hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f70e988f4dafef4a16d2ad7c3426c886babdf411
2,642
py
Python
internal/twirptest/service_pb2_twirp.py
DennisSSDev/twirp
c27eaf7ba72b359deaed686bc1e2a486214059ca
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
internal/twirptest/service_pb2_twirp.py
DennisSSDev/twirp
c27eaf7ba72b359deaed686bc1e2a486214059ca
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
internal/twirptest/service_pb2_twirp.py
DennisSSDev/twirp
c27eaf7ba72b359deaed686bc1e2a486214059ca
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# Code generated by protoc-gen-twirp_python v7.1.0, DO NOT EDIT. # source: service.proto try: import httplib from urllib2 import Request, HTTPError, urlopen except ImportError: import http.client as httplib from urllib.request import Request, urlopen from urllib.error import HTTPError import json from google.protobuf import symbol_database as _symbol_database import sys _sym_db = _symbol_database.Default() class TwirpException(httplib.HTTPException): def __init__(self, code, message, meta): self.code = code self.message = message self.meta = meta super(TwirpException, self).__init__(message) @classmethod def from_http_err(cls, err): try: jsonerr = json.load(err) code = jsonerr["code"] msg = jsonerr["msg"] meta = jsonerr.get("meta") if meta is None: meta = {} except: code = "internal" msg = "Error from intermediary with HTTP status code {} {}".format( err.code, httplib.responses[err.code], ) meta = {} return cls(code, msg, meta) class HaberdasherClient(object): """ A Haberdasher makes hats for clients. """ def __init__(self, server_address): """Creates a new client for the Haberdasher service. Args: server_address: The address of the server to send requests to, in the full protocol://host:port form. """ if sys.version_info[0] > 2: self.__target = server_address else: self.__target = server_address.encode('ascii') self.__service_name = "twirp.internal.twirptest.Haberdasher" def __make_request(self, body, full_method): req = Request( url=self.__target + "/twirp" + full_method, data=body, headers={"Content-Type": "application/protobuf"}, ) try: resp = urlopen(req) except HTTPError as err: raise TwirpException.from_http_err(err) return resp.read() def make_hat(self, size): """ MakeHat produces a hat of mysterious, randomly-selected color! """ serialize = _sym_db.GetSymbol("twirp.internal.twirptest.Size").SerializeToString deserialize = _sym_db.GetSymbol("twirp.internal.twirptest.Hat").FromString full_method = "/{}/{}".format(self.__service_name, "MakeHat") body = serialize(size) resp_str = self.__make_request(body=body, full_method=full_method) return deserialize(resp_str)
31.082353
88
0.613929
try: import httplib from urllib2 import Request, HTTPError, urlopen except ImportError: import http.client as httplib from urllib.request import Request, urlopen from urllib.error import HTTPError import json from google.protobuf import symbol_database as _symbol_database import sys _sym_db = _symbol_database.Default() class TwirpException(httplib.HTTPException): def __init__(self, code, message, meta): self.code = code self.message = message self.meta = meta super(TwirpException, self).__init__(message) @classmethod def from_http_err(cls, err): try: jsonerr = json.load(err) code = jsonerr["code"] msg = jsonerr["msg"] meta = jsonerr.get("meta") if meta is None: meta = {} except: code = "internal" msg = "Error from intermediary with HTTP status code {} {}".format( err.code, httplib.responses[err.code], ) meta = {} return cls(code, msg, meta) class HaberdasherClient(object): def __init__(self, server_address): if sys.version_info[0] > 2: self.__target = server_address else: self.__target = server_address.encode('ascii') self.__service_name = "twirp.internal.twirptest.Haberdasher" def __make_request(self, body, full_method): req = Request( url=self.__target + "/twirp" + full_method, data=body, headers={"Content-Type": "application/protobuf"}, ) try: resp = urlopen(req) except HTTPError as err: raise TwirpException.from_http_err(err) return resp.read() def make_hat(self, size): serialize = _sym_db.GetSymbol("twirp.internal.twirptest.Size").SerializeToString deserialize = _sym_db.GetSymbol("twirp.internal.twirptest.Hat").FromString full_method = "/{}/{}".format(self.__service_name, "MakeHat") body = serialize(size) resp_str = self.__make_request(body=body, full_method=full_method) return deserialize(resp_str)
true
true
f70e98a26c7f36c76b2da78699df18fa1459660b
8,717
py
Python
dropbox/packing-your-dropbox/packurbox.py
ramsay/ramsay-snippets
d64e30a7d7b64eedab46150babb2bb675b3da8ff
[ "MIT" ]
null
null
null
dropbox/packing-your-dropbox/packurbox.py
ramsay/ramsay-snippets
d64e30a7d7b64eedab46150babb2bb675b3da8ff
[ "MIT" ]
null
null
null
dropbox/packing-your-dropbox/packurbox.py
ramsay/ramsay-snippets
d64e30a7d7b64eedab46150babb2bb675b3da8ff
[ "MIT" ]
null
null
null
#!/usr/bin/python """ Robert Ramsay <robert.alan.ramsay@gmail.com> Packing your Dropbox When you're working with petabytes of data, you have to store files wherever they can fit. All of us here at Dropbox are always searching for more ways to efficiently pack data into smaller and more manageable chunks. The fun begins when you bend the rules a little bit and visualize it in two dimensions. You'll be given a list of rectangular "files" that you'll need to pack into as small a "Dropbox" as possible. The dimensions of each file will be specified by a tuple (width, height), both of which will be integers. The output of your function should be the area of the smallest rectangular Dropbox that can enclose all of them without any overlap. Files can be rotated 90(deg) if it helps. Bonus points if you can draw pictures of the winning configurations along the way. While drawing pictures, any files sharing dimensions should be considered identical/interchangeable. Input Your program must read a small integer N (1 <= N <= 100) from stdin representing the maximum number of files to consider, followed by the width and height of each file, one per line. Output Output should be simply be the area of the smallest containing Dropbox. If you want to print pretty pictures, send that to stderr. Only the output on stdout will be judged. Sample Input 3 8 8 4 3 3 4 Sample Output 88 """ #from __future__ import print_function import sys class DropBox: w = 0 h = 0 x = 0 y = 0 def __init__(self,vector=None, w=0, h=0): if vector: self.w, self.h = vector else: self.w = w self.h = h def rotate(self): t = self.w self.w = self.h self.h = t def align(self): if self.w > self.h: self.rotate() return self.h #free space = (lowest left x, lowest left y, width, height) def fit(size, free, box): x, y, w, h = free box.x = x box.y = y if h < box.h and w < box.w: # Our box will not fit inside the current freespace. size = (size[0]+box.w-w, size[1]+box.h-h) x += box.w w = 0 h = box.h elif w < box.w: size = (size[0] + box.w - w, size[1]) w = box.w y += box.h h -= box.h elif h < box.h: x += box.w w -= box.w else: box.rotate() if w < box.w: size = (size[0] + box.w - w, size[1]) w = box.w y += box.h h -= box.h else: x += box.w w -= box.w free = (x, y, w, h) return size, free def pretty(boxes,w,h): '''Pretty print the list of boxes''' print >> sys.stderr, str(w) + 'x' + str(h) + ':' graph = [[' ' for l in range(h+1)] for m in range(w+1)] for box in boxes: try: # Vertices graph[box.x][box.y] = '+' graph[box.x+box.w][box.y] = '+' graph[box.x][box.y+box.h] = '+' graph[box.x+box.w][box.y+box.h] = '+' # Edges for x in range(box.x+1, box.x+box.w): graph[x][box.y] = '|' graph[x][box.y+box.h] = '|' for y in range(box.y+1, box.y+box.h): graph[box.x][y] = '-' graph[box.x+box.w][y] = '-' except Exception as e: print >> sys.stderr, "Box (", box.x, box.y, box.w, box.h, ") is outside bounds (", w, h,")" raise e print >> sys.stderr, '\n'.join([''.join(row) for row in graph]) def pack(boxes): #Align all the boxes and sort them by height lagest to smallest boxes.sort(key=lambda box: box.align(), reverse=True) size = (0, 0) #free = (left, lower, width, height) free = (0, 0, 0, 0) for box in boxes: size, free = fit(size, free, box) pretty(boxes, size[0], size[1]) return size[0]*size[1] class DropNode: left = None # Left Edge is the parent. vertex = None # We can store at most one Box right = None # Right Edge is the child. direction = [1,0] # direction is the identity ray def __init__(self,vertex=None, left=None, right=None): self.vertex = vertex self.left = left if self.left: self.left.right = self self.right = right if self.right: w = self.right.width() h = self.right.height() if self.vertex.w > self.vertex.h: # An increase in width costs less than an increase in height # if width is already greater. self.direction = [0,1] if w < h: self.right.rotate() else: self.direction = [0,1] if h < w: self.right.rotate() self.right.left = self def rotate(self): self.direction.reverse() if self.vertex: self.vertex.rotate() if self.right: self.right.rotate() def width(self): w = 0 if self.vertex is not None: w = self.vertex.w if self.right is not None: if self.direction[0]: w += self.right.width() return w def height(self): h = 0 if self.vertex is not None: h = self.vertex.h if self.right is not None: if self.direction[1]: h += self.right.height() return h def packtree(node, boxes): '''This is a recursive pack algorithm, similar to a binary search tree.''' if node is None: node = DropNode() if not boxes: # Stack empty. while node.left: node = node.left return node # Return root if node is None: #RootNode print >> sys.stderr, "root node", boxes[-1] return packtree(DropNode(boxes.pop(0)), boxes) if node.vertex is None: # Not sure if I agree with this. print >> sys.stderr, "curious" node.vertex = boxes.pop() return packtree(node, boxes) # Make comparisons simpler left = (max(boxes[0].w, boxes[0].h), min(boxes[0].w, boxes[0].h)) w = node.width() h = node.height() right = (max(w, h), min(w, h)) print >> sys.stderr, "left", left, "right", right, if left[0] > right[0]: print >> sys.stderr, "insert left" if node.left: return packtree(node.left, boxes) else: return packtree(DropNode(boxes.pop(0),None,node), boxes) #if left[0] < right[1]: # print >> sys.stderr, "insert right" # if node.right: # return packtree(node.right, boxes) # else: # return packtree(DropNode(boxes.pop(0),node),boxes) print >> sys.stderr, "insert middle" return packtree(DropNode(boxes.pop(0), node.left, node), boxes) def prettytree(tree): '''Pretty print the list of boxes''' w = tree.width() h = tree.height() print >> sys.stderr, str(w) + 'x' + str(h) + ':' graph = [[' ' for l in range(h+1)] for m in range(w+1)] vx = 0 vy = 0 i = 0 node = tree while node.right: i += 1 print >> sys.stderr, '.', if node.vertex is None: print >> sys.stderr, "Empty Vertex" node = node.right continue try: vw = tree.vertex.w vh = tree.vertex.h # Vertices graph[vx][vy] = '+' graph[vx+vw][vy] = '+' graph[vx][vy+vh] = '+' graph[vx+vw][vy+vh] = '+' # Edges for x in range(vx+1, vx+vw): graph[x][vy] = '|' graph[x][vy+vh] = '|' for y in range(vy+1, vy+vh): graph[vx][y] = '-' graph[vx+vw][y] = '-' vx += tree.direction[0]*vw vy += tree.direction[1]*vh except Exception as e: raise e node = node.right print >> sys.stderr print >> sys.stderr, '\n'.join([''.join(row) for row in graph]) if __name__ == '__main__': import sys inp = input() #Number of boxes try: boxcount = int(inp) if boxcount < 1 or boxcount > 100: raise except: sys.exit("Box count must be between 1 and 100 (inclusive)") boxes = [] for i in range(boxcount): inp = raw_input('') #Box: width height box = DropBox() try: w, h = inp.split(" ") box.w = int(w) box.h = int(h) except: sys.exit("Box definition should be integers seperated "\ "by whitespace") boxes.append(box) print(pack(boxes)) sys.exit()
31.356115
574
0.533326
import sys class DropBox: w = 0 h = 0 x = 0 y = 0 def __init__(self,vector=None, w=0, h=0): if vector: self.w, self.h = vector else: self.w = w self.h = h def rotate(self): t = self.w self.w = self.h self.h = t def align(self): if self.w > self.h: self.rotate() return self.h def fit(size, free, box): x, y, w, h = free box.x = x box.y = y if h < box.h and w < box.w: size = (size[0]+box.w-w, size[1]+box.h-h) x += box.w w = 0 h = box.h elif w < box.w: size = (size[0] + box.w - w, size[1]) w = box.w y += box.h h -= box.h elif h < box.h: x += box.w w -= box.w else: box.rotate() if w < box.w: size = (size[0] + box.w - w, size[1]) w = box.w y += box.h h -= box.h else: x += box.w w -= box.w free = (x, y, w, h) return size, free def pretty(boxes,w,h): print >> sys.stderr, str(w) + 'x' + str(h) + ':' graph = [[' ' for l in range(h+1)] for m in range(w+1)] for box in boxes: try: graph[box.x][box.y] = '+' graph[box.x+box.w][box.y] = '+' graph[box.x][box.y+box.h] = '+' graph[box.x+box.w][box.y+box.h] = '+' for x in range(box.x+1, box.x+box.w): graph[x][box.y] = '|' graph[x][box.y+box.h] = '|' for y in range(box.y+1, box.y+box.h): graph[box.x][y] = '-' graph[box.x+box.w][y] = '-' except Exception as e: print >> sys.stderr, "Box (", box.x, box.y, box.w, box.h, ") is outside bounds (", w, h,")" raise e print >> sys.stderr, '\n'.join([''.join(row) for row in graph]) def pack(boxes): boxes.sort(key=lambda box: box.align(), reverse=True) size = (0, 0) free = (0, 0, 0, 0) for box in boxes: size, free = fit(size, free, box) pretty(boxes, size[0], size[1]) return size[0]*size[1] class DropNode: left = None vertex = None right = None direction = [1,0] def __init__(self,vertex=None, left=None, right=None): self.vertex = vertex self.left = left if self.left: self.left.right = self self.right = right if self.right: w = self.right.width() h = self.right.height() if self.vertex.w > self.vertex.h: self.direction = [0,1] if w < h: self.right.rotate() else: self.direction = [0,1] if h < w: self.right.rotate() self.right.left = self def rotate(self): self.direction.reverse() if self.vertex: self.vertex.rotate() if self.right: self.right.rotate() def width(self): w = 0 if self.vertex is not None: w = self.vertex.w if self.right is not None: if self.direction[0]: w += self.right.width() return w def height(self): h = 0 if self.vertex is not None: h = self.vertex.h if self.right is not None: if self.direction[1]: h += self.right.height() return h def packtree(node, boxes): if node is None: node = DropNode() if not boxes: while node.left: node = node.left return node if node is None: print >> sys.stderr, "root node", boxes[-1] return packtree(DropNode(boxes.pop(0)), boxes) if node.vertex is None: print >> sys.stderr, "curious" node.vertex = boxes.pop() return packtree(node, boxes) left = (max(boxes[0].w, boxes[0].h), min(boxes[0].w, boxes[0].h)) w = node.width() h = node.height() right = (max(w, h), min(w, h)) print >> sys.stderr, "left", left, "right", right, if left[0] > right[0]: print >> sys.stderr, "insert left" if node.left: return packtree(node.left, boxes) else: return packtree(DropNode(boxes.pop(0),None,node), boxes) print >> sys.stderr, "insert middle" return packtree(DropNode(boxes.pop(0), node.left, node), boxes) def prettytree(tree): w = tree.width() h = tree.height() print >> sys.stderr, str(w) + 'x' + str(h) + ':' graph = [[' ' for l in range(h+1)] for m in range(w+1)] vx = 0 vy = 0 i = 0 node = tree while node.right: i += 1 print >> sys.stderr, '.', if node.vertex is None: print >> sys.stderr, "Empty Vertex" node = node.right continue try: vw = tree.vertex.w vh = tree.vertex.h graph[vx][vy] = '+' graph[vx+vw][vy] = '+' graph[vx][vy+vh] = '+' graph[vx+vw][vy+vh] = '+' for x in range(vx+1, vx+vw): graph[x][vy] = '|' graph[x][vy+vh] = '|' for y in range(vy+1, vy+vh): graph[vx][y] = '-' graph[vx+vw][y] = '-' vx += tree.direction[0]*vw vy += tree.direction[1]*vh except Exception as e: raise e node = node.right print >> sys.stderr print >> sys.stderr, '\n'.join([''.join(row) for row in graph]) if __name__ == '__main__': import sys inp = input() try: boxcount = int(inp) if boxcount < 1 or boxcount > 100: raise except: sys.exit("Box count must be between 1 and 100 (inclusive)") boxes = [] for i in range(boxcount): inp = raw_input('') box = DropBox() try: w, h = inp.split(" ") box.w = int(w) box.h = int(h) except: sys.exit("Box definition should be integers seperated "\ "by whitespace") boxes.append(box) print(pack(boxes)) sys.exit()
true
true
f70e997410788ed72a9b2883d54249e487064f32
2,675
py
Python
libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/progressbar_magtag_simpletest.py
Yarik9008/SoftAcademic
118c9dc4620ca444c1557edd141a838820577202
[ "MIT" ]
5
2020-04-14T20:50:30.000Z
2021-10-19T18:49:46.000Z
libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/progressbar_magtag_simpletest.py
Yarik9008/SoftAcademic
118c9dc4620ca444c1557edd141a838820577202
[ "MIT" ]
25
2020-07-17T03:05:56.000Z
2022-01-06T16:00:51.000Z
libralli/circcuitpython/adafruit-circuitpython-bundle-7.x-mpy-20211225/examples/progressbar_magtag_simpletest.py
Yarik9008/SoftAcademic
118c9dc4620ca444c1557edd141a838820577202
[ "MIT" ]
10
2020-01-07T20:14:56.000Z
2021-11-14T20:34:43.000Z
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries # SPDX-License-Identifier: MIT """ Basic progressbar example script adapted for use on MagTag. """ import time import board import displayio import digitalio from adafruit_progressbar.progressbar import HorizontalProgressBar # use built in display (PyPortal, PyGamer, PyBadge, CLUE, etc.) # see guide for setting up external displays (TFT / OLED breakouts, RGB matrices, etc.) # https://learn.adafruit.com/circuitpython-display-support-using-displayio/display-and-display-bus display = board.DISPLAY time.sleep(display.time_to_refresh) # B/up button will be used to increase the progress up_btn = digitalio.DigitalInOut(board.BUTTON_B) up_btn.direction = digitalio.Direction.INPUT up_btn.pull = digitalio.Pull.UP # C/down button will be used to increase the progress down_btn = digitalio.DigitalInOut(board.BUTTON_C) down_btn.direction = digitalio.Direction.INPUT down_btn.pull = digitalio.Pull.UP # Make the display context splash = displayio.Group() display.show(splash) # set progress bar width and height relative to board's display BAR_WIDTH = display.width - 40 BAR_HEIGHT = 30 x = display.width // 2 - BAR_WIDTH // 2 y = display.height // 3 # Create a new progress_bar object at (x, y) progress_bar = HorizontalProgressBar( (x, y), (BAR_WIDTH, BAR_HEIGHT), bar_color=0xFFFFFF, outline_color=0xAAAAAA, fill_color=0x777777, ) # Append progress_bar to the splash group splash.append(progress_bar) # Get a random starting value within our min/max range current_progress = time.monotonic() % 101 print(current_progress) progress_bar.value = current_progress # refresh the display display.refresh() value_incrementor = 3 prev_up = up_btn.value prev_down = down_btn.value while True: cur_up = up_btn.value cur_down = down_btn.value do_refresh = False # if up_btn was just pressed down if not cur_up and prev_up: current_progress += value_incrementor # Wrap if we get over the maximum value if current_progress > progress_bar.maximum: current_progress = progress_bar.minimum do_refresh = True if not cur_down and prev_down: current_progress -= value_incrementor # Wrap if we get below the minimum value if current_progress < progress_bar.minimum: current_progress = progress_bar.maximum do_refresh = True if do_refresh: print(current_progress) progress_bar.value = current_progress time.sleep(display.time_to_refresh) display.refresh() time.sleep(display.time_to_refresh) prev_up = cur_up prev_down = cur_down
27.864583
98
0.741682
import time import board import displayio import digitalio from adafruit_progressbar.progressbar import HorizontalProgressBar display = board.DISPLAY time.sleep(display.time_to_refresh) up_btn = digitalio.DigitalInOut(board.BUTTON_B) up_btn.direction = digitalio.Direction.INPUT up_btn.pull = digitalio.Pull.UP down_btn = digitalio.DigitalInOut(board.BUTTON_C) down_btn.direction = digitalio.Direction.INPUT down_btn.pull = digitalio.Pull.UP splash = displayio.Group() display.show(splash) BAR_WIDTH = display.width - 40 BAR_HEIGHT = 30 x = display.width // 2 - BAR_WIDTH // 2 y = display.height // 3 # Create a new progress_bar object at (x, y) progress_bar = HorizontalProgressBar( (x, y), (BAR_WIDTH, BAR_HEIGHT), bar_color=0xFFFFFF, outline_color=0xAAAAAA, fill_color=0x777777, ) # Append progress_bar to the splash group splash.append(progress_bar) # Get a random starting value within our min/max range current_progress = time.monotonic() % 101 print(current_progress) progress_bar.value = current_progress # refresh the display display.refresh() value_incrementor = 3 prev_up = up_btn.value prev_down = down_btn.value while True: cur_up = up_btn.value cur_down = down_btn.value do_refresh = False # if up_btn was just pressed down if not cur_up and prev_up: current_progress += value_incrementor # Wrap if we get over the maximum value if current_progress > progress_bar.maximum: current_progress = progress_bar.minimum do_refresh = True if not cur_down and prev_down: current_progress -= value_incrementor # Wrap if we get below the minimum value if current_progress < progress_bar.minimum: current_progress = progress_bar.maximum do_refresh = True if do_refresh: print(current_progress) progress_bar.value = current_progress time.sleep(display.time_to_refresh) display.refresh() time.sleep(display.time_to_refresh) prev_up = cur_up prev_down = cur_down
true
true
f70e99a6d83ec20f3d35576e5d254633a5aa413c
19,416
py
Python
ServidorPython/python32_web/Lib/site-packages/nbconvert/nbconvertapp.py
mak213k/Servidor_automatizado_python
4403ef8027a2f814220baacc95856cf5fbf01d21
[ "MIT" ]
4
2019-07-26T11:32:22.000Z
2019-09-11T05:34:59.000Z
ServidorPython/python32_web/Lib/site-packages/nbconvert/nbconvertapp.py
mak213k/Servidor_automatizado_python
4403ef8027a2f814220baacc95856cf5fbf01d21
[ "MIT" ]
1
2021-09-02T17:44:02.000Z
2021-09-02T17:44:02.000Z
ServidorPython/python32_web/Lib/site-packages/nbconvert/nbconvertapp.py
mak213k/Servidor_automatizado_python
4403ef8027a2f814220baacc95856cf5fbf01d21
[ "MIT" ]
2
2019-08-28T14:57:54.000Z
2019-11-26T16:18:30.000Z
#!/usr/bin/env python """NbConvert is a utility for conversion of .ipynb files. Command-line interface for the NbConvert conversion utility. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import print_function import logging import sys import os import glob from jupyter_core.application import JupyterApp, base_aliases, base_flags from traitlets.config import catch_config_error, Configurable from traitlets import ( Unicode, List, Instance, DottedObjectName, Type, Bool, default, observe, ) from traitlets.utils.importstring import import_item from .exporters.base import get_export_names, get_exporter from nbconvert import exporters, preprocessors, writers, postprocessors, __version__ from .utils.base import NbConvertBase from .utils.exceptions import ConversionException from .utils.io import unicode_stdin_stream #----------------------------------------------------------------------------- #Classes and functions #----------------------------------------------------------------------------- class DottedOrNone(DottedObjectName): """A string holding a valid dotted object name in Python, such as A.b3._c Also allows for None type. """ default_value = u'' def validate(self, obj, value): if value is not None and len(value) > 0: return super(DottedOrNone, self).validate(obj, value) else: return value nbconvert_aliases = {} nbconvert_aliases.update(base_aliases) nbconvert_aliases.update({ 'to' : 'NbConvertApp.export_format', 'template' : 'TemplateExporter.template_file', 'writer' : 'NbConvertApp.writer_class', 'post': 'NbConvertApp.postprocessor_class', 'output': 'NbConvertApp.output_base', 'output-dir': 'FilesWriter.build_directory', 'reveal-prefix': 'SlidesExporter.reveal_url_prefix', 'nbformat': 'NotebookExporter.nbformat_version', }) nbconvert_flags = {} nbconvert_flags.update(base_flags) nbconvert_flags.update({ 'execute' : ( {'ExecutePreprocessor' : {'enabled' : True}}, "Execute the notebook prior to export." ), 'allow-errors' : ( {'ExecutePreprocessor' : {'allow_errors' : True}}, ("Continue notebook execution even if one of the cells throws " "an error and include the error message in the cell output " "(the default behaviour is to abort conversion). This flag " "is only relevant if '--execute' was specified, too.") ), 'stdin' : ( {'NbConvertApp' : { 'from_stdin' : True, } }, "read a single notebook file from stdin. Write the resulting notebook with default basename 'notebook.*'" ), 'stdout' : ( {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, "Write notebook output to stdout instead of files." ), 'inplace' : ( { 'NbConvertApp' : { 'use_output_suffix' : False, 'export_format' : 'notebook', }, 'FilesWriter' : {'build_directory': ''}, }, """Run nbconvert in place, overwriting the existing notebook (only relevant when converting to notebook format)""" ), 'clear-output' : ( { 'NbConvertApp' : { 'use_output_suffix' : False, 'export_format' : 'notebook', }, 'FilesWriter' : {'build_directory': ''}, 'ClearOutputPreprocessor' : {'enabled' : True}, }, """Clear output of current file and save in place, overwriting the existing notebook. """ ), 'no-prompt' : ( {'TemplateExporter' : { 'exclude_input_prompt' : True, 'exclude_output_prompt' : True, } }, "Exclude input and output prompts from converted document." ), 'no-input' : ( {'TemplateExporter' : { 'exclude_output_prompt' : True, 'exclude_input': True, } }, """Exclude input cells and output prompts from converted document. This mode is ideal for generating code-free reports.""" ), }) class NbConvertApp(JupyterApp): """Application used to convert from notebook file type (``*.ipynb``)""" version = __version__ name = 'jupyter-nbconvert' aliases = nbconvert_aliases flags = nbconvert_flags @default('log_level') def _log_level_default(self): return logging.INFO classes = List() @default('classes') def _classes_default(self): classes = [NbConvertBase] for pkg in (exporters, preprocessors, writers, postprocessors): for name in dir(pkg): cls = getattr(pkg, name) if isinstance(cls, type) and issubclass(cls, Configurable): classes.append(cls) return classes description = Unicode( u"""This application is used to convert notebook files (*.ipynb) to various other formats. WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") output_base = Unicode('', help='''overwrite base name use for output files. can only be used when converting one notebook at a time. ''').tag(config=True) use_output_suffix = Bool( True, help="""Whether to apply a suffix prior to the extension (only relevant when converting to notebook format). The suffix is determined by the exporter, and is usually '.nbconvert'.""" ).tag(config=True) output_files_dir = Unicode('{notebook_name}_files', help='''Directory to copy extra files (figures) to. '{notebook_name}' in the string will be converted to notebook basename''' ).tag(config=True) examples = Unicode(u""" The simplest way to use nbconvert is > jupyter nbconvert mynotebook.ipynb which will convert mynotebook.ipynb to the default format (probably HTML). You can specify the export format with `--to`. Options include {formats}. > jupyter nbconvert --to latex mynotebook.ipynb Both HTML and LaTeX support multiple output templates. LaTeX includes 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You can specify the flavor of the format used. > jupyter nbconvert --to html --template basic mynotebook.ipynb You can also pipe the output to stdout, rather than a file > jupyter nbconvert mynotebook.ipynb --stdout PDF is generated via latex > jupyter nbconvert mynotebook.ipynb --to pdf You can get (and serve) a Reveal.js-powered slideshow > jupyter nbconvert myslides.ipynb --to slides --post serve Multiple notebooks can be given at the command line in a couple of different ways: > jupyter nbconvert notebook*.ipynb > jupyter nbconvert notebook1.ipynb notebook2.ipynb or you can specify the notebooks list in a config file, containing:: c.NbConvertApp.notebooks = ["my_notebook.ipynb"] > jupyter nbconvert --config mycfg.py """.format(formats=get_export_names())) # Writer specific variables writer = Instance('nbconvert.writers.base.WriterBase', help="""Instance of the writer class used to write the results of the conversion.""", allow_none=True) writer_class = DottedObjectName('FilesWriter', help="""Writer class used to write the results of the conversion""").tag(config=True) writer_aliases = {'fileswriter': 'nbconvert.writers.files.FilesWriter', 'debugwriter': 'nbconvert.writers.debug.DebugWriter', 'stdoutwriter': 'nbconvert.writers.stdout.StdoutWriter'} writer_factory = Type(allow_none=True) @observe('writer_class') def _writer_class_changed(self, change): new = change['new'] if new.lower() in self.writer_aliases: new = self.writer_aliases[new.lower()] self.writer_factory = import_item(new) # Post-processor specific variables postprocessor = Instance('nbconvert.postprocessors.base.PostProcessorBase', help="""Instance of the PostProcessor class used to write the results of the conversion.""", allow_none=True) postprocessor_class = DottedOrNone( help="""PostProcessor class used to write the results of the conversion""" ).tag(config=True) postprocessor_aliases = {'serve': 'nbconvert.postprocessors.serve.ServePostProcessor'} postprocessor_factory = Type(None, allow_none=True) @observe('postprocessor_class') def _postprocessor_class_changed(self, change): new = change['new'] if new.lower() in self.postprocessor_aliases: new = self.postprocessor_aliases[new.lower()] if new: self.postprocessor_factory = import_item(new) ipywidgets_base_url = Unicode("https://unpkg.com/", help="URL base for ipywidgets package").tag(config=True) export_format = Unicode( 'html', allow_none=False, help="""The export format to be used, either one of the built-in formats {formats} or a dotted object name that represents the import path for an `Exporter` class""".format(formats=get_export_names()) ).tag(config=True) notebooks = List([], help="""List of notebooks to convert. Wildcards are supported. Filenames passed positionally will be added to the list. """ ).tag(config=True) from_stdin = Bool(False, help="read a single notebook from stdin.").tag(config=True) @catch_config_error def initialize(self, argv=None): """Initialize application, notebooks, writer, and postprocessor""" self.init_syspath() super(NbConvertApp, self).initialize(argv) self.init_notebooks() self.init_writer() self.init_postprocessor() def init_syspath(self): """Add the cwd to the sys.path ($PYTHONPATH)""" sys.path.insert(0, os.getcwd()) def init_notebooks(self): """Construct the list of notebooks. If notebooks are passed on the command-line, they override (rather than add) notebooks specified in config files. Glob each notebook to replace notebook patterns with filenames. """ # Specifying notebooks on the command-line overrides (rather than # adds) the notebook list if self.extra_args: patterns = self.extra_args else: patterns = self.notebooks # Use glob to replace all the notebook patterns with filenames. filenames = [] for pattern in patterns: # Use glob to find matching filenames. Allow the user to convert # notebooks without having to type the extension. globbed_files = glob.glob(pattern) globbed_files.extend(glob.glob(pattern + '.ipynb')) if not globbed_files: self.log.warning("pattern %r matched no files", pattern) for filename in globbed_files: if not filename in filenames: filenames.append(filename) self.notebooks = filenames def init_writer(self): """Initialize the writer (which is stateless)""" self._writer_class_changed({ 'new': self.writer_class }) self.writer = self.writer_factory(parent=self) if hasattr(self.writer, 'build_directory') and self.writer.build_directory != '': self.use_output_suffix = False def init_postprocessor(self): """Initialize the postprocessor (which is stateless)""" self._postprocessor_class_changed({'new': self.postprocessor_class}) if self.postprocessor_factory: self.postprocessor = self.postprocessor_factory(parent=self) def start(self): """Run start after initialization process has completed""" super(NbConvertApp, self).start() self.convert_notebooks() def init_single_notebook_resources(self, notebook_filename): """Step 1: Initialize resources This initializes the resources dictionary for a single notebook. Returns ------- dict resources dictionary for a single notebook that MUST include the following keys: - config_dir: the location of the Jupyter config directory - unique_key: the notebook name - output_files_dir: a directory where output files (not including the notebook itself) should be saved """ basename = os.path.basename(notebook_filename) notebook_name = basename[:basename.rfind('.')] if self.output_base: # strip duplicate extension from output_base, to avoid Basename.ext.ext if getattr(self.exporter, 'file_extension', False): base, ext = os.path.splitext(self.output_base) if ext == self.exporter.file_extension: self.output_base = base notebook_name = self.output_base self.log.debug("Notebook name is '%s'", notebook_name) # first initialize the resources we want to use resources = {} resources['config_dir'] = self.config_dir resources['unique_key'] = notebook_name output_files_dir = (self.output_files_dir .format(notebook_name=notebook_name)) resources['output_files_dir'] = output_files_dir resources['ipywidgets_base_url'] = self.ipywidgets_base_url return resources def export_single_notebook(self, notebook_filename, resources, input_buffer=None): """Step 2: Export the notebook Exports the notebook to a particular format according to the specified exporter. This function returns the output and (possibly modified) resources from the exporter. Parameters ---------- notebook_filename : str name of notebook file. resources : dict input_buffer : readable file-like object returning unicode. if not None, notebook_filename is ignored Returns ------- output dict resources (possibly modified) """ try: if input_buffer is not None: output, resources = self.exporter.from_file(input_buffer, resources=resources) else: output, resources = self.exporter.from_filename(notebook_filename, resources=resources) except ConversionException: self.log.error("Error while converting '%s'", notebook_filename, exc_info=True) self.exit(1) return output, resources def write_single_notebook(self, output, resources): """Step 3: Write the notebook to file This writes output from the exporter to file using the specified writer. It returns the results from the writer. Parameters ---------- output : resources : dict resources for a single notebook including name, config directory and directory to save output Returns ------- file results from the specified writer output of exporter """ if 'unique_key' not in resources: raise KeyError("unique_key MUST be specified in the resources, but it is not") notebook_name = resources['unique_key'] if self.use_output_suffix and not self.output_base: notebook_name += resources.get('output_suffix', '') write_results = self.writer.write( output, resources, notebook_name=notebook_name) return write_results def postprocess_single_notebook(self, write_results): """Step 4: Post-process the written file Only used if a postprocessor has been specified. After the converted notebook is written to a file in Step 3, this post-processes the notebook. """ # Post-process if post processor has been defined. if hasattr(self, 'postprocessor') and self.postprocessor: self.postprocessor(write_results) def convert_single_notebook(self, notebook_filename, input_buffer=None): """Convert a single notebook. Performs the following steps: 1. Initialize notebook resources 2. Export the notebook to a particular format 3. Write the exported notebook to file 4. (Maybe) postprocess the written file Parameters ---------- notebook_filename : str input_buffer : If input_buffer is not None, conversion is done and the buffer is used as source into a file basenamed by the notebook_filename argument. """ if input_buffer is None: self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) else: self.log.info("Converting notebook into %s", self.export_format) resources = self.init_single_notebook_resources(notebook_filename) output, resources = self.export_single_notebook(notebook_filename, resources, input_buffer=input_buffer) write_results = self.write_single_notebook(output, resources) self.postprocess_single_notebook(write_results) def convert_notebooks(self): """Convert the notebooks in the self.notebook traitlet """ # check that the output base isn't specified if there is more than # one notebook to convert if self.output_base != '' and len(self.notebooks) > 1: self.log.error( """ UsageError: --output flag or `NbConvertApp.output_base` config option cannot be used when converting multiple notebooks. """ ) self.exit(1) # initialize the exporter cls = get_exporter(self.export_format) self.exporter = cls(config=self.config) # no notebooks to convert! if len(self.notebooks) == 0 and not self.from_stdin: self.print_help() sys.exit(-1) # convert each notebook if not self.from_stdin: for notebook_filename in self.notebooks: self.convert_single_notebook(notebook_filename) else: input_buffer = unicode_stdin_stream() # default name when conversion from stdin self.convert_single_notebook("notebook.ipynb", input_buffer=input_buffer) #----------------------------------------------------------------------------- # Main entry point #----------------------------------------------------------------------------- main = launch_new_instance = NbConvertApp.launch_instance
37.410405
113
0.613103
from __future__ import print_function import logging import sys import os import glob from jupyter_core.application import JupyterApp, base_aliases, base_flags from traitlets.config import catch_config_error, Configurable from traitlets import ( Unicode, List, Instance, DottedObjectName, Type, Bool, default, observe, ) from traitlets.utils.importstring import import_item from .exporters.base import get_export_names, get_exporter from nbconvert import exporters, preprocessors, writers, postprocessors, __version__ from .utils.base import NbConvertBase from .utils.exceptions import ConversionException from .utils.io import unicode_stdin_stream class DottedOrNone(DottedObjectName): default_value = u'' def validate(self, obj, value): if value is not None and len(value) > 0: return super(DottedOrNone, self).validate(obj, value) else: return value nbconvert_aliases = {} nbconvert_aliases.update(base_aliases) nbconvert_aliases.update({ 'to' : 'NbConvertApp.export_format', 'template' : 'TemplateExporter.template_file', 'writer' : 'NbConvertApp.writer_class', 'post': 'NbConvertApp.postprocessor_class', 'output': 'NbConvertApp.output_base', 'output-dir': 'FilesWriter.build_directory', 'reveal-prefix': 'SlidesExporter.reveal_url_prefix', 'nbformat': 'NotebookExporter.nbformat_version', }) nbconvert_flags = {} nbconvert_flags.update(base_flags) nbconvert_flags.update({ 'execute' : ( {'ExecutePreprocessor' : {'enabled' : True}}, "Execute the notebook prior to export." ), 'allow-errors' : ( {'ExecutePreprocessor' : {'allow_errors' : True}}, ("Continue notebook execution even if one of the cells throws " "an error and include the error message in the cell output " "(the default behaviour is to abort conversion). This flag " "is only relevant if '--execute' was specified, too.") ), 'stdin' : ( {'NbConvertApp' : { 'from_stdin' : True, } }, "read a single notebook file from stdin. Write the resulting notebook with default basename 'notebook.*'" ), 'stdout' : ( {'NbConvertApp' : {'writer_class' : "StdoutWriter"}}, "Write notebook output to stdout instead of files." ), 'inplace' : ( { 'NbConvertApp' : { 'use_output_suffix' : False, 'export_format' : 'notebook', }, 'FilesWriter' : {'build_directory': ''}, }, """Run nbconvert in place, overwriting the existing notebook (only relevant when converting to notebook format)""" ), 'clear-output' : ( { 'NbConvertApp' : { 'use_output_suffix' : False, 'export_format' : 'notebook', }, 'FilesWriter' : {'build_directory': ''}, 'ClearOutputPreprocessor' : {'enabled' : True}, }, """Clear output of current file and save in place, overwriting the existing notebook. """ ), 'no-prompt' : ( {'TemplateExporter' : { 'exclude_input_prompt' : True, 'exclude_output_prompt' : True, } }, "Exclude input and output prompts from converted document." ), 'no-input' : ( {'TemplateExporter' : { 'exclude_output_prompt' : True, 'exclude_input': True, } }, """Exclude input cells and output prompts from converted document. This mode is ideal for generating code-free reports.""" ), }) class NbConvertApp(JupyterApp): version = __version__ name = 'jupyter-nbconvert' aliases = nbconvert_aliases flags = nbconvert_flags @default('log_level') def _log_level_default(self): return logging.INFO classes = List() @default('classes') def _classes_default(self): classes = [NbConvertBase] for pkg in (exporters, preprocessors, writers, postprocessors): for name in dir(pkg): cls = getattr(pkg, name) if isinstance(cls, type) and issubclass(cls, Configurable): classes.append(cls) return classes description = Unicode( u"""This application is used to convert notebook files (*.ipynb) to various other formats. WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.""") output_base = Unicode('', help='''overwrite base name use for output files. can only be used when converting one notebook at a time. ''').tag(config=True) use_output_suffix = Bool( True, help="""Whether to apply a suffix prior to the extension (only relevant when converting to notebook format). The suffix is determined by the exporter, and is usually '.nbconvert'.""" ).tag(config=True) output_files_dir = Unicode('{notebook_name}_files', help='''Directory to copy extra files (figures) to. '{notebook_name}' in the string will be converted to notebook basename''' ).tag(config=True) examples = Unicode(u""" The simplest way to use nbconvert is > jupyter nbconvert mynotebook.ipynb which will convert mynotebook.ipynb to the default format (probably HTML). You can specify the export format with `--to`. Options include {formats}. > jupyter nbconvert --to latex mynotebook.ipynb Both HTML and LaTeX support multiple output templates. LaTeX includes 'base', 'article' and 'report'. HTML includes 'basic' and 'full'. You can specify the flavor of the format used. > jupyter nbconvert --to html --template basic mynotebook.ipynb You can also pipe the output to stdout, rather than a file > jupyter nbconvert mynotebook.ipynb --stdout PDF is generated via latex > jupyter nbconvert mynotebook.ipynb --to pdf You can get (and serve) a Reveal.js-powered slideshow > jupyter nbconvert myslides.ipynb --to slides --post serve Multiple notebooks can be given at the command line in a couple of different ways: > jupyter nbconvert notebook*.ipynb > jupyter nbconvert notebook1.ipynb notebook2.ipynb or you can specify the notebooks list in a config file, containing:: c.NbConvertApp.notebooks = ["my_notebook.ipynb"] > jupyter nbconvert --config mycfg.py """.format(formats=get_export_names())) writer = Instance('nbconvert.writers.base.WriterBase', help="""Instance of the writer class used to write the results of the conversion.""", allow_none=True) writer_class = DottedObjectName('FilesWriter', help="""Writer class used to write the results of the conversion""").tag(config=True) writer_aliases = {'fileswriter': 'nbconvert.writers.files.FilesWriter', 'debugwriter': 'nbconvert.writers.debug.DebugWriter', 'stdoutwriter': 'nbconvert.writers.stdout.StdoutWriter'} writer_factory = Type(allow_none=True) @observe('writer_class') def _writer_class_changed(self, change): new = change['new'] if new.lower() in self.writer_aliases: new = self.writer_aliases[new.lower()] self.writer_factory = import_item(new) postprocessor = Instance('nbconvert.postprocessors.base.PostProcessorBase', help="""Instance of the PostProcessor class used to write the results of the conversion.""", allow_none=True) postprocessor_class = DottedOrNone( help="""PostProcessor class used to write the results of the conversion""" ).tag(config=True) postprocessor_aliases = {'serve': 'nbconvert.postprocessors.serve.ServePostProcessor'} postprocessor_factory = Type(None, allow_none=True) @observe('postprocessor_class') def _postprocessor_class_changed(self, change): new = change['new'] if new.lower() in self.postprocessor_aliases: new = self.postprocessor_aliases[new.lower()] if new: self.postprocessor_factory = import_item(new) ipywidgets_base_url = Unicode("https://unpkg.com/", help="URL base for ipywidgets package").tag(config=True) export_format = Unicode( 'html', allow_none=False, help="""The export format to be used, either one of the built-in formats {formats} or a dotted object name that represents the import path for an `Exporter` class""".format(formats=get_export_names()) ).tag(config=True) notebooks = List([], help="""List of notebooks to convert. Wildcards are supported. Filenames passed positionally will be added to the list. """ ).tag(config=True) from_stdin = Bool(False, help="read a single notebook from stdin.").tag(config=True) @catch_config_error def initialize(self, argv=None): self.init_syspath() super(NbConvertApp, self).initialize(argv) self.init_notebooks() self.init_writer() self.init_postprocessor() def init_syspath(self): sys.path.insert(0, os.getcwd()) def init_notebooks(self): if self.extra_args: patterns = self.extra_args else: patterns = self.notebooks filenames = [] for pattern in patterns: globbed_files = glob.glob(pattern) globbed_files.extend(glob.glob(pattern + '.ipynb')) if not globbed_files: self.log.warning("pattern %r matched no files", pattern) for filename in globbed_files: if not filename in filenames: filenames.append(filename) self.notebooks = filenames def init_writer(self): self._writer_class_changed({ 'new': self.writer_class }) self.writer = self.writer_factory(parent=self) if hasattr(self.writer, 'build_directory') and self.writer.build_directory != '': self.use_output_suffix = False def init_postprocessor(self): self._postprocessor_class_changed({'new': self.postprocessor_class}) if self.postprocessor_factory: self.postprocessor = self.postprocessor_factory(parent=self) def start(self): super(NbConvertApp, self).start() self.convert_notebooks() def init_single_notebook_resources(self, notebook_filename): basename = os.path.basename(notebook_filename) notebook_name = basename[:basename.rfind('.')] if self.output_base: if getattr(self.exporter, 'file_extension', False): base, ext = os.path.splitext(self.output_base) if ext == self.exporter.file_extension: self.output_base = base notebook_name = self.output_base self.log.debug("Notebook name is '%s'", notebook_name) resources = {} resources['config_dir'] = self.config_dir resources['unique_key'] = notebook_name output_files_dir = (self.output_files_dir .format(notebook_name=notebook_name)) resources['output_files_dir'] = output_files_dir resources['ipywidgets_base_url'] = self.ipywidgets_base_url return resources def export_single_notebook(self, notebook_filename, resources, input_buffer=None): try: if input_buffer is not None: output, resources = self.exporter.from_file(input_buffer, resources=resources) else: output, resources = self.exporter.from_filename(notebook_filename, resources=resources) except ConversionException: self.log.error("Error while converting '%s'", notebook_filename, exc_info=True) self.exit(1) return output, resources def write_single_notebook(self, output, resources): if 'unique_key' not in resources: raise KeyError("unique_key MUST be specified in the resources, but it is not") notebook_name = resources['unique_key'] if self.use_output_suffix and not self.output_base: notebook_name += resources.get('output_suffix', '') write_results = self.writer.write( output, resources, notebook_name=notebook_name) return write_results def postprocess_single_notebook(self, write_results): if hasattr(self, 'postprocessor') and self.postprocessor: self.postprocessor(write_results) def convert_single_notebook(self, notebook_filename, input_buffer=None): if input_buffer is None: self.log.info("Converting notebook %s to %s", notebook_filename, self.export_format) else: self.log.info("Converting notebook into %s", self.export_format) resources = self.init_single_notebook_resources(notebook_filename) output, resources = self.export_single_notebook(notebook_filename, resources, input_buffer=input_buffer) write_results = self.write_single_notebook(output, resources) self.postprocess_single_notebook(write_results) def convert_notebooks(self): # one notebook to convert if self.output_base != '' and len(self.notebooks) > 1: self.log.error( """ UsageError: --output flag or `NbConvertApp.output_base` config option cannot be used when converting multiple notebooks. """ ) self.exit(1) # initialize the exporter cls = get_exporter(self.export_format) self.exporter = cls(config=self.config) # no notebooks to convert! if len(self.notebooks) == 0 and not self.from_stdin: self.print_help() sys.exit(-1) # convert each notebook if not self.from_stdin: for notebook_filename in self.notebooks: self.convert_single_notebook(notebook_filename) else: input_buffer = unicode_stdin_stream() # default name when conversion from stdin self.convert_single_notebook("notebook.ipynb", input_buffer=input_buffer) #----------------------------------------------------------------------------- # Main entry point #----------------------------------------------------------------------------- main = launch_new_instance = NbConvertApp.launch_instance
true
true
f70e9ab4127f82a7bc61abb5b1f4d8af7ef4fcd5
122
py
Python
contributed_traders/util.py
andrewsonin/abides_dev
e8a9c8450bbbe98597f31767362c86eb193597a0
[ "BSD-3-Clause" ]
null
null
null
contributed_traders/util.py
andrewsonin/abides_dev
e8a9c8450bbbe98597f31767362c86eb193597a0
[ "BSD-3-Clause" ]
null
null
null
contributed_traders/util.py
andrewsonin/abides_dev
e8a9c8450bbbe98597f31767362c86eb193597a0
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 from pathlib import Path def get_file(fname): return Path(__file__).resolve().parent / fname
17.428571
50
0.729508
from pathlib import Path def get_file(fname): return Path(__file__).resolve().parent / fname
true
true
f70e9b3086a20a853e14284409d0e7e61f4feaf3
326
py
Python
utils/bytes_utils.py
linyuan0213/qb-bot
be8a172d7d72a7e3878f89ba341c3b5f36e3d76c
[ "MIT" ]
3
2021-08-28T18:14:54.000Z
2022-01-19T18:22:59.000Z
utils/bytes_utils.py
linyuan0213/qb-bot
be8a172d7d72a7e3878f89ba341c3b5f36e3d76c
[ "MIT" ]
null
null
null
utils/bytes_utils.py
linyuan0213/qb-bot
be8a172d7d72a7e3878f89ba341c3b5f36e3d76c
[ "MIT" ]
1
2021-07-16T08:37:03.000Z
2021-07-16T08:37:03.000Z
def bytes_to_human(n): symbols = ('KB', 'MB', 'GB', 'TB', 'PB', 'EB') prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] return '%.1f%s' % (value, s) return '%sB' % n
29.636364
50
0.46319
def bytes_to_human(n): symbols = ('KB', 'MB', 'GB', 'TB', 'PB', 'EB') prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] return '%.1f%s' % (value, s) return '%sB' % n
true
true
f70e9b6e0d6c5f3a42f4bb7ff0f34b00d662bc27
1,338
py
Python
spartan/examples/ssvd/qr.py
GabrielWen/spartan
ce3bf7f2bb551d7f996a1884acef819b620cc854
[ "Apache-2.0" ]
156
2015-01-10T21:54:25.000Z
2021-10-17T14:13:57.000Z
spartan/examples/ssvd/qr.py
GabrielWen/spartan
ce3bf7f2bb551d7f996a1884acef819b620cc854
[ "Apache-2.0" ]
8
2015-01-05T16:34:18.000Z
2015-12-11T08:12:28.000Z
spartan/examples/ssvd/qr.py
GabrielWen/spartan
ce3bf7f2bb551d7f996a1884acef819b620cc854
[ "Apache-2.0" ]
24
2015-01-10T21:55:48.000Z
2021-04-14T08:09:34.000Z
import spartan from spartan import expr, core import numpy as np from sys import stderr def qr(Y): ''' Compute the thin qr factorization of a matrix. Factor the matrix Y as QR, where Q is orthonormal and R is upper-triangular. Parameters ---------- Y: Spartan array of shape (M, K). Notes ---------- Y'Y must fit in memory. Y is a Spartan array of shape (M, K). Since this QR decomposition is mainly used in Stochastic SVD, K will be the rank of the matrix of shape (M, N) and the assumption is that the rank K should be far less than M or N. Returns ------- Q : Spartan array of shape (M, K). R : Numpy array of shape (K, K). ''' # Since the K should be far less than M. So the matrix multiplication # should be the bottleneck instead of local cholesky decomposition and # finding inverse of R. So we just parallelize the matrix mulitplication. # If K is really large, we may consider using our Spartan cholesky # decomposition, but for now, we use numpy version, it works fine. # YTY = Y'Y. YTY has shape of (K, K). YTY = expr.dot(expr.transpose(Y), Y).optimized().glom() # Do cholesky decomposition and get R. R = np.linalg.cholesky(YTY).T # Find the inverse of R inv_R = np.linalg.inv(R) # Q = Y * inv(R) Q = expr.dot(Y, inv_R).optimized().evaluate() return Q, R
29.086957
75
0.675635
import spartan from spartan import expr, core import numpy as np from sys import stderr def qr(Y): YTY = expr.dot(expr.transpose(Y), Y).optimized().glom() # Do cholesky decomposition and get R. R = np.linalg.cholesky(YTY).T # Find the inverse of R inv_R = np.linalg.inv(R) # Q = Y * inv(R) Q = expr.dot(Y, inv_R).optimized().evaluate() return Q, R
true
true
f70e9bc29651fbb0c65ebc831bb2635b14150dfd
3,633
py
Python
servers/migrations/0012_auto__add_field_server_samba_base_folder.py
PolyLAN/azimut-gestion
0b076570d7ceeef8cfd9c8e65fd0495aa16d3c8a
[ "MIT" ]
null
null
null
servers/migrations/0012_auto__add_field_server_samba_base_folder.py
PolyLAN/azimut-gestion
0b076570d7ceeef8cfd9c8e65fd0495aa16d3c8a
[ "MIT" ]
null
null
null
servers/migrations/0012_auto__add_field_server_samba_base_folder.py
PolyLAN/azimut-gestion
0b076570d7ceeef8cfd9c8e65fd0495aa16d3c8a
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Server.samba_base_folder' db.add_column(u'servers_server', 'samba_base_folder', self.gf('django.db.models.fields.CharField')(default='', max_length=255, null=True, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Server.samba_base_folder' db.delete_column(u'servers_server', 'samba_base_folder') models = { u'servers.server': { 'Meta': {'object_name': 'Server'}, 'external_hostname_for_vms_creation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'external_interface': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'external_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'internal_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'is_proxmox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_vm': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'keymanger_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'ngnix_server': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ngnixed_server_set'", 'null': 'True', 'to': u"orm['servers.Server']"}), 'proxmox_node_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'samba_base_folder': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'samba_management': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'ssh_connection_string_from_backup': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'ssh_connection_string_from_gestion': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'vm_host': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'server_set'", 'null': 'True', 'to': u"orm['servers.Server']"}) }, u'servers.serveruser': { 'Meta': {'object_name': 'ServerUser'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'server': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['servers.Server']"}) }, u'servers.sshkey': { 'Meta': {'object_name': 'SshKey'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.TextField', [], {}), 'server': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['servers.Server']"}), 'user': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['servers']
63.736842
186
0.57886
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): db.add_column(u'servers_server', 'samba_base_folder', self.gf('django.db.models.fields.CharField')(default='', max_length=255, null=True, blank=True), keep_default=False) def backwards(self, orm): db.delete_column(u'servers_server', 'samba_base_folder') models = { u'servers.server': { 'Meta': {'object_name': 'Server'}, 'external_hostname_for_vms_creation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'external_interface': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'external_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'internal_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), 'is_proxmox': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_vm': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'keymanger_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'ngnix_server': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ngnixed_server_set'", 'null': 'True', 'to': u"orm['servers.Server']"}), 'proxmox_node_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'samba_base_folder': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'samba_management': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'ssh_connection_string_from_backup': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'ssh_connection_string_from_gestion': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'vm_host': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'server_set'", 'null': 'True', 'to': u"orm['servers.Server']"}) }, u'servers.serveruser': { 'Meta': {'object_name': 'ServerUser'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'server': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['servers.Server']"}) }, u'servers.sshkey': { 'Meta': {'object_name': 'SshKey'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.TextField', [], {}), 'server': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['servers.Server']"}), 'user': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['servers']
true
true
f70e9c0b8bac3d670748990df3ff0e02f0a7f8ad
15,619
py
Python
zerver/lib/cache.py
dehnert/zulip
f5935e81c7cf2f11ff4ccfcd31d2a1061b8d7ff5
[ "Apache-2.0" ]
null
null
null
zerver/lib/cache.py
dehnert/zulip
f5935e81c7cf2f11ff4ccfcd31d2a1061b8d7ff5
[ "Apache-2.0" ]
null
null
null
zerver/lib/cache.py
dehnert/zulip
f5935e81c7cf2f11ff4ccfcd31d2a1061b8d7ff5
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import from __future__ import print_function from functools import wraps from django.core.cache import cache as djcache from django.core.cache import caches from django.conf import settings from django.db.models import Q from django.core.cache.backends.base import BaseCache from typing import Any, Callable, Iterable, Optional, Union, TypeVar from zerver.lib.utils import statsd, statsd_key, make_safe_digest import subprocess import time import base64 import random import sys import os import os.path import hashlib import six from six import text_type if False: from zerver.models import UserProfile, Realm, Message # These modules have to be imported for type annotations but # they cannot be imported at runtime due to cyclic dependency. FuncT = TypeVar('FuncT', bound=Callable[..., Any]) remote_cache_time_start = 0.0 remote_cache_total_time = 0.0 remote_cache_total_requests = 0 def get_remote_cache_time(): # type: () -> float return remote_cache_total_time def get_remote_cache_requests(): # type: () -> int return remote_cache_total_requests def remote_cache_stats_start(): # type: () -> None global remote_cache_time_start remote_cache_time_start = time.time() def remote_cache_stats_finish(): # type: () -> None global remote_cache_total_time global remote_cache_total_requests global remote_cache_time_start remote_cache_total_requests += 1 remote_cache_total_time += (time.time() - remote_cache_time_start) def get_or_create_key_prefix(): # type: () -> text_type if settings.TEST_SUITE: # This sets the prefix mostly for the benefit of the JS tests. # The Python tests overwrite KEY_PREFIX on each test. return u'test_suite:%s:' % (text_type(os.getpid()),) # directory `var` should exist in production subprocess.check_call(["mkdir", "-p", os.path.join(settings.DEPLOY_ROOT, "var")]) filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix") try: fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0o444) random_hash = hashlib.sha256(text_type(random.getrandbits(256)).encode('utf-8')).digest() prefix = base64.b16encode(random_hash)[:32].decode('utf-8').lower() + ':' # This does close the underlying file with os.fdopen(fd, 'w') as f: f.write(prefix + "\n") except OSError: # The file already exists tries = 1 while tries < 10: with open(filename, 'r') as f: prefix = f.readline()[:-1] if len(prefix) == 33: break tries += 1 prefix = '' time.sleep(0.5) if not prefix: print("Could not read remote cache key prefix file") sys.exit(1) return prefix KEY_PREFIX = get_or_create_key_prefix() # type: text_type def bounce_key_prefix_for_testing(test_name): # type: (text_type) -> None global KEY_PREFIX KEY_PREFIX = test_name + u':' + text_type(os.getpid()) + u':' def get_cache_backend(cache_name): # type: (Optional[str]) -> BaseCache if cache_name is None: return djcache return caches[cache_name] def cache_with_key(keyfunc, cache_name=None, timeout=None, with_statsd_key=None): # type: (Any, Optional[str], Optional[int], Optional[str]) -> Any # This function can't be typed perfectly because returning a generic function # isn't supported in mypy - https://github.com/python/mypy/issues/1551. """Decorator which applies Django caching to a function. Decorator argument is a function which computes a cache key from the original function's arguments. You are responsible for avoiding collisions with other uses of this decorator or other uses of caching.""" def decorator(func): # type: (Callable[..., Any]) -> (Callable[..., Any]) @wraps(func) def func_with_caching(*args, **kwargs): # type: (*Any, **Any) -> Callable[..., Any] key = keyfunc(*args, **kwargs) val = cache_get(key, cache_name=cache_name) extra = "" if cache_name == 'database': extra = ".dbcache" if with_statsd_key is not None: metric_key = with_statsd_key else: metric_key = statsd_key(key) status = "hit" if val is not None else "miss" statsd.incr("cache%s.%s.%s" % (extra, metric_key, status)) # Values are singleton tuples so that we can distinguish # a result of None from a missing key. if val is not None: return val[0] val = func(*args, **kwargs) cache_set(key, val, cache_name=cache_name, timeout=timeout) return val return func_with_caching return decorator def cache_set(key, val, cache_name=None, timeout=None): # type: (text_type, Any, Optional[str], Optional[int]) -> None remote_cache_stats_start() cache_backend = get_cache_backend(cache_name) cache_backend.set(KEY_PREFIX + key, (val,), timeout=timeout) remote_cache_stats_finish() def cache_get(key, cache_name=None): # type: (text_type, Optional[str]) -> Any remote_cache_stats_start() cache_backend = get_cache_backend(cache_name) ret = cache_backend.get(KEY_PREFIX + key) remote_cache_stats_finish() return ret def cache_get_many(keys, cache_name=None): # type: (List[text_type], Optional[str]) -> Dict[text_type, Any] keys = [KEY_PREFIX + key for key in keys] remote_cache_stats_start() ret = get_cache_backend(cache_name).get_many(keys) remote_cache_stats_finish() return dict([(key[len(KEY_PREFIX):], value) for key, value in ret.items()]) def cache_set_many(items, cache_name=None, timeout=None): # type: (Dict[text_type, Any], Optional[str], Optional[int]) -> None new_items = {} for key in items: new_items[KEY_PREFIX + key] = items[key] items = new_items remote_cache_stats_start() get_cache_backend(cache_name).set_many(items, timeout=timeout) remote_cache_stats_finish() def cache_delete(key, cache_name=None): # type: (text_type, Optional[str]) -> None remote_cache_stats_start() get_cache_backend(cache_name).delete(KEY_PREFIX + key) remote_cache_stats_finish() def cache_delete_many(items, cache_name=None): # type: (Iterable[text_type], Optional[str]) -> None remote_cache_stats_start() get_cache_backend(cache_name).delete_many( KEY_PREFIX + item for item in items) remote_cache_stats_finish() # Required Arguments are as follows: # * object_ids: The list of object ids to look up # * cache_key_function: object_id => cache key # * query_function: [object_ids] => [objects from database] # Optional keyword arguments: # * setter: Function to call before storing items to cache (e.g. compression) # * extractor: Function to call on items returned from cache # (e.g. decompression). Should be the inverse of the setter # function. # * id_fetcher: Function mapping an object from database => object_id # (in case we're using a key more complex than obj.id) # * cache_transformer: Function mapping an object from database => # value for cache (in case the values that we're caching are some # function of the objects, not the objects themselves) ObjKT = TypeVar('ObjKT', int, text_type) ItemT = Any # https://github.com/python/mypy/issues/1721 CompressedItemT = Any # https://github.com/python/mypy/issues/1721 def generic_bulk_cached_fetch(cache_key_function, # type: Callable[[ObjKT], text_type] query_function, # type: Callable[[List[ObjKT]], Iterable[Any]] object_ids, # type: Iterable[ObjKT] extractor=lambda obj: obj, # type: Callable[[CompressedItemT], ItemT] setter=lambda obj: obj, # type: Callable[[ItemT], CompressedItemT] id_fetcher=lambda obj: obj.id, # type: Callable[[Any], ObjKT] cache_transformer=lambda obj: obj # type: Callable[[Any], ItemT] ): # type: (...) -> Dict[ObjKT, Any] cache_keys = {} # type: Dict[ObjKT, text_type] for object_id in object_ids: cache_keys[object_id] = cache_key_function(object_id) cached_objects = cache_get_many([cache_keys[object_id] for object_id in object_ids]) for (key, val) in cached_objects.items(): cached_objects[key] = extractor(cached_objects[key][0]) needed_ids = [object_id for object_id in object_ids if cache_keys[object_id] not in cached_objects] db_objects = query_function(needed_ids) items_for_remote_cache = {} # type: Dict[text_type, Any] for obj in db_objects: key = cache_keys[id_fetcher(obj)] item = cache_transformer(obj) items_for_remote_cache[key] = (setter(item),) cached_objects[key] = item if len(items_for_remote_cache) > 0: cache_set_many(items_for_remote_cache) return dict((object_id, cached_objects[cache_keys[object_id]]) for object_id in object_ids if cache_keys[object_id] in cached_objects) def cache(func): # type: (FuncT) -> FuncT """Decorator which applies Django caching to a function. Uses a key based on the function's name, filename, and the repr() of its arguments.""" func_uniqifier = '%s-%s' % (func.__code__.co_filename, func.__name__) # type: ignore # https://github.com/python/mypy/issues/1923 @wraps(func) def keyfunc(*args, **kwargs): # type: (*Any, **Any) -> str # Django complains about spaces because memcached rejects them key = func_uniqifier + repr((args, kwargs)) return key.replace('-', '--').replace(' ', '-s') return cache_with_key(keyfunc)(func) def display_recipient_cache_key(recipient_id): # type: (int) -> text_type return u"display_recipient_dict:%d" % (recipient_id,) def user_profile_by_email_cache_key(email): # type: (text_type) -> text_type # See the comment in zerver/lib/avatar_hash.py:gravatar_hash for why we # are proactively encoding email addresses even though they will # with high likelihood be ASCII-only for the foreseeable future. return u'user_profile_by_email:%s' % (make_safe_digest(email.strip()),) def user_profile_by_id_cache_key(user_profile_id): # type: (int) -> text_type return u"user_profile_by_id:%s" % (user_profile_id,) # TODO: Refactor these cache helpers into another file that can import # models.py so that python3-style type annotations can also work. def cache_save_user_profile(user_profile): # type: (UserProfile) -> None cache_set(user_profile_by_id_cache_key(user_profile.id), user_profile, timeout=3600*24*7) active_user_dict_fields = ['id', 'full_name', 'short_name', 'email', 'is_realm_admin', 'is_bot'] # type: List[str] def active_user_dicts_in_realm_cache_key(realm): # type: (Realm) -> text_type return u"active_user_dicts_in_realm:%s" % (realm.id,) active_bot_dict_fields = ['id', 'full_name', 'short_name', 'email', 'default_sending_stream__name', 'default_events_register_stream__name', 'default_all_public_streams', 'api_key', 'bot_owner__email', 'avatar_source'] # type: List[str] def active_bot_dicts_in_realm_cache_key(realm): # type: (Realm) -> text_type return u"active_bot_dicts_in_realm:%s" % (realm.id,) def get_stream_cache_key(stream_name, realm): # type: (text_type, Union[Realm, int]) -> text_type from zerver.models import Realm if isinstance(realm, Realm): realm_id = realm.id else: realm_id = realm return u"stream_by_realm_and_name:%s:%s" % ( realm_id, make_safe_digest(stream_name.strip().lower())) def delete_user_profile_caches(user_profiles): # type: (Iterable[UserProfile]) -> None keys = [] for user_profile in user_profiles: keys.append(user_profile_by_email_cache_key(user_profile.email)) keys.append(user_profile_by_id_cache_key(user_profile.id)) cache_delete_many(keys) # Called by models.py to flush the user_profile cache whenever we save # a user_profile object def flush_user_profile(sender, **kwargs): # type: (Any, **Any) -> None user_profile = kwargs['instance'] delete_user_profile_caches([user_profile]) # Invalidate our active_users_in_realm info dict if any user has changed # the fields in the dict or become (in)active if kwargs.get('update_fields') is None or \ len(set(active_user_dict_fields + ['is_active']) & set(kwargs['update_fields'])) > 0: cache_delete(active_user_dicts_in_realm_cache_key(user_profile.realm)) # Invalidate our active_bots_in_realm info dict if any bot has # changed the fields in the dict or become (in)active if user_profile.is_bot and (kwargs['update_fields'] is None or (set(active_bot_dict_fields + ['is_active']) & set(kwargs['update_fields']))): cache_delete(active_bot_dicts_in_realm_cache_key(user_profile.realm)) # Invalidate realm-wide alert words cache if any user in the realm has changed # alert words if kwargs.get('update_fields') is None or "alert_words" in kwargs['update_fields']: cache_delete(realm_alert_words_cache_key(user_profile.realm)) # Called by models.py to flush various caches whenever we save # a Realm object. The main tricky thing here is that Realm info is # generally cached indirectly through user_profile objects. def flush_realm(sender, **kwargs): # type: (Any, **Any) -> None realm = kwargs['instance'] users = realm.get_active_users() delete_user_profile_caches(users) if realm.deactivated: cache_delete(active_user_dicts_in_realm_cache_key(realm)) cache_delete(active_bot_dicts_in_realm_cache_key(realm)) cache_delete(realm_alert_words_cache_key(realm)) def realm_alert_words_cache_key(realm): # type: (Realm) -> text_type return u"realm_alert_words:%s" % (realm.domain,) # Called by models.py to flush the stream cache whenever we save a stream # object. def flush_stream(sender, **kwargs): # type: (Any, **Any) -> None from zerver.models import UserProfile stream = kwargs['instance'] items_for_remote_cache = {} items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm)] = (stream,) cache_set_many(items_for_remote_cache) if kwargs.get('update_fields') is None or 'name' in kwargs['update_fields'] and \ UserProfile.objects.filter( Q(default_sending_stream=stream) | Q(default_events_register_stream=stream) ).exists(): cache_delete(active_bot_dicts_in_realm_cache_key(stream.realm)) # TODO: Rename to_dict_cache_key_id and to_dict_cache_key def to_dict_cache_key_id(message_id, apply_markdown): # type: (int, bool) -> text_type return u'message_dict:%d:%d' % (message_id, apply_markdown) def to_dict_cache_key(message, apply_markdown): # type: (Message, bool) -> text_type return to_dict_cache_key_id(message.id, apply_markdown) def flush_message(sender, **kwargs): # type: (Any, **Any) -> None message = kwargs['instance'] cache_delete(to_dict_cache_key(message, False)) cache_delete(to_dict_cache_key(message, True))
39.541772
133
0.681414
from __future__ import absolute_import from __future__ import print_function from functools import wraps from django.core.cache import cache as djcache from django.core.cache import caches from django.conf import settings from django.db.models import Q from django.core.cache.backends.base import BaseCache from typing import Any, Callable, Iterable, Optional, Union, TypeVar from zerver.lib.utils import statsd, statsd_key, make_safe_digest import subprocess import time import base64 import random import sys import os import os.path import hashlib import six from six import text_type if False: from zerver.models import UserProfile, Realm, Message FuncT = TypeVar('FuncT', bound=Callable[..., Any]) remote_cache_time_start = 0.0 remote_cache_total_time = 0.0 remote_cache_total_requests = 0 def get_remote_cache_time(): return remote_cache_total_time def get_remote_cache_requests(): return remote_cache_total_requests def remote_cache_stats_start(): global remote_cache_time_start remote_cache_time_start = time.time() def remote_cache_stats_finish(): global remote_cache_total_time global remote_cache_total_requests global remote_cache_time_start remote_cache_total_requests += 1 remote_cache_total_time += (time.time() - remote_cache_time_start) def get_or_create_key_prefix(): if settings.TEST_SUITE: return u'test_suite:%s:' % (text_type(os.getpid()),) subprocess.check_call(["mkdir", "-p", os.path.join(settings.DEPLOY_ROOT, "var")]) filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix") try: fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0o444) random_hash = hashlib.sha256(text_type(random.getrandbits(256)).encode('utf-8')).digest() prefix = base64.b16encode(random_hash)[:32].decode('utf-8').lower() + ':' with os.fdopen(fd, 'w') as f: f.write(prefix + "\n") except OSError: tries = 1 while tries < 10: with open(filename, 'r') as f: prefix = f.readline()[:-1] if len(prefix) == 33: break tries += 1 prefix = '' time.sleep(0.5) if not prefix: print("Could not read remote cache key prefix file") sys.exit(1) return prefix KEY_PREFIX = get_or_create_key_prefix() def bounce_key_prefix_for_testing(test_name): global KEY_PREFIX KEY_PREFIX = test_name + u':' + text_type(os.getpid()) + u':' def get_cache_backend(cache_name): if cache_name is None: return djcache return caches[cache_name] def cache_with_key(keyfunc, cache_name=None, timeout=None, with_statsd_key=None): # isn't supported in mypy - https://github.com/python/mypy/issues/1551. def decorator(func): @wraps(func) def func_with_caching(*args, **kwargs): key = keyfunc(*args, **kwargs) val = cache_get(key, cache_name=cache_name) extra = "" if cache_name == 'database': extra = ".dbcache" if with_statsd_key is not None: metric_key = with_statsd_key else: metric_key = statsd_key(key) status = "hit" if val is not None else "miss" statsd.incr("cache%s.%s.%s" % (extra, metric_key, status)) if val is not None: return val[0] val = func(*args, **kwargs) cache_set(key, val, cache_name=cache_name, timeout=timeout) return val return func_with_caching return decorator def cache_set(key, val, cache_name=None, timeout=None): remote_cache_stats_start() cache_backend = get_cache_backend(cache_name) cache_backend.set(KEY_PREFIX + key, (val,), timeout=timeout) remote_cache_stats_finish() def cache_get(key, cache_name=None): remote_cache_stats_start() cache_backend = get_cache_backend(cache_name) ret = cache_backend.get(KEY_PREFIX + key) remote_cache_stats_finish() return ret def cache_get_many(keys, cache_name=None): keys = [KEY_PREFIX + key for key in keys] remote_cache_stats_start() ret = get_cache_backend(cache_name).get_many(keys) remote_cache_stats_finish() return dict([(key[len(KEY_PREFIX):], value) for key, value in ret.items()]) def cache_set_many(items, cache_name=None, timeout=None): new_items = {} for key in items: new_items[KEY_PREFIX + key] = items[key] items = new_items remote_cache_stats_start() get_cache_backend(cache_name).set_many(items, timeout=timeout) remote_cache_stats_finish() def cache_delete(key, cache_name=None): remote_cache_stats_start() get_cache_backend(cache_name).delete(KEY_PREFIX + key) remote_cache_stats_finish() def cache_delete_many(items, cache_name=None): remote_cache_stats_start() get_cache_backend(cache_name).delete_many( KEY_PREFIX + item for item in items) remote_cache_stats_finish() # * cache_transformer: Function mapping an object from database => # value for cache (in case the values that we're caching are some ObjKT = TypeVar('ObjKT', int, text_type) ItemT = Any CompressedItemT = Any def generic_bulk_cached_fetch(cache_key_function, query_function, object_ids, extractor=lambda obj: obj, setter=lambda obj: obj, id_fetcher=lambda obj: obj.id, cache_transformer=lambda obj: obj ): cache_keys = {} for object_id in object_ids: cache_keys[object_id] = cache_key_function(object_id) cached_objects = cache_get_many([cache_keys[object_id] for object_id in object_ids]) for (key, val) in cached_objects.items(): cached_objects[key] = extractor(cached_objects[key][0]) needed_ids = [object_id for object_id in object_ids if cache_keys[object_id] not in cached_objects] db_objects = query_function(needed_ids) items_for_remote_cache = {} for obj in db_objects: key = cache_keys[id_fetcher(obj)] item = cache_transformer(obj) items_for_remote_cache[key] = (setter(item),) cached_objects[key] = item if len(items_for_remote_cache) > 0: cache_set_many(items_for_remote_cache) return dict((object_id, cached_objects[cache_keys[object_id]]) for object_id in object_ids if cache_keys[object_id] in cached_objects) def cache(func): func_uniqifier = '%s-%s' % (func.__code__.co_filename, func.__name__) kwargs): key = func_uniqifier + repr((args, kwargs)) return key.replace('-', '--').replace(' ', '-s') return cache_with_key(keyfunc)(func) def display_recipient_cache_key(recipient_id): return u"display_recipient_dict:%d" % (recipient_id,) def user_profile_by_email_cache_key(email): return u'user_profile_by_email:%s' % (make_safe_digest(email.strip()),) def user_profile_by_id_cache_key(user_profile_id): return u"user_profile_by_id:%s" % (user_profile_id,) def cache_save_user_profile(user_profile): cache_set(user_profile_by_id_cache_key(user_profile.id), user_profile, timeout=3600*24*7) active_user_dict_fields = ['id', 'full_name', 'short_name', 'email', 'is_realm_admin', 'is_bot'] def active_user_dicts_in_realm_cache_key(realm): return u"active_user_dicts_in_realm:%s" % (realm.id,) active_bot_dict_fields = ['id', 'full_name', 'short_name', 'email', 'default_sending_stream__name', 'default_events_register_stream__name', 'default_all_public_streams', 'api_key', 'bot_owner__email', 'avatar_source'] def active_bot_dicts_in_realm_cache_key(realm): return u"active_bot_dicts_in_realm:%s" % (realm.id,) def get_stream_cache_key(stream_name, realm): from zerver.models import Realm if isinstance(realm, Realm): realm_id = realm.id else: realm_id = realm return u"stream_by_realm_and_name:%s:%s" % ( realm_id, make_safe_digest(stream_name.strip().lower())) def delete_user_profile_caches(user_profiles): keys = [] for user_profile in user_profiles: keys.append(user_profile_by_email_cache_key(user_profile.email)) keys.append(user_profile_by_id_cache_key(user_profile.id)) cache_delete_many(keys) def flush_user_profile(sender, **kwargs): user_profile = kwargs['instance'] delete_user_profile_caches([user_profile]) if kwargs.get('update_fields') is None or \ len(set(active_user_dict_fields + ['is_active']) & set(kwargs['update_fields'])) > 0: cache_delete(active_user_dicts_in_realm_cache_key(user_profile.realm)) if user_profile.is_bot and (kwargs['update_fields'] is None or (set(active_bot_dict_fields + ['is_active']) & set(kwargs['update_fields']))): cache_delete(active_bot_dicts_in_realm_cache_key(user_profile.realm)) if kwargs.get('update_fields') is None or "alert_words" in kwargs['update_fields']: cache_delete(realm_alert_words_cache_key(user_profile.realm)) def flush_realm(sender, **kwargs): realm = kwargs['instance'] users = realm.get_active_users() delete_user_profile_caches(users) if realm.deactivated: cache_delete(active_user_dicts_in_realm_cache_key(realm)) cache_delete(active_bot_dicts_in_realm_cache_key(realm)) cache_delete(realm_alert_words_cache_key(realm)) def realm_alert_words_cache_key(realm): return u"realm_alert_words:%s" % (realm.domain,) def flush_stream(sender, **kwargs): from zerver.models import UserProfile stream = kwargs['instance'] items_for_remote_cache = {} items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm)] = (stream,) cache_set_many(items_for_remote_cache) if kwargs.get('update_fields') is None or 'name' in kwargs['update_fields'] and \ UserProfile.objects.filter( Q(default_sending_stream=stream) | Q(default_events_register_stream=stream) ).exists(): cache_delete(active_bot_dicts_in_realm_cache_key(stream.realm)) def to_dict_cache_key_id(message_id, apply_markdown): return u'message_dict:%d:%d' % (message_id, apply_markdown) def to_dict_cache_key(message, apply_markdown): return to_dict_cache_key_id(message.id, apply_markdown) def flush_message(sender, **kwargs): message = kwargs['instance'] cache_delete(to_dict_cache_key(message, False)) cache_delete(to_dict_cache_key(message, True))
true
true
f70e9c0f85a4b70d03afc9fad7137e074aea7e36
1,674
py
Python
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/scalability_space_galewsky_cheyenne_intel/postprocessing.py
valentinaschueller/sweet
27e99c7a110c99deeadee70688c186d82b39ac90
[ "MIT" ]
6
2017-11-20T08:12:46.000Z
2021-03-11T15:32:36.000Z
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/scalability_space_galewsky_cheyenne_intel/postprocessing.py
valentinaschueller/sweet
27e99c7a110c99deeadee70688c186d82b39ac90
[ "MIT" ]
4
2018-02-02T21:46:33.000Z
2022-01-11T11:10:27.000Z
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/scalability_space_galewsky_cheyenne_intel/postprocessing.py
valentinaschueller/sweet
27e99c7a110c99deeadee70688c186d82b39ac90
[ "MIT" ]
12
2016-03-01T18:33:34.000Z
2022-02-08T22:20:31.000Z
#! /usr/bin/env python3 from SWEET import * from mule.postprocessing.JobsData import * from mule.postprocessing.JobsDataConsolidate import * from mule.plotting.Plotting import * sys.path.append('../') import pretty_plotting as pp sys.path.pop() # # Load data # j = JobsData('job_bench_*', verbosity=0) # # Create groups # groups = ['runtime.timestepping_method'] c = JobsDataConsolidate(j) job_groups = c.create_groups(groups) print("Groups:") for key, g in job_groups.items(): print(key) tagname_x = 'parallelization.num_threads_per_rank' tagname_y = 'output.simulation_benchmark_timings.main_timestepping' # # Make ready for plotting # d = JobsData_GroupsPlottingScattered( job_groups, tagname_x, tagname_y ) data_plotting = d.get_data_float() # Make pretty for key, data in data_plotting.items(): data['label'] = pp.get_pretty_name(key) # # Plot! # p = Plotting_ScatteredData() p.plot( data_plotting = data_plotting, xlabel = "Number of threads", ylabel = "Wallclock time (seconds)", title = "Wallclock time", outfile = "output_threads_vs_wallclock_time.pdf" ) # # Scalability # for key, values in data_plotting.items(): label = key x_values = values['x_values'] y_values = values['y_values'] # Basis for scalability (number of cores) basis_scalability = 1.0 # Get index of x value for scalability i = x_values.index(basis_scalability) if i == None: raise Exception("Scalability basis not found") # Convert to scalability values['y_values'] = [y_values[i]/y for y in y_values] p.plot( data_plotting, xlabel="Number of threads", ylabel="Scalability", title = "Scalability", outfile="output_threads_vs_scalability.pdf" )
18.6
67
0.737754
from SWEET import * from mule.postprocessing.JobsData import * from mule.postprocessing.JobsDataConsolidate import * from mule.plotting.Plotting import * sys.path.append('../') import pretty_plotting as pp sys.path.pop() j = JobsData('job_bench_*', verbosity=0) groups = ['runtime.timestepping_method'] c = JobsDataConsolidate(j) job_groups = c.create_groups(groups) print("Groups:") for key, g in job_groups.items(): print(key) tagname_x = 'parallelization.num_threads_per_rank' tagname_y = 'output.simulation_benchmark_timings.main_timestepping' d = JobsData_GroupsPlottingScattered( job_groups, tagname_x, tagname_y ) data_plotting = d.get_data_float() for key, data in data_plotting.items(): data['label'] = pp.get_pretty_name(key) p = Plotting_ScatteredData() p.plot( data_plotting = data_plotting, xlabel = "Number of threads", ylabel = "Wallclock time (seconds)", title = "Wallclock time", outfile = "output_threads_vs_wallclock_time.pdf" ) for key, values in data_plotting.items(): label = key x_values = values['x_values'] y_values = values['y_values'] basis_scalability = 1.0 i = x_values.index(basis_scalability) if i == None: raise Exception("Scalability basis not found") values['y_values'] = [y_values[i]/y for y in y_values] p.plot( data_plotting, xlabel="Number of threads", ylabel="Scalability", title = "Scalability", outfile="output_threads_vs_scalability.pdf" )
true
true
f70e9d0ca009d2d068924b1345df34f152420b7c
1,346
py
Python
setup.py
FaizChishtie/MrTopo
e6a674738d8b0a0c56edde2be0ae272ea9e62f1a
[ "MIT" ]
1
2021-01-26T11:01:32.000Z
2021-01-26T11:01:32.000Z
setup.py
FaizChishtie/MrTopo
e6a674738d8b0a0c56edde2be0ae272ea9e62f1a
[ "MIT" ]
null
null
null
setup.py
FaizChishtie/MrTopo
e6a674738d8b0a0c56edde2be0ae272ea9e62f1a
[ "MIT" ]
1
2020-12-23T22:01:09.000Z
2020-12-23T22:01:09.000Z
import setuptools import re with open("README.md", "r") as fh: long_description = fh.read() version = re.search( '^__version__\s*=\s*"(.*)"', open('mrtopo/__main__.py').read(), re.M ).group(1) setuptools.setup( name='mrtopo', version=version, packages=setuptools.find_packages(), url='https://github.com/FaizChishtie/mrtopo', license='MIT', author='faizchishtie', author_email='faizchishtie@gmail.com', description='Mutate Mininet topology files with MrTopo', python_requires='>=3.0', entry_points={'console_scripts': ['mrtopo = mrtopo.cli:cli']}, long_description=long_description, long_description_content_type='text/markdown', install_requires=[ 'mininet', 'click' ], keywords='topology network startup', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Topic :: Utilities', 'Typing :: Typed', ], )
29.911111
66
0.61367
import setuptools import re with open("README.md", "r") as fh: long_description = fh.read() version = re.search( '^__version__\s*=\s*"(.*)"', open('mrtopo/__main__.py').read(), re.M ).group(1) setuptools.setup( name='mrtopo', version=version, packages=setuptools.find_packages(), url='https://github.com/FaizChishtie/mrtopo', license='MIT', author='faizchishtie', author_email='faizchishtie@gmail.com', description='Mutate Mininet topology files with MrTopo', python_requires='>=3.0', entry_points={'console_scripts': ['mrtopo = mrtopo.cli:cli']}, long_description=long_description, long_description_content_type='text/markdown', install_requires=[ 'mininet', 'click' ], keywords='topology network startup', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Topic :: Utilities', 'Typing :: Typed', ], )
true
true
f70e9e8f3f4e25b72d078669cef22e5f17566484
1,495
py
Python
setup.py
ucamhal/jsonlogging
7606abb00c3cdc9536c8947d5ab1e210716e8d47
[ "BSD-2-Clause" ]
1
2017-03-19T12:06:58.000Z
2017-03-19T12:06:58.000Z
setup.py
ucamhal/jsonlogging
7606abb00c3cdc9536c8947d5ab1e210716e8d47
[ "BSD-2-Clause" ]
null
null
null
setup.py
ucamhal/jsonlogging
7606abb00c3cdc9536c8947d5ab1e210716e8d47
[ "BSD-2-Clause" ]
null
null
null
from setuptools import setup def get_version(filename): """ Parse the value of the __version__ var from a Python source file without running/importing the file. """ import re version_pattern = r"^ *__version__ *= *['\"](\d+\.\d+\.\d+)['\"] *$" match = re.search(version_pattern, open(filename).read(), re.MULTILINE) assert match, ("No version found in file: {!r} matching pattern: {!r}" .format(filename, version_pattern)) return match.group(1) setup( name="jsonlogging", description="jsonlogging provides structured log output from the " "logging module in JSON format", author="Hal Blackburn", author_email="hwtb2@cam.ac.uk", url="https://github.com/ucamhal/ravenpy", version=get_version("jsonlogging/__init__.py"), packages=["jsonlogging"], license="BSD", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python", "Topic :: Software Development", "Topic :: System :: Logging" ], long_description=open("README.md").read(), test_suite="jsonlogging.tests.test_all", tests_require="mock >= 1.0.0, < 2.0.0" )
33.977273
75
0.623411
from setuptools import setup def get_version(filename): import re version_pattern = r"^ *__version__ *= *['\"](\d+\.\d+\.\d+)['\"] *$" match = re.search(version_pattern, open(filename).read(), re.MULTILINE) assert match, ("No version found in file: {!r} matching pattern: {!r}" .format(filename, version_pattern)) return match.group(1) setup( name="jsonlogging", description="jsonlogging provides structured log output from the " "logging module in JSON format", author="Hal Blackburn", author_email="hwtb2@cam.ac.uk", url="https://github.com/ucamhal/ravenpy", version=get_version("jsonlogging/__init__.py"), packages=["jsonlogging"], license="BSD", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python", "Topic :: Software Development", "Topic :: System :: Logging" ], long_description=open("README.md").read(), test_suite="jsonlogging.tests.test_all", tests_require="mock >= 1.0.0, < 2.0.0" )
true
true
f70e9eba1ab08ba41a6d14813985705f6afe0f1e
94,216
py
Python
sympy/plotting/plot.py
Michal-Gagala/sympy
3cc756c2af73b5506102abaeefd1b654e286e2c8
[ "MIT" ]
null
null
null
sympy/plotting/plot.py
Michal-Gagala/sympy
3cc756c2af73b5506102abaeefd1b654e286e2c8
[ "MIT" ]
null
null
null
sympy/plotting/plot.py
Michal-Gagala/sympy
3cc756c2af73b5506102abaeefd1b654e286e2c8
[ "MIT" ]
null
null
null
"""Plotting module for SymPy. A plot is represented by the ``Plot`` class that contains a reference to the backend and a list of the data series to be plotted. The data series are instances of classes meant to simplify getting points and meshes from SymPy expressions. ``plot_backends`` is a dictionary with all the backends. This module gives only the essential. For all the fancy stuff use directly the backend. You can get the backend wrapper for every plot from the ``_backend`` attribute. Moreover the data series classes have various useful methods like ``get_points``, ``get_meshes``, etc, that may be useful if you wish to use another plotting library. Especially if you need publication ready graphs and this module is not enough for you - just get the ``_backend`` attribute and add whatever you want directly to it. In the case of matplotlib (the common way to graph data in python) just copy ``_backend.fig`` which is the figure and ``_backend.ax`` which is the axis and work on them as you would on any other matplotlib object. Simplicity of code takes much greater importance than performance. Do not use it if you care at all about performance. A new backend instance is initialized every time you call ``show()`` and the old one is left to the garbage collector. """ from collections.abc import Callable from sympy.core.basic import Basic from sympy.core.containers import Tuple from sympy.core.expr import Expr from sympy.core.function import arity, Function from sympy.core.symbol import (Dummy, Symbol) from sympy.core.sympify import sympify from sympy.external import import_module from sympy.printing.latex import latex from sympy.utilities.exceptions import sympy_deprecation_warning from sympy.utilities.iterables import is_sequence from .experimental_lambdify import (vectorized_lambdify, lambdify) # N.B. # When changing the minimum module version for matplotlib, please change # the same in the `SymPyDocTestFinder`` in `sympy/testing/runtests.py` # Backend specific imports - textplot from sympy.plotting.textplot import textplot # Global variable # Set to False when running tests / doctests so that the plots don't show. _show = True def unset_show(): """ Disable show(). For use in the tests. """ global _show _show = False def _str_or_latex(label): if isinstance(label, Basic): return latex(label, mode='inline') return str(label) ############################################################################## # The public interface ############################################################################## class Plot: """The central class of the plotting module. Explanation =========== For interactive work the function ``plot`` is better suited. This class permits the plotting of SymPy expressions using numerous backends (matplotlib, textplot, the old pyglet module for sympy, Google charts api, etc). The figure can contain an arbitrary number of plots of SymPy expressions, lists of coordinates of points, etc. Plot has a private attribute _series that contains all data series to be plotted (expressions for lines or surfaces, lists of points, etc (all subclasses of BaseSeries)). Those data series are instances of classes not imported by ``from sympy import *``. The customization of the figure is on two levels. Global options that concern the figure as a whole (eg title, xlabel, scale, etc) and per-data series options (eg name) and aesthetics (eg. color, point shape, line type, etc.). The difference between options and aesthetics is that an aesthetic can be a function of the coordinates (or parameters in a parametric plot). The supported values for an aesthetic are: - None (the backend uses default values) - a constant - a function of one variable (the first coordinate or parameter) - a function of two variables (the first and second coordinate or parameters) - a function of three variables (only in nonparametric 3D plots) Their implementation depends on the backend so they may not work in some backends. If the plot is parametric and the arity of the aesthetic function permits it the aesthetic is calculated over parameters and not over coordinates. If the arity does not permit calculation over parameters the calculation is done over coordinates. Only cartesian coordinates are supported for the moment, but you can use the parametric plots to plot in polar, spherical and cylindrical coordinates. The arguments for the constructor Plot must be subclasses of BaseSeries. Any global option can be specified as a keyword argument. The global options for a figure are: - title : str - xlabel : str or Symbol - ylabel : str or Symbol - zlabel : str or Symbol - legend : bool - xscale : {'linear', 'log'} - yscale : {'linear', 'log'} - axis : bool - axis_center : tuple of two floats or {'center', 'auto'} - xlim : tuple of two floats - ylim : tuple of two floats - aspect_ratio : tuple of two floats or {'auto'} - autoscale : bool - margin : float in [0, 1] - backend : {'default', 'matplotlib', 'text'} or a subclass of BaseBackend - size : optional tuple of two floats, (width, height); default: None The per data series options and aesthetics are: There are none in the base series. See below for options for subclasses. Some data series support additional aesthetics or options: ListSeries, LineOver1DRangeSeries, Parametric2DLineSeries, Parametric3DLineSeries support the following: Aesthetics: - line_color : string, or float, or function, optional Specifies the color for the plot, which depends on the backend being used. For example, if ``MatplotlibBackend`` is being used, then Matplotlib string colors are acceptable ("red", "r", "cyan", "c", ...). Alternatively, we can use a float number `0 < color < 1` wrapped in a string (for example, `line_color="0.5"`) to specify grayscale colors. Alternatively, We can specify a function returning a single float value: this will be used to apply a color-loop (for example, `line_color=lambda x: math.cos(x)`). Note that by setting line_color, it would be applied simultaneously to all the series. options: - label : str - steps : bool - integers_only : bool SurfaceOver2DRangeSeries, ParametricSurfaceSeries support the following: aesthetics: - surface_color : function which returns a float. """ def __init__(self, *args, title=None, xlabel=None, ylabel=None, zlabel=None, aspect_ratio='auto', xlim=None, ylim=None, axis_center='auto', axis=True, xscale='linear', yscale='linear', legend=False, autoscale=True, margin=0, annotations=None, markers=None, rectangles=None, fill=None, backend='default', size=None, **kwargs): super().__init__() # Options for the graph as a whole. # The possible values for each option are described in the docstring of # Plot. They are based purely on convention, no checking is done. self.title = title self.xlabel = xlabel self.ylabel = ylabel self.zlabel = zlabel self.aspect_ratio = aspect_ratio self.axis_center = axis_center self.axis = axis self.xscale = xscale self.yscale = yscale self.legend = legend self.autoscale = autoscale self.margin = margin self.annotations = annotations self.markers = markers self.rectangles = rectangles self.fill = fill # Contains the data objects to be plotted. The backend should be smart # enough to iterate over this list. self._series = [] self._series.extend(args) # The backend type. On every show() a new backend instance is created # in self._backend which is tightly coupled to the Plot instance # (thanks to the parent attribute of the backend). if isinstance(backend, str): self.backend = plot_backends[backend] elif (type(backend) == type) and issubclass(backend, BaseBackend): self.backend = backend else: raise TypeError( "backend must be either a string or a subclass of BaseBackend") is_real = \ lambda lim: all(getattr(i, 'is_real', True) for i in lim) is_finite = \ lambda lim: all(getattr(i, 'is_finite', True) for i in lim) # reduce code repetition def check_and_set(t_name, t): if t: if not is_real(t): raise ValueError( "All numbers from {}={} must be real".format(t_name, t)) if not is_finite(t): raise ValueError( "All numbers from {}={} must be finite".format(t_name, t)) setattr(self, t_name, (float(t[0]), float(t[1]))) self.xlim = None check_and_set("xlim", xlim) self.ylim = None check_and_set("ylim", ylim) self.size = None check_and_set("size", size) def show(self): # TODO move this to the backend (also for save) if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.show() def save(self, path): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.save(path) def __str__(self): series_strs = [('[%d]: ' % i) + str(s) for i, s in enumerate(self._series)] return 'Plot object containing:\n' + '\n'.join(series_strs) def __getitem__(self, index): return self._series[index] def __setitem__(self, index, *args): if len(args) == 1 and isinstance(args[0], BaseSeries): self._series[index] = args def __delitem__(self, index): del self._series[index] def append(self, arg): """Adds an element from a plot's series to an existing plot. Examples ======== Consider two ``Plot`` objects, ``p1`` and ``p2``. To add the second plot's first series object to the first, use the ``append`` method, like so: .. plot:: :format: doctest :include-source: True >>> from sympy import symbols >>> from sympy.plotting import plot >>> x = symbols('x') >>> p1 = plot(x*x, show=False) >>> p2 = plot(x, show=False) >>> p1.append(p2[0]) >>> p1 Plot object containing: [0]: cartesian line: x**2 for x over (-10.0, 10.0) [1]: cartesian line: x for x over (-10.0, 10.0) >>> p1.show() See Also ======== extend """ if isinstance(arg, BaseSeries): self._series.append(arg) else: raise TypeError('Must specify element of plot to append.') def extend(self, arg): """Adds all series from another plot. Examples ======== Consider two ``Plot`` objects, ``p1`` and ``p2``. To add the second plot to the first, use the ``extend`` method, like so: .. plot:: :format: doctest :include-source: True >>> from sympy import symbols >>> from sympy.plotting import plot >>> x = symbols('x') >>> p1 = plot(x**2, show=False) >>> p2 = plot(x, -x, show=False) >>> p1.extend(p2) >>> p1 Plot object containing: [0]: cartesian line: x**2 for x over (-10.0, 10.0) [1]: cartesian line: x for x over (-10.0, 10.0) [2]: cartesian line: -x for x over (-10.0, 10.0) >>> p1.show() """ if isinstance(arg, Plot): self._series.extend(arg._series) elif is_sequence(arg): self._series.extend(arg) else: raise TypeError('Expecting Plot or sequence of BaseSeries') class PlotGrid: """This class helps to plot subplots from already created SymPy plots in a single figure. Examples ======== .. plot:: :context: close-figs :format: doctest :include-source: True >>> from sympy import symbols >>> from sympy.plotting import plot, plot3d, PlotGrid >>> x, y = symbols('x, y') >>> p1 = plot(x, x**2, x**3, (x, -5, 5)) >>> p2 = plot((x**2, (x, -6, 6)), (x, (x, -5, 5))) >>> p3 = plot(x**3, (x, -5, 5)) >>> p4 = plot3d(x*y, (x, -5, 5), (y, -5, 5)) Plotting vertically in a single line: .. plot:: :context: close-figs :format: doctest :include-source: True >>> PlotGrid(2, 1, p1, p2) PlotGrid object containing: Plot[0]:Plot object containing: [0]: cartesian line: x for x over (-5.0, 5.0) [1]: cartesian line: x**2 for x over (-5.0, 5.0) [2]: cartesian line: x**3 for x over (-5.0, 5.0) Plot[1]:Plot object containing: [0]: cartesian line: x**2 for x over (-6.0, 6.0) [1]: cartesian line: x for x over (-5.0, 5.0) Plotting horizontally in a single line: .. plot:: :context: close-figs :format: doctest :include-source: True >>> PlotGrid(1, 3, p2, p3, p4) PlotGrid object containing: Plot[0]:Plot object containing: [0]: cartesian line: x**2 for x over (-6.0, 6.0) [1]: cartesian line: x for x over (-5.0, 5.0) Plot[1]:Plot object containing: [0]: cartesian line: x**3 for x over (-5.0, 5.0) Plot[2]:Plot object containing: [0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0) Plotting in a grid form: .. plot:: :context: close-figs :format: doctest :include-source: True >>> PlotGrid(2, 2, p1, p2, p3, p4) PlotGrid object containing: Plot[0]:Plot object containing: [0]: cartesian line: x for x over (-5.0, 5.0) [1]: cartesian line: x**2 for x over (-5.0, 5.0) [2]: cartesian line: x**3 for x over (-5.0, 5.0) Plot[1]:Plot object containing: [0]: cartesian line: x**2 for x over (-6.0, 6.0) [1]: cartesian line: x for x over (-5.0, 5.0) Plot[2]:Plot object containing: [0]: cartesian line: x**3 for x over (-5.0, 5.0) Plot[3]:Plot object containing: [0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0) """ def __init__(self, nrows, ncolumns, *args, show=True, size=None, **kwargs): """ Parameters ========== nrows : The number of rows that should be in the grid of the required subplot. ncolumns : The number of columns that should be in the grid of the required subplot. nrows and ncolumns together define the required grid. Arguments ========= A list of predefined plot objects entered in a row-wise sequence i.e. plot objects which are to be in the top row of the required grid are written first, then the second row objects and so on Keyword arguments ================= show : Boolean The default value is set to ``True``. Set show to ``False`` and the function will not display the subplot. The returned instance of the ``PlotGrid`` class can then be used to save or display the plot by calling the ``save()`` and ``show()`` methods respectively. size : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. """ self.nrows = nrows self.ncolumns = ncolumns self._series = [] self.args = args for arg in args: self._series.append(arg._series) self.backend = DefaultBackend self.size = size if show: self.show() def show(self): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.show() def save(self, path): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.save(path) def __str__(self): plot_strs = [('Plot[%d]:' % i) + str(plot) for i, plot in enumerate(self.args)] return 'PlotGrid object containing:\n' + '\n'.join(plot_strs) ############################################################################## # Data Series ############################################################################## #TODO more general way to calculate aesthetics (see get_color_array) ### The base class for all series class BaseSeries: """Base class for the data objects containing stuff to be plotted. Explanation =========== The backend should check if it supports the data series that it's given. (eg TextBackend supports only LineOver1DRange). It's the backend responsibility to know how to use the class of data series that it's given. Some data series classes are grouped (using a class attribute like is_2Dline) according to the api they present (based only on convention). The backend is not obliged to use that api (eg. The LineOver1DRange belongs to the is_2Dline group and presents the get_points method, but the TextBackend does not use the get_points method). """ # Some flags follow. The rationale for using flags instead of checking base # classes is that setting multiple flags is simpler than multiple # inheritance. is_2Dline = False # Some of the backends expect: # - get_points returning 1D np.arrays list_x, list_y # - get_color_array returning 1D np.array (done in Line2DBaseSeries) # with the colors calculated at the points from get_points is_3Dline = False # Some of the backends expect: # - get_points returning 1D np.arrays list_x, list_y, list_y # - get_color_array returning 1D np.array (done in Line2DBaseSeries) # with the colors calculated at the points from get_points is_3Dsurface = False # Some of the backends expect: # - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays) # - get_points an alias for get_meshes is_contour = False # Some of the backends expect: # - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays) # - get_points an alias for get_meshes is_implicit = False # Some of the backends expect: # - get_meshes returning mesh_x (1D array), mesh_y(1D array, # mesh_z (2D np.arrays) # - get_points an alias for get_meshes # Different from is_contour as the colormap in backend will be # different is_parametric = False # The calculation of aesthetics expects: # - get_parameter_points returning one or two np.arrays (1D or 2D) # used for calculation aesthetics def __init__(self): super().__init__() @property def is_3D(self): flags3D = [ self.is_3Dline, self.is_3Dsurface ] return any(flags3D) @property def is_line(self): flagslines = [ self.is_2Dline, self.is_3Dline ] return any(flagslines) ### 2D lines class Line2DBaseSeries(BaseSeries): """A base class for 2D lines. - adding the label, steps and only_integers options - making is_2Dline true - defining get_segments and get_color_array """ is_2Dline = True _dim = 2 def __init__(self): super().__init__() self.label = None self.steps = False self.only_integers = False self.line_color = None def get_data(self): """ Return lists of coordinates for plotting the line. Returns ======= x: list List of x-coordinates y: list List of y-coordinates y: list List of z-coordinates in case of Parametric3DLineSeries """ np = import_module('numpy') points = self.get_points() if self.steps is True: if len(points) == 2: x = np.array((points[0], points[0])).T.flatten()[1:] y = np.array((points[1], points[1])).T.flatten()[:-1] points = (x, y) else: x = np.repeat(points[0], 3)[2:] y = np.repeat(points[1], 3)[:-2] z = np.repeat(points[2], 3)[1:-1] points = (x, y, z) return points def get_segments(self): sympy_deprecation_warning( """ The Line2DBaseSeries.get_segments() method is deprecated. Instead, use the MatplotlibBackend.get_segments() method, or use The get_points() or get_data() methods. """, deprecated_since_version="1.9", active_deprecations_target="deprecated-get-segments") np = import_module('numpy') points = type(self).get_data(self) points = np.ma.array(points).T.reshape(-1, 1, self._dim) return np.ma.concatenate([points[:-1], points[1:]], axis=1) def get_color_array(self): np = import_module('numpy') c = self.line_color if hasattr(c, '__call__'): f = np.vectorize(c) nargs = arity(c) if nargs == 1 and self.is_parametric: x = self.get_parameter_points() return f(centers_of_segments(x)) else: variables = list(map(centers_of_segments, self.get_points())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables[:2]) else: # only if the line is 3D (otherwise raises an error) return f(*variables) else: return c*np.ones(self.nb_of_points) class List2DSeries(Line2DBaseSeries): """Representation for a line consisting of list of points.""" def __init__(self, list_x, list_y): np = import_module('numpy') super().__init__() self.list_x = np.array(list_x) self.list_y = np.array(list_y) self.label = 'list' def __str__(self): return 'list plot' def get_points(self): return (self.list_x, self.list_y) class LineOver1DRangeSeries(Line2DBaseSeries): """Representation for a line consisting of a SymPy expression over a range.""" def __init__(self, expr, var_start_end, **kwargs): super().__init__() self.expr = sympify(expr) self.label = kwargs.get('label', None) or self.expr self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.adaptive = kwargs.get('adaptive', True) self.depth = kwargs.get('depth', 12) self.line_color = kwargs.get('line_color', None) self.xscale = kwargs.get('xscale', 'linear') def __str__(self): return 'cartesian line: %s for %s over %s' % ( str(self.expr), str(self.var), str((self.start, self.end))) def get_points(self): """ Return lists of coordinates for plotting. Depending on the `adaptive` option, this function will either use an adaptive algorithm or it will uniformly sample the expression over the provided range. Returns ======= x: list List of x-coordinates y: list List of y-coordinates Explanation =========== The adaptive sampling is done by recursively checking if three points are almost collinear. If they are not collinear, then more points are added between those points. References ========== .. [1] Adaptive polygonal approximation of parametric curves, Luiz Henrique de Figueiredo. """ if self.only_integers or not self.adaptive: return self._uniform_sampling() else: f = lambdify([self.var], self.expr) x_coords = [] y_coords = [] np = import_module('numpy') def sample(p, q, depth): """ Samples recursively if three points are almost collinear. For depth < 6, points are added irrespective of whether they satisfy the collinearity condition or not. The maximum depth allowed is 12. """ # Randomly sample to avoid aliasing. random = 0.45 + np.random.rand() * 0.1 if self.xscale == 'log': xnew = 10**(np.log10(p[0]) + random * (np.log10(q[0]) - np.log10(p[0]))) else: xnew = p[0] + random * (q[0] - p[0]) ynew = f(xnew) new_point = np.array([xnew, ynew]) # Maximum depth if depth > self.depth: x_coords.append(q[0]) y_coords.append(q[1]) # Sample irrespective of whether the line is flat till the # depth of 6. We are not using linspace to avoid aliasing. elif depth < 6: sample(p, new_point, depth + 1) sample(new_point, q, depth + 1) # Sample ten points if complex values are encountered # at both ends. If there is a real value in between, then # sample those points further. elif p[1] is None and q[1] is None: if self.xscale == 'log': xarray = np.logspace(p[0], q[0], 10) else: xarray = np.linspace(p[0], q[0], 10) yarray = list(map(f, xarray)) if not all(y is None for y in yarray): for i in range(len(yarray) - 1): if not (yarray[i] is None and yarray[i + 1] is None): sample([xarray[i], yarray[i]], [xarray[i + 1], yarray[i + 1]], depth + 1) # Sample further if one of the end points in None (i.e. a # complex value) or the three points are not almost collinear. elif (p[1] is None or q[1] is None or new_point[1] is None or not flat(p, new_point, q)): sample(p, new_point, depth + 1) sample(new_point, q, depth + 1) else: x_coords.append(q[0]) y_coords.append(q[1]) f_start = f(self.start) f_end = f(self.end) x_coords.append(self.start) y_coords.append(f_start) sample(np.array([self.start, f_start]), np.array([self.end, f_end]), 0) return (x_coords, y_coords) def _uniform_sampling(self): np = import_module('numpy') if self.only_integers is True: if self.xscale == 'log': list_x = np.logspace(int(self.start), int(self.end), num=int(self.end) - int(self.start) + 1) else: list_x = np.linspace(int(self.start), int(self.end), num=int(self.end) - int(self.start) + 1) else: if self.xscale == 'log': list_x = np.logspace(self.start, self.end, num=self.nb_of_points) else: list_x = np.linspace(self.start, self.end, num=self.nb_of_points) f = vectorized_lambdify([self.var], self.expr) list_y = f(list_x) return (list_x, list_y) class Parametric2DLineSeries(Line2DBaseSeries): """Representation for a line consisting of two parametric SymPy expressions over a range.""" is_parametric = True def __init__(self, expr_x, expr_y, var_start_end, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.label = kwargs.get('label', None) or \ Tuple(self.expr_x, self.expr_y) self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.adaptive = kwargs.get('adaptive', True) self.depth = kwargs.get('depth', 12) self.line_color = kwargs.get('line_color', None) def __str__(self): return 'parametric cartesian line: (%s, %s) for %s over %s' % ( str(self.expr_x), str(self.expr_y), str(self.var), str((self.start, self.end))) def get_parameter_points(self): np = import_module('numpy') return np.linspace(self.start, self.end, num=self.nb_of_points) def _uniform_sampling(self): param = self.get_parameter_points() fx = vectorized_lambdify([self.var], self.expr_x) fy = vectorized_lambdify([self.var], self.expr_y) list_x = fx(param) list_y = fy(param) return (list_x, list_y) def get_points(self): """ Return lists of coordinates for plotting. Depending on the `adaptive` option, this function will either use an adaptive algorithm or it will uniformly sample the expression over the provided range. Returns ======= x: list List of x-coordinates y: list List of y-coordinates Explanation =========== The adaptive sampling is done by recursively checking if three points are almost collinear. If they are not collinear, then more points are added between those points. References ========== .. [1] Adaptive polygonal approximation of parametric curves, Luiz Henrique de Figueiredo. """ if not self.adaptive: return self._uniform_sampling() f_x = lambdify([self.var], self.expr_x) f_y = lambdify([self.var], self.expr_y) x_coords = [] y_coords = [] def sample(param_p, param_q, p, q, depth): """ Samples recursively if three points are almost collinear. For depth < 6, points are added irrespective of whether they satisfy the collinearity condition or not. The maximum depth allowed is 12. """ # Randomly sample to avoid aliasing. np = import_module('numpy') random = 0.45 + np.random.rand() * 0.1 param_new = param_p + random * (param_q - param_p) xnew = f_x(param_new) ynew = f_y(param_new) new_point = np.array([xnew, ynew]) # Maximum depth if depth > self.depth: x_coords.append(q[0]) y_coords.append(q[1]) # Sample irrespective of whether the line is flat till the # depth of 6. We are not using linspace to avoid aliasing. elif depth < 6: sample(param_p, param_new, p, new_point, depth + 1) sample(param_new, param_q, new_point, q, depth + 1) # Sample ten points if complex values are encountered # at both ends. If there is a real value in between, then # sample those points further. elif ((p[0] is None and q[1] is None) or (p[1] is None and q[1] is None)): param_array = np.linspace(param_p, param_q, 10) x_array = list(map(f_x, param_array)) y_array = list(map(f_y, param_array)) if not all(x is None and y is None for x, y in zip(x_array, y_array)): for i in range(len(y_array) - 1): if ((x_array[i] is not None and y_array[i] is not None) or (x_array[i + 1] is not None and y_array[i + 1] is not None)): point_a = [x_array[i], y_array[i]] point_b = [x_array[i + 1], y_array[i + 1]] sample(param_array[i], param_array[i], point_a, point_b, depth + 1) # Sample further if one of the end points in None (i.e. a complex # value) or the three points are not almost collinear. elif (p[0] is None or p[1] is None or q[1] is None or q[0] is None or not flat(p, new_point, q)): sample(param_p, param_new, p, new_point, depth + 1) sample(param_new, param_q, new_point, q, depth + 1) else: x_coords.append(q[0]) y_coords.append(q[1]) f_start_x = f_x(self.start) f_start_y = f_y(self.start) start = [f_start_x, f_start_y] f_end_x = f_x(self.end) f_end_y = f_y(self.end) end = [f_end_x, f_end_y] x_coords.append(f_start_x) y_coords.append(f_start_y) sample(self.start, self.end, start, end, 0) return x_coords, y_coords ### 3D lines class Line3DBaseSeries(Line2DBaseSeries): """A base class for 3D lines. Most of the stuff is derived from Line2DBaseSeries.""" is_2Dline = False is_3Dline = True _dim = 3 def __init__(self): super().__init__() class Parametric3DLineSeries(Line3DBaseSeries): """Representation for a 3D line consisting of three parametric SymPy expressions and a range.""" is_parametric = True def __init__(self, expr_x, expr_y, expr_z, var_start_end, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.expr_z = sympify(expr_z) self.label = kwargs.get('label', None) or \ Tuple(self.expr_x, self.expr_y) self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.line_color = kwargs.get('line_color', None) self._xlim = None self._ylim = None self._zlim = None def __str__(self): return '3D parametric cartesian line: (%s, %s, %s) for %s over %s' % ( str(self.expr_x), str(self.expr_y), str(self.expr_z), str(self.var), str((self.start, self.end))) def get_parameter_points(self): np = import_module('numpy') return np.linspace(self.start, self.end, num=self.nb_of_points) def get_points(self): np = import_module('numpy') param = self.get_parameter_points() fx = vectorized_lambdify([self.var], self.expr_x) fy = vectorized_lambdify([self.var], self.expr_y) fz = vectorized_lambdify([self.var], self.expr_z) list_x = fx(param) list_y = fy(param) list_z = fz(param) list_x = np.array(list_x, dtype=np.float64) list_y = np.array(list_y, dtype=np.float64) list_z = np.array(list_z, dtype=np.float64) list_x = np.ma.masked_invalid(list_x) list_y = np.ma.masked_invalid(list_y) list_z = np.ma.masked_invalid(list_z) self._xlim = (np.amin(list_x), np.amax(list_x)) self._ylim = (np.amin(list_y), np.amax(list_y)) self._zlim = (np.amin(list_z), np.amax(list_z)) return list_x, list_y, list_z ### Surfaces class SurfaceBaseSeries(BaseSeries): """A base class for 3D surfaces.""" is_3Dsurface = True def __init__(self): super().__init__() self.surface_color = None def get_color_array(self): np = import_module('numpy') c = self.surface_color if isinstance(c, Callable): f = np.vectorize(c) nargs = arity(c) if self.is_parametric: variables = list(map(centers_of_faces, self.get_parameter_meshes())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables) variables = list(map(centers_of_faces, self.get_meshes())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables[:2]) else: return f(*variables) else: if isinstance(self, SurfaceOver2DRangeSeries): return c*np.ones(min(self.nb_of_points_x, self.nb_of_points_y)) else: return c*np.ones(min(self.nb_of_points_u, self.nb_of_points_v)) class SurfaceOver2DRangeSeries(SurfaceBaseSeries): """Representation for a 3D surface consisting of a SymPy expression and 2D range.""" def __init__(self, expr, var_start_end_x, var_start_end_y, **kwargs): super().__init__() self.expr = sympify(expr) self.var_x = sympify(var_start_end_x[0]) self.start_x = float(var_start_end_x[1]) self.end_x = float(var_start_end_x[2]) self.var_y = sympify(var_start_end_y[0]) self.start_y = float(var_start_end_y[1]) self.end_y = float(var_start_end_y[2]) self.nb_of_points_x = kwargs.get('nb_of_points_x', 50) self.nb_of_points_y = kwargs.get('nb_of_points_y', 50) self.surface_color = kwargs.get('surface_color', None) self._xlim = (self.start_x, self.end_x) self._ylim = (self.start_y, self.end_y) def __str__(self): return ('cartesian surface: %s for' ' %s over %s and %s over %s') % ( str(self.expr), str(self.var_x), str((self.start_x, self.end_x)), str(self.var_y), str((self.start_y, self.end_y))) def get_meshes(self): np = import_module('numpy') mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x, num=self.nb_of_points_x), np.linspace(self.start_y, self.end_y, num=self.nb_of_points_y)) f = vectorized_lambdify((self.var_x, self.var_y), self.expr) mesh_z = f(mesh_x, mesh_y) mesh_z = np.array(mesh_z, dtype=np.float64) mesh_z = np.ma.masked_invalid(mesh_z) self._zlim = (np.amin(mesh_z), np.amax(mesh_z)) return mesh_x, mesh_y, mesh_z class ParametricSurfaceSeries(SurfaceBaseSeries): """Representation for a 3D surface consisting of three parametric SymPy expressions and a range.""" is_parametric = True def __init__( self, expr_x, expr_y, expr_z, var_start_end_u, var_start_end_v, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.expr_z = sympify(expr_z) self.var_u = sympify(var_start_end_u[0]) self.start_u = float(var_start_end_u[1]) self.end_u = float(var_start_end_u[2]) self.var_v = sympify(var_start_end_v[0]) self.start_v = float(var_start_end_v[1]) self.end_v = float(var_start_end_v[2]) self.nb_of_points_u = kwargs.get('nb_of_points_u', 50) self.nb_of_points_v = kwargs.get('nb_of_points_v', 50) self.surface_color = kwargs.get('surface_color', None) def __str__(self): return ('parametric cartesian surface: (%s, %s, %s) for' ' %s over %s and %s over %s') % ( str(self.expr_x), str(self.expr_y), str(self.expr_z), str(self.var_u), str((self.start_u, self.end_u)), str(self.var_v), str((self.start_v, self.end_v))) def get_parameter_meshes(self): np = import_module('numpy') return np.meshgrid(np.linspace(self.start_u, self.end_u, num=self.nb_of_points_u), np.linspace(self.start_v, self.end_v, num=self.nb_of_points_v)) def get_meshes(self): np = import_module('numpy') mesh_u, mesh_v = self.get_parameter_meshes() fx = vectorized_lambdify((self.var_u, self.var_v), self.expr_x) fy = vectorized_lambdify((self.var_u, self.var_v), self.expr_y) fz = vectorized_lambdify((self.var_u, self.var_v), self.expr_z) mesh_x = fx(mesh_u, mesh_v) mesh_y = fy(mesh_u, mesh_v) mesh_z = fz(mesh_u, mesh_v) mesh_x = np.array(mesh_x, dtype=np.float64) mesh_y = np.array(mesh_y, dtype=np.float64) mesh_z = np.array(mesh_z, dtype=np.float64) mesh_x = np.ma.masked_invalid(mesh_x) mesh_y = np.ma.masked_invalid(mesh_y) mesh_z = np.ma.masked_invalid(mesh_z) self._xlim = (np.amin(mesh_x), np.amax(mesh_x)) self._ylim = (np.amin(mesh_y), np.amax(mesh_y)) self._zlim = (np.amin(mesh_z), np.amax(mesh_z)) return mesh_x, mesh_y, mesh_z ### Contours class ContourSeries(BaseSeries): """Representation for a contour plot.""" # The code is mostly repetition of SurfaceOver2DRange. # Presently used in contour_plot function is_contour = True def __init__(self, expr, var_start_end_x, var_start_end_y): super().__init__() self.nb_of_points_x = 50 self.nb_of_points_y = 50 self.expr = sympify(expr) self.var_x = sympify(var_start_end_x[0]) self.start_x = float(var_start_end_x[1]) self.end_x = float(var_start_end_x[2]) self.var_y = sympify(var_start_end_y[0]) self.start_y = float(var_start_end_y[1]) self.end_y = float(var_start_end_y[2]) self.get_points = self.get_meshes self._xlim = (self.start_x, self.end_x) self._ylim = (self.start_y, self.end_y) def __str__(self): return ('contour: %s for ' '%s over %s and %s over %s') % ( str(self.expr), str(self.var_x), str((self.start_x, self.end_x)), str(self.var_y), str((self.start_y, self.end_y))) def get_meshes(self): np = import_module('numpy') mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x, num=self.nb_of_points_x), np.linspace(self.start_y, self.end_y, num=self.nb_of_points_y)) f = vectorized_lambdify((self.var_x, self.var_y), self.expr) return (mesh_x, mesh_y, f(mesh_x, mesh_y)) ############################################################################## # Backends ############################################################################## class BaseBackend: """Base class for all backends. A backend represents the plotting library, which implements the necessary functionalities in order to use SymPy plotting functions. How the plotting module works: 1. Whenever a plotting function is called, the provided expressions are processed and a list of instances of the `BaseSeries` class is created, containing the necessary information to plot the expressions (eg the expression, ranges, series name, ...). Eventually, these objects will generate the numerical data to be plotted. 2. A Plot object is instantiated, which stores the list of series and the main attributes of the plot (eg axis labels, title, ...). 3. When the "show" command is executed, a new backend is instantiated, which loops through each series object to generate and plot the numerical data. The backend is also going to set the axis labels, title, ..., according to the values stored in the Plot instance. The backend should check if it supports the data series that it's given (eg TextBackend supports only LineOver1DRange). It's the backend responsibility to know how to use the class of data series that it's given. Note that the current implementation of the `*Series` classes is "matplotlib-centric": the numerical data returned by the `get_points` and `get_meshes` methods is meant to be used directly by Matplotlib. Therefore, the new backend will have to pre-process the numerical data to make it compatible with the chosen plotting library. Keep in mind that future SymPy versions may improve the `*Series` classes in order to return numerical data "non-matplotlib-centric", hence if you code a new backend you have the responsibility to check if its working on each SymPy release. Please, explore the `MatplotlibBackend` source code to understand how a backend should be coded. Methods ======= In order to be used by SymPy plotting functions, a backend must implement the following methods: * `show(self)`: used to loop over the data series, generate the numerical data, plot it and set the axis labels, title, ... * save(self, path): used to save the current plot to the specified file path. * close(self): used to close the current plot backend (note: some plotting library does not support this functionality. In that case, just raise a warning). See also ======== MatplotlibBackend """ def __init__(self, parent): super().__init__() self.parent = parent def show(self): raise NotImplementedError def save(self, path): raise NotImplementedError def close(self): raise NotImplementedError # Don't have to check for the success of importing matplotlib in each case; # we will only be using this backend if we can successfully import matploblib class MatplotlibBackend(BaseBackend): """ This class implements the functionalities to use Matplotlib with SymPy plotting functions. """ def __init__(self, parent): super().__init__(parent) self.matplotlib = import_module('matplotlib', import_kwargs={'fromlist': ['pyplot', 'cm', 'collections']}, min_module_version='1.1.0', catch=(RuntimeError,)) self.plt = self.matplotlib.pyplot self.cm = self.matplotlib.cm self.LineCollection = self.matplotlib.collections.LineCollection aspect = getattr(self.parent, 'aspect_ratio', 'auto') if aspect != 'auto': aspect = float(aspect[1]) / aspect[0] if isinstance(self.parent, Plot): nrows, ncolumns = 1, 1 series_list = [self.parent._series] elif isinstance(self.parent, PlotGrid): nrows, ncolumns = self.parent.nrows, self.parent.ncolumns series_list = self.parent._series self.ax = [] self.fig = self.plt.figure(figsize=parent.size) for i, series in enumerate(series_list): are_3D = [s.is_3D for s in series] if any(are_3D) and not all(are_3D): raise ValueError('The matplotlib backend cannot mix 2D and 3D.') elif all(are_3D): # mpl_toolkits.mplot3d is necessary for # projection='3d' mpl_toolkits = import_module('mpl_toolkits', # noqa import_kwargs={'fromlist': ['mplot3d']}) self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, projection='3d', aspect=aspect)) elif not any(are_3D): self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, aspect=aspect)) self.ax[i].spines['left'].set_position('zero') self.ax[i].spines['right'].set_color('none') self.ax[i].spines['bottom'].set_position('zero') self.ax[i].spines['top'].set_color('none') self.ax[i].xaxis.set_ticks_position('bottom') self.ax[i].yaxis.set_ticks_position('left') @staticmethod def get_segments(x, y, z=None): """ Convert two list of coordinates to a list of segments to be used with Matplotlib's LineCollection. Parameters ========== x: list List of x-coordinates y: list List of y-coordinates z: list List of z-coordinates for a 3D line. """ np = import_module('numpy') if z is not None: dim = 3 points = (x, y, z) else: dim = 2 points = (x, y) points = np.ma.array(points).T.reshape(-1, 1, dim) return np.ma.concatenate([points[:-1], points[1:]], axis=1) def _process_series(self, series, ax, parent): np = import_module('numpy') mpl_toolkits = import_module( 'mpl_toolkits', import_kwargs={'fromlist': ['mplot3d']}) # XXX Workaround for matplotlib issue # https://github.com/matplotlib/matplotlib/issues/17130 xlims, ylims, zlims = [], [], [] for s in series: # Create the collections if s.is_2Dline: x, y = s.get_data() if (isinstance(s.line_color, (int, float)) or callable(s.line_color)): segments = self.get_segments(x, y) collection = self.LineCollection(segments) collection.set_array(s.get_color_array()) ax.add_collection(collection) else: lbl = _str_or_latex(s.label) line, = ax.plot(x, y, label=lbl, color=s.line_color) elif s.is_contour: ax.contour(*s.get_meshes()) elif s.is_3Dline: x, y, z = s.get_data() if (isinstance(s.line_color, (int, float)) or callable(s.line_color)): art3d = mpl_toolkits.mplot3d.art3d segments = self.get_segments(x, y, z) collection = art3d.Line3DCollection(segments) collection.set_array(s.get_color_array()) ax.add_collection(collection) else: lbl = _str_or_latex(s.label) ax.plot(x, y, z, label=lbl, color=s.line_color) xlims.append(s._xlim) ylims.append(s._ylim) zlims.append(s._zlim) elif s.is_3Dsurface: x, y, z = s.get_meshes() collection = ax.plot_surface(x, y, z, cmap=getattr(self.cm, 'viridis', self.cm.jet), rstride=1, cstride=1, linewidth=0.1) if isinstance(s.surface_color, (float, int, Callable)): color_array = s.get_color_array() color_array = color_array.reshape(color_array.size) collection.set_array(color_array) else: collection.set_color(s.surface_color) xlims.append(s._xlim) ylims.append(s._ylim) zlims.append(s._zlim) elif s.is_implicit: points = s.get_raster() if len(points) == 2: # interval math plotting x, y = _matplotlib_list(points[0]) ax.fill(x, y, facecolor=s.line_color, edgecolor='None') else: # use contourf or contour depending on whether it is # an inequality or equality. # XXX: ``contour`` plots multiple lines. Should be fixed. ListedColormap = self.matplotlib.colors.ListedColormap colormap = ListedColormap(["white", s.line_color]) xarray, yarray, zarray, plot_type = points if plot_type == 'contour': ax.contour(xarray, yarray, zarray, cmap=colormap, label=_str_or_latex(s.label)) else: ax.contourf(xarray, yarray, zarray, cmap=colormap, label=_str_or_latex(s.label)) else: raise NotImplementedError( '{} is not supported in the SymPy plotting module ' 'with matplotlib backend. Please report this issue.' .format(ax)) Axes3D = mpl_toolkits.mplot3d.Axes3D if not isinstance(ax, Axes3D): ax.autoscale_view( scalex=ax.get_autoscalex_on(), scaley=ax.get_autoscaley_on()) else: # XXX Workaround for matplotlib issue # https://github.com/matplotlib/matplotlib/issues/17130 if xlims: xlims = np.array(xlims) xlim = (np.amin(xlims[:, 0]), np.amax(xlims[:, 1])) ax.set_xlim(xlim) else: ax.set_xlim([0, 1]) if ylims: ylims = np.array(ylims) ylim = (np.amin(ylims[:, 0]), np.amax(ylims[:, 1])) ax.set_ylim(ylim) else: ax.set_ylim([0, 1]) if zlims: zlims = np.array(zlims) zlim = (np.amin(zlims[:, 0]), np.amax(zlims[:, 1])) ax.set_zlim(zlim) else: ax.set_zlim([0, 1]) # Set global options. # TODO The 3D stuff # XXX The order of those is important. if parent.xscale and not isinstance(ax, Axes3D): ax.set_xscale(parent.xscale) if parent.yscale and not isinstance(ax, Axes3D): ax.set_yscale(parent.yscale) if not isinstance(ax, Axes3D) or self.matplotlib.__version__ >= '1.2.0': # XXX in the distant future remove this check ax.set_autoscale_on(parent.autoscale) if parent.axis_center: val = parent.axis_center if isinstance(ax, Axes3D): pass elif val == 'center': ax.spines['left'].set_position('center') ax.spines['bottom'].set_position('center') elif val == 'auto': xl, xh = ax.get_xlim() yl, yh = ax.get_ylim() pos_left = ('data', 0) if xl*xh <= 0 else 'center' pos_bottom = ('data', 0) if yl*yh <= 0 else 'center' ax.spines['left'].set_position(pos_left) ax.spines['bottom'].set_position(pos_bottom) else: ax.spines['left'].set_position(('data', val[0])) ax.spines['bottom'].set_position(('data', val[1])) if not parent.axis: ax.set_axis_off() if parent.legend: if ax.legend(): ax.legend_.set_visible(parent.legend) if parent.margin: ax.set_xmargin(parent.margin) ax.set_ymargin(parent.margin) if parent.title: ax.set_title(parent.title) if parent.xlabel: xlbl = _str_or_latex(parent.xlabel) ax.set_xlabel(xlbl, position=(1, 0)) if parent.ylabel: ylbl = _str_or_latex(parent.ylabel) ax.set_ylabel(ylbl, position=(0, 1)) if isinstance(ax, Axes3D) and parent.zlabel: zlbl = _str_or_latex(parent.zlabel) ax.set_zlabel(zlbl, position=(0, 1)) if parent.annotations: for a in parent.annotations: ax.annotate(**a) if parent.markers: for marker in parent.markers: # make a copy of the marker dictionary # so that it doesn't get altered m = marker.copy() args = m.pop('args') ax.plot(*args, **m) if parent.rectangles: for r in parent.rectangles: rect = self.matplotlib.patches.Rectangle(**r) ax.add_patch(rect) if parent.fill: ax.fill_between(**parent.fill) # xlim and ylim shoulld always be set at last so that plot limits # doesn't get altered during the process. if parent.xlim: ax.set_xlim(parent.xlim) if parent.ylim: ax.set_ylim(parent.ylim) def process_series(self): """ Iterates over every ``Plot`` object and further calls _process_series() """ parent = self.parent if isinstance(parent, Plot): series_list = [parent._series] else: series_list = parent._series for i, (series, ax) in enumerate(zip(series_list, self.ax)): if isinstance(self.parent, PlotGrid): parent = self.parent.args[i] self._process_series(series, ax, parent) def show(self): self.process_series() #TODO after fixing https://github.com/ipython/ipython/issues/1255 # you can uncomment the next line and remove the pyplot.show() call #self.fig.show() if _show: self.fig.tight_layout() self.plt.show() else: self.close() def save(self, path): self.process_series() self.fig.savefig(path) def close(self): self.plt.close(self.fig) class TextBackend(BaseBackend): def __init__(self, parent): super().__init__(parent) def show(self): if not _show: return if len(self.parent._series) != 1: raise ValueError( 'The TextBackend supports only one graph per Plot.') elif not isinstance(self.parent._series[0], LineOver1DRangeSeries): raise ValueError( 'The TextBackend supports only expressions over a 1D range') else: ser = self.parent._series[0] textplot(ser.expr, ser.start, ser.end) def close(self): pass class DefaultBackend(BaseBackend): def __new__(cls, parent): matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,)) if matplotlib: return MatplotlibBackend(parent) else: return TextBackend(parent) plot_backends = { 'matplotlib': MatplotlibBackend, 'text': TextBackend, 'default': DefaultBackend } ############################################################################## # Finding the centers of line segments or mesh faces ############################################################################## def centers_of_segments(array): np = import_module('numpy') return np.mean(np.vstack((array[:-1], array[1:])), 0) def centers_of_faces(array): np = import_module('numpy') return np.mean(np.dstack((array[:-1, :-1], array[1:, :-1], array[:-1, 1:], array[:-1, :-1], )), 2) def flat(x, y, z, eps=1e-3): """Checks whether three points are almost collinear""" np = import_module('numpy') # Workaround plotting piecewise (#8577): # workaround for `lambdify` in `.experimental_lambdify` fails # to return numerical values in some cases. Lower-level fix # in `lambdify` is possible. vector_a = (x - y).astype(np.float64) vector_b = (z - y).astype(np.float64) dot_product = np.dot(vector_a, vector_b) vector_a_norm = np.linalg.norm(vector_a) vector_b_norm = np.linalg.norm(vector_b) cos_theta = dot_product / (vector_a_norm * vector_b_norm) return abs(cos_theta + 1) < eps def _matplotlib_list(interval_list): """ Returns lists for matplotlib ``fill`` command from a list of bounding rectangular intervals """ xlist = [] ylist = [] if len(interval_list): for intervals in interval_list: intervalx = intervals[0] intervaly = intervals[1] xlist.extend([intervalx.start, intervalx.start, intervalx.end, intervalx.end, None]) ylist.extend([intervaly.start, intervaly.end, intervaly.end, intervaly.start, None]) else: #XXX Ugly hack. Matplotlib does not accept empty lists for ``fill`` xlist.extend((None, None, None, None)) ylist.extend((None, None, None, None)) return xlist, ylist ####New API for plotting module #### # TODO: Add color arrays for plots. # TODO: Add more plotting options for 3d plots. # TODO: Adaptive sampling for 3D plots. def plot(*args, show=True, **kwargs): """Plots a function of a single variable as a curve. Parameters ========== args : The first argument is the expression representing the function of single variable to be plotted. The last argument is a 3-tuple denoting the range of the free variable. e.g. ``(x, 0, 5)`` Typical usage examples are in the followings: - Plotting a single expression with a single range. ``plot(expr, range, **kwargs)`` - Plotting a single expression with the default range (-10, 10). ``plot(expr, **kwargs)`` - Plotting multiple expressions with a single range. ``plot(expr1, expr2, ..., range, **kwargs)`` - Plotting multiple expressions with multiple ranges. ``plot((expr1, range1), (expr2, range2), ..., **kwargs)`` It is best practice to specify range explicitly because default range may change in the future if a more advanced default range detection algorithm is implemented. show : bool, optional The default value is set to ``True``. Set show to ``False`` and the function will not display the plot. The returned instance of the ``Plot`` class can then be used to save or display the plot by calling the ``save()`` and ``show()`` methods respectively. line_color : string, or float, or function, optional Specifies the color for the plot. See ``Plot`` to see how to set color for the plots. Note that by setting ``line_color``, it would be applied simultaneously to all the series. title : str, optional Title of the plot. It is set to the latex representation of the expression, if the plot has only one expression. label : str, optional The label of the expression in the plot. It will be used when called with ``legend``. Default is the name of the expression. e.g. ``sin(x)`` xlabel : str or expression, optional Label for the x-axis. ylabel : str or expression, optional Label for the y-axis. xscale : 'linear' or 'log', optional Sets the scaling of the x-axis. yscale : 'linear' or 'log', optional Sets the scaling of the y-axis. axis_center : (float, float), optional Tuple of two floats denoting the coordinates of the center or {'center', 'auto'} xlim : (float, float), optional Denotes the x-axis limits, ``(min, max)```. ylim : (float, float), optional Denotes the y-axis limits, ``(min, max)```. annotations : list, optional A list of dictionaries specifying the type of annotation required. The keys in the dictionary should be equivalent to the arguments of the matplotlib's annotate() function. markers : list, optional A list of dictionaries specifying the type the markers required. The keys in the dictionary should be equivalent to the arguments of the matplotlib's plot() function along with the marker related keyworded arguments. rectangles : list, optional A list of dictionaries specifying the dimensions of the rectangles to be plotted. The keys in the dictionary should be equivalent to the arguments of the matplotlib's patches.Rectangle class. fill : dict, optional A dictionary specifying the type of color filling required in the plot. The keys in the dictionary should be equivalent to the arguments of the matplotlib's fill_between() function. adaptive : bool, optional The default value is set to ``True``. Set adaptive to ``False`` and specify ``nb_of_points`` if uniform sampling is required. The plotting uses an adaptive algorithm which samples recursively to accurately plot. The adaptive algorithm uses a random point near the midpoint of two points that has to be further sampled. Hence the same plots can appear slightly different. depth : int, optional Recursion depth of the adaptive algorithm. A depth of value ``n`` samples a maximum of `2^{n}` points. If the ``adaptive`` flag is set to ``False``, this will be ignored. nb_of_points : int, optional Used when the ``adaptive`` is set to ``False``. The function is uniformly sampled at ``nb_of_points`` number of points. If the ``adaptive`` flag is set to ``True``, this will be ignored. size : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. Examples ======== .. plot:: :context: close-figs :format: doctest :include-source: True >>> from sympy import symbols >>> from sympy.plotting import plot >>> x = symbols('x') Single Plot .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot(x**2, (x, -5, 5)) Plot object containing: [0]: cartesian line: x**2 for x over (-5.0, 5.0) Multiple plots with single range. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot(x, x**2, x**3, (x, -5, 5)) Plot object containing: [0]: cartesian line: x for x over (-5.0, 5.0) [1]: cartesian line: x**2 for x over (-5.0, 5.0) [2]: cartesian line: x**3 for x over (-5.0, 5.0) Multiple plots with different ranges. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot((x**2, (x, -6, 6)), (x, (x, -5, 5))) Plot object containing: [0]: cartesian line: x**2 for x over (-6.0, 6.0) [1]: cartesian line: x for x over (-5.0, 5.0) No adaptive sampling. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot(x**2, adaptive=False, nb_of_points=400) Plot object containing: [0]: cartesian line: x**2 for x over (-10.0, 10.0) See Also ======== Plot, LineOver1DRangeSeries """ args = list(map(sympify, args)) free = set() for a in args: if isinstance(a, Expr): free |= a.free_symbols if len(free) > 1: raise ValueError( 'The same variable should be used in all ' 'univariate expressions being plotted.') x = free.pop() if free else Symbol('x') kwargs.setdefault('xlabel', x) kwargs.setdefault('ylabel', Function('f')(x)) series = [] plot_expr = check_arguments(args, 1, 1) series = [LineOver1DRangeSeries(*arg, **kwargs) for arg in plot_expr] plots = Plot(*series, **kwargs) if show: plots.show() return plots def plot_parametric(*args, show=True, **kwargs): """ Plots a 2D parametric curve. Parameters ========== args Common specifications are: - Plotting a single parametric curve with a range ``plot_parametric((expr_x, expr_y), range)`` - Plotting multiple parametric curves with the same range ``plot_parametric((expr_x, expr_y), ..., range)`` - Plotting multiple parametric curves with different ranges ``plot_parametric((expr_x, expr_y, range), ...)`` ``expr_x`` is the expression representing $x$ component of the parametric function. ``expr_y`` is the expression representing $y$ component of the parametric function. ``range`` is a 3-tuple denoting the parameter symbol, start and stop. For example, ``(u, 0, 5)``. If the range is not specified, then a default range of (-10, 10) is used. However, if the arguments are specified as ``(expr_x, expr_y, range), ...``, you must specify the ranges for each expressions manually. Default range may change in the future if a more advanced algorithm is implemented. adaptive : bool, optional Specifies whether to use the adaptive sampling or not. The default value is set to ``True``. Set adaptive to ``False`` and specify ``nb_of_points`` if uniform sampling is required. depth : int, optional The recursion depth of the adaptive algorithm. A depth of value $n$ samples a maximum of $2^n$ points. nb_of_points : int, optional Used when the ``adaptive`` flag is set to ``False``. Specifies the number of the points used for the uniform sampling. line_color : string, or float, or function, optional Specifies the color for the plot. See ``Plot`` to see how to set color for the plots. Note that by setting ``line_color``, it would be applied simultaneously to all the series. label : str, optional The label of the expression in the plot. It will be used when called with ``legend``. Default is the name of the expression. e.g. ``sin(x)`` xlabel : str, optional Label for the x-axis. ylabel : str, optional Label for the y-axis. xscale : 'linear' or 'log', optional Sets the scaling of the x-axis. yscale : 'linear' or 'log', optional Sets the scaling of the y-axis. axis_center : (float, float), optional Tuple of two floats denoting the coordinates of the center or {'center', 'auto'} xlim : (float, float), optional Denotes the x-axis limits, ``(min, max)```. ylim : (float, float), optional Denotes the y-axis limits, ``(min, max)```. size : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. Examples ======== .. plot:: :context: reset :format: doctest :include-source: True >>> from sympy import plot_parametric, symbols, cos, sin >>> u = symbols('u') A parametric plot with a single expression: .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot_parametric((cos(u), sin(u)), (u, -5, 5)) Plot object containing: [0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0) A parametric plot with multiple expressions with the same range: .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot_parametric((cos(u), sin(u)), (u, cos(u)), (u, -10, 10)) Plot object containing: [0]: parametric cartesian line: (cos(u), sin(u)) for u over (-10.0, 10.0) [1]: parametric cartesian line: (u, cos(u)) for u over (-10.0, 10.0) A parametric plot with multiple expressions with different ranges for each curve: .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot_parametric((cos(u), sin(u), (u, -5, 5)), ... (cos(u), u, (u, -5, 5))) Plot object containing: [0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0) [1]: parametric cartesian line: (cos(u), u) for u over (-5.0, 5.0) Notes ===== The plotting uses an adaptive algorithm which samples recursively to accurately plot the curve. The adaptive algorithm uses a random point near the midpoint of two points that has to be further sampled. Hence, repeating the same plot command can give slightly different results because of the random sampling. If there are multiple plots, then the same optional arguments are applied to all the plots drawn in the same canvas. If you want to set these options separately, you can index the returned ``Plot`` object and set it. For example, when you specify ``line_color`` once, it would be applied simultaneously to both series. .. plot:: :context: close-figs :format: doctest :include-source: True >>> from sympy import pi >>> expr1 = (u, cos(2*pi*u)/2 + 1/2) >>> expr2 = (u, sin(2*pi*u)/2 + 1/2) >>> p = plot_parametric(expr1, expr2, (u, 0, 1), line_color='blue') If you want to specify the line color for the specific series, you should index each item and apply the property manually. .. plot:: :context: close-figs :format: doctest :include-source: True >>> p[0].line_color = 'red' >>> p.show() See Also ======== Plot, Parametric2DLineSeries """ args = list(map(sympify, args)) series = [] plot_expr = check_arguments(args, 2, 1) series = [Parametric2DLineSeries(*arg, **kwargs) for arg in plot_expr] plots = Plot(*series, **kwargs) if show: plots.show() return plots def plot3d_parametric_line(*args, show=True, **kwargs): """ Plots a 3D parametric line plot. Usage ===== Single plot: ``plot3d_parametric_line(expr_x, expr_y, expr_z, range, **kwargs)`` If the range is not specified, then a default range of (-10, 10) is used. Multiple plots. ``plot3d_parametric_line((expr_x, expr_y, expr_z, range), ..., **kwargs)`` Ranges have to be specified for every expression. Default range may change in the future if a more advanced default range detection algorithm is implemented. Arguments ========= ``expr_x`` : Expression representing the function along x. ``expr_y`` : Expression representing the function along y. ``expr_z`` : Expression representing the function along z. ``range``: ``(u, 0, 5)``, A 3-tuple denoting the range of the parameter variable. Keyword Arguments ================= Arguments for ``Parametric3DLineSeries`` class. ``nb_of_points``: The range is uniformly sampled at ``nb_of_points`` number of points. Aesthetics: ``line_color``: string, or float, or function, optional Specifies the color for the plot. See ``Plot`` to see how to set color for the plots. Note that by setting ``line_color``, it would be applied simultaneously to all the series. ``label``: str The label to the plot. It will be used when called with ``legend=True`` to denote the function with the given label in the plot. If there are multiple plots, then the same series arguments are applied to all the plots. If you want to set these options separately, you can index the returned ``Plot`` object and set it. Arguments for ``Plot`` class. ``title`` : str. Title of the plot. ``size`` : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. Examples ======== .. plot:: :context: reset :format: doctest :include-source: True >>> from sympy import symbols, cos, sin >>> from sympy.plotting import plot3d_parametric_line >>> u = symbols('u') Single plot. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d_parametric_line(cos(u), sin(u), u, (u, -5, 5)) Plot object containing: [0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0) Multiple plots. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d_parametric_line((cos(u), sin(u), u, (u, -5, 5)), ... (sin(u), u**2, u, (u, -5, 5))) Plot object containing: [0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0) [1]: 3D parametric cartesian line: (sin(u), u**2, u) for u over (-5.0, 5.0) See Also ======== Plot, Parametric3DLineSeries """ args = list(map(sympify, args)) series = [] plot_expr = check_arguments(args, 3, 1) series = [Parametric3DLineSeries(*arg, **kwargs) for arg in plot_expr] kwargs.setdefault("xlabel", "x") kwargs.setdefault("ylabel", "y") kwargs.setdefault("zlabel", "z") plots = Plot(*series, **kwargs) if show: plots.show() return plots def plot3d(*args, show=True, **kwargs): """ Plots a 3D surface plot. Usage ===== Single plot ``plot3d(expr, range_x, range_y, **kwargs)`` If the ranges are not specified, then a default range of (-10, 10) is used. Multiple plot with the same range. ``plot3d(expr1, expr2, range_x, range_y, **kwargs)`` If the ranges are not specified, then a default range of (-10, 10) is used. Multiple plots with different ranges. ``plot3d((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)`` Ranges have to be specified for every expression. Default range may change in the future if a more advanced default range detection algorithm is implemented. Arguments ========= ``expr`` : Expression representing the function along x. ``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x variable. ``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y variable. Keyword Arguments ================= Arguments for ``SurfaceOver2DRangeSeries`` class: ``nb_of_points_x``: int. The x range is sampled uniformly at ``nb_of_points_x`` of points. ``nb_of_points_y``: int. The y range is sampled uniformly at ``nb_of_points_y`` of points. Aesthetics: ``surface_color``: Function which returns a float. Specifies the color for the surface of the plot. See ``sympy.plotting.Plot`` for more details. If there are multiple plots, then the same series arguments are applied to all the plots. If you want to set these options separately, you can index the returned ``Plot`` object and set it. Arguments for ``Plot`` class: ``title`` : str. Title of the plot. ``size`` : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. Examples ======== .. plot:: :context: reset :format: doctest :include-source: True >>> from sympy import symbols >>> from sympy.plotting import plot3d >>> x, y = symbols('x y') Single plot .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d(x*y, (x, -5, 5), (y, -5, 5)) Plot object containing: [0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0) Multiple plots with same range .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d(x*y, -x*y, (x, -5, 5), (y, -5, 5)) Plot object containing: [0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0) [1]: cartesian surface: -x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0) Multiple plots with different ranges. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d((x**2 + y**2, (x, -5, 5), (y, -5, 5)), ... (x*y, (x, -3, 3), (y, -3, 3))) Plot object containing: [0]: cartesian surface: x**2 + y**2 for x over (-5.0, 5.0) and y over (-5.0, 5.0) [1]: cartesian surface: x*y for x over (-3.0, 3.0) and y over (-3.0, 3.0) See Also ======== Plot, SurfaceOver2DRangeSeries """ args = list(map(sympify, args)) series = [] plot_expr = check_arguments(args, 1, 2) series = [SurfaceOver2DRangeSeries(*arg, **kwargs) for arg in plot_expr] kwargs.setdefault("xlabel", series[0].var_x) kwargs.setdefault("ylabel", series[0].var_y) kwargs.setdefault("zlabel", Function('f')(series[0].var_x, series[0].var_y)) plots = Plot(*series, **kwargs) if show: plots.show() return plots def plot3d_parametric_surface(*args, show=True, **kwargs): """ Plots a 3D parametric surface plot. Explanation =========== Single plot. ``plot3d_parametric_surface(expr_x, expr_y, expr_z, range_u, range_v, **kwargs)`` If the ranges is not specified, then a default range of (-10, 10) is used. Multiple plots. ``plot3d_parametric_surface((expr_x, expr_y, expr_z, range_u, range_v), ..., **kwargs)`` Ranges have to be specified for every expression. Default range may change in the future if a more advanced default range detection algorithm is implemented. Arguments ========= ``expr_x``: Expression representing the function along ``x``. ``expr_y``: Expression representing the function along ``y``. ``expr_z``: Expression representing the function along ``z``. ``range_u``: ``(u, 0, 5)``, A 3-tuple denoting the range of the ``u`` variable. ``range_v``: ``(v, 0, 5)``, A 3-tuple denoting the range of the v variable. Keyword Arguments ================= Arguments for ``ParametricSurfaceSeries`` class: ``nb_of_points_u``: int. The ``u`` range is sampled uniformly at ``nb_of_points_v`` of points ``nb_of_points_y``: int. The ``v`` range is sampled uniformly at ``nb_of_points_y`` of points Aesthetics: ``surface_color``: Function which returns a float. Specifies the color for the surface of the plot. See ``sympy.plotting.Plot`` for more details. If there are multiple plots, then the same series arguments are applied for all the plots. If you want to set these options separately, you can index the returned ``Plot`` object and set it. Arguments for ``Plot`` class: ``title`` : str. Title of the plot. ``size`` : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. Examples ======== .. plot:: :context: reset :format: doctest :include-source: True >>> from sympy import symbols, cos, sin >>> from sympy.plotting import plot3d_parametric_surface >>> u, v = symbols('u v') Single plot. .. plot:: :context: close-figs :format: doctest :include-source: True >>> plot3d_parametric_surface(cos(u + v), sin(u - v), u - v, ... (u, -5, 5), (v, -5, 5)) Plot object containing: [0]: parametric cartesian surface: (cos(u + v), sin(u - v), u - v) for u over (-5.0, 5.0) and v over (-5.0, 5.0) See Also ======== Plot, ParametricSurfaceSeries """ args = list(map(sympify, args)) series = [] plot_expr = check_arguments(args, 3, 2) series = [ParametricSurfaceSeries(*arg, **kwargs) for arg in plot_expr] kwargs.setdefault("xlabel", "x") kwargs.setdefault("ylabel", "y") kwargs.setdefault("zlabel", "z") plots = Plot(*series, **kwargs) if show: plots.show() return plots def plot_contour(*args, show=True, **kwargs): """ Draws contour plot of a function Usage ===== Single plot ``plot_contour(expr, range_x, range_y, **kwargs)`` If the ranges are not specified, then a default range of (-10, 10) is used. Multiple plot with the same range. ``plot_contour(expr1, expr2, range_x, range_y, **kwargs)`` If the ranges are not specified, then a default range of (-10, 10) is used. Multiple plots with different ranges. ``plot_contour((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)`` Ranges have to be specified for every expression. Default range may change in the future if a more advanced default range detection algorithm is implemented. Arguments ========= ``expr`` : Expression representing the function along x. ``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x variable. ``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y variable. Keyword Arguments ================= Arguments for ``ContourSeries`` class: ``nb_of_points_x``: int. The x range is sampled uniformly at ``nb_of_points_x`` of points. ``nb_of_points_y``: int. The y range is sampled uniformly at ``nb_of_points_y`` of points. Aesthetics: ``surface_color``: Function which returns a float. Specifies the color for the surface of the plot. See ``sympy.plotting.Plot`` for more details. If there are multiple plots, then the same series arguments are applied to all the plots. If you want to set these options separately, you can index the returned ``Plot`` object and set it. Arguments for ``Plot`` class: ``title`` : str. Title of the plot. ``size`` : (float, float), optional A tuple in the form (width, height) in inches to specify the size of the overall figure. The default value is set to ``None``, meaning the size will be set by the default backend. See Also ======== Plot, ContourSeries """ args = list(map(sympify, args)) plot_expr = check_arguments(args, 1, 2) series = [ContourSeries(*arg) for arg in plot_expr] plot_contours = Plot(*series, **kwargs) if len(plot_expr[0].free_symbols) > 2: raise ValueError('Contour Plot cannot Plot for more than two variables.') if show: plot_contours.show() return plot_contours def check_arguments(args, expr_len, nb_of_free_symbols): """ Checks the arguments and converts into tuples of the form (exprs, ranges). Examples ======== .. plot:: :context: reset :format: doctest :include-source: True >>> from sympy import cos, sin, symbols >>> from sympy.plotting.plot import check_arguments >>> x = symbols('x') >>> check_arguments([cos(x), sin(x)], 2, 1) [(cos(x), sin(x), (x, -10, 10))] >>> check_arguments([x, x**2], 1, 1) [(x, (x, -10, 10)), (x**2, (x, -10, 10))] """ if not args: return [] if expr_len > 1 and isinstance(args[0], Expr): # Multiple expressions same range. # The arguments are tuples when the expression length is # greater than 1. if len(args) < expr_len: raise ValueError("len(args) should not be less than expr_len") for i in range(len(args)): if isinstance(args[i], Tuple): break else: i = len(args) + 1 exprs = Tuple(*args[:i]) free_symbols = list(set().union(*[e.free_symbols for e in exprs])) if len(args) == expr_len + nb_of_free_symbols: #Ranges given plots = [exprs + Tuple(*args[expr_len:])] else: default_range = Tuple(-10, 10) ranges = [] for symbol in free_symbols: ranges.append(Tuple(symbol) + default_range) for i in range(len(free_symbols) - nb_of_free_symbols): ranges.append(Tuple(Dummy()) + default_range) plots = [exprs + Tuple(*ranges)] return plots if isinstance(args[0], Expr) or (isinstance(args[0], Tuple) and len(args[0]) == expr_len and expr_len != 3): # Cannot handle expressions with number of expression = 3. It is # not possible to differentiate between expressions and ranges. #Series of plots with same range for i in range(len(args)): if isinstance(args[i], Tuple) and len(args[i]) != expr_len: break if not isinstance(args[i], Tuple): args[i] = Tuple(args[i]) else: i = len(args) + 1 exprs = args[:i] assert all(isinstance(e, Expr) for expr in exprs for e in expr) free_symbols = list(set().union(*[e.free_symbols for expr in exprs for e in expr])) if len(free_symbols) > nb_of_free_symbols: raise ValueError("The number of free_symbols in the expression " "is greater than %d" % nb_of_free_symbols) if len(args) == i + nb_of_free_symbols and isinstance(args[i], Tuple): ranges = Tuple(*[range_expr for range_expr in args[ i:i + nb_of_free_symbols]]) plots = [expr + ranges for expr in exprs] return plots else: # Use default ranges. default_range = Tuple(-10, 10) ranges = [] for symbol in free_symbols: ranges.append(Tuple(symbol) + default_range) for i in range(nb_of_free_symbols - len(free_symbols)): ranges.append(Tuple(Dummy()) + default_range) ranges = Tuple(*ranges) plots = [expr + ranges for expr in exprs] return plots elif isinstance(args[0], Tuple) and len(args[0]) == expr_len + nb_of_free_symbols: # Multiple plots with different ranges. for arg in args: for i in range(expr_len): if not isinstance(arg[i], Expr): raise ValueError("Expected an expression, given %s" % str(arg[i])) for i in range(nb_of_free_symbols): if not len(arg[i + expr_len]) == 3: raise ValueError("The ranges should be a tuple of " "length 3, got %s" % str(arg[i + expr_len])) return args
35.905488
128
0.565721
from collections.abc import Callable from sympy.core.basic import Basic from sympy.core.containers import Tuple from sympy.core.expr import Expr from sympy.core.function import arity, Function from sympy.core.symbol import (Dummy, Symbol) from sympy.core.sympify import sympify from sympy.external import import_module from sympy.printing.latex import latex from sympy.utilities.exceptions import sympy_deprecation_warning from sympy.utilities.iterables import is_sequence from .experimental_lambdify import (vectorized_lambdify, lambdify) from sympy.plotting.textplot import textplot _show = True def unset_show(): global _show _show = False def _str_or_latex(label): if isinstance(label, Basic): return latex(label, mode='inline') return str(label) ############################################################################## # The public interface ############################################################################## class Plot: def __init__(self, *args, title=None, xlabel=None, ylabel=None, zlabel=None, aspect_ratio='auto', xlim=None, ylim=None, axis_center='auto', axis=True, xscale='linear', yscale='linear', legend=False, autoscale=True, margin=0, annotations=None, markers=None, rectangles=None, fill=None, backend='default', size=None, **kwargs): super().__init__() # Options for the graph as a whole. # The possible values for each option are described in the docstring of # Plot. They are based purely on convention, no checking is done. self.title = title self.xlabel = xlabel self.ylabel = ylabel self.zlabel = zlabel self.aspect_ratio = aspect_ratio self.axis_center = axis_center self.axis = axis self.xscale = xscale self.yscale = yscale self.legend = legend self.autoscale = autoscale self.margin = margin self.annotations = annotations self.markers = markers self.rectangles = rectangles self.fill = fill # Contains the data objects to be plotted. The backend should be smart # enough to iterate over this list. self._series = [] self._series.extend(args) # The backend type. On every show() a new backend instance is created # in self._backend which is tightly coupled to the Plot instance # (thanks to the parent attribute of the backend). if isinstance(backend, str): self.backend = plot_backends[backend] elif (type(backend) == type) and issubclass(backend, BaseBackend): self.backend = backend else: raise TypeError( "backend must be either a string or a subclass of BaseBackend") is_real = \ lambda lim: all(getattr(i, 'is_real', True) for i in lim) is_finite = \ lambda lim: all(getattr(i, 'is_finite', True) for i in lim) # reduce code repetition def check_and_set(t_name, t): if t: if not is_real(t): raise ValueError( "All numbers from {}={} must be real".format(t_name, t)) if not is_finite(t): raise ValueError( "All numbers from {}={} must be finite".format(t_name, t)) setattr(self, t_name, (float(t[0]), float(t[1]))) self.xlim = None check_and_set("xlim", xlim) self.ylim = None check_and_set("ylim", ylim) self.size = None check_and_set("size", size) def show(self): # TODO move this to the backend (also for save) if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.show() def save(self, path): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.save(path) def __str__(self): series_strs = [('[%d]: ' % i) + str(s) for i, s in enumerate(self._series)] return 'Plot object containing:\n' + '\n'.join(series_strs) def __getitem__(self, index): return self._series[index] def __setitem__(self, index, *args): if len(args) == 1 and isinstance(args[0], BaseSeries): self._series[index] = args def __delitem__(self, index): del self._series[index] def append(self, arg): if isinstance(arg, BaseSeries): self._series.append(arg) else: raise TypeError('Must specify element of plot to append.') def extend(self, arg): if isinstance(arg, Plot): self._series.extend(arg._series) elif is_sequence(arg): self._series.extend(arg) else: raise TypeError('Expecting Plot or sequence of BaseSeries') class PlotGrid: def __init__(self, nrows, ncolumns, *args, show=True, size=None, **kwargs): self.nrows = nrows self.ncolumns = ncolumns self._series = [] self.args = args for arg in args: self._series.append(arg._series) self.backend = DefaultBackend self.size = size if show: self.show() def show(self): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.show() def save(self, path): if hasattr(self, '_backend'): self._backend.close() self._backend = self.backend(self) self._backend.save(path) def __str__(self): plot_strs = [('Plot[%d]:' % i) + str(plot) for i, plot in enumerate(self.args)] return 'PlotGrid object containing:\n' + '\n'.join(plot_strs) ############################################################################## # Data Series ############################################################################## #TODO more general way to calculate aesthetics (see get_color_array) ### The base class for all series class BaseSeries: # Some flags follow. The rationale for using flags instead of checking base # classes is that setting multiple flags is simpler than multiple # inheritance. is_2Dline = False # Some of the backends expect: # - get_points returning 1D np.arrays list_x, list_y # - get_color_array returning 1D np.array (done in Line2DBaseSeries) # with the colors calculated at the points from get_points is_3Dline = False # Some of the backends expect: # - get_points returning 1D np.arrays list_x, list_y, list_y # - get_color_array returning 1D np.array (done in Line2DBaseSeries) # with the colors calculated at the points from get_points is_3Dsurface = False # Some of the backends expect: # - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays) # - get_points an alias for get_meshes is_contour = False # Some of the backends expect: # - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays) # - get_points an alias for get_meshes is_implicit = False # Some of the backends expect: # - get_meshes returning mesh_x (1D array), mesh_y(1D array, # mesh_z (2D np.arrays) # - get_points an alias for get_meshes # Different from is_contour as the colormap in backend will be # different is_parametric = False # The calculation of aesthetics expects: # - get_parameter_points returning one or two np.arrays (1D or 2D) # used for calculation aesthetics def __init__(self): super().__init__() @property def is_3D(self): flags3D = [ self.is_3Dline, self.is_3Dsurface ] return any(flags3D) @property def is_line(self): flagslines = [ self.is_2Dline, self.is_3Dline ] return any(flagslines) ### 2D lines class Line2DBaseSeries(BaseSeries): is_2Dline = True _dim = 2 def __init__(self): super().__init__() self.label = None self.steps = False self.only_integers = False self.line_color = None def get_data(self): np = import_module('numpy') points = self.get_points() if self.steps is True: if len(points) == 2: x = np.array((points[0], points[0])).T.flatten()[1:] y = np.array((points[1], points[1])).T.flatten()[:-1] points = (x, y) else: x = np.repeat(points[0], 3)[2:] y = np.repeat(points[1], 3)[:-2] z = np.repeat(points[2], 3)[1:-1] points = (x, y, z) return points def get_segments(self): sympy_deprecation_warning( """ The Line2DBaseSeries.get_segments() method is deprecated. Instead, use the MatplotlibBackend.get_segments() method, or use The get_points() or get_data() methods. """, deprecated_since_version="1.9", active_deprecations_target="deprecated-get-segments") np = import_module('numpy') points = type(self).get_data(self) points = np.ma.array(points).T.reshape(-1, 1, self._dim) return np.ma.concatenate([points[:-1], points[1:]], axis=1) def get_color_array(self): np = import_module('numpy') c = self.line_color if hasattr(c, '__call__'): f = np.vectorize(c) nargs = arity(c) if nargs == 1 and self.is_parametric: x = self.get_parameter_points() return f(centers_of_segments(x)) else: variables = list(map(centers_of_segments, self.get_points())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables[:2]) else: # only if the line is 3D (otherwise raises an error) return f(*variables) else: return c*np.ones(self.nb_of_points) class List2DSeries(Line2DBaseSeries): def __init__(self, list_x, list_y): np = import_module('numpy') super().__init__() self.list_x = np.array(list_x) self.list_y = np.array(list_y) self.label = 'list' def __str__(self): return 'list plot' def get_points(self): return (self.list_x, self.list_y) class LineOver1DRangeSeries(Line2DBaseSeries): def __init__(self, expr, var_start_end, **kwargs): super().__init__() self.expr = sympify(expr) self.label = kwargs.get('label', None) or self.expr self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.adaptive = kwargs.get('adaptive', True) self.depth = kwargs.get('depth', 12) self.line_color = kwargs.get('line_color', None) self.xscale = kwargs.get('xscale', 'linear') def __str__(self): return 'cartesian line: %s for %s over %s' % ( str(self.expr), str(self.var), str((self.start, self.end))) def get_points(self): if self.only_integers or not self.adaptive: return self._uniform_sampling() else: f = lambdify([self.var], self.expr) x_coords = [] y_coords = [] np = import_module('numpy') def sample(p, q, depth): """ Samples recursively if three points are almost collinear. For depth < 6, points are added irrespective of whether they satisfy the collinearity condition or not. The maximum depth allowed is 12. """ # Randomly sample to avoid aliasing. random = 0.45 + np.random.rand() * 0.1 if self.xscale == 'log': xnew = 10**(np.log10(p[0]) + random * (np.log10(q[0]) - np.log10(p[0]))) else: xnew = p[0] + random * (q[0] - p[0]) ynew = f(xnew) new_point = np.array([xnew, ynew]) # Maximum depth if depth > self.depth: x_coords.append(q[0]) y_coords.append(q[1]) # Sample irrespective of whether the line is flat till the # depth of 6. We are not using linspace to avoid aliasing. elif depth < 6: sample(p, new_point, depth + 1) sample(new_point, q, depth + 1) # Sample ten points if complex values are encountered # at both ends. If there is a real value in between, then # sample those points further. elif p[1] is None and q[1] is None: if self.xscale == 'log': xarray = np.logspace(p[0], q[0], 10) else: xarray = np.linspace(p[0], q[0], 10) yarray = list(map(f, xarray)) if not all(y is None for y in yarray): for i in range(len(yarray) - 1): if not (yarray[i] is None and yarray[i + 1] is None): sample([xarray[i], yarray[i]], [xarray[i + 1], yarray[i + 1]], depth + 1) # Sample further if one of the end points in None (i.e. a # complex value) or the three points are not almost collinear. elif (p[1] is None or q[1] is None or new_point[1] is None or not flat(p, new_point, q)): sample(p, new_point, depth + 1) sample(new_point, q, depth + 1) else: x_coords.append(q[0]) y_coords.append(q[1]) f_start = f(self.start) f_end = f(self.end) x_coords.append(self.start) y_coords.append(f_start) sample(np.array([self.start, f_start]), np.array([self.end, f_end]), 0) return (x_coords, y_coords) def _uniform_sampling(self): np = import_module('numpy') if self.only_integers is True: if self.xscale == 'log': list_x = np.logspace(int(self.start), int(self.end), num=int(self.end) - int(self.start) + 1) else: list_x = np.linspace(int(self.start), int(self.end), num=int(self.end) - int(self.start) + 1) else: if self.xscale == 'log': list_x = np.logspace(self.start, self.end, num=self.nb_of_points) else: list_x = np.linspace(self.start, self.end, num=self.nb_of_points) f = vectorized_lambdify([self.var], self.expr) list_y = f(list_x) return (list_x, list_y) class Parametric2DLineSeries(Line2DBaseSeries): is_parametric = True def __init__(self, expr_x, expr_y, var_start_end, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.label = kwargs.get('label', None) or \ Tuple(self.expr_x, self.expr_y) self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.adaptive = kwargs.get('adaptive', True) self.depth = kwargs.get('depth', 12) self.line_color = kwargs.get('line_color', None) def __str__(self): return 'parametric cartesian line: (%s, %s) for %s over %s' % ( str(self.expr_x), str(self.expr_y), str(self.var), str((self.start, self.end))) def get_parameter_points(self): np = import_module('numpy') return np.linspace(self.start, self.end, num=self.nb_of_points) def _uniform_sampling(self): param = self.get_parameter_points() fx = vectorized_lambdify([self.var], self.expr_x) fy = vectorized_lambdify([self.var], self.expr_y) list_x = fx(param) list_y = fy(param) return (list_x, list_y) def get_points(self): if not self.adaptive: return self._uniform_sampling() f_x = lambdify([self.var], self.expr_x) f_y = lambdify([self.var], self.expr_y) x_coords = [] y_coords = [] def sample(param_p, param_q, p, q, depth): # Randomly sample to avoid aliasing. np = import_module('numpy') random = 0.45 + np.random.rand() * 0.1 param_new = param_p + random * (param_q - param_p) xnew = f_x(param_new) ynew = f_y(param_new) new_point = np.array([xnew, ynew]) # Maximum depth if depth > self.depth: x_coords.append(q[0]) y_coords.append(q[1]) # Sample irrespective of whether the line is flat till the # depth of 6. We are not using linspace to avoid aliasing. elif depth < 6: sample(param_p, param_new, p, new_point, depth + 1) sample(param_new, param_q, new_point, q, depth + 1) # Sample ten points if complex values are encountered # at both ends. If there is a real value in between, then # sample those points further. elif ((p[0] is None and q[1] is None) or (p[1] is None and q[1] is None)): param_array = np.linspace(param_p, param_q, 10) x_array = list(map(f_x, param_array)) y_array = list(map(f_y, param_array)) if not all(x is None and y is None for x, y in zip(x_array, y_array)): for i in range(len(y_array) - 1): if ((x_array[i] is not None and y_array[i] is not None) or (x_array[i + 1] is not None and y_array[i + 1] is not None)): point_a = [x_array[i], y_array[i]] point_b = [x_array[i + 1], y_array[i + 1]] sample(param_array[i], param_array[i], point_a, point_b, depth + 1) # Sample further if one of the end points in None (i.e. a complex # value) or the three points are not almost collinear. elif (p[0] is None or p[1] is None or q[1] is None or q[0] is None or not flat(p, new_point, q)): sample(param_p, param_new, p, new_point, depth + 1) sample(param_new, param_q, new_point, q, depth + 1) else: x_coords.append(q[0]) y_coords.append(q[1]) f_start_x = f_x(self.start) f_start_y = f_y(self.start) start = [f_start_x, f_start_y] f_end_x = f_x(self.end) f_end_y = f_y(self.end) end = [f_end_x, f_end_y] x_coords.append(f_start_x) y_coords.append(f_start_y) sample(self.start, self.end, start, end, 0) return x_coords, y_coords ### 3D lines class Line3DBaseSeries(Line2DBaseSeries): is_2Dline = False is_3Dline = True _dim = 3 def __init__(self): super().__init__() class Parametric3DLineSeries(Line3DBaseSeries): is_parametric = True def __init__(self, expr_x, expr_y, expr_z, var_start_end, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.expr_z = sympify(expr_z) self.label = kwargs.get('label', None) or \ Tuple(self.expr_x, self.expr_y) self.var = sympify(var_start_end[0]) self.start = float(var_start_end[1]) self.end = float(var_start_end[2]) self.nb_of_points = kwargs.get('nb_of_points', 300) self.line_color = kwargs.get('line_color', None) self._xlim = None self._ylim = None self._zlim = None def __str__(self): return '3D parametric cartesian line: (%s, %s, %s) for %s over %s' % ( str(self.expr_x), str(self.expr_y), str(self.expr_z), str(self.var), str((self.start, self.end))) def get_parameter_points(self): np = import_module('numpy') return np.linspace(self.start, self.end, num=self.nb_of_points) def get_points(self): np = import_module('numpy') param = self.get_parameter_points() fx = vectorized_lambdify([self.var], self.expr_x) fy = vectorized_lambdify([self.var], self.expr_y) fz = vectorized_lambdify([self.var], self.expr_z) list_x = fx(param) list_y = fy(param) list_z = fz(param) list_x = np.array(list_x, dtype=np.float64) list_y = np.array(list_y, dtype=np.float64) list_z = np.array(list_z, dtype=np.float64) list_x = np.ma.masked_invalid(list_x) list_y = np.ma.masked_invalid(list_y) list_z = np.ma.masked_invalid(list_z) self._xlim = (np.amin(list_x), np.amax(list_x)) self._ylim = (np.amin(list_y), np.amax(list_y)) self._zlim = (np.amin(list_z), np.amax(list_z)) return list_x, list_y, list_z ### Surfaces class SurfaceBaseSeries(BaseSeries): is_3Dsurface = True def __init__(self): super().__init__() self.surface_color = None def get_color_array(self): np = import_module('numpy') c = self.surface_color if isinstance(c, Callable): f = np.vectorize(c) nargs = arity(c) if self.is_parametric: variables = list(map(centers_of_faces, self.get_parameter_meshes())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables) variables = list(map(centers_of_faces, self.get_meshes())) if nargs == 1: return f(variables[0]) elif nargs == 2: return f(*variables[:2]) else: return f(*variables) else: if isinstance(self, SurfaceOver2DRangeSeries): return c*np.ones(min(self.nb_of_points_x, self.nb_of_points_y)) else: return c*np.ones(min(self.nb_of_points_u, self.nb_of_points_v)) class SurfaceOver2DRangeSeries(SurfaceBaseSeries): def __init__(self, expr, var_start_end_x, var_start_end_y, **kwargs): super().__init__() self.expr = sympify(expr) self.var_x = sympify(var_start_end_x[0]) self.start_x = float(var_start_end_x[1]) self.end_x = float(var_start_end_x[2]) self.var_y = sympify(var_start_end_y[0]) self.start_y = float(var_start_end_y[1]) self.end_y = float(var_start_end_y[2]) self.nb_of_points_x = kwargs.get('nb_of_points_x', 50) self.nb_of_points_y = kwargs.get('nb_of_points_y', 50) self.surface_color = kwargs.get('surface_color', None) self._xlim = (self.start_x, self.end_x) self._ylim = (self.start_y, self.end_y) def __str__(self): return ('cartesian surface: %s for' ' %s over %s and %s over %s') % ( str(self.expr), str(self.var_x), str((self.start_x, self.end_x)), str(self.var_y), str((self.start_y, self.end_y))) def get_meshes(self): np = import_module('numpy') mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x, num=self.nb_of_points_x), np.linspace(self.start_y, self.end_y, num=self.nb_of_points_y)) f = vectorized_lambdify((self.var_x, self.var_y), self.expr) mesh_z = f(mesh_x, mesh_y) mesh_z = np.array(mesh_z, dtype=np.float64) mesh_z = np.ma.masked_invalid(mesh_z) self._zlim = (np.amin(mesh_z), np.amax(mesh_z)) return mesh_x, mesh_y, mesh_z class ParametricSurfaceSeries(SurfaceBaseSeries): is_parametric = True def __init__( self, expr_x, expr_y, expr_z, var_start_end_u, var_start_end_v, **kwargs): super().__init__() self.expr_x = sympify(expr_x) self.expr_y = sympify(expr_y) self.expr_z = sympify(expr_z) self.var_u = sympify(var_start_end_u[0]) self.start_u = float(var_start_end_u[1]) self.end_u = float(var_start_end_u[2]) self.var_v = sympify(var_start_end_v[0]) self.start_v = float(var_start_end_v[1]) self.end_v = float(var_start_end_v[2]) self.nb_of_points_u = kwargs.get('nb_of_points_u', 50) self.nb_of_points_v = kwargs.get('nb_of_points_v', 50) self.surface_color = kwargs.get('surface_color', None) def __str__(self): return ('parametric cartesian surface: (%s, %s, %s) for' ' %s over %s and %s over %s') % ( str(self.expr_x), str(self.expr_y), str(self.expr_z), str(self.var_u), str((self.start_u, self.end_u)), str(self.var_v), str((self.start_v, self.end_v))) def get_parameter_meshes(self): np = import_module('numpy') return np.meshgrid(np.linspace(self.start_u, self.end_u, num=self.nb_of_points_u), np.linspace(self.start_v, self.end_v, num=self.nb_of_points_v)) def get_meshes(self): np = import_module('numpy') mesh_u, mesh_v = self.get_parameter_meshes() fx = vectorized_lambdify((self.var_u, self.var_v), self.expr_x) fy = vectorized_lambdify((self.var_u, self.var_v), self.expr_y) fz = vectorized_lambdify((self.var_u, self.var_v), self.expr_z) mesh_x = fx(mesh_u, mesh_v) mesh_y = fy(mesh_u, mesh_v) mesh_z = fz(mesh_u, mesh_v) mesh_x = np.array(mesh_x, dtype=np.float64) mesh_y = np.array(mesh_y, dtype=np.float64) mesh_z = np.array(mesh_z, dtype=np.float64) mesh_x = np.ma.masked_invalid(mesh_x) mesh_y = np.ma.masked_invalid(mesh_y) mesh_z = np.ma.masked_invalid(mesh_z) self._xlim = (np.amin(mesh_x), np.amax(mesh_x)) self._ylim = (np.amin(mesh_y), np.amax(mesh_y)) self._zlim = (np.amin(mesh_z), np.amax(mesh_z)) return mesh_x, mesh_y, mesh_z ### Contours class ContourSeries(BaseSeries): # The code is mostly repetition of SurfaceOver2DRange. # Presently used in contour_plot function is_contour = True def __init__(self, expr, var_start_end_x, var_start_end_y): super().__init__() self.nb_of_points_x = 50 self.nb_of_points_y = 50 self.expr = sympify(expr) self.var_x = sympify(var_start_end_x[0]) self.start_x = float(var_start_end_x[1]) self.end_x = float(var_start_end_x[2]) self.var_y = sympify(var_start_end_y[0]) self.start_y = float(var_start_end_y[1]) self.end_y = float(var_start_end_y[2]) self.get_points = self.get_meshes self._xlim = (self.start_x, self.end_x) self._ylim = (self.start_y, self.end_y) def __str__(self): return ('contour: %s for ' '%s over %s and %s over %s') % ( str(self.expr), str(self.var_x), str((self.start_x, self.end_x)), str(self.var_y), str((self.start_y, self.end_y))) def get_meshes(self): np = import_module('numpy') mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x, num=self.nb_of_points_x), np.linspace(self.start_y, self.end_y, num=self.nb_of_points_y)) f = vectorized_lambdify((self.var_x, self.var_y), self.expr) return (mesh_x, mesh_y, f(mesh_x, mesh_y)) ############################################################################## # Backends ############################################################################## class BaseBackend: def __init__(self, parent): super().__init__() self.parent = parent def show(self): raise NotImplementedError def save(self, path): raise NotImplementedError def close(self): raise NotImplementedError # Don't have to check for the success of importing matplotlib in each case; class MatplotlibBackend(BaseBackend): def __init__(self, parent): super().__init__(parent) self.matplotlib = import_module('matplotlib', import_kwargs={'fromlist': ['pyplot', 'cm', 'collections']}, min_module_version='1.1.0', catch=(RuntimeError,)) self.plt = self.matplotlib.pyplot self.cm = self.matplotlib.cm self.LineCollection = self.matplotlib.collections.LineCollection aspect = getattr(self.parent, 'aspect_ratio', 'auto') if aspect != 'auto': aspect = float(aspect[1]) / aspect[0] if isinstance(self.parent, Plot): nrows, ncolumns = 1, 1 series_list = [self.parent._series] elif isinstance(self.parent, PlotGrid): nrows, ncolumns = self.parent.nrows, self.parent.ncolumns series_list = self.parent._series self.ax = [] self.fig = self.plt.figure(figsize=parent.size) for i, series in enumerate(series_list): are_3D = [s.is_3D for s in series] if any(are_3D) and not all(are_3D): raise ValueError('The matplotlib backend cannot mix 2D and 3D.') elif all(are_3D): mpl_toolkits = import_module('mpl_toolkits', import_kwargs={'fromlist': ['mplot3d']}) self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, projection='3d', aspect=aspect)) elif not any(are_3D): self.ax.append(self.fig.add_subplot(nrows, ncolumns, i + 1, aspect=aspect)) self.ax[i].spines['left'].set_position('zero') self.ax[i].spines['right'].set_color('none') self.ax[i].spines['bottom'].set_position('zero') self.ax[i].spines['top'].set_color('none') self.ax[i].xaxis.set_ticks_position('bottom') self.ax[i].yaxis.set_ticks_position('left') @staticmethod def get_segments(x, y, z=None): np = import_module('numpy') if z is not None: dim = 3 points = (x, y, z) else: dim = 2 points = (x, y) points = np.ma.array(points).T.reshape(-1, 1, dim) return np.ma.concatenate([points[:-1], points[1:]], axis=1) def _process_series(self, series, ax, parent): np = import_module('numpy') mpl_toolkits = import_module( 'mpl_toolkits', import_kwargs={'fromlist': ['mplot3d']}) xlims, ylims, zlims = [], [], [] for s in series: if s.is_2Dline: x, y = s.get_data() if (isinstance(s.line_color, (int, float)) or callable(s.line_color)): segments = self.get_segments(x, y) collection = self.LineCollection(segments) collection.set_array(s.get_color_array()) ax.add_collection(collection) else: lbl = _str_or_latex(s.label) line, = ax.plot(x, y, label=lbl, color=s.line_color) elif s.is_contour: ax.contour(*s.get_meshes()) elif s.is_3Dline: x, y, z = s.get_data() if (isinstance(s.line_color, (int, float)) or callable(s.line_color)): art3d = mpl_toolkits.mplot3d.art3d segments = self.get_segments(x, y, z) collection = art3d.Line3DCollection(segments) collection.set_array(s.get_color_array()) ax.add_collection(collection) else: lbl = _str_or_latex(s.label) ax.plot(x, y, z, label=lbl, color=s.line_color) xlims.append(s._xlim) ylims.append(s._ylim) zlims.append(s._zlim) elif s.is_3Dsurface: x, y, z = s.get_meshes() collection = ax.plot_surface(x, y, z, cmap=getattr(self.cm, 'viridis', self.cm.jet), rstride=1, cstride=1, linewidth=0.1) if isinstance(s.surface_color, (float, int, Callable)): color_array = s.get_color_array() color_array = color_array.reshape(color_array.size) collection.set_array(color_array) else: collection.set_color(s.surface_color) xlims.append(s._xlim) ylims.append(s._ylim) zlims.append(s._zlim) elif s.is_implicit: points = s.get_raster() if len(points) == 2: x, y = _matplotlib_list(points[0]) ax.fill(x, y, facecolor=s.line_color, edgecolor='None') else: ListedColormap = self.matplotlib.colors.ListedColormap colormap = ListedColormap(["white", s.line_color]) xarray, yarray, zarray, plot_type = points if plot_type == 'contour': ax.contour(xarray, yarray, zarray, cmap=colormap, label=_str_or_latex(s.label)) else: ax.contourf(xarray, yarray, zarray, cmap=colormap, label=_str_or_latex(s.label)) else: raise NotImplementedError( '{} is not supported in the SymPy plotting module ' 'with matplotlib backend. Please report this issue.' .format(ax)) Axes3D = mpl_toolkits.mplot3d.Axes3D if not isinstance(ax, Axes3D): ax.autoscale_view( scalex=ax.get_autoscalex_on(), scaley=ax.get_autoscaley_on()) else: if xlims: xlims = np.array(xlims) xlim = (np.amin(xlims[:, 0]), np.amax(xlims[:, 1])) ax.set_xlim(xlim) else: ax.set_xlim([0, 1]) if ylims: ylims = np.array(ylims) ylim = (np.amin(ylims[:, 0]), np.amax(ylims[:, 1])) ax.set_ylim(ylim) else: ax.set_ylim([0, 1]) if zlims: zlims = np.array(zlims) zlim = (np.amin(zlims[:, 0]), np.amax(zlims[:, 1])) ax.set_zlim(zlim) else: ax.set_zlim([0, 1]) if parent.xscale and not isinstance(ax, Axes3D): ax.set_xscale(parent.xscale) if parent.yscale and not isinstance(ax, Axes3D): ax.set_yscale(parent.yscale) if not isinstance(ax, Axes3D) or self.matplotlib.__version__ >= '1.2.0': ax.set_autoscale_on(parent.autoscale) if parent.axis_center: val = parent.axis_center if isinstance(ax, Axes3D): pass elif val == 'center': ax.spines['left'].set_position('center') ax.spines['bottom'].set_position('center') elif val == 'auto': xl, xh = ax.get_xlim() yl, yh = ax.get_ylim() pos_left = ('data', 0) if xl*xh <= 0 else 'center' pos_bottom = ('data', 0) if yl*yh <= 0 else 'center' ax.spines['left'].set_position(pos_left) ax.spines['bottom'].set_position(pos_bottom) else: ax.spines['left'].set_position(('data', val[0])) ax.spines['bottom'].set_position(('data', val[1])) if not parent.axis: ax.set_axis_off() if parent.legend: if ax.legend(): ax.legend_.set_visible(parent.legend) if parent.margin: ax.set_xmargin(parent.margin) ax.set_ymargin(parent.margin) if parent.title: ax.set_title(parent.title) if parent.xlabel: xlbl = _str_or_latex(parent.xlabel) ax.set_xlabel(xlbl, position=(1, 0)) if parent.ylabel: ylbl = _str_or_latex(parent.ylabel) ax.set_ylabel(ylbl, position=(0, 1)) if isinstance(ax, Axes3D) and parent.zlabel: zlbl = _str_or_latex(parent.zlabel) ax.set_zlabel(zlbl, position=(0, 1)) if parent.annotations: for a in parent.annotations: ax.annotate(**a) if parent.markers: for marker in parent.markers: m = marker.copy() args = m.pop('args') ax.plot(*args, **m) if parent.rectangles: for r in parent.rectangles: rect = self.matplotlib.patches.Rectangle(**r) ax.add_patch(rect) if parent.fill: ax.fill_between(**parent.fill) # xlim and ylim shoulld always be set at last so that plot limits # doesn't get altered during the process. if parent.xlim: ax.set_xlim(parent.xlim) if parent.ylim: ax.set_ylim(parent.ylim) def process_series(self): parent = self.parent if isinstance(parent, Plot): series_list = [parent._series] else: series_list = parent._series for i, (series, ax) in enumerate(zip(series_list, self.ax)): if isinstance(self.parent, PlotGrid): parent = self.parent.args[i] self._process_series(series, ax, parent) def show(self): self.process_series() if _show: self.fig.tight_layout() self.plt.show() else: self.close() def save(self, path): self.process_series() self.fig.savefig(path) def close(self): self.plt.close(self.fig) class TextBackend(BaseBackend): def __init__(self, parent): super().__init__(parent) def show(self): if not _show: return if len(self.parent._series) != 1: raise ValueError( 'The TextBackend supports only one graph per Plot.') elif not isinstance(self.parent._series[0], LineOver1DRangeSeries): raise ValueError( 'The TextBackend supports only expressions over a 1D range') else: ser = self.parent._series[0] textplot(ser.expr, ser.start, ser.end) def close(self): pass class DefaultBackend(BaseBackend): def __new__(cls, parent): matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,)) if matplotlib: return MatplotlibBackend(parent) else: return TextBackend(parent) plot_backends = { 'matplotlib': MatplotlibBackend, 'text': TextBackend, 'default': DefaultBackend } d" % nb_of_free_symbols) if len(args) == i + nb_of_free_symbols and isinstance(args[i], Tuple): ranges = Tuple(*[range_expr for range_expr in args[ i:i + nb_of_free_symbols]]) plots = [expr + ranges for expr in exprs] return plots else: default_range = Tuple(-10, 10) ranges = [] for symbol in free_symbols: ranges.append(Tuple(symbol) + default_range) for i in range(nb_of_free_symbols - len(free_symbols)): ranges.append(Tuple(Dummy()) + default_range) ranges = Tuple(*ranges) plots = [expr + ranges for expr in exprs] return plots elif isinstance(args[0], Tuple) and len(args[0]) == expr_len + nb_of_free_symbols: for arg in args: for i in range(expr_len): if not isinstance(arg[i], Expr): raise ValueError("Expected an expression, given %s" % str(arg[i])) for i in range(nb_of_free_symbols): if not len(arg[i + expr_len]) == 3: raise ValueError("The ranges should be a tuple of " "length 3, got %s" % str(arg[i + expr_len])) return args
true
true
f70e9f82018e07c830743995d2a988adcd978449
53,574
py
Python
qutip/qip/circuit.py
lebinyu/qutip
19f172cbb1689f3295dd2561057543da342fb646
[ "BSD-3-Clause" ]
null
null
null
qutip/qip/circuit.py
lebinyu/qutip
19f172cbb1689f3295dd2561057543da342fb646
[ "BSD-3-Clause" ]
null
null
null
qutip/qip/circuit.py
lebinyu/qutip
19f172cbb1689f3295dd2561057543da342fb646
[ "BSD-3-Clause" ]
null
null
null
# This file is part of QuTiP: Quantum Toolbox in Python. # # Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names # of its contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ############################################################################### from collections.abc import Iterable import warnings import inspect import numpy as np import numpy as np import numpy as np from qutip.qip.circuit_latex import _latex_compile from qutip.qip.gates import * from qutip.qip.qubits import qubit_states __all__ = ['Gate', 'QubitCircuit'] class Gate(object): """ Representation of a quantum gate, with its required parametrs, and target and control qubits. Parameters ---------- name : string Gate name. targets : list or int Gate targets. controls : list or int Gate controls. arg_value : float Argument value(phi). arg_label : string Label for gate representation. """ def __init__(self, name, targets=None, controls=None, arg_value=None, arg_label=None): """ Create a gate with specified parameters. """ self.name = name self.targets = None self.controls = None if not isinstance(targets, Iterable) and targets is not None: self.targets = [targets] else: self.targets = targets if not isinstance(controls, Iterable) and controls is not None: self.controls = [controls] else: self.controls = controls for ind_list in [self.targets, self.controls]: if isinstance(ind_list, Iterable): all_integer = all( [isinstance(ind, np.int) for ind in ind_list]) if not all_integer: raise ValueError("Index of a qubit must be an integer") if name in ["SWAP", "ISWAP", "SQRTISWAP", "SQRTSWAP", "BERKELEY", "SWAPalpha"]: if (self.targets is None) or (len(self.targets) != 2): raise ValueError("Gate %s requires two targets" % name) if self.controls is not None: raise ValueError("Gate %s cannot have a control" % name) elif name in ["CNOT", "CSIGN", "CRX", "CRY", "CRZ"]: if self.targets is None or len(self.targets) != 1: raise ValueError("Gate %s requires one target" % name) if self.controls is None or len(self.controls) != 1: raise ValueError("Gate %s requires one control" % name) elif name in ["SNOT", "RX", "RY", "RZ", "PHASEGATE"]: if self.controls is not None: raise ValueError("Gate %s does not take controls" % name) elif name in ["RX", "RY", "RZ", "CPHASE", "SWAPalpha", "PHASEGATE", "GLOBALPHASE", "CRX", "CRY", "CRZ"]: if arg_value is None: raise ValueError("Gate %s requires an argument value" % name) self.arg_value = arg_value self.arg_label = arg_label def __str__(self): s = "Gate(%s, targets=%s, controls=%s)" % (self.name, self.targets, self.controls) return s def __repr__(self): return str(self) def _repr_latex_(self): return str(self) _gate_name_to_label = { 'RX': r'R_x', 'RY': r'R_y', 'RZ': r'R_z', 'CRX': r'R_x', 'CRY': r'R_y', 'CRZ': r'R_z', 'SQRTNOT': r'\sqrt{\rm NOT}', 'SNOT': r'{\rm H}', 'PHASEGATE': r'{\rm PHASE}', 'CPHASE': r'{\rm R}', 'CNOT': r'{\rm CNOT}', 'CSIGN': r'{\rm Z}', 'BERKELEY': r'{\rm BERKELEY}', 'SWAPalpha': r'{\rm SWAPalpha}', 'SWAP': r'{\rm SWAP}', 'ISWAP': r'{i}{\rm SWAP}', 'SQRTSWAP': r'\sqrt{\rm SWAP}', 'SQRTISWAP': r'\sqrt{{i}\rm SWAP}', 'FREDKIN': r'{\rm FREDKIN}', 'TOFFOLI': r'{\rm TOFFOLI}', 'GLOBALPHASE': r'{\rm Ph}', } def _gate_label(name, arg_label): if name in _gate_name_to_label: gate_label = _gate_name_to_label[name] else: warnings.warn("Unknown gate %s" % name) gate_label = name if arg_label: return r'%s(%s)' % (gate_label, arg_label) else: return r'%s' % gate_label class QubitCircuit(object): """ Representation of a quantum program/algorithm, maintaining a sequence of gates. Parameters ---------- N : int Number of qubits in the system. user_gates : dict Define a dictionary of the custom gates. See examples for detail. input_states : list A list of string such as `0`,'+', "A", "Y". Only used for latex. Examples -------- >>> def user_gate(): ... mat = np.array([[1., 0], ... [0., 1.j]]) ... return Qobj(mat, dims=[[2], [2]]) >>> qc.QubitCircuit(2, user_gates={"T":user_gate}) >>> qc.add_gate("T", targets=[0]) """ def __init__(self, N, input_states=None, output_states=None, reverse_states=True, user_gates=None): # number of qubits in the register self.N = N self.reverse_states = reverse_states self.gates = [] self.U_list = [] self.input_states = [None for i in range(N)] self.output_states = [None for i in range(N)] if user_gates is None: self.user_gates = {} else: if isinstance(user_gates, dict): self.user_gates = user_gates else: raise ValueError( "`user_gate` takes a python dictionary of the form" "{{str: gate_function}}, not {}".format(user_gates)) def add_state(self, state, targets=None, state_type="input"): """ Add an input or ouput state to the circuit. By default all the input and output states will be initialized to `None`. A particular state can be added by specifying the state and the qubit where it has to be added along with the type as input or output. Parameters ---------- state: str The state that has to be added. It can be any string such as `0`, '+', "A", "Y" targets: list A list of qubit positions where the given state has to be added. state_type: str One of either "input" or "output". This specifies whether the state to be added is an input or output. default: "input" """ if state_type == "input": for i in targets: self.input_states[i] = state if state_type == "output": for i in targets: self.output_states[i] = state def add_gate(self, gate, targets=None, controls=None, arg_value=None, arg_label=None, index=None): """ Adds a gate with specified parameters to the circuit. Parameters ---------- gate: string or `Gate` Gate name. If gate is an instance of `Gate`, parameters are unpacked and added. targets: list Gate targets. controls: list Gate controls. arg_value: float Argument value(phi). arg_label: string Label for gate representation. index : list Positions to add the gate. """ if isinstance(gate, Gate): name = gate.name targets = gate.targets controls = gate.controls arg_value = gate.arg_value arg_label = gate.arg_label else: name = gate if index is None: self.gates.append(Gate(name, targets=targets, controls=controls, arg_value=arg_value, arg_label=arg_label)) else: for position in index: self.gates.insert(position, Gate(name, targets=targets, controls=controls, arg_value=arg_value, arg_label=arg_label)) def add_1q_gate(self, name, start=0, end=None, qubits=None, arg_value=None, arg_label=None): """ Adds a single qubit gate with specified parameters on a variable number of qubits in the circuit. By default, it applies the given gate to all the qubits in the register. Parameters ---------- name : string Gate name. start : int Starting location of qubits. end : int Last qubit for the gate. qubits : list Specific qubits for applying gates. arg_value : float Argument value(phi). arg_label : string Label for gate representation. """ if name not in ["RX", "RY", "RZ", "SNOT", "SQRTNOT", "PHASEGATE"]: raise ValueError("%s is not a single qubit gate" % name) if qubits is not None: for i in range(len(qubits)): self.gates.append(Gate(name, targets=qubits[i], controls=None, arg_value=arg_value, arg_label=arg_label)) else: if end is None: end = self.N - 1 for i in range(start, end): self.gates.append(Gate(name, targets=i, controls=None, arg_value=arg_value, arg_label=arg_label)) def add_circuit(self, qc, start=0): """ Adds a block of a qubit circuit to the main circuit. Globalphase gates are not added. Parameters ---------- qc : QubitCircuit The circuit block to be added to the main circuit. start : int The qubit on which the first gate is applied. """ if self.N - start < qc.N: raise NotImplementedError("Targets exceed number of qubits.") for gate in qc.gates: if gate.name in ["RX", "RY", "RZ", "SNOT", "SQRTNOT", "PHASEGATE"]: self.add_gate(gate.name, gate.targets[0] + start, None, gate.arg_value, gate.arg_label) elif gate.name in ["CPHASE", "CNOT", "CSIGN", "CRX", "CRY", "CRZ"]: self.add_gate(gate.name, gate.targets[0] + start, gate.controls[0] + start, gate.arg_value, gate.arg_label) elif gate.name in ["BERKELEY", "SWAPalpha", "SWAP", "ISWAP", "SQRTSWAP", "SQRTISWAP"]: self.add_gate(gate.name, None, [gate.controls[0] + start, gate.controls[1] + start], None, None) elif gate.name in ["TOFFOLI"]: self.add_gate(gate.name, gate.targets[0] + start, [gate.controls[0] + start, gate.controls[1] + start], None, None) elif gate.name in ["FREDKIN"]: self.add_gate(gate.name, [gate.targets[0] + start, gate.targets[1] + start], gate.controls + start, None, None) elif gate.name in self.user_gates: self.add_gate( gate.name, targets=gate.targets, arg_value=gate.arg_value) def remove_gate(self, index=None, end=None, name=None, remove="first"): """ Remove a gate from a specific index or between two indexes or the first, last or all instances of a particular gate. Parameters ---------- index : int Location of gate to be removed. name : string Gate name to be removed. remove : string If first or all gate are to be removed. """ if index is not None and index <= self.N: if end is not None and end <= self.N: for i in range(end - index): self.gates.pop(index + i) elif end is not None and end > self.N: raise ValueError("End target exceeds number of gates.") else: self.gates.pop(index) elif name is not None and remove == "first": for gate in self.gates: if name == gate.name: self.gates.remove(gate) break elif name is not None and remove == "last": for i in range(self.N + 1): if name == self.gates[self.N - i].name: self.gates.remove(self.gates[self.N - i]) break elif name is not None and remove == "all": for j in range(self.N + 1): if name == self.gates[self.N - j].name: self.gates.remove(self.gates[self.N - j]) else: self.gates.pop() def reverse_circuit(self): """ Reverse an entire circuit of unitary gates. Returns ---------- qc : QubitCircuit Return QubitCircuit of resolved gates for the qubit circuit in the reverse order. """ temp = QubitCircuit(self.N, self.reverse_states) for gate in reversed(self.gates): temp.add_gate(gate) return temp def resolve_gates(self, basis=["CNOT", "RX", "RY", "RZ"]): """ Unitary matrix calculator for N qubits returning the individual steps as unitary matrices operating from left to right in the specified basis. Parameters ---------- basis : list. Basis of the resolved circuit. Returns ------- qc : QubitCircuit Return QubitCircuit of resolved gates for the qubit circuit in the desired basis. """ qc_temp = QubitCircuit(self.N, self.reverse_states) temp_resolved = [] basis_1q_valid = ["RX", "RY", "RZ"] basis_2q_valid = ["CNOT", "CSIGN", "ISWAP", "SQRTSWAP", "SQRTISWAP"] if isinstance(basis, list): basis_1q = [] basis_2q = [] for gate in basis: if gate in basis_2q_valid: basis_2q.append(gate) elif gate in basis_1q_valid: basis_1q.append(gate) else: raise NotImplementedError( "%s is not a valid basis gate" % gate) if len(basis_1q) == 1: raise ValueError("Not sufficient single-qubit gates in basis") elif len(basis_1q) == 0: basis_1q = ["RX", "RY", "RZ"] else: # only one 2q gate is given as basis basis_1q = ["RX", "RY", "RZ"] if basis in basis_2q_valid: basis_2q = [basis] else: raise ValueError("%s is not a valid two-qubit basis gate" % basis) for gate in self.gates: if gate.name == "RX": temp_resolved.append(gate) elif gate.name == "RY": temp_resolved.append(gate) elif gate.name == "RZ": temp_resolved.append(gate) elif gate.name == "SQRTNOT": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SNOT": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "PHASEGATE": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=gate.arg_value / 2, arg_label=gate.arg_label)) temp_resolved.append(Gate("RZ", gate.targets, None, gate.arg_value, gate.arg_label)) elif gate.name in basis_2q: # ignore all gate in 2q basis temp_resolved.append(gate) elif gate.name == "CPHASE": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "CNOT": temp_resolved.append(gate) elif gate.name == "CSIGN": temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls)) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "BERKELEY": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SWAPalpha": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SWAP": if "ISWAP" in basis_2q: # dealed with separately temp_resolved.append(gate) else: temp_resolved.append( Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append( Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append( Gate("CNOT", gate.targets[0], gate.targets[1])) elif gate.name == "ISWAP": temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("RZ", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("RY", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SQRTSWAP": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SQRTISWAP": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "FREDKIN": temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.controls)) temp_resolved.append(Gate("RZ", gate.controls, None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", [gate.targets[0]], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.controls)) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.targets[0], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.controls)) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.controls)) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) elif gate.name == "TOFFOLI": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=1 * np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=np.pi/2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.controls[0], None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.controls[1], gate.controls[0])) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.controls[1], gate.controls[0])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[0])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[1])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[0])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[1])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "GLOBALPHASE": temp_resolved.append(Gate(gate.name, gate.targets, gate.controls, gate.arg_value, gate.arg_label)) else: raise NotImplementedError( "Gate {} " "cannot be resolved.".format(gate.name)) if "CSIGN" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("CSIGN", gate.targets, gate.controls)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) else: qc_temp.gates.append(gate) elif "ISWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("ISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("ISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SWAP": qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("ISWAP", gate.targets, None)) qc_temp.gates.append(Gate("RX", gate.targets[0], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("ISWAP", gate.targets, None)) qc_temp.gates.append(Gate("RX", gate.targets[1], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("ISWAP", [gate.targets[1], gate.targets[0]], None)) qc_temp.gates.append(Gate("RX", gate.targets[0], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) else: qc_temp.gates.append(gate) elif "SQRTSWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("SQRTSWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("SQRTSWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) else: qc_temp.gates.append(gate) elif "SQRTISWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RX", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("SQRTISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RX", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("SQRTISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=3 * np.pi / 2, arg_label=r"3\pi/2")) else: qc_temp.gates.append(gate) else: qc_temp.gates = temp_resolved if len(basis_1q) == 2: temp_resolved = qc_temp.gates qc_temp.gates = [] for gate in temp_resolved: if gate.name == "RX" and "RX" not in basis_1q: qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "RY" and "RY" not in basis_1q: qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RX", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "RZ" and "RZ" not in basis_1q: qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) else: qc_temp.gates.append(gate) return qc_temp def adjacent_gates(self): """ Method to resolve two qubit gates with non-adjacent control/s or target/s in terms of gates with adjacent interactions. Returns ------- qc : QubitCircuit Return QubitCircuit of the gates for the qubit circuit with the resolved non-adjacent gates. """ temp = QubitCircuit(self.N, reverse_states=self.reverse_states) swap_gates = ["SWAP", "ISWAP", "SQRTISWAP", "SQRTSWAP", "BERKELEY", "SWAPalpha"] for gate in self.gates: if gate.name == "CNOT" or gate.name == "CSIGN": start = min([gate.targets[0], gate.controls[0]]) end = max([gate.targets[0], gate.controls[0]]) i = start while i < end: if start + end - i - i == 1 and (end - start + 1) % 2 == 0: # Apply required gate if control, target are adjacent # to each other, provided |control-target| is even. if end == gate.controls[0]: temp.gates.append(Gate(gate.name, targets=[i], controls=[i + 1])) else: temp.gates.append(Gate(gate.name, targets=[i + 1], controls=[i])) elif (start + end - i - i == 2 and (end - start + 1) % 2 == 1): # Apply a swap between i and its adjacent gate, then # the required gate if and then another swap if control # and target have one qubit between them, provided # |control-target| is odd. temp.gates.append(Gate("SWAP", targets=[i, i + 1])) if end == gate.controls[0]: temp.gates.append(Gate(gate.name, targets=[i + 1], controls=[i + 2])) else: temp.gates.append(Gate(gate.name, targets=[i + 2], controls=[i + 1])) temp.gates.append(Gate("SWAP", targets=[i, i + 1])) i += 1 else: # Swap the target/s and/or control with their adjacent # qubit to bring them closer. temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append(Gate("SWAP", targets=[start + end - i - 1, start + end - i])) i += 1 elif gate.name in swap_gates: start = min([gate.targets[0], gate.targets[1]]) end = max([gate.targets[0], gate.targets[1]]) i = start while i < end: if start + end - i - i == 1 and (end - start + 1) % 2 == 0: temp.gates.append(Gate(gate.name, targets=[i, i + 1])) elif ((start + end - i - i) == 2 and (end - start + 1) % 2 == 1): temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append( Gate(gate.name, targets=[i + 1, i + 2])) temp.gates.append(Gate("SWAP", targets=[i, i + 1])) i += 1 else: temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append(Gate("SWAP", targets=[start + end - i - 1, start + end - i])) i += 1 else: raise NotImplementedError( "`adjacent_gates` is not defined for " "gate {}.".format(gate.name)) return temp def propagators(self): """ Propagator matrix calculator for N qubits returning the individual steps as unitary matrices operating from left to right. Returns ------- U_list : list Return list of unitary matrices for the qubit circuit. """ self.U_list = [] for gate in self.gates: if gate.name == "RX": self.U_list.append(rx(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "RY": self.U_list.append(ry(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "RZ": self.U_list.append(rz(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "SQRTNOT": self.U_list.append(sqrtnot(self.N, gate.targets[0])) elif gate.name == "SNOT": self.U_list.append(snot(self.N, gate.targets[0])) elif gate.name == "PHASEGATE": self.U_list.append(phasegate(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "CRX": self.U_list.append(controlled_gate(rx(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CRY": self.U_list.append(controlled_gate(ry(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CRZ": self.U_list.append(controlled_gate(rz(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CPHASE": self.U_list.append(cphase(gate.arg_value, self.N, gate.controls[0], gate.targets[0])) elif gate.name == "CNOT": self.U_list.append(cnot(self.N, gate.controls[0], gate.targets[0])) elif gate.name == "CSIGN": self.U_list.append(csign(self.N, gate.controls[0], gate.targets[0])) elif gate.name == "BERKELEY": self.U_list.append(berkeley(self.N, gate.targets)) elif gate.name == "SWAPalpha": self.U_list.append(swapalpha(gate.arg_value, self.N, gate.targets)) elif gate.name == "SWAP": self.U_list.append(swap(self.N, gate.targets)) elif gate.name == "ISWAP": self.U_list.append(iswap(self.N, gate.targets)) elif gate.name == "SQRTSWAP": self.U_list.append(sqrtswap(self.N, gate.targets)) elif gate.name == "SQRTISWAP": self.U_list.append(sqrtiswap(self.N, gate.targets)) elif gate.name == "FREDKIN": self.U_list.append(fredkin(self.N, gate.controls[0], gate.targets)) elif gate.name == "TOFFOLI": self.U_list.append(toffoli(self.N, gate.controls, gate.targets[0])) elif gate.name == "GLOBALPHASE": self.U_list.append(globalphase(gate.arg_value, self.N)) elif gate.name in self.user_gates: if gate.controls is not None: raise ValueError( "A user defined gate {} takes only " "`targets` variable.".format(gate.name)) func = self.user_gates[gate.name] para_num = len(inspect.getfullargspec(func)[0]) if para_num == 0: oper = func() elif para_num == 1: oper = func(gate.arg_value) else: raise ValueError( "gate function takes at most one parameters.") self.U_list.append(expand_oper(oper, self.N, gate.targets)) else: raise NotImplementedError( "{} gate is an unknown gate.".format(gate.name)) return self.U_list def latex_code(self): rows = [] gates = self.gates for gate in gates: col = [] for n in range(self.N): if gate.targets and n in gate.targets: if len(gate.targets) > 1: if gate.name == "SWAP": col.append(r" \qswap \qwx ") elif ((self.reverse_states and n == max(gate.targets)) or (not self.reverse_states and n == min(gate.targets))): col.append(r" \multigate{%d}{%s} " % (len(gate.targets) - 1, _gate_label(gate.name, gate.arg_label))) else: col.append(r" \ghost{%s} " % (_gate_label(gate.name, gate.arg_label))) elif gate.name == "CNOT": col.append(r" \targ ") elif gate.name == "TOFFOLI": col.append(r" \targ ") else: col.append(r" \gate{%s} " % _gate_label(gate.name, gate.arg_label)) elif gate.controls and n in gate.controls: m = (gate.targets[0] - n) * (-1 if self.reverse_states else 1) col.append(r" \ctrl{%d} " % m) elif (not gate.controls and not gate.targets): # global gate if ((self.reverse_states and n == self.N - 1) or (not self.reverse_states and n == 0)): col.append(r" \multigate{%d}{%s} " % (self.N - 1, _gate_label(gate.name, gate.arg_label))) else: col.append(r" \ghost{%s} " % (_gate_label(gate.name, gate.arg_label))) else: col.append(r" \qw ") col.append(r" \qw ") rows.append(col) input_states = ["\lstick{\ket{" + x + "}}" if x is not None else "" for x in self.input_states] code = "" n_iter = (reversed(range(self.N)) if self.reverse_states else range(self.N)) for n in n_iter: code += r" & %s" % input_states[n] for m in range(len(gates)): code += r" & %s" % rows[m][n] code += r" & \qw \\ " + "\n" return code def _repr_png_(self): return _latex_compile(self.latex_code(), format="png") def _repr_svg_(self): return _latex_compile(self.latex_code(), format="svg") @property def png(self): from IPython.display import Image return Image(self._repr_png_(), embed=True) @property def svg(self): from IPython.display import SVG return SVG(self._repr_svg_()) def qasm(self): code = "# qasm code generated by QuTiP\n\n" for n in range(self.N): code += "\tqubit\tq%d\n" % n code += "\n" for gate in self.gates: code += "\t%s\t" % gate.name qtargets = ["q%d" % t for t in gate.targets] if gate.targets else [] qcontrols = (["q%d" % c for c in gate.controls] if gate.controls else []) code += ",".join(qtargets + qcontrols) code += "\n" return code
46.707934
79
0.434987
, 'ISWAP': r'{i}{\rm SWAP}', 'SQRTSWAP': r'\sqrt{\rm SWAP}', 'SQRTISWAP': r'\sqrt{{i}\rm SWAP}', 'FREDKIN': r'{\rm FREDKIN}', 'TOFFOLI': r'{\rm TOFFOLI}', 'GLOBALPHASE': r'{\rm Ph}', } def _gate_label(name, arg_label): if name in _gate_name_to_label: gate_label = _gate_name_to_label[name] else: warnings.warn("Unknown gate %s" % name) gate_label = name if arg_label: return r'%s(%s)' % (gate_label, arg_label) else: return r'%s' % gate_label class QubitCircuit(object): def __init__(self, N, input_states=None, output_states=None, reverse_states=True, user_gates=None): self.N = N self.reverse_states = reverse_states self.gates = [] self.U_list = [] self.input_states = [None for i in range(N)] self.output_states = [None for i in range(N)] if user_gates is None: self.user_gates = {} else: if isinstance(user_gates, dict): self.user_gates = user_gates else: raise ValueError( "`user_gate` takes a python dictionary of the form" "{{str: gate_function}}, not {}".format(user_gates)) def add_state(self, state, targets=None, state_type="input"): if state_type == "input": for i in targets: self.input_states[i] = state if state_type == "output": for i in targets: self.output_states[i] = state def add_gate(self, gate, targets=None, controls=None, arg_value=None, arg_label=None, index=None): if isinstance(gate, Gate): name = gate.name targets = gate.targets controls = gate.controls arg_value = gate.arg_value arg_label = gate.arg_label else: name = gate if index is None: self.gates.append(Gate(name, targets=targets, controls=controls, arg_value=arg_value, arg_label=arg_label)) else: for position in index: self.gates.insert(position, Gate(name, targets=targets, controls=controls, arg_value=arg_value, arg_label=arg_label)) def add_1q_gate(self, name, start=0, end=None, qubits=None, arg_value=None, arg_label=None): if name not in ["RX", "RY", "RZ", "SNOT", "SQRTNOT", "PHASEGATE"]: raise ValueError("%s is not a single qubit gate" % name) if qubits is not None: for i in range(len(qubits)): self.gates.append(Gate(name, targets=qubits[i], controls=None, arg_value=arg_value, arg_label=arg_label)) else: if end is None: end = self.N - 1 for i in range(start, end): self.gates.append(Gate(name, targets=i, controls=None, arg_value=arg_value, arg_label=arg_label)) def add_circuit(self, qc, start=0): if self.N - start < qc.N: raise NotImplementedError("Targets exceed number of qubits.") for gate in qc.gates: if gate.name in ["RX", "RY", "RZ", "SNOT", "SQRTNOT", "PHASEGATE"]: self.add_gate(gate.name, gate.targets[0] + start, None, gate.arg_value, gate.arg_label) elif gate.name in ["CPHASE", "CNOT", "CSIGN", "CRX", "CRY", "CRZ"]: self.add_gate(gate.name, gate.targets[0] + start, gate.controls[0] + start, gate.arg_value, gate.arg_label) elif gate.name in ["BERKELEY", "SWAPalpha", "SWAP", "ISWAP", "SQRTSWAP", "SQRTISWAP"]: self.add_gate(gate.name, None, [gate.controls[0] + start, gate.controls[1] + start], None, None) elif gate.name in ["TOFFOLI"]: self.add_gate(gate.name, gate.targets[0] + start, [gate.controls[0] + start, gate.controls[1] + start], None, None) elif gate.name in ["FREDKIN"]: self.add_gate(gate.name, [gate.targets[0] + start, gate.targets[1] + start], gate.controls + start, None, None) elif gate.name in self.user_gates: self.add_gate( gate.name, targets=gate.targets, arg_value=gate.arg_value) def remove_gate(self, index=None, end=None, name=None, remove="first"): if index is not None and index <= self.N: if end is not None and end <= self.N: for i in range(end - index): self.gates.pop(index + i) elif end is not None and end > self.N: raise ValueError("End target exceeds number of gates.") else: self.gates.pop(index) elif name is not None and remove == "first": for gate in self.gates: if name == gate.name: self.gates.remove(gate) break elif name is not None and remove == "last": for i in range(self.N + 1): if name == self.gates[self.N - i].name: self.gates.remove(self.gates[self.N - i]) break elif name is not None and remove == "all": for j in range(self.N + 1): if name == self.gates[self.N - j].name: self.gates.remove(self.gates[self.N - j]) else: self.gates.pop() def reverse_circuit(self): temp = QubitCircuit(self.N, self.reverse_states) for gate in reversed(self.gates): temp.add_gate(gate) return temp def resolve_gates(self, basis=["CNOT", "RX", "RY", "RZ"]): qc_temp = QubitCircuit(self.N, self.reverse_states) temp_resolved = [] basis_1q_valid = ["RX", "RY", "RZ"] basis_2q_valid = ["CNOT", "CSIGN", "ISWAP", "SQRTSWAP", "SQRTISWAP"] if isinstance(basis, list): basis_1q = [] basis_2q = [] for gate in basis: if gate in basis_2q_valid: basis_2q.append(gate) elif gate in basis_1q_valid: basis_1q.append(gate) else: raise NotImplementedError( "%s is not a valid basis gate" % gate) if len(basis_1q) == 1: raise ValueError("Not sufficient single-qubit gates in basis") elif len(basis_1q) == 0: basis_1q = ["RX", "RY", "RZ"] else: basis_1q = ["RX", "RY", "RZ"] if basis in basis_2q_valid: basis_2q = [basis] else: raise ValueError("%s is not a valid two-qubit basis gate" % basis) for gate in self.gates: if gate.name == "RX": temp_resolved.append(gate) elif gate.name == "RY": temp_resolved.append(gate) elif gate.name == "RZ": temp_resolved.append(gate) elif gate.name == "SQRTNOT": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SNOT": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "PHASEGATE": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=gate.arg_value / 2, arg_label=gate.arg_label)) temp_resolved.append(Gate("RZ", gate.targets, None, gate.arg_value, gate.arg_label)) elif gate.name in basis_2q: temp_resolved.append(gate) elif gate.name == "CPHASE": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "CNOT": temp_resolved.append(gate) elif gate.name == "CSIGN": temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls)) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "BERKELEY": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SWAPalpha": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SWAP": if "ISWAP" in basis_2q: temp_resolved.append(gate) else: temp_resolved.append( Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append( Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append( Gate("CNOT", gate.targets[0], gate.targets[1])) elif gate.name == "ISWAP": temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("RZ", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("RY", gate.targets[0], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SQRTSWAP": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "SQRTISWAP": raise NotImplementedError("Cannot be resolved in this basis") elif gate.name == "FREDKIN": temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.controls)) temp_resolved.append(Gate("RZ", gate.controls, None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", [gate.targets[0]], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.controls)) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.targets[0], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.controls)) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.controls)) temp_resolved.append(Gate("RZ", gate.targets[1], None, arg_value=-np.pi / 8, arg_label=r"-\pi/8")) temp_resolved.append(Gate("CNOT", gate.targets[1], gate.targets[0])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets[1], None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("CNOT", gate.targets[0], gate.targets[1])) elif gate.name == "TOFFOLI": temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=1 * np.pi / 8, arg_label=r"\pi/8")) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=np.pi/2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RZ", gate.controls[0], None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.controls[1], gate.controls[0])) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.controls[1], gate.controls[0])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) temp_resolved.append(Gate("RZ", gate.controls[1], None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[0])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[1])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[0])) temp_resolved.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 4, arg_label=r"-\pi/4")) temp_resolved.append(Gate("CNOT", gate.targets, gate.controls[1])) temp_resolved.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) temp_resolved.append(Gate("RX", gate.targets, None, arg_value=np.pi, arg_label=r"\pi")) elif gate.name == "GLOBALPHASE": temp_resolved.append(Gate(gate.name, gate.targets, gate.controls, gate.arg_value, gate.arg_label)) else: raise NotImplementedError( "Gate {} " "cannot be resolved.".format(gate.name)) if "CSIGN" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("CSIGN", gate.targets, gate.controls)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) else: qc_temp.gates.append(gate) elif "ISWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("ISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("ISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "SWAP": qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("ISWAP", gate.targets, None)) qc_temp.gates.append(Gate("RX", gate.targets[0], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("ISWAP", gate.targets, None)) qc_temp.gates.append(Gate("RX", gate.targets[1], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("ISWAP", [gate.targets[1], gate.targets[0]], None)) qc_temp.gates.append(Gate("RX", gate.targets[0], None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) else: qc_temp.gates.append(gate) elif "SQRTSWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("SQRTSWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("SQRTSWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) else: qc_temp.gates.append(gate) elif "SQRTISWAP" in basis_2q: for gate in temp_resolved: if gate.name == "CNOT": qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RX", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("SQRTISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RX", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("SQRTISWAP", [gate.controls[0], gate.targets[0]], None)) qc_temp.gates.append(Gate("RY", gate.controls, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=np.pi / 4, arg_label=r"\pi/4")) qc_temp.gates.append(Gate("RZ", gate.controls, None, arg_value=np.pi, arg_label=r"\pi")) qc_temp.gates.append(Gate("GLOBALPHASE", None, None, arg_value=3 * np.pi / 2, arg_label=r"3\pi/2")) else: qc_temp.gates.append(gate) else: qc_temp.gates = temp_resolved if len(basis_1q) == 2: temp_resolved = qc_temp.gates qc_temp.gates = [] for gate in temp_resolved: if gate.name == "RX" and "RX" not in basis_1q: qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RZ", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RY", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "RY" and "RY" not in basis_1q: qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RX", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RZ", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) elif gate.name == "RZ" and "RZ" not in basis_1q: qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=-np.pi / 2, arg_label=r"-\pi/2")) qc_temp.gates.append(Gate("RY", gate.targets, None, gate.arg_value, gate.arg_label)) qc_temp.gates.append(Gate("RX", gate.targets, None, arg_value=np.pi / 2, arg_label=r"\pi/2")) else: qc_temp.gates.append(gate) return qc_temp def adjacent_gates(self): temp = QubitCircuit(self.N, reverse_states=self.reverse_states) swap_gates = ["SWAP", "ISWAP", "SQRTISWAP", "SQRTSWAP", "BERKELEY", "SWAPalpha"] for gate in self.gates: if gate.name == "CNOT" or gate.name == "CSIGN": start = min([gate.targets[0], gate.controls[0]]) end = max([gate.targets[0], gate.controls[0]]) i = start while i < end: if start + end - i - i == 1 and (end - start + 1) % 2 == 0: if end == gate.controls[0]: temp.gates.append(Gate(gate.name, targets=[i], controls=[i + 1])) else: temp.gates.append(Gate(gate.name, targets=[i + 1], controls=[i])) elif (start + end - i - i == 2 and (end - start + 1) % 2 == 1): temp.gates.append(Gate("SWAP", targets=[i, i + 1])) if end == gate.controls[0]: temp.gates.append(Gate(gate.name, targets=[i + 1], controls=[i + 2])) else: temp.gates.append(Gate(gate.name, targets=[i + 2], controls=[i + 1])) temp.gates.append(Gate("SWAP", targets=[i, i + 1])) i += 1 else: temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append(Gate("SWAP", targets=[start + end - i - 1, start + end - i])) i += 1 elif gate.name in swap_gates: start = min([gate.targets[0], gate.targets[1]]) end = max([gate.targets[0], gate.targets[1]]) i = start while i < end: if start + end - i - i == 1 and (end - start + 1) % 2 == 0: temp.gates.append(Gate(gate.name, targets=[i, i + 1])) elif ((start + end - i - i) == 2 and (end - start + 1) % 2 == 1): temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append( Gate(gate.name, targets=[i + 1, i + 2])) temp.gates.append(Gate("SWAP", targets=[i, i + 1])) i += 1 else: temp.gates.append(Gate("SWAP", targets=[i, i + 1])) temp.gates.append(Gate("SWAP", targets=[start + end - i - 1, start + end - i])) i += 1 else: raise NotImplementedError( "`adjacent_gates` is not defined for " "gate {}.".format(gate.name)) return temp def propagators(self): self.U_list = [] for gate in self.gates: if gate.name == "RX": self.U_list.append(rx(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "RY": self.U_list.append(ry(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "RZ": self.U_list.append(rz(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "SQRTNOT": self.U_list.append(sqrtnot(self.N, gate.targets[0])) elif gate.name == "SNOT": self.U_list.append(snot(self.N, gate.targets[0])) elif gate.name == "PHASEGATE": self.U_list.append(phasegate(gate.arg_value, self.N, gate.targets[0])) elif gate.name == "CRX": self.U_list.append(controlled_gate(rx(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CRY": self.U_list.append(controlled_gate(ry(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CRZ": self.U_list.append(controlled_gate(rz(gate.arg_value), N=self.N, control=gate.controls[0], target=gate.targets[0])) elif gate.name == "CPHASE": self.U_list.append(cphase(gate.arg_value, self.N, gate.controls[0], gate.targets[0])) elif gate.name == "CNOT": self.U_list.append(cnot(self.N, gate.controls[0], gate.targets[0])) elif gate.name == "CSIGN": self.U_list.append(csign(self.N, gate.controls[0], gate.targets[0])) elif gate.name == "BERKELEY": self.U_list.append(berkeley(self.N, gate.targets)) elif gate.name == "SWAPalpha": self.U_list.append(swapalpha(gate.arg_value, self.N, gate.targets)) elif gate.name == "SWAP": self.U_list.append(swap(self.N, gate.targets)) elif gate.name == "ISWAP": self.U_list.append(iswap(self.N, gate.targets)) elif gate.name == "SQRTSWAP": self.U_list.append(sqrtswap(self.N, gate.targets)) elif gate.name == "SQRTISWAP": self.U_list.append(sqrtiswap(self.N, gate.targets)) elif gate.name == "FREDKIN": self.U_list.append(fredkin(self.N, gate.controls[0], gate.targets)) elif gate.name == "TOFFOLI": self.U_list.append(toffoli(self.N, gate.controls, gate.targets[0])) elif gate.name == "GLOBALPHASE": self.U_list.append(globalphase(gate.arg_value, self.N)) elif gate.name in self.user_gates: if gate.controls is not None: raise ValueError( "A user defined gate {} takes only " "`targets` variable.".format(gate.name)) func = self.user_gates[gate.name] para_num = len(inspect.getfullargspec(func)[0]) if para_num == 0: oper = func() elif para_num == 1: oper = func(gate.arg_value) else: raise ValueError( "gate function takes at most one parameters.") self.U_list.append(expand_oper(oper, self.N, gate.targets)) else: raise NotImplementedError( "{} gate is an unknown gate.".format(gate.name)) return self.U_list def latex_code(self): rows = [] gates = self.gates for gate in gates: col = [] for n in range(self.N): if gate.targets and n in gate.targets: if len(gate.targets) > 1: if gate.name == "SWAP": col.append(r" \qswap \qwx ") elif ((self.reverse_states and n == max(gate.targets)) or (not self.reverse_states and n == min(gate.targets))): col.append(r" \multigate{%d}{%s} " % (len(gate.targets) - 1, _gate_label(gate.name, gate.arg_label))) else: col.append(r" \ghost{%s} " % (_gate_label(gate.name, gate.arg_label))) elif gate.name == "CNOT": col.append(r" \targ ") elif gate.name == "TOFFOLI": col.append(r" \targ ") else: col.append(r" \gate{%s} " % _gate_label(gate.name, gate.arg_label)) elif gate.controls and n in gate.controls: m = (gate.targets[0] - n) * (-1 if self.reverse_states else 1) col.append(r" \ctrl{%d} " % m) elif (not gate.controls and not gate.targets): if ((self.reverse_states and n == self.N - 1) or (not self.reverse_states and n == 0)): col.append(r" \multigate{%d}{%s} " % (self.N - 1, _gate_label(gate.name, gate.arg_label))) else: col.append(r" \ghost{%s} " % (_gate_label(gate.name, gate.arg_label))) else: col.append(r" \qw ") col.append(r" \qw ") rows.append(col) input_states = ["\lstick{\ket{" + x + "}}" if x is not None else "" for x in self.input_states] code = "" n_iter = (reversed(range(self.N)) if self.reverse_states else range(self.N)) for n in n_iter: code += r" & %s" % input_states[n] for m in range(len(gates)): code += r" & %s" % rows[m][n] code += r" & \qw \\ " + "\n" return code def _repr_png_(self): return _latex_compile(self.latex_code(), format="png") def _repr_svg_(self): return _latex_compile(self.latex_code(), format="svg") @property def png(self): from IPython.display import Image return Image(self._repr_png_(), embed=True) @property def svg(self): from IPython.display import SVG return SVG(self._repr_svg_()) def qasm(self): code = "# qasm code generated by QuTiP\n\n" for n in range(self.N): code += "\tqubit\tq%d\n" % n code += "\n" for gate in self.gates: code += "\t%s\t" % gate.name qtargets = ["q%d" % t for t in gate.targets] if gate.targets else [] qcontrols = (["q%d" % c for c in gate.controls] if gate.controls else []) code += ",".join(qtargets + qcontrols) code += "\n" return code
true
true
f70ea0db8a6447a49d3bf80ab3739ca2d79b0877
5,469
py
Python
docs/source/conf.py
holavpv/osmnx
df1feb06f2c2be3c042f524428e3993f22560ee6
[ "MIT" ]
1
2020-07-09T22:11:52.000Z
2020-07-09T22:11:52.000Z
docs/source/conf.py
holavpv/osmnx
df1feb06f2c2be3c042f524428e3993f22560ee6
[ "MIT" ]
null
null
null
docs/source/conf.py
holavpv/osmnx
df1feb06f2c2be3c042f524428e3993f22560ee6
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ OSMnx documentation build configuration file. Created by sphinx-quickstart on Sun Feb 4 13:53:34 2018. This file is execfile()d with the current directory set to its containing dir. Note that not all possible configuration values are present in this autogenerated file. All configuration values have a default; values that are commented out serve to show the default. If extensions (or modules to document with autodoc) are in another directory, add these directories to sys.path here. If the directory is relative to the documentation root, use os.path.abspath to make it absolute, like shown here. """ import os import sys # go up two levels from /docs/source to the package root sys.path.insert(0, os.path.abspath("../..")) # mock import these packages because readthedocs doesn't have them installed autodoc_mock_imports = [ "dateutil", "geopandas", "matplotlib", "matplotlib.cm", "matplotlib.colors", "matplotlib.pyplot", "networkx", "numpy", "pandas", "pyproj", "requests", "scipy", "scipy.spatial", "shapely", "shapely.geometry", "shapely.ops", "sklearn", "sklearn.neighbors", ] # -- General configuration ------------------------------------------------ # General information about the project. project = "OSMnx" copyright = "2020, Geoff Boeing" author = "Geoff Boeing" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. version = release = "0.15.1" # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ".rst" # The master toctree document. master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = [] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { "**": [ "relations.html", # needs 'show_related': True theme option to display "searchbox.html", ] } # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = "OSMnxdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, "OSMnx.tex", "OSMnx Documentation", "Geoff Boeing", "manual"), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, "osmnx", "OSMnx Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, "OSMnx", "OSMnx Documentation", author, "OSMnx", "Python for street networks.", "Miscellaneous", ), ]
29.403226
79
0.672518
import os import sys sys.path.insert(0, os.path.abspath("../..")) autodoc_mock_imports = [ "dateutil", "geopandas", "matplotlib", "matplotlib.cm", "matplotlib.colors", "matplotlib.pyplot", "networkx", "numpy", "pandas", "pyproj", "requests", "scipy", "scipy.spatial", "shapely", "shapely.geometry", "shapely.ops", "sklearn", "sklearn.neighbors", ] # -- General configuration ------------------------------------------------ # General information about the project. project = "OSMnx" copyright = "2020, Geoff Boeing" author = "Geoff Boeing" # The version info for the project you're documenting, acts as replacement for version = release = "0.15.1" extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"] templates_path = ["_templates"] source_suffix = ".rst" master_doc = "index" language = None exclude_patterns = [] pygments_style = "sphinx" todo_include_todos = False html_theme = "default" html_static_path = [] ebars = { "**": [ "relations.html", "searchbox.html", ] } htmlhelp_basename = "OSMnxdoc" latex_elements = { } latex_documents = [ (master_doc, "OSMnx.tex", "OSMnx Documentation", "Geoff Boeing", "manual"), ] man_pages = [(master_doc, "osmnx", "OSMnx Documentation", [author], 1)] texinfo_documents = [ ( master_doc, "OSMnx", "OSMnx Documentation", author, "OSMnx", "Python for street networks.", "Miscellaneous", ), ]
true
true
f70ea0e76e8e27653ad98588d7b83e00bb2a047a
5,613
py
Python
tests/unit/test_config.py
nickatnight/praw
6ba5c92e5d5210338c0a2a2755a5e5e226a002fa
[ "BSD-2-Clause" ]
2,360
2015-01-03T18:27:44.000Z
2022-03-26T23:24:49.000Z
tests/unit/test_config.py
nickatnight/praw
6ba5c92e5d5210338c0a2a2755a5e5e226a002fa
[ "BSD-2-Clause" ]
1,187
2015-01-04T18:42:10.000Z
2022-03-28T13:46:33.000Z
tests/unit/test_config.py
nickatnight/praw
6ba5c92e5d5210338c0a2a2755a5e5e226a002fa
[ "BSD-2-Clause" ]
591
2015-01-04T17:33:34.000Z
2022-03-27T20:28:26.000Z
import os import sys from unittest import mock import pytest from praw.config import Config from praw.exceptions import ClientException class TestConfig: @staticmethod def _assert_config_read(environment, mock_config): mock_instance = mock_config.return_value Config.CONFIG = None # Force config file reload prev_environment = {environment: None} for env_name in ["APPDATA", "HOME", "XDG_CONFIG_HOME"]: if env_name in os.environ: prev_environment[env_name] = os.environ[env_name] del os.environ[env_name] os.environ[environment] = "/MOCK" module_dir = os.path.dirname(sys.modules["praw"].__file__) environ_path = os.path.join( "/MOCK", ".config" if environment == "HOME" else "", "praw.ini" ) locations = [ os.path.join(module_dir, "praw.ini"), environ_path, "praw.ini", ] try: Config._load_config() mock_instance.read.assert_called_with(locations) finally: Config.CONFIG = None # Force config file reload for env_name in prev_environment: if prev_environment[env_name] is None: del os.environ[env_name] else: os.environ[env_name] = prev_environment[env_name] def test_check_for_updates__false(self): for value in [False, "False", "other"]: config = Config("DEFAULT", check_for_updates=value) assert config.check_for_updates is False def test_custom__extra_values_set(self): config = Config("DEFAULT", user1="foo", user2="bar") assert config.custom == {"user1": "foo", "user2": "bar"} def test_custom__no_extra_values_set(self): config = Config("DEFAULT") assert config.custom == {} def test_check_for_updates__true(self): for value in [True, "1", "true", "YES", "on"]: config = Config("DEFAULT", check_for_updates=value) assert config.check_for_updates is True @mock.patch("configparser.ConfigParser") def test_load_ini_from_appdata(self, mock_config): self._assert_config_read("APPDATA", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_from_home(self, mock_config): self._assert_config_read("HOME", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_from_xdg_config_home(self, mock_config): self._assert_config_read("XDG_CONFIG_HOME", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_with_no_config_directory(self, mock_config): mock_instance = mock_config.return_value Config.CONFIG = None # Force config file reload prev_environment = {} for key in ["APPDATA", "HOME", "XDG_CONFIG_HOME"]: if key in os.environ: prev_environment[key] = os.environ[key] del os.environ[key] module_dir = os.path.dirname(sys.modules["praw"].__file__) locations = [os.path.join(module_dir, "praw.ini"), "praw.ini"] try: Config._load_config() mock_instance.read.assert_called_with(locations) finally: Config.CONFIG = None # Force config file reload for key, value in prev_environment.items(): os.environ[key] = value def test_short_url(self): config = Config("DEFAULT") assert config.short_url == "https://redd.it" def test_short_url_not_defined(self): config = Config("DEFAULT", short_url=None) with pytest.raises(ClientException) as excinfo: config.short_url assert str(excinfo.value) == "No short domain specified." def test_unset_value_has_useful_string_representation(self): config = Config("DEFAULT", password=Config.CONFIG_NOT_SET) assert str(config.password) == "NotSet" class TestConfigInterpolation: def test_no_interpolation(self): Config.CONFIG = None # Force config file reload with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION") assert config.custom["basic_interpolation"] == "%(reddit_url)s" assert config.custom["extended_interpolation"] == "${reddit_url}" def test_basic_interpolation(self): Config.CONFIG = None # Force config file reload with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION", config_interpolation="basic") assert config.custom["basic_interpolation"] == config.reddit_url assert config.custom["extended_interpolation"] == "${reddit_url}" def test_extended_interpolation(self): Config.CONFIG = None # Force config file reload with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION", config_interpolation="extended") assert config.custom["basic_interpolation"] == "%(reddit_url)s" assert config.custom["extended_interpolation"] == config.reddit_url
37.42
79
0.618742
import os import sys from unittest import mock import pytest from praw.config import Config from praw.exceptions import ClientException class TestConfig: @staticmethod def _assert_config_read(environment, mock_config): mock_instance = mock_config.return_value Config.CONFIG = None prev_environment = {environment: None} for env_name in ["APPDATA", "HOME", "XDG_CONFIG_HOME"]: if env_name in os.environ: prev_environment[env_name] = os.environ[env_name] del os.environ[env_name] os.environ[environment] = "/MOCK" module_dir = os.path.dirname(sys.modules["praw"].__file__) environ_path = os.path.join( "/MOCK", ".config" if environment == "HOME" else "", "praw.ini" ) locations = [ os.path.join(module_dir, "praw.ini"), environ_path, "praw.ini", ] try: Config._load_config() mock_instance.read.assert_called_with(locations) finally: Config.CONFIG = None for env_name in prev_environment: if prev_environment[env_name] is None: del os.environ[env_name] else: os.environ[env_name] = prev_environment[env_name] def test_check_for_updates__false(self): for value in [False, "False", "other"]: config = Config("DEFAULT", check_for_updates=value) assert config.check_for_updates is False def test_custom__extra_values_set(self): config = Config("DEFAULT", user1="foo", user2="bar") assert config.custom == {"user1": "foo", "user2": "bar"} def test_custom__no_extra_values_set(self): config = Config("DEFAULT") assert config.custom == {} def test_check_for_updates__true(self): for value in [True, "1", "true", "YES", "on"]: config = Config("DEFAULT", check_for_updates=value) assert config.check_for_updates is True @mock.patch("configparser.ConfigParser") def test_load_ini_from_appdata(self, mock_config): self._assert_config_read("APPDATA", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_from_home(self, mock_config): self._assert_config_read("HOME", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_from_xdg_config_home(self, mock_config): self._assert_config_read("XDG_CONFIG_HOME", mock_config) @mock.patch("configparser.ConfigParser") def test_load_ini_with_no_config_directory(self, mock_config): mock_instance = mock_config.return_value Config.CONFIG = None prev_environment = {} for key in ["APPDATA", "HOME", "XDG_CONFIG_HOME"]: if key in os.environ: prev_environment[key] = os.environ[key] del os.environ[key] module_dir = os.path.dirname(sys.modules["praw"].__file__) locations = [os.path.join(module_dir, "praw.ini"), "praw.ini"] try: Config._load_config() mock_instance.read.assert_called_with(locations) finally: Config.CONFIG = None for key, value in prev_environment.items(): os.environ[key] = value def test_short_url(self): config = Config("DEFAULT") assert config.short_url == "https://redd.it" def test_short_url_not_defined(self): config = Config("DEFAULT", short_url=None) with pytest.raises(ClientException) as excinfo: config.short_url assert str(excinfo.value) == "No short domain specified." def test_unset_value_has_useful_string_representation(self): config = Config("DEFAULT", password=Config.CONFIG_NOT_SET) assert str(config.password) == "NotSet" class TestConfigInterpolation: def test_no_interpolation(self): Config.CONFIG = None with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION") assert config.custom["basic_interpolation"] == "%(reddit_url)s" assert config.custom["extended_interpolation"] == "${reddit_url}" def test_basic_interpolation(self): Config.CONFIG = None with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION", config_interpolation="basic") assert config.custom["basic_interpolation"] == config.reddit_url assert config.custom["extended_interpolation"] == "${reddit_url}" def test_extended_interpolation(self): Config.CONFIG = None with mock.patch.dict( "os.environ", { "APPDATA": os.path.dirname(__file__), "XDG_CONFIG_HOME": os.path.dirname(__file__), }, ): config = Config("INTERPOLATION", config_interpolation="extended") assert config.custom["basic_interpolation"] == "%(reddit_url)s" assert config.custom["extended_interpolation"] == config.reddit_url
true
true
f70ea1c658fc200cff6ab95f636b343fc4f6d6b7
2,057
py
Python
Validation/Performance/scripts/cmsScimarkStop.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
13
2015-11-30T15:49:45.000Z
2022-02-08T16:11:30.000Z
Validation/Performance/scripts/cmsScimarkStop.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
640
2015-02-11T18:55:47.000Z
2022-03-31T14:12:23.000Z
Validation/Performance/scripts/cmsScimarkStop.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
51
2015-08-11T21:01:40.000Z
2022-03-30T07:31:34.000Z
#! /usr/bin/env python #Script to #1-check for cmsScimarkLaunch (infinite loop) scripts #2-kill them #3-report their results using cmsScimarkParser.py from __future__ import print_function import subprocess,os,sys def main(): #Use ps -ef to look for cmsScimarkLaunch processes ps_stdouterr=subprocess.Popen("ps -efww|grep cmsScimarkLaunch|grep -v grep|grep -v 'sh -c'",shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout if ps_stdouterr: ps_lines=ps_stdouterr.readlines() #print ps_lines if ps_lines: for line in ps_lines: tokens=line.split() #Look up the PID PID=tokens[1] #Look up the cpu core core=tokens[9] print("Found process:\n%s"%line[:-1]) #to eliminate the extra \n #Kill the PID print("Killing process with PID %s"%PID) kill_stdouterr=subprocess.Popen("kill %s"%PID,shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(kill_stdouterr) #Harvest the cmsScimark scores #Look for the cmsScimark log: if os.path.exists("cmsScimark_%s.log"%core): #Create the results dir mkdir_stdouterr=subprocess.Popen("mkdir cmsScimarkResults_cpu%s"%core,shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(mkdir_stdouterr) #Execute the harvesting scrip cmsScimarkParser.py (it is in the release) harvest_stdouterr=subprocess.Popen("cmsScimarkParser.py -i cmsScimark_%s.log -o cmsScimarkResults_cpu%s"%(core,core),shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(harvest_stdouterr) else: print("No cmsScimark_%s.log file was found for cpu%s, log might be in another directory!"%(core,core)) else: print("No cmsScimarkLaunch processes found in the ps -ef output") return 0 if __name__ == "__main__": sys.exit(main())
45.711111
207
0.656782
from __future__ import print_function import subprocess,os,sys def main(): ps_stdouterr=subprocess.Popen("ps -efww|grep cmsScimarkLaunch|grep -v grep|grep -v 'sh -c'",shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout if ps_stdouterr: ps_lines=ps_stdouterr.readlines() if ps_lines: for line in ps_lines: tokens=line.split() PID=tokens[1] core=tokens[9] print("Found process:\n%s"%line[:-1]) print("Killing process with PID %s"%PID) kill_stdouterr=subprocess.Popen("kill %s"%PID,shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(kill_stdouterr) if os.path.exists("cmsScimark_%s.log"%core): mkdir_stdouterr=subprocess.Popen("mkdir cmsScimarkResults_cpu%s"%core,shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(mkdir_stdouterr) harvest_stdouterr=subprocess.Popen("cmsScimarkParser.py -i cmsScimark_%s.log -o cmsScimarkResults_cpu%s"%(core,core),shell=True,stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout.read() print(harvest_stdouterr) else: print("No cmsScimark_%s.log file was found for cpu%s, log might be in another directory!"%(core,core)) else: print("No cmsScimarkLaunch processes found in the ps -ef output") return 0 if __name__ == "__main__": sys.exit(main())
true
true
f70ea28bcf4185dc675728f2549fd2e9447346be
77,417
py
Python
Graphing_Summaries.py
GrantRoss-Tenki/Malawi-CQC-CSC-OSU-Work
a720e0451579945ba10eafdafe2e0d59a86d5cfb
[ "MIT" ]
null
null
null
Graphing_Summaries.py
GrantRoss-Tenki/Malawi-CQC-CSC-OSU-Work
a720e0451579945ba10eafdafe2e0d59a86d5cfb
[ "MIT" ]
null
null
null
Graphing_Summaries.py
GrantRoss-Tenki/Malawi-CQC-CSC-OSU-Work
a720e0451579945ba10eafdafe2e0d59a86d5cfb
[ "MIT" ]
null
null
null
import matplotlib.pyplot as plt import numpy as np import pandas as pd #from pylab import plot, show, xlim,figure,hold, ylim,legend, boxplot, setup, axes import seaborn as sns # Is this a personal or work computer # Are you graphing for hood or no hood Computer = 'personal' #or 'personal' or 'work' Hood_or_no = 'no_hood' # 'no_hood' or 'hood' #what household do you want to remove make sure it is in ascending order # if there is nothing, then put a placeholder of 1045 or higher Household_removal = [1045] #Household_removal = Household_removal.sort(reverse=False) Household_removal_NO_Hood_fuel_day_adult = [1045] Household_removal_Hood_fuel_day_adult = [2020] Household_removal_NO_Hood_PM = [1045] Household_removal_Hood_PM = [2020] pd.set_option('display.max_rows', 500) pd.set_option('display.max_columns', 500) pd.set_option('display.width', 1000) if Hood_or_no == 'hood': C_Place_holder = 2001 else: C_Place_holder = 1001 if Computer == 'personal' and Hood_or_no == 'no_hood': # 1N datafile_path_day_1N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_Summary_Day_1_exact.csv" Day_1N = pd.read_csv(datafile_path_day_1N, skiprows=2) datafile_path_event_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_Summary_Event_1_exact.csv" Event_1N = pd.read_csv(datafile_path_event_1N, skiprows=2) # there is no second exact in phase 1N #1N Survey datafile_path_survey_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Filter_1n_survey = pd.read_csv(datafile_path_survey_1N, skiprows=0) #print(Filter_1n_survey.iloc[0:40, :]) Survey_1N = Filter_1n_survey.iloc[0:40,:] #24 hour Kitchen pm breakdown data_file_path_24_PM_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_24_hour_Kitchen_PM.csv" Kit_PM_1N_24hr = pd.read_csv(data_file_path_24_PM_1N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_24_hour_Fuel_removal.csv" Fuel_remove_1N_24hr = pd.read_csv(data_file_path_24_Fuel_1N, skiprows=0) #2N datafile_path_day_2N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Day_1_exact.csv" Day_2N = pd.read_csv(datafile_path_day_2N, skiprows=2) datafile_path_event_2N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Event_1_exact.csv" Event_2N_1 = pd.read_csv(datafile_path_event_2N_1, skiprows=2) #2N second Exact datafile_path_event_2N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Event_2_exact.csv" Event_2N_2 = pd.read_csv(datafile_path_event_2N_2, skiprows=2) #2N Survey datafile_path_survey_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Survey_summary_.csv" Survey_2N = pd.read_csv(datafile_path_survey_2N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_24_hour_Kitchen_PM.csv" Kit_PM_2N_24hr = pd.read_csv(data_file_path_24_PM_2N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_24_hour_Fuel_removal.csv" Fuel_remove_2N_24hr = pd.read_csv(data_file_path_24_Fuel_2N, skiprows=0) #3N datafile_path_day_3N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Day_1_exact.csv" Day_3N = pd.read_csv(datafile_path_day_3N, skiprows=2) datafile_path_event_3N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Event_1_exact.csv" Event_3N_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) #3N second Exact datafile_path_event_3N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Event_2_exact.csv" Event_3N_2 = pd.read_csv(datafile_path_event_3N_2, skiprows=2) #3N Survey datafile_path_survey_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Survey_summary_.csv" Survey_3N = pd.read_csv(datafile_path_survey_3N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_24_hour_Kitchen_PM.csv" Kit_PM_3N_24hr = pd.read_csv(data_file_path_24_PM_3N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_24_hour_Fuel_removal.csv" Fuel_remove_3N_24hr = pd.read_csv(data_file_path_24_Fuel_3N, skiprows=0) #4N datafile_path_day_4N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Day_1_exact.csv" Day_4N = pd.read_csv(datafile_path_day_4N, skiprows=2) datafile_path_event_4N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Event_1_exact.csv" Event_4N_1 = pd.read_csv(datafile_path_event_4N_1, skiprows=2) #4N second Exact datafile_path_event_4N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Event_2_exact.csv" Event_4N_2 = pd.read_csv(datafile_path_event_4N_2, skiprows=2) #4N Survey datafile_path_survey_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Survey_summary_.csv" Survey_4N = pd.read_csv(datafile_path_survey_4N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_24_hour_Kitchen_PM.csv" Kit_PM_4N_24hr = pd.read_csv(data_file_path_24_PM_4N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_24_hour_Fuel_removal.csv" Fuel_remove_4N_24hr = pd.read_csv(data_file_path_24_Fuel_4N, skiprows=0) elif Computer == 'personal' and Hood_or_no == 'hood': #1H datafile_path_day_1H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_Summary_Day_1_exact.csv" Day_1H = pd.read_csv(datafile_path_day_1H, skiprows=2) datafile_path_event_1H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_Summary_Event_1_exact.csv" Event_1H = pd.read_csv(datafile_path_event_1H, skiprows=2) #there is no second exact in phase 1H #1H Survey (row 40 or so afterward is Hood portion column 1 is houshold number) datafile_path_survey_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Survey_1H = pd.read_csv(datafile_path_survey_1H, skiprows=40) #24 hour Kitchen pm breakdown data_file_path_24_PM_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_24_hour_Kitchen_PM.csv" Kit_PM_1H_24hr = pd.read_csv(data_file_path_24_PM_1H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_24_hour_Fuel_removal.csv" Fuel_remove_1H_24hr = pd.read_csv(data_file_path_24_fuel_1H, skiprows=0) #2H datafile_path_day_2H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Day_1_exact.csv" Day_2H = pd.read_csv(datafile_path_day_2H, skiprows=2) datafile_path_event_2H_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Event_1_exact.csv" Event_2H_1 = pd.read_csv(datafile_path_event_2H_1, skiprows=2) #2H second Exact datafile_path_event_2H_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Event_2_exact.csv" Event_2H_2 = pd.read_csv(datafile_path_event_2H_2, skiprows=2) #2H survey datafile_path_survey_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Survey_summary_.csv" Survey_2H = pd.read_csv(datafile_path_survey_2H, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_24_hour_Kitchen_PM.csv" Kit_PM_2H_24hr = pd.read_csv(data_file_path_24_PM_2H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_24_hour_Fuel_removal.csv" Fuel_remove_2H_24hr = pd.read_csv(data_file_path_24_fuel_2H, skiprows=0) #3H datafile_path_day_3H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Day_1_exact.csv" Day_3H = pd.read_csv(datafile_path_day_3H, skiprows=2) datafile_path_event_3N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Event_1_exact.csv" Event_3H_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) #3H second Exact datafile_path_event_3H_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Event_2_exact.csv" Event_3H_2 = pd.read_csv(datafile_path_event_3H_2, skiprows=2) #3H survey datafile_path_survey_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Survey_summary_.csv" Survey_3H = pd.read_csv(datafile_path_survey_3H, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_24_hour_Kitchen_PM.csv" Kit_PM_3H_24hr = pd.read_csv(data_file_path_24_PM_3H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_24_hour_Fuel_removal.csv" Fuel_remove_3H_24hr = pd.read_csv(data_file_path_24_fuel_3H, skiprows=0) #work uses box information and not local data elif Computer == 'work' and Hood_or_no == 'no_hood': # 1N for box file system datafile_path_day_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_Summary_Day_1_exact.csv" Day_1N = pd.read_csv(datafile_path_day_1N, skiprows=2) datafile_path_event_1N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_Summary_Event_1_exact.csv" Event_1N = pd.read_csv(datafile_path_event_1N, skiprows=2) # there is no second exact in phase 1N #1N Survey datafile_path_survey_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Filter_1n_survey = pd.read_csv(datafile_path_survey_1N, skiprows=0) #print(Filter_1n_survey.iloc[0:40, :]) Survey_1N = Filter_1n_survey.iloc[0:40,:] #24 hour Kitchen pm breakdown data_file_path_24_PM_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_24_hour_Kitchen_PM.csv" Kit_PM_1N_24hr = pd.read_csv(data_file_path_24_PM_1N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_24_hour_Fuel_removal.csv" Fuel_remove_1N_24hr = pd.read_csv(data_file_path_24_Fuel_1N, skiprows=0) #2N datafile_path_day_2N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Day_1_exact.csv" Day_2N = pd.read_csv(datafile_path_day_2N, skiprows=2) datafile_path_event_2N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Event_1_exact.csv" Event_2N_1 = pd.read_csv(datafile_path_event_2N_1, skiprows=2) #2N second Exact datafile_path_event_2N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Event_2_exact.csv" Event_2N_2 = pd.read_csv(datafile_path_event_2N_2, skiprows=2) #2N Survey datafile_path_survey_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Survey_summary_.csv" Survey_2N = pd.read_csv(datafile_path_survey_2N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_24_hour_Kitchen_PM.csv" Kit_PM_2N_24hr = pd.read_csv(data_file_path_24_PM_2N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_24_hour_Fuel_removal.csv" Fuel_remove_2N_24hr = pd.read_csv(data_file_path_24_Fuel_2N, skiprows=0) #3N datafile_path_day_3N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Day_1_exact.csv" Day_3N = pd.read_csv(datafile_path_day_3N, skiprows=2) datafile_path_event_3N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Event_1_exact.csv" Event_3N_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) #3N second Exact datafile_path_event_3N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Event_2_exact.csv" Event_3N_2 = pd.read_csv(datafile_path_event_3N_2, skiprows=2) #3N survey datafile_path_survey_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Survey_summary_.csv" Survey_3N = pd.read_csv(datafile_path_survey_3N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_24_hour_Kitchen_PM.csv" Kit_PM_3N_24hr = pd.read_csv(data_file_path_24_PM_3N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_24_hour_Fuel_removal.csv" Fuel_remove_3N_24hr = pd.read_csv(data_file_path_24_Fuel_3N, skiprows=0) #4N datafile_path_day_4N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Day_1_exact.csv" Day_4N = pd.read_csv(datafile_path_day_4N, skiprows=2) datafile_path_event_4N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Event_1_exact.csv" Event_4N_1 = pd.read_csv(datafile_path_event_4N_1, skiprows=2) #4N second Exact datafile_path_event_4N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Event_2_exact.csv" Event_4N_2 = pd.read_csv(datafile_path_event_4N_2, skiprows=2) #4N Survey datafile_path_survey_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Survey_summary_.csv" Survey_4N = pd.read_csv(datafile_path_survey_4N, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_24_hour_Kitchen_PM.csv" Kit_PM_4N_24hr = pd.read_csv(data_file_path_24_PM_4N, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_Fuel_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_24_hour_Fuel_removal.csv" Fuel_remove_4N_24hr = pd.read_csv(data_file_path_24_Fuel_4N, skiprows=0) else: #1H datafile_path_day_1H ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_Summary_Day_1_exact.csv" Day_1H = pd.read_csv(datafile_path_day_1H, skiprows=2) datafile_path_event_1H ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_Summary_Event_1_exact.csv" Event_1H = pd.read_csv(datafile_path_event_1H, skiprows=2) #there is no second exact in phase 1H #1H Survey (row 40 or so afterward is Hood portion column 1 is houshold number) datafile_path_survey_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Survey_1H = pd.read_csv(datafile_path_survey_1H, skiprows=40) #24 hour Kitchen pm breakdown data_file_path_24_PM_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_24_hour_Kitchen_PM.csv" Kit_PM_1H_24hr = pd.read_csv(data_file_path_24_PM_1H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_24_hour_Fuel_removal.csv" Fuel_remove_1H_24hr = pd.read_csv(data_file_path_24_fuel_1H, skiprows=0) #2H datafile_path_day_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Day_1_exact.csv" Day_2H = pd.read_csv(datafile_path_day_2H, skiprows=2) datafile_path_event_2H_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Event_1_exact.csv" Event_2H_1 = pd.read_csv(datafile_path_event_2H_1, skiprows=2) #2H second Exact datafile_path_event_2H_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Event_2_exact.csv" Event_2H_2 = pd.read_csv(datafile_path_event_2H_2, skiprows=2) #2H survey datafile_path_survey_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Survey_summary_.csv" Survey_2H = pd.read_csv(datafile_path_survey_2H, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_24_hour_Kitchen_PM.csv" Kit_PM_2H_24hr = pd.read_csv(data_file_path_24_PM_2H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_24_hour_Fuel_removal.csv" Fuel_remove_2H_24hr = pd.read_csv(data_file_path_24_fuel_2H, skiprows=0) #3H datafile_path_day_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Day_1_exact.csv" Day_3H = pd.read_csv(datafile_path_day_3H, skiprows=2) datafile_path_event_3N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Event_1_exact.csv" Event_3H_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) #3H second Exact datafile_path_event_3H_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Event_2_exact.csv" Event_3H_2 = pd.read_csv(datafile_path_event_3H_2, skiprows=2) #3H survey datafile_path_survey_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Survey_summary_.csv" Survey_3H = pd.read_csv(datafile_path_survey_3H, skiprows=0) #24 hour Kitchen pm breakdown data_file_path_24_PM_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_24_hour_Kitchen_PM.csv" Kit_PM_3H_24hr = pd.read_csv(data_file_path_24_PM_3H, skiprows=0) #24 hour Fuel Removal breakdown data_file_path_24_fuel_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_24_hour_Fuel_removal.csv" Fuel_remove_3H_24hr = pd.read_csv(data_file_path_24_fuel_3H, skiprows=0) #time to start ploting fun things #1st starting with the fuel per day per adult histogram and box plot NO_hood_counter = np.arange(0,39) hood_counter = np.arange(0,14) #what household do you want to remove from the graphs (1046 is a dummy spacer) print('---------------Fuel per Day per Adult No-Hood Phase---------------------') if Hood_or_no == 'no_hood': Fuel_per_day_per_adult_1N = [] f_d_a_1N = [] Fuel_per_day_per_adult_2N = [] f_d_a_2N = [] Fuel_per_day_per_adult_3N = [] f_d_a_3N = [] Fuel_per_day_per_adult_4N = [] f_d_a_4N =[] count_t = 0 count_f = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_NO_Hood_fuel_day_adult): count_f = 0 continue if Fuel_remove_1N_24hr.iloc[c,6]!= -1.00: Fuel_per_day_per_adult_1N.append(Fuel_remove_1N_24hr.iloc[c,6]/Survey_1N.iloc[c,7]) f_d_a_1N.append(Day_1N.iloc[c,0]) if Fuel_remove_2N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_2N.append(Fuel_remove_2N_24hr.iloc[c,6] / Survey_2N.iloc[c, 7]) f_d_a_2N.append(Day_2N.iloc[c,0]) if Fuel_remove_3N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_3N.append(Fuel_remove_3N_24hr.iloc[c,6]/ Survey_3N.iloc[c, 7]) f_d_a_3N.append(Day_3N.iloc[c, 0]) if Fuel_remove_4N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_4N.append(Fuel_remove_4N_24hr.iloc[c,6] / Survey_4N.iloc[c, 7]) f_d_a_4N.append(Day_3N.iloc[c, 0]) # percentage Change of Fuel per day between the phases Fuel_per_day_per_adult_2N_1N = [] f_d_a_2N_1N = [] Fuel_per_day_per_adult_3N_1N = [] f_d_a_3N_1N = [] Fuel_per_day_per_adult_4N_1N = [] f_d_a_4N_1N = [] Fuel_per_day_per_adult_3N_2N = [] f_d_a_3N_2N = [] Fuel_per_day_per_adult_4N_3N = [] f_d_a_4N_3N = [] Fuel_per_day_per_adult_4N_2N = [] f_d_a_4N_2N = [] count_t = 0 count_f = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_NO_Hood_fuel_day_adult): count_f = 0 continue if (len(Fuel_per_day_per_adult_2N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_1N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_1N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_2N_1N.append(Fuel_per_day_per_adult_2N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_2N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_3N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_3N.iloc[c,13] > 0 and Day_1N.iloc[c,13] > 0 and Day_3N.iloc[c,0] == Day_1N.iloc[c,0]: Fuel_per_day_per_adult_3N_1N.append(Fuel_per_day_per_adult_3N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_3N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_1N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_1N.iloc[c,0]: Fuel_per_day_per_adult_4N_1N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_4N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_3N)-1) >= c and (len(Fuel_per_day_per_adult_2N)-1) >= c: if Day_3N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_3N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_3N_2N.append(Fuel_per_day_per_adult_3N[c]/Fuel_per_day_per_adult_2N[c]) f_d_a_3N_2N.append(Day_2N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_3N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_3N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_3N.iloc[c,0]: Fuel_per_day_per_adult_4N_3N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_3N[c]) f_d_a_4N_3N.append(Day_3N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_2N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_4N_2N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_2N[c]) f_d_a_4N_2N.append(Day_4N.iloc[c,0]) # now for box plotting for Fuel per day beteen Phases #1N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_1N, ax=ax_box, color='b') sns.distplot(Fuel_per_day_per_adult_1N, ax=ax_hist, color='b') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('1N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #2N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_2N, ax=ax_box, color='g') sns.distplot(Fuel_per_day_per_adult_2N, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('2N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #3N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_3N, ax=ax_box, color='r') sns.distplot(Fuel_per_day_per_adult_3N, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('3N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #4N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_4N, ax=ax_box, color='y') sns.distplot(Fuel_per_day_per_adult_4N, ax=ax_hist, color='y') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('4N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #Plotting on the same graph fig, ax = plt.subplots() plt.title('No-Hood Fuel per Day per Adult') #plt.hold(True) #1N quant_1_1N = np.percentile(Fuel_per_day_per_adult_1N, [25,50,75]) Top_lim_1_1N = quant_1_1N[2] + 1.5*(quant_1_1N[2] - quant_1_1N[0]) Low_lim_1_1N = quant_1_1N[0] - 1.5*(quant_1_1N[2] - quant_1_1N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_1N, positions = [1], widths = 0.6) Fuel_D_A_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_1N): if a > Top_lim_1_1N or a < Low_lim_1_1N: Fuel_D_A_1N_outlier.append(f_d_a_1N[v]) plt.text(1,a,f_d_a_1N[v]) plt.text(1,0.1,'1N',color='b') #2N quant_1_2N = np.percentile(Fuel_per_day_per_adult_2N, [25,50,75]) Top_lim_1_2N = quant_1_2N[2] + 1.5*(quant_1_2N[2] - quant_1_2N[0]) Low_lim_1_2N = quant_1_2N[0] - 1.5*(quant_1_2N[2] - quant_1_2N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_2N,positions = [2], widths = 0.6) Fuel_D_A_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2N): if a > Top_lim_1_2N or a < Low_lim_1_2N: Fuel_D_A_2N_outlier.append(f_d_a_2N[v]) plt.text(2,a,f_d_a_2N[v]) plt.text(2,0.1,'2N', color= 'g') #3N quant_1_3N = np.percentile(Fuel_per_day_per_adult_3N, [25,50,75]) Top_lim_1_3N = quant_1_3N[2] + 1.5*(quant_1_3N[2] - quant_1_3N[0]) Low_lim_1_3N = quant_1_3N[0] - 1.5*(quant_1_3N[2] - quant_1_3N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_3N,positions = [3], widths = 0.6) count = 0 Fuel_D_A_3N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N): if a > Top_lim_1_3N or a < Low_lim_1_3N: Fuel_D_A_3N_outlier.append(f_d_a_3N[v]) count = count + 1 if count == 2: plt.text(3,a,f_d_a_3N[v],ha='left',va='bottom') elif count != 2: plt.text(3,a,f_d_a_3N[v],ha='right',va='bottom') plt.text(3,0.1,'3N', color='r') #4N quant_1_4N = np.percentile(Fuel_per_day_per_adult_4N, [25,50,75]) Top_lim_1_4N = quant_1_4N[2] + 1.5*(quant_1_4N[2] - quant_1_4N[0]) Low_lim_1_4N = quant_1_4N[0] - 1.5*(quant_1_4N[2] - quant_1_4N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_4N,positions = [4], widths = 0.6) Fuel_D_A_4N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N): if a > Top_lim_1_4N or a < Low_lim_1_4N: Fuel_D_A_4N_outlier.append(f_d_a_4N[v]) plt.text(4,a,f_d_a_4N[v]) plt.text(4,0.1,'4N', color='y') plt.xlim(0,5) plt.ylim(0,2.3) print('Fuel/Day/Adult 1N had these values as outliers ', Fuel_D_A_1N_outlier) print('Fuel/Day/Adult 2N had these values as outliers ', Fuel_D_A_2N_outlier) print('Fuel/Day/Adult 3N had these values as outliers ', Fuel_D_A_3N_outlier) print('Fuel/Day/Adult 4N had these values as outliers ', Fuel_D_A_4N_outlier) plt.show() # % change of fuel per day per adult between each phase fig_2, ax2 = plt.subplots() plt.title('% No_hood Change from Fuel per Day per Adult' ) #plt.hold(True) #2N to 1N quant_1_2N_1N = np.percentile(Fuel_per_day_per_adult_2N_1N, [25,50,75]) Top_lim_1_2N_1N = quant_1_2N_1N[2] + 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) Low_lim_1_2N_1N = quant_1_2N_1N[0] - 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_2N_1N, positions=[1], widths= 0.6) Fuel_D_A_2N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2N_1N): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: Fuel_D_A_2N_1N_outlier.append(f_d_a_2N_1N[v]) plt.text(1, a, f_d_a_2N_1N[v]) plt.text(0.5, 0, '2N / 1N', color= 'g') #3N to 1N quant_1_3N_1N = np.percentile(Fuel_per_day_per_adult_3N_1N, [25,50,75]) Top_lim_1_3N_1N = quant_1_3N_1N[2] + 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) Low_lim_1_3N_1N = quant_1_3N_1N[0] - 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3N_1N, positions=[2], widths= 0.6) Fuel_D_A_3N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N_1N): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: Fuel_D_A_3N_1N_outlier.append(f_d_a_3N_1N[v]) plt.text(2, a, f_d_a_3N_1N[v]) plt.text(1.5, 0, '3N / 1N', color= 'r') #4N to 1N quant_1_4N_1N = np.percentile(Fuel_per_day_per_adult_4N_1N, [25,50,75]) Top_lim_1_4N_1N = quant_1_4N_1N[2] + 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) Low_lim_1_4N_1N = quant_1_4N_1N[0] - 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_1N, positions=[3], widths= 0.6) Fuel_D_A_4N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_1N): if a > Top_lim_1_4N_1N or a < Low_lim_1_4N_1N: Fuel_D_A_4N_1N_outlier.append(f_d_a_4N_1N[v]) plt.text(3, a, f_d_a_4N_1N[v]) plt.text(2.5, 0, '4N / 1N', color= 'y') #3N to 2N quant_1_3N_2N = np.percentile(Fuel_per_day_per_adult_3N_2N, [25,50,75]) Top_lim_1_3N_2N = quant_1_3N_2N[2] + 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) Low_lim_1_3N_2N = quant_1_3N_2N[0] - 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3N_2N, positions=[4], widths= 0.6) Fuel_D_A_3N_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N_2N): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: Fuel_D_A_3N_2N_outlier.append(f_d_a_3N_2N[v]) plt.text(4, a, f_d_a_3N_2N[v]) plt.text(3.5, 0, '3N / 2N', color= 'm') #4N to 3N quant_1_4N_3N = np.percentile(Fuel_per_day_per_adult_4N_3N, [25,50,75]) Top_lim_1_4N_3N = quant_1_4N_3N[2] + 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) Low_lim_1_4N_3N = quant_1_4N_3N[0] - 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_3N, positions=[5], widths= 0.6) Fuel_D_A_4N_3N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_3N): if a > Top_lim_1_4N_3N or a < Low_lim_1_4N_3N: Fuel_D_A_4N_3N_outlier.append(f_d_a_4N_3N[v]) plt.text(5, a, f_d_a_4N_3N[v]) plt.text(4.5, 0, '4N / 3N', color= 'k') #4N to 2N quant_1_4N_2N = np.percentile(Fuel_per_day_per_adult_4N_2N, [25,50,75]) Top_lim_1_4N_2N = quant_1_4N_2N[2] + 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) Low_lim_1_4N_2N = quant_1_4N_2N[0] - 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_2N, positions=[6], widths= 0.6) Fuel_D_A_4N_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_2N): if a > Top_lim_1_4N_2N or a < Low_lim_1_4N_2N: Fuel_D_A_4N_2N_outlier.append(f_d_a_4N_2N[v]) plt.text(6, a, f_d_a_4N_2N[v]) plt.text(5.5, 0, '4N / 2N', color= 'tab:orange') plt.xlim(0,7) plt.ylim(-0.5,4) print('Fuel/Day/Adult 2N/1N had these values as outliers ', Fuel_D_A_2N_1N_outlier) print('Fuel/Day/Adult 3N/1N had these values as outliers ', Fuel_D_A_3N_1N_outlier) print('Fuel/Day/Adult 4N/1N had these values as outliers ', Fuel_D_A_4N_1N_outlier) print('Fuel/Day/Adult 3N/2N had these values as outliers ', Fuel_D_A_3N_2N_outlier) print('Fuel/Day/Adult 4N/3N had these values as outliers ', Fuel_D_A_4N_3N_outlier) print('Fuel/Day/Adult 4N/2N had these values as outliers ', Fuel_D_A_4N_2N_outlier) plt.show() #adding averages to the tables quant_1_1N = np.append(quant_1_1N, np.average(Fuel_per_day_per_adult_1N)) quant_1_2N = np.append(quant_1_2N, np.average(Fuel_per_day_per_adult_2N)) quant_1_3N = np.append(quant_1_3N, np.average(Fuel_per_day_per_adult_3N)) quant_1_4N = np.append(quant_1_4N, np.average(Fuel_per_day_per_adult_4N)) D_50_quant_phase_f_d_a = {'Percentile %': ['25','50','75', 'Avg'], '1N': quant_1_1N, '2N': quant_1_2N,'3N' : quant_1_3N,'4N': quant_1_4N} F_D_A_50_phase_no_hood = pd.DataFrame(data=D_50_quant_phase_f_d_a, columns=['Percentile %','1N', '2N', '3N','4N']) quant_1_2N_1N = np.append(quant_1_2N_1N , np.average(Fuel_per_day_per_adult_2N_1N)) quant_1_3N_1N = np.append(quant_1_3N_1N , np.average(Fuel_per_day_per_adult_3N_1N)) quant_1_4N_1N = np.append(quant_1_4N_1N , np.average(Fuel_per_day_per_adult_4N_1N)) quant_1_3N_2N = np.append(quant_1_3N_2N , np.average(Fuel_per_day_per_adult_3N_2N)) quant_1_4N_3N = np.append(quant_1_4N_3N , np.average(Fuel_per_day_per_adult_4N_3N)) quant_1_4N_2N = np.append(quant_1_4N_2N , np.average(Fuel_per_day_per_adult_4N_2N)) D_50_quant_percent_f_d_a ={'Percentile %': ['25','50','75', 'Avg'],'2N / 1N': quant_1_2N_1N,'3N / 1N': quant_1_3N_1N,'4N / 1N': quant_1_4N_1N, '3N / 2N': quant_1_3N_2N,'4N / 3N': quant_1_4N_3N,'4N / 2N': quant_1_4N_2N} F_D_A_50_percent_change_no_hood = pd.DataFrame(data=D_50_quant_percent_f_d_a, columns=['Percentile %','2N / 1N','3N / 1N', '4N / 1N' ,'3N / 2N','4N / 3N','4N / 2N']) print(F_D_A_50_phase_no_hood) print(F_D_A_50_percent_change_no_hood) # add more print ('-------------------Fuel per Day per Adult Hood Phase -------------------') if Hood_or_no == 'hood': Fuel_per_day_per_adult_1H = [] f_d_a_1H = [] Fuel_per_day_per_adult_2H = [] f_d_a_2H = [] Fuel_per_day_per_adult_3H = [] f_d_a_3H = [] count_t = 0 count_f = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_Hood_fuel_day_adult): count_f = 0 continue if Fuel_remove_1H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_1H.append(Fuel_remove_1H_24hr.iloc[c,6]/Survey_1H.iloc[c,7]) f_d_a_1H.append(Day_1H.iloc[c,0]) if Fuel_remove_2H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_2H.append(Fuel_remove_2H_24hr.iloc[c,6] / Survey_2H.iloc[c, 7]) f_d_a_2H.append(Day_2H.iloc[c,0]) if Fuel_remove_3H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_3H.append(Fuel_remove_3H_24hr.iloc[c,6]/ Survey_3H.iloc[c, 7]) f_d_a_3H.append(Day_3H.iloc[c, 0]) # percentage Change of Fuel per day between the phases Fuel_per_day_per_adult_2H_1H = [] f_d_a_2H_1H = [] Fuel_per_day_per_adult_3H_1H = [] f_d_a_3H_1H = [] Fuel_per_day_per_adult_3H_2H = [] f_d_a_3H_2H = [] count_t = 0 count_f = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_Hood_fuel_day_adult): count_f = 0 continue if (len(Fuel_per_day_per_adult_2H)-1) >= c and (len(Fuel_per_day_per_adult_1H)-1) >= c: if Day_1H.iloc[c,13] > 0 and Day_2H.iloc[c,13] > 0 and Day_1H.iloc[c,0] == Day_2H.iloc[c,0]: Fuel_per_day_per_adult_2H_1H.append(Fuel_per_day_per_adult_2H[c]/Fuel_per_day_per_adult_1H[c]) f_d_a_2H_1H.append(Day_1H.iloc[c,0]) if (len(Fuel_per_day_per_adult_3H)-1) >= c and (len(Fuel_per_day_per_adult_1H)-1) >= c: if Day_3H.iloc[c,13] > 0 and Day_1H.iloc[c,13] > 0 and Day_3H.iloc[c,0] == Day_1H.iloc[c,0]: Fuel_per_day_per_adult_3H_1H.append(Fuel_per_day_per_adult_3H[c]/Fuel_per_day_per_adult_1H[c]) f_d_a_3H_1H.append(Day_1H.iloc[c,0]) if (len(Fuel_per_day_per_adult_3H)-1) >= c and (len(Fuel_per_day_per_adult_2H)-1) >= c: if Day_3H.iloc[c,13] > 0 and Day_2H.iloc[c,13] > 0 and Day_3H.iloc[c,0] == Day_2H.iloc[c,0]: Fuel_per_day_per_adult_3H_2H.append(Fuel_per_day_per_adult_3H[c]/Fuel_per_day_per_adult_2H[c]) f_d_a_3H_2H.append(Day_1H.iloc[c,0]) # now for plotting #1H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_1H, ax=ax_box, color='b') sns.distplot(Fuel_per_day_per_adult_1H, ax=ax_hist, color='b') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('1H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #2H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_2H, ax=ax_box, color='g') sns.distplot(Fuel_per_day_per_adult_2H, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('2H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) #3H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_3H, ax=ax_box, color='r') sns.distplot(Fuel_per_day_per_adult_3H, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('3H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) fig_2, ax_2 = plt.subplots() plt.title('Hood Fuel per Day per Adult') #plt.hold(True) quant_1_1H = np.percentile(Fuel_per_day_per_adult_1H, [25,50,75]) Top_lim_1_1H = quant_1_1H[2] + 1.5*(quant_1_1H[2] - quant_1_1H[0]) Low_lim_1_1H = quant_1_1H[0] - 1.5*(quant_1_1H[2] - quant_1_1H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_1H, positions = [1], widths = 0.6) Fuel_D_A_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_1H): if a > Top_lim_1_1H or a < Low_lim_1_1H: Fuel_D_A_1H_outlier.append(f_d_a_1H[v]) plt.text(1,a,f_d_a_1H[v]) plt.text(1,0,'1H',color='b') quant_1_2H = np.percentile(Fuel_per_day_per_adult_2H, [25,50,75]) Top_lim_1_2H = quant_1_2H[2] + 1.5*(quant_1_2H[2] - quant_1_2H[0]) Low_lim_1_2H = quant_1_2H[0] - 1.5*(quant_1_2H[2] - quant_1_2H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_2H,positions = [2], widths = 0.6) count = 0 Fuel_D_A_2H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2H): if a > Top_lim_1_2H or a < Low_lim_1_2H: Fuel_D_A_2H_outlier.append(f_d_a_2H[v]) count = count + 1 if count == 1: plt.text(2,a,f_d_a_2H[v],ha='left',va='bottom') elif count !=1: plt.text(2,a,f_d_a_2H[v],ha='right',va='bottom') plt.text(2,0,'2H', color= 'g') quant_1_3H = np.percentile(Fuel_per_day_per_adult_3H, [25,50,75]) Top_lim_1_3H = quant_1_3H[2] + 1.5*(quant_1_3H[2] - quant_1_3H[0]) Low_lim_1_3H = quant_1_3H[0] - 1.5*(quant_1_3H[2] - quant_1_3H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_3H,positions = [3], widths = 0.6) count = 0 Fuel_D_A_3H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H): if a > Top_lim_1_3H or a < Low_lim_1_3H: Fuel_D_A_3H_outlier.append(f_d_a_3H[v]) count = count + 1 if count == 3: plt.text(3,a,f_d_a_3H[v],ha='left',va='bottom') elif count != 1: plt.text(3,a,f_d_a_3H[v],ha='right',va='bottom') plt.text(3,0,'3H', color='r') plt.xlim(-0,4) plt.ylim(-0.25,2.5) print('Fuel/Day/Adult 1H had these values as outliers ', Fuel_D_A_1H_outlier) print('Fuel/Day/Adult 2H had these values as outliers ', Fuel_D_A_2H_outlier) print('Fuel/Day/Adult 3H had these values as outliers ', Fuel_D_A_3H_outlier) plt.show() #% change of fuel perday per adult between each phase fig_2, ax2 = plt.subplots() plt.title('% No_hood Change from Fuel per Day per Adult' ) #plt.hold(True) #2H to 1H quant_1_2H_1H = np.percentile(Fuel_per_day_per_adult_2H_1H, [25,50,75]) Top_lim_1_2H_1H = quant_1_2H_1H[2] + 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) Low_lim_1_2H_1H = quant_1_2H_1H[0] - 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_2H_1H, positions=[1], widths= 0.6) Fuel_D_A_2H_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2H_1H): if a > Top_lim_1_2H_1H or a < Low_lim_1_2H_1H: Fuel_D_A_2H_1H_outlier.append(f_d_a_2H_1H[v]) plt.text(1, a, f_d_a_2H_1H[v]) plt.text(0.75, -0.25, '2H / 1H', color= 'g') #3H to 1H quant_1_3H_1H = np.percentile(Fuel_per_day_per_adult_3H_1H, [25,50,75]) Top_lim_1_3H_1H = quant_1_3H_1H[2] + 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) Low_lim_1_3H_1H = quant_1_3H_1H[0] - 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3H_1H, positions=[2], widths= 0.6) Fuel_D_A_3H_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H_1H): if a > Top_lim_1_3H_1H or a < Low_lim_1_3H_1H: Fuel_D_A_3H_1H_outlier.append(f_d_a_3H_1H[v]) plt.text(2, a, f_d_a_3H_1H[v]) plt.text(1.75, -0.25, '3H / 1H', color= 'r') #3H to 2H quant_1_3H_2H = np.percentile(Fuel_per_day_per_adult_3H_2H, [25,50,75]) Top_lim_1_3H_2H = quant_1_3H_2H[2] + 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) Low_lim_1_3H_2H = quant_1_3H_2H[0] - 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3H_2H, positions=[3], widths= 0.6) Fuel_D_A_3H_2H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H_2H): if a > Top_lim_1_3H_2H or a < Low_lim_1_3H_2H: Fuel_D_A_3H_2H_outlier.append(f_d_a_3H_2H[v]) plt.text(3, a, f_d_a_3H_2H[v]) plt.text(2.75, -0.25, '2H / 1H', color= 'm') plt.xlim(-0,4) plt.ylim(-0.25,6) print('Fuel/Day/Adult 2H/1H had these values as outliers ', Fuel_D_A_2H_1H_outlier) print('Fuel/Day/Adult 3H/1H had these values as outliers ', Fuel_D_A_3H_1H_outlier) print('Fuel/Day/Adult 3H/2H had these values as outliers ', Fuel_D_A_3H_2H_outlier) plt.show() quant_1_1H = np.append(quant_1_1H, np.average(Fuel_per_day_per_adult_1H)) quant_1_2H = np.append(quant_1_2H, np.average(Fuel_per_day_per_adult_2H)) quant_1_3H = np.append(quant_1_3H, np.average(Fuel_per_day_per_adult_3H)) D_50_quant_phase_f_d_a_hood = {'Percentile %': ['25','50','75', 'Avg'], '1H': quant_1_1H, '2H': quant_1_2H,'3H' : quant_1_3H} F_D_A_50_phase_hood = pd.DataFrame(data=D_50_quant_phase_f_d_a_hood, columns=['Percentile %','1H', '2H','3H'] ) quant_1_2H_1H = np.append(quant_1_2H_1H , np.average(Fuel_per_day_per_adult_2H_1H)) quant_1_3H_1H = np.append(quant_1_3H_1H , np.average(Fuel_per_day_per_adult_3H_1H)) quant_1_3H_2H = np.append(quant_1_3H_2H , np.average(Fuel_per_day_per_adult_3H_2H)) D_50_quant_percent_f_d_a_hood ={'Percentile %': ['25','50','75', 'Avg'],'2H / 1H': quant_1_2H_1H,'3H / 1H': quant_1_3H_1H,'3H / 2H': quant_1_3H_2H} F_D_A_50_percent_change_hood = pd.DataFrame(data=D_50_quant_percent_f_d_a_hood, columns=['Percentile %','2H / 1H','3H / 1H','3H / 2H']) print(F_D_A_50_phase_hood) print(F_D_A_50_percent_change_hood) print('----------------------- Kitchen PM per Day -----------------------------') if Hood_or_no == 'no_hood': Kit_PM_per_day_1N = [] K_PM_D_1N = [] Kit_PM_per_day_2N = [] K_PM_D_2N = [] Kit_PM_per_day_3N = [] K_PM_D_3N = [] Kit_PM_per_day_4N = [] K_PM_D_4N = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_NO_Hood_PM): count_pm = 0 continue # if Day_1N.iloc[c,7] != -1.00: # Kit_PM_per_day_1N.append(Day_1N.iloc[c,7]/Day_1N.iloc[c,1]) # K_PM_D_1N.append(Day_1N.iloc[c,0]) if Kit_PM_1N_24hr.iloc[c,6] != -1.00: Kit_PM_per_day_1N.append(Kit_PM_1N_24hr.iloc[c,6]) K_PM_D_1N.append(Kit_PM_1N_24hr.iloc[c, 0]) #if Day_2N.iloc[c, 7] != -1.00: # Kit_PM_per_day_2N.append(Day_2N.iloc[c,7]/Day_2N.iloc[c,1]) # K_PM_D_2N.append(Day_2N.iloc[c,0]) if Kit_PM_2N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_2N.append(Kit_PM_2N_24hr.iloc[c, 6]) K_PM_D_2N.append(Kit_PM_2N_24hr.iloc[c, 0]) # if Day_3N.iloc[c, 7] != -1.00: # Kit_PM_per_day_3N.append(Day_3N.iloc[c,7]/Day_3N.iloc[c,1]) # K_PM_D_3N.append(Day_3N.iloc[c, 0]) if Kit_PM_3N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_3N.append(Kit_PM_3N_24hr.iloc[c, 6]) K_PM_D_3N.append(Kit_PM_3N_24hr.iloc[c, 0]) # if Day_4N.iloc[c, 7] != -1.00: # Kit_PM_per_day_4N.append(Day_4N.iloc[c,7]/Day_4N.iloc[c,1]) # K_PM_D_4N.append(Day_4N.iloc[c, 0]) if Kit_PM_4N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_4N.append(Kit_PM_4N_24hr.iloc[c, 6]) K_PM_D_4N.append(Kit_PM_4N_24hr.iloc[c, 0]) # percentages Between Phases of kitchen PM per day Kit_per_day_2N_1N = [] K_PM_D_2N_1N = [] Kit_per_day_3N_1N = [] K_PM_D_3N_1N = [] Kit_per_day_4N_1N = [] K_PM_D_4N_1N = [] Kit_per_day_3N_2N = [] K_PM_D_3N_2N = [] Kit_per_day_4N_3N = [] K_PM_D_4N_3N = [] Kit_per_day_4N_2N = [] K_PM_D_4N_2N = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_NO_Hood_PM): count_pm = 0 continue if (len(Kit_PM_per_day_2N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: #if Day_1N.iloc[c,7] > 0 and Day_2N.iloc[c,7] > 0 and Day_1N.iloc[c,0] == Day_2N.iloc[c,0]: if Kit_PM_1N_24hr.iloc[c,6] > 0 and Kit_PM_2N_24hr.iloc[c,6] > 0 and Kit_PM_1N_24hr.iloc[c,0] == Kit_PM_2N_24hr.iloc[c,0]: Kit_per_day_2N_1N.append(Kit_PM_per_day_2N[c]/Kit_PM_per_day_1N[c]) K_PM_D_2N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_3N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: #if Day_3N.iloc[c,7] > 0 and Day_1N.iloc[c,7] > 0 and Day_3N.iloc[c,0] == Day_1N.iloc[c,0]: if Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_1N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_1N_24hr.iloc[c, 0]: Kit_per_day_3N_1N.append(Kit_PM_per_day_3N[c]/Kit_PM_per_day_1N[c]) K_PM_D_3N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: #if Day_4N.iloc[c,7] > 0 and Day_1N.iloc[c,7] > 0 and Day_4N.iloc[c,0] == Day_1N.iloc[c,0]: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_1N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 0] == \ Kit_PM_1N_24hr.iloc[c, 0]: Kit_per_day_4N_1N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_1N[c]) K_PM_D_4N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_3N)-1) >= c and (len(Kit_PM_per_day_2N)-1) >= c: #if Day_3N.iloc[c,7] > 0 and Day_2N.iloc[c,7] > 0 and Day_3N.iloc[c,0] == Day_2N.iloc[c,0]: if Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_2N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_2N_24hr.iloc[c, 0]: Kit_per_day_3N_2N.append(Kit_PM_per_day_3N[c]/Kit_PM_per_day_2N[c]) K_PM_D_3N_2N.append(Day_2N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_3N)-1) >= c: #if Day_4N.iloc[c,7] > 0 and Day_3N.iloc[c,7] > 0 and Day_4N.iloc[c,0] == Day_3N.iloc[c,0]: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_4N_24hr.iloc[c, 0]: Kit_per_day_4N_3N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_3N[c]) K_PM_D_4N_3N.append(Day_3N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_2N)-1) >= c: #if Day_4N.iloc[c,7] > 0 and Day_2N.iloc[c,7] > 0 and Day_4N.iloc[c,0] == Day_2N.iloc[c,0]: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 0] == \ Kit_PM_2N_24hr.iloc[c, 0]: Kit_per_day_4N_2N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_2N[c]) K_PM_D_4N_2N.append(Day_4N.iloc[c,0]) # now for box plotting for Kitchen PM per day percent changes #2N to 1N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_2N_1N, ax=ax_box, color='g') sns.distplot(Kit_per_day_2N_1N, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 2N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #3N to 1N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3N_1N, ax=ax_box, color='r') sns.distplot(Kit_per_day_3N_1N, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #4N to 1N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_1N, ax=ax_box, color='y') sns.distplot(Kit_per_day_4N_1N, ax=ax_hist, color='y') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #3N to 2N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3N_2N, ax=ax_box, color='m') sns.distplot(Kit_per_day_3N_2N, ax=ax_hist, color='m') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3N/2N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #4N to 3N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_3N, ax=ax_box, color='k') sns.distplot(Kit_per_day_4N_3N, ax=ax_hist, color='k') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/3N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #4N to 2N sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_2N, ax=ax_box, color='tab:orange') sns.distplot(Kit_per_day_4N_2N, ax=ax_hist, color='tab:orange') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/2N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #Plotting on the same graph fig, ax = plt.subplots() plt.title('No-Hood Kitchen PM per day') #plt.hold() #1N quant_1_1N = np.percentile(Kit_PM_per_day_1N, [25,50,75]) Top_lim_1_1N = quant_1_1N[2] + 1.5*(quant_1_1N[2] - quant_1_1N[0]) Low_lim_1_1N = quant_1_1N[0] - 1.5*(quant_1_1N[2] - quant_1_1N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_1N, positions = [1], widths = 0.6) kitchen_pm_1N_outlier = [] for v,a in enumerate(Kit_PM_per_day_1N): if a > Top_lim_1_1N or a < Low_lim_1_1N: kitchen_pm_1N_outlier.append(K_PM_D_1N[v]) plt.text(1,a,K_PM_D_1N[v]) plt.text(1,0.1,'1N',color='b') #2N quant_1_2N = np.percentile(Kit_PM_per_day_2N, [25,50,75]) Top_lim_1_2N = quant_1_2N[2] + 1.5*(quant_1_2N[2] - quant_1_2N[0]) Low_lim_1_2N = quant_1_2N[0] - 1.5*(quant_1_2N[2] - quant_1_2N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_2N,positions = [2], widths = 0.6) kitchen_pm_2N_outlier = [] for v,a in enumerate(Kit_PM_per_day_2N): if a > Top_lim_1_2N or a < Low_lim_1_2N: kitchen_pm_2N_outlier.append(K_PM_D_2N[v]) plt.text(2,a,K_PM_D_2N[v]) plt.text(2,0.1,'2N', color= 'g') #3N quant_1_3N = np.percentile(Kit_PM_per_day_3N, [25,50,75]) Top_lim_1_3N = quant_1_3N[2] + 1.5*(quant_1_3N[2] - quant_1_3N[0]) Low_lim_1_3N = quant_1_3N[0] - 1.5*(quant_1_3N[2] - quant_1_3N[0]) kitchen_pm_3N_outlier = [] bp_1 = plt.boxplot(Kit_PM_per_day_3N,positions = [3], widths = 0.6) count = 0 for v,a in enumerate(Kit_PM_per_day_3N): if a > Top_lim_1_3N or a < Low_lim_1_3N: kitchen_pm_3N_outlier.append(K_PM_D_3N[v]) count = count + 1 if count == (3): plt.text(3,a,K_PM_D_3N[v],ha='left', va='bottom') if count == (1): plt.text(3,a,K_PM_D_3N[v],ha='left', va='top') else: plt.text(3,a,K_PM_D_3N[v],ha='right', va='bottom') plt.text(3,0.1,'3N', color='r') #4N quant_1_4N = np.percentile(Kit_PM_per_day_4N, [25,50,75]) Top_lim_1_4N = quant_1_4N[2] + 1.5*(quant_1_4N[2] - quant_1_4N[0]) Low_lim_1_4N = quant_1_4N[0] - 1.5*(quant_1_4N[2] - quant_1_4N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_4N,positions = [4], widths = 0.6) kitchen_pm_4N_outlier = [] for v,a in enumerate(Kit_PM_per_day_4N): if a > Top_lim_1_4N or a < Low_lim_1_4N: kitchen_pm_4N_outlier.append(K_PM_D_4N[v]) plt.text(4,a,K_PM_D_4N[v]) plt.text(4,0.1,'4N', color='y') plt.xlim(0,5) plt.ylim(0,1200) print('Kitchen PM 1N had these values as outliers ', kitchen_pm_1N_outlier) print('Kitchen PM 2N had these values as outliers ', kitchen_pm_2N_outlier) print('Kitchen PM 3N had these values as outliers ', kitchen_pm_3N_outlier) print('Kitchen PM 4N had these values as outliers ', kitchen_pm_4N_outlier) plt.show() # % change of PM per day fig_2, ax2 = plt.subplots() plt.title('% No_hood PM per Day Change' ) #plt.hold(True) #2N to 1N quant_1_2N_1N = np.percentile(Kit_per_day_2N_1N, [25,50,75]) Top_lim_1_2N_1N = quant_1_2N_1N[2] + 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) Low_lim_1_2N_1N = quant_1_2N_1N[0] - 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_2N_1N, positions=[1], widths= 0.6) kitchen_pm_2N_1N_outlier = [] for v,a in enumerate(Kit_per_day_2N_1N): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: kitchen_pm_2N_1N_outlier.append(K_PM_D_2N_1N[v]) plt.text(1, a, K_PM_D_2N_1N[v]) plt.text(0.5, -0.25, '2N / 1N', color= 'g') #3N to 1N quant_1_3N_1N = np.percentile(Kit_per_day_3N_1N, [25,50,75]) Top_lim_1_3N_1N = quant_1_3N_1N[2] + 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) Low_lim_1_3N_1N = quant_1_3N_1N[0] - 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_3N_1N, positions=[2], widths= 0.6) kitchen_pm_3N_1N_outlier = [] for v,a in enumerate(Kit_per_day_3N_1N): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: kitchen_pm_3N_1N_outlier.append(K_PM_D_3N_1N[v]) plt.text(2, a, K_PM_D_3N_1N[v]) plt.text(1.5, -0.25, '3N / 1N', color= 'r') #4N to 1N quant_1_4N_1N = np.percentile(Kit_per_day_4N_1N, [25,50,75]) Top_lim_1_4N_1N = quant_1_4N_1N[2] + 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) Low_lim_1_4N_1N = quant_1_4N_1N[0] - 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_1N, positions=[3], widths= 0.6) kitchen_pm_4N_1N_outlier = [] for v,a in enumerate(Kit_per_day_4N_1N): if a > Top_lim_1_4N_1N or a < Low_lim_1_4N_1N: kitchen_pm_4N_1N_outlier.append(K_PM_D_4N_1N[v]) plt.text(3, a, K_PM_D_4N_1N[v]) plt.text(2.5, -0.25, '4N / 1N', color= 'y') #3N to 2N quant_1_3N_2N = np.percentile(Kit_per_day_3N_2N, [25,50,75]) Top_lim_1_3N_2N = quant_1_3N_2N[2] + 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) Low_lim_1_3N_2N = quant_1_3N_2N[0] - 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) bp_1_1 = plt.boxplot(Kit_per_day_3N_2N, positions=[4], widths= 0.6) kitchen_pm_3N_2N_outlier = [] for v,a in enumerate(Kit_per_day_3N_2N): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: kitchen_pm_3N_2N_outlier.append(K_PM_D_3N_2N[v]) plt.text(4, a, K_PM_D_3N_2N[v]) plt.text(3.5, -0.25, '3N / 2N', color= 'm') #4N to 3N quant_1_4N_3N = np.percentile(Kit_per_day_4N_3N, [25,50,75]) Top_lim_1_4N_3N = quant_1_4N_3N[2] + 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) Low_lim_1_4N_3N = quant_1_4N_3N[0] - 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_3N, positions=[5], widths= 0.6) kitchen_pm_4N_3N_outlier = [] for v,a in enumerate(Kit_per_day_4N_3N): if a > Top_lim_1_4N_3N or a < Low_lim_1_4N_3N: kitchen_pm_4N_3N_outlier.append(K_PM_D_4N_3N[v]) plt.text(5, a, K_PM_D_4N_3N[v]) plt.text(4.5, -0.25, '4N / 3N', color= 'k') #4N to 2N quant_1_4N_2N = np.percentile(Kit_per_day_4N_2N, [25,50,75]) Top_lim_1_4N_2N = quant_1_4N_2N[2] + 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) Low_lim_1_4N_2N = quant_1_4N_2N[0] - 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_2N, positions=[6], widths= 0.6) kitchen_pm_4N_2N_outlier = [] for v,a in enumerate(Kit_per_day_4N_2N): if a > Top_lim_1_4N_2N or a < Low_lim_1_4N_2N: kitchen_pm_4N_2N_outlier.append(K_PM_D_4N_2N[v]) plt.text(6, a, K_PM_D_4N_2N[v]) plt.text(5.5, -0.25, '4N / 2N', color= 'tab:orange') plt.xlim(0,7) plt.ylim(-0.5,5) print('Kitchen PM 2N/1N had these values as outliers ', kitchen_pm_2N_1N_outlier) print('Kitchen PM 3N/1N had these values as outliers ', kitchen_pm_3N_1N_outlier) print('Kitchen PM 4N/1N had these values as outliers ', kitchen_pm_4N_1N_outlier) print('Kitchen PM 3N/2N had these values as outliers ', kitchen_pm_3N_2N_outlier) print('Kitchen PM 4N/3N had these values as outliers ', kitchen_pm_4N_3N_outlier) print('Kitchen PM 4N/2N had these values as outliers ', kitchen_pm_4N_2N_outlier) plt.show() #adding averages to the tables quant_1_1N = np.append(quant_1_1N, np.average(Kit_PM_per_day_1N)) quant_1_2N = np.append(quant_1_2N, np.average(Kit_PM_per_day_2N)) quant_1_3N = np.append(quant_1_3N, np.average(Kit_PM_per_day_3N)) quant_1_4N = np.append(quant_1_4N, np.average(Kit_PM_per_day_4N)) D_50_quant_phase_PM_d = {'Percentile %': ['25','50','75', 'Avg'], '1N': quant_1_1N, '2N': quant_1_2N,'3N' : quant_1_3N,'4N': quant_1_4N} PM_D_50_phase_no_hood = pd.DataFrame(data=D_50_quant_phase_PM_d,columns=['Percentile %','1N', '2N', '3N','4N']) quant_1_2N_1N = np.append(quant_1_2N_1N , np.average(Kit_per_day_2N_1N)) quant_1_3N_1N = np.append(quant_1_3N_1N , np.average(Kit_per_day_3N_1N)) quant_1_4N_1N = np.append(quant_1_4N_1N , np.average(Kit_per_day_4N_1N)) quant_1_3N_2N = np.append(quant_1_3N_2N , np.average(Kit_per_day_3N_2N)) quant_1_4N_3N = np.append(quant_1_4N_3N , np.average(Kit_per_day_4N_3N)) quant_1_4N_2N = np.append(quant_1_4N_2N , np.average(Kit_per_day_4N_2N)) D_50_quant_percent_PM_d ={'Percentile %': ['25','50','75', 'Avg'],'2N / 1N': quant_1_2N_1N,'3N / 1N': quant_1_3N_1N,'4N / 1N': quant_1_4N_1N, '3N / 2N': quant_1_3N_2N,'4N / 3N': quant_1_4N_3N,'4N / 2N': quant_1_4N_2N} PM_D_50_percent_change_no_hood = pd.DataFrame(data=D_50_quant_percent_PM_d, columns=['Percentile %','2N / 1N','3N / 1N', '4N / 1N' ,'3N / 2N','4N / 3N','4N / 2N']) print(PM_D_50_phase_no_hood) print(PM_D_50_percent_change_no_hood) # hood Pm per day if Hood_or_no == 'hood': Kit_PM_per_day_1H = [] K_PM_D_1H = [] Kit_PM_per_day_2H = [] K_PM_D_2H = [] Kit_PM_per_day_3H = [] K_PM_D_3H = [] count_t = 0 count_pm = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_Hood_PM): count_pm = 0 continue # if Day_1H.iloc[c,7] != -1.00: # Kit_PM_per_day_1H.append(Day_1H.iloc[c,7]/Day_1H.iloc[c,1]) # K_PM_D_1H.append(Day_1H.iloc[c,0]) if Kit_PM_1H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_1H.append(Kit_PM_1H_24hr.iloc[c,6]) K_PM_D_1H.append(Kit_PM_1H_24hr.iloc[c,0]) # if Day_2H.iloc[c, 7] != -1.00: # Kit_PM_per_day_2H.append(Day_2H.iloc[c,7]/Day_2H.iloc[c,1]) # K_PM_D_2H.append(Day_2H.iloc[c,0]) if Kit_PM_2H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_2H.append(Kit_PM_2H_24hr.iloc[c,6]) K_PM_D_2H.append(Kit_PM_2H_24hr.iloc[c,0]) # if Day_3H.iloc[c, 7] != -1.00: # Kit_PM_per_day_3H.append(Day_3H.iloc[c,7]/Day_3H.iloc[c,1]) # K_PM_D_3H.append(Day_3H.iloc[c, 0]) if Kit_PM_3H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_3H.append(Kit_PM_3H_24hr.iloc[c,6]) K_PM_D_3H.append(Kit_PM_3H_24hr.iloc[c,0]) # percentages Between Phases of kitchen PM per day Kit_per_day_2H_1H = [] K_PM_D_2H_1H = [] Kit_per_day_3H_1H = [] K_PM_D_3H_1H = [] Kit_per_day_3H_2H = [] K_PM_D_3H_2H = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_Hood_PM): count_pm = 0 continue if (len(Kit_PM_per_day_2H)-1) >= c and (len(Kit_PM_per_day_1H)-1) >= c: #if Day_1H.iloc[c,7] > 0 and Day_2H.iloc[c,7] > 0 and Day_1H.iloc[c,0] == Day_2H.iloc[c,0]: if Kit_PM_1H_24hr.iloc[c, 6] > 0 and Kit_PM_2H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 0] == Kit_PM_2H_24hr.iloc[c, 0]: Kit_per_day_2H_1H.append(Kit_PM_per_day_2H[c]/Kit_PM_per_day_1H[c]) K_PM_D_2H_1H.append(Day_1H.iloc[c,0]) if (len(Kit_PM_per_day_3H)-1) >= c and (len(Kit_PM_per_day_1H)-1) >= c: #if Day_3H.iloc[c,7] > 0 and Day_1H.iloc[c,7] > 0 and Day_3H.iloc[c,0] == Day_1H.iloc[c,0]: if Kit_PM_3H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 0] == \ Kit_PM_3H_24hr.iloc[c, 0]: Kit_per_day_3H_1H.append(Kit_PM_per_day_3H[c]/Kit_PM_per_day_1H[c]) K_PM_D_3H_1H.append(Day_1H.iloc[c,0]) if (len(Kit_PM_per_day_3H)-1) >= c and (len(Kit_PM_per_day_2H)-1) >= c: #if Day_3H.iloc[c,7] > 0 and Day_2H.iloc[c,7] > 0 and Day_3H.iloc[c,0] == Day_2H.iloc[c,0]: if Kit_PM_3H_24hr.iloc[c, 6] > 0 and Kit_PM_2H_24hr.iloc[c, 6] > 0 and Kit_PM_3H_24hr.iloc[c, 0] == \ Kit_PM_2H_24hr.iloc[c, 0]: Kit_per_day_3H_2H.append(Kit_PM_per_day_3H[c]/Kit_PM_per_day_2H[c]) K_PM_D_3H_2H.append(Day_2H.iloc[c,0]) # now for box plotting for Kitchen PM per day percent changes #2H to 1H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_2H_1H, ax=ax_box, color='g') sns.distplot(Kit_per_day_2H_1H, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 2H/1H (Kitchen PM per Day)') plt.ylim(top=1.5) plt.ylim(bottom = 0) #3H to 1H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3H_1H, ax=ax_box, color='r') sns.distplot(Kit_per_day_3H_1H, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3H/1H (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #3H to 2H sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3H_2H, ax=ax_box, color='m') sns.distplot(Kit_per_day_3H_2H, ax=ax_hist, color='m') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3H/2H (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) #Plotting on the same graph fig, ax = plt.subplots() plt.title('Hood Kitchen PM per day') #1H quant_1_1H = np.percentile(Kit_PM_per_day_1H, [25,50,75]) Top_lim_1_1H = quant_1_1H[2] + 1.5*(quant_1_1H[2] - quant_1_1H[0]) Low_lim_1_1H = quant_1_1H[0] - 1.5*(quant_1_1H[2] - quant_1_1H[0]) bp_1 = plt.boxplot(Kit_PM_per_day_1H, positions = [1], widths = 0.6) kitchen_pm_1H_outlier = [] for v,a in enumerate(Kit_PM_per_day_1H): if a > Top_lim_1_1H or a < Low_lim_1_1H: kitchen_pm_1H_outlier.append(K_PM_D_1H[v]) plt.text(1,a,K_PM_D_1H[v]) plt.text(0.5,0.1,'1H',color='b') #2N quant_1_2H = np.percentile(Kit_PM_per_day_2H, [25,50,75]) Top_lim_1_2N = quant_1_2H[2] + 1.5*(quant_1_2H[2] - quant_1_2H[0]) Low_lim_1_2N = quant_1_2H[0] - 1.5*(quant_1_2H[2] - quant_1_2H[0]) bp_1 = plt.boxplot(Kit_PM_per_day_2H,positions = [2], widths = 0.6) kitchen_pm_2H_outlier = [] for v,a in enumerate(Kit_PM_per_day_2H): if a > Top_lim_1_2N or a < Low_lim_1_2N: kitchen_pm_2H_outlier.append(K_PM_D_2H[v]) plt.text(2,a,K_PM_D_2H[v]) plt.text(1.5,0.1,'2H', color= 'g') #3H quant_1_3H = np.percentile(Kit_PM_per_day_3H, [25,50,75]) Top_lim_1_3N = quant_1_3H[2] + 1.5*(quant_1_3H[2] - quant_1_3H[0]) Low_lim_1_3N = quant_1_3H[0] - 1.5*(quant_1_3H[2] - quant_1_3H[0]) kitchen_3H_outlier = [] bp_1 = plt.boxplot(Kit_PM_per_day_3H,positions = [3], widths = 0.6) count = 0 kitchen_pm_3H_outlier = [] for v,a in enumerate(Kit_PM_per_day_3H): if a > Top_lim_1_3N or a < Low_lim_1_3N: kitchen_pm_3H_outlier.append(K_PM_D_3H[v]) plt.text(3,a,K_PM_D_3H[v]) # kitchen_3N_outlier.append(K_PM_D_3N[v]) # count = count + 1 # if count == (3): # plt.text(3,a,K_PM_D_3N[v],ha='left', va='bottom') # if count == (1): # plt.text(3,a,K_PM_D_3N[v],ha='left', va='top') # else: # plt.text(3,a,K_PM_D_3N[v],ha='right', va='bottom') plt.text(2.5,0.1,'3H', color='r') plt.xlim(0,4) plt.ylim(0,1200) print('Kitchen PM 1H had these values as outliers ', kitchen_pm_1H_outlier) print('Kitchen PM 2H had these values as outliers ', kitchen_pm_2H_outlier) print('Kitchen PM 3H had these values as outliers ', kitchen_pm_3H_outlier) plt.show() #print('3N had these values as outliers ' , kitchen_3N_outlier) # % change of PM per day fig_2, ax2 = plt.subplots() plt.title('% hood PM per Day Change' ) #plt.hold(True) #2H to 1H quant_1_2H_1H = np.percentile(Kit_per_day_2H_1H, [25,50,75]) Top_lim_1_2N_1N = quant_1_2H_1H[2] + 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) Low_lim_1_2N_1N = quant_1_2H_1H[0] - 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) bp_1_1 = plt.boxplot(Kit_per_day_2H_1H, positions=[1], widths= 0.6) kitchen_pm_2H_1H_outlier = [] for v,a in enumerate(Kit_per_day_2H_1H): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: kitchen_pm_2H_1H_outlier.append(K_PM_D_2H_1H[v]) plt.text(1, a, K_PM_D_2H_1H[v]) plt.text(0.75, -0.25, '2H / 1H', color= 'g') #3H to 1H quant_1_3H_1H = np.percentile(Kit_per_day_3H_1H, [25,50,75]) Top_lim_1_3N_1N = quant_1_3H_1H[2] + 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) Low_lim_1_3N_1N = quant_1_3H_1H[0] - 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) bp_1_1 = plt.boxplot(Kit_per_day_3H_1H, positions=[2], widths= 0.6) kitchen_pm_3H_1H_outlier = [] for v,a in enumerate(Kit_per_day_3H_1H): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: kitchen_pm_3H_1H_outlier.append(K_PM_D_3H_1H[v]) plt.text(2, a, K_PM_D_3H_1H[v]) plt.text(1.75, -0.25, '3H / 1H', color= 'r') #3H to 2H quant_1_3H_2H = np.percentile(Kit_per_day_3H_2H, [25,50,75]) Top_lim_1_3N_2N = quant_1_3H_2H[2] + 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) Low_lim_1_3N_2N = quant_1_3H_2H[0] - 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) bp_1_1 = plt.boxplot(Kit_per_day_3H_2H, positions=[3], widths= 0.6) kitchen_pm_3H_2H_outlier = [] for v,a in enumerate(Kit_per_day_3H_2H): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: kitchen_pm_3H_2H_outlier.append(K_PM_D_3H_2H[v]) plt.text(3, a, K_PM_D_3H_2H[v]) plt.text(2.75, -0.25, '3H / 2H', color= 'm') plt.xlim(0,4) plt.ylim(-0.5,5) print('Kitchen PM 2H/1H had these values as outliers ', kitchen_pm_2H_1H_outlier) print('Kitchen PM 3H/1H had these values as outliers ', kitchen_pm_3H_1H_outlier) print('Kitchen PM 3H/2H had these values as outliers ', kitchen_pm_3H_2H_outlier) plt.show() quant_1_1H = np.append(quant_1_1H, np.average(Kit_PM_per_day_1H)) quant_1_2H = np.append(quant_1_2H, np.average(Kit_PM_per_day_2H)) quant_1_3H = np.append(quant_1_3H, np.average(Kit_PM_per_day_3H)) D_50_quant_phase_PM_D_hood = {'Percentile %': ['25','50','75', 'Avg'], '1H': quant_1_1H, '2H': quant_1_2H,'3H' : quant_1_3H} PM_D_50_phase_hood = pd.DataFrame(data=D_50_quant_phase_PM_D_hood, columns= ['Percentile %','1H','2H','3H' ]) quant_1_2H_1H = np.append(quant_1_2H_1H , np.average(Kit_per_day_2H_1H)) quant_1_3H_1H = np.append(quant_1_3H_1H , np.average(Kit_per_day_3H_1H)) quant_1_3H_2H = np.append(quant_1_3H_2H , np.average(Kit_per_day_3H_2H)) D_50_quant_percent_PM_D_hood ={'Percentile %': ['25','50','75', 'Avg'],'2H / 1H': quant_1_2H_1H,'3H / 1H': quant_1_3H_1H,'3H / 2H': quant_1_3H_2H} PM_D_50_percent_change_hood = pd.DataFrame(data=D_50_quant_percent_PM_D_hood, columns=['Percentile %','2H / 1H','3H / 1H','3H / 2H']) print(PM_D_50_phase_hood) print(PM_D_50_percent_change_hood) # when i am ready to transfer to a data frame and get the differences #histograms for the comparison if Hood_or_no == 'no_hood': plt.title('Histogram of Fuel per 24 Hours per Person - No Hood' ) plt.hist([Fuel_per_day_per_adult_1N], color=['b'], alpha=0.5, label='1N') plt.hist([Fuel_per_day_per_adult_2N], color=['g'], alpha=0.5, label='2N') plt.hist([Fuel_per_day_per_adult_3N], color=['r'], alpha=0.5, label='3N') plt.hist([Fuel_per_day_per_adult_4N], color=['y'], alpha=0.5, label='4N') plt.legend(loc='upper right') plt.show() plt.title('Histogram of Kitchen PM 24 Hours - No Hood' ) plt.hist([Kit_PM_per_day_1N], color=['b'], alpha=0.5, label='1N') plt.hist([Kit_PM_per_day_2N], color=['g'], alpha=0.5, label='2N') plt.hist([Kit_PM_per_day_3N], color=['r'], alpha=0.5, label='3N') plt.hist([Kit_PM_per_day_4N], color=['y'], alpha=0.5, label='4N') plt.legend(loc='upper right') plt.show() if Hood_or_no == 'hood': plt.title('Histogram of Fuel per 24 Hours per Person - Hood' ) plt.hist([Fuel_per_day_per_adult_1H], color=['b'], alpha=0.5, label='1H') plt.hist([Fuel_per_day_per_adult_2H], color=['g'], alpha=0.5, label='2H') plt.hist([Fuel_per_day_per_adult_3H], color=['r'], alpha=0.5, label='3H') plt.legend(loc='upper right') plt.show() plt.title('Histogram of Kitchen PM 24 Hours - Hood' ) plt.hist([Kit_PM_per_day_1H], color=['b'], alpha=0.5, label='1H') plt.hist([Kit_PM_per_day_2H], color=['g'], alpha=0.5, label='2H') plt.hist([Kit_PM_per_day_3H], color=['r'], alpha=0.5, label='3H') plt.legend(loc='upper right') plt.show()
50.965767
152
0.657504
import matplotlib.pyplot as plt import numpy as np import pandas as pd import seaborn as sns Computer = 'personal' Hood_or_no = 'no_hood' Household_removal = [1045] Household_removal_NO_Hood_fuel_day_adult = [1045] Household_removal_Hood_fuel_day_adult = [2020] Household_removal_NO_Hood_PM = [1045] Household_removal_Hood_PM = [2020] pd.set_option('display.max_rows', 500) pd.set_option('display.max_columns', 500) pd.set_option('display.width', 1000) if Hood_or_no == 'hood': C_Place_holder = 2001 else: C_Place_holder = 1001 if Computer == 'personal' and Hood_or_no == 'no_hood': datafile_path_day_1N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_Summary_Day_1_exact.csv" Day_1N = pd.read_csv(datafile_path_day_1N, skiprows=2) datafile_path_event_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_Summary_Event_1_exact.csv" Event_1N = pd.read_csv(datafile_path_event_1N, skiprows=2) datafile_path_survey_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Filter_1n_survey = pd.read_csv(datafile_path_survey_1N, skiprows=0) Survey_1N = Filter_1n_survey.iloc[0:40,:] data_file_path_24_PM_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_24_hour_Kitchen_PM.csv" Kit_PM_1N_24hr = pd.read_csv(data_file_path_24_PM_1N, skiprows=0) data_file_path_24_Fuel_1N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_24_hour_Fuel_removal.csv" Fuel_remove_1N_24hr = pd.read_csv(data_file_path_24_Fuel_1N, skiprows=0) datafile_path_day_2N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Day_1_exact.csv" Day_2N = pd.read_csv(datafile_path_day_2N, skiprows=2) datafile_path_event_2N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Event_1_exact.csv" Event_2N_1 = pd.read_csv(datafile_path_event_2N_1, skiprows=2) datafile_path_event_2N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Summary_Event_2_exact.csv" Event_2N_2 = pd.read_csv(datafile_path_event_2N_2, skiprows=2) datafile_path_survey_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_Survey_summary_.csv" Survey_2N = pd.read_csv(datafile_path_survey_2N, skiprows=0) data_file_path_24_PM_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_24_hour_Kitchen_PM.csv" Kit_PM_2N_24hr = pd.read_csv(data_file_path_24_PM_2N, skiprows=0) data_file_path_24_Fuel_2N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2N/2N_24_hour_Fuel_removal.csv" Fuel_remove_2N_24hr = pd.read_csv(data_file_path_24_Fuel_2N, skiprows=0) datafile_path_day_3N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Day_1_exact.csv" Day_3N = pd.read_csv(datafile_path_day_3N, skiprows=2) datafile_path_event_3N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Event_1_exact.csv" Event_3N_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) datafile_path_event_3N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Summary_Event_2_exact.csv" Event_3N_2 = pd.read_csv(datafile_path_event_3N_2, skiprows=2) datafile_path_survey_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_Survey_summary_.csv" Survey_3N = pd.read_csv(datafile_path_survey_3N, skiprows=0) data_file_path_24_PM_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_24_hour_Kitchen_PM.csv" Kit_PM_3N_24hr = pd.read_csv(data_file_path_24_PM_3N, skiprows=0) data_file_path_24_Fuel_3N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3N/3N_24_hour_Fuel_removal.csv" Fuel_remove_3N_24hr = pd.read_csv(data_file_path_24_Fuel_3N, skiprows=0) datafile_path_day_4N ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Day_1_exact.csv" Day_4N = pd.read_csv(datafile_path_day_4N, skiprows=2) datafile_path_event_4N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Event_1_exact.csv" Event_4N_1 = pd.read_csv(datafile_path_event_4N_1, skiprows=2) datafile_path_event_4N_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Summary_Event_2_exact.csv" Event_4N_2 = pd.read_csv(datafile_path_event_4N_2, skiprows=2) datafile_path_survey_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_Survey_summary_.csv" Survey_4N = pd.read_csv(datafile_path_survey_4N, skiprows=0) data_file_path_24_PM_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_24_hour_Kitchen_PM.csv" Kit_PM_4N_24hr = pd.read_csv(data_file_path_24_PM_4N, skiprows=0) data_file_path_24_Fuel_4N = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/4N/4N_24_hour_Fuel_removal.csv" Fuel_remove_4N_24hr = pd.read_csv(data_file_path_24_Fuel_4N, skiprows=0) elif Computer == 'personal' and Hood_or_no == 'hood': datafile_path_day_1H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_Summary_Day_1_exact.csv" Day_1H = pd.read_csv(datafile_path_day_1H, skiprows=2) datafile_path_event_1H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_Summary_Event_1_exact.csv" Event_1H = pd.read_csv(datafile_path_event_1H, skiprows=2) datafile_path_survey_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Survey_1H = pd.read_csv(datafile_path_survey_1H, skiprows=40) data_file_path_24_PM_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_24_hour_Kitchen_PM.csv" Kit_PM_1H_24hr = pd.read_csv(data_file_path_24_PM_1H, skiprows=0) data_file_path_24_fuel_1H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/1H/1H_24_hour_Fuel_removal.csv" Fuel_remove_1H_24hr = pd.read_csv(data_file_path_24_fuel_1H, skiprows=0) datafile_path_day_2H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Day_1_exact.csv" Day_2H = pd.read_csv(datafile_path_day_2H, skiprows=2) datafile_path_event_2H_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Event_1_exact.csv" Event_2H_1 = pd.read_csv(datafile_path_event_2H_1, skiprows=2) datafile_path_event_2H_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Summary_Event_2_exact.csv" Event_2H_2 = pd.read_csv(datafile_path_event_2H_2, skiprows=2) datafile_path_survey_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_Survey_summary_.csv" Survey_2H = pd.read_csv(datafile_path_survey_2H, skiprows=0) data_file_path_24_PM_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_24_hour_Kitchen_PM.csv" Kit_PM_2H_24hr = pd.read_csv(data_file_path_24_PM_2H, skiprows=0) data_file_path_24_fuel_2H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/2H/2H_24_hour_Fuel_removal.csv" Fuel_remove_2H_24hr = pd.read_csv(data_file_path_24_fuel_2H, skiprows=0) datafile_path_day_3H ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Day_1_exact.csv" Day_3H = pd.read_csv(datafile_path_day_3H, skiprows=2) datafile_path_event_3N_1 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Event_1_exact.csv" Event_3H_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) datafile_path_event_3H_2 ="C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Summary_Event_2_exact.csv" Event_3H_2 = pd.read_csv(datafile_path_event_3H_2, skiprows=2) datafile_path_survey_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_Survey_summary_.csv" Survey_3H = pd.read_csv(datafile_path_survey_3H, skiprows=0) data_file_path_24_PM_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_24_hour_Kitchen_PM.csv" Kit_PM_3H_24hr = pd.read_csv(data_file_path_24_PM_3H, skiprows=0) data_file_path_24_fuel_3H = "C:/Users/gvros/Desktop/Oregon State Masters/Work/OSU, CSC, CQC Project files/3H/3H_24_hour_Fuel_removal.csv" Fuel_remove_3H_24hr = pd.read_csv(data_file_path_24_fuel_3H, skiprows=0) elif Computer == 'work' and Hood_or_no == 'no_hood': datafile_path_day_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_Summary_Day_1_exact.csv" Day_1N = pd.read_csv(datafile_path_day_1N, skiprows=2) datafile_path_event_1N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_Summary_Event_1_exact.csv" Event_1N = pd.read_csv(datafile_path_event_1N, skiprows=2) datafile_path_survey_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Filter_1n_survey = pd.read_csv(datafile_path_survey_1N, skiprows=0) Survey_1N = Filter_1n_survey.iloc[0:40,:] data_file_path_24_PM_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_24_hour_Kitchen_PM.csv" Kit_PM_1N_24hr = pd.read_csv(data_file_path_24_PM_1N, skiprows=0) data_file_path_24_Fuel_1N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_24_hour_Fuel_removal.csv" Fuel_remove_1N_24hr = pd.read_csv(data_file_path_24_Fuel_1N, skiprows=0) datafile_path_day_2N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Day_1_exact.csv" Day_2N = pd.read_csv(datafile_path_day_2N, skiprows=2) datafile_path_event_2N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Event_1_exact.csv" Event_2N_1 = pd.read_csv(datafile_path_event_2N_1, skiprows=2) datafile_path_event_2N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Summary_Event_2_exact.csv" Event_2N_2 = pd.read_csv(datafile_path_event_2N_2, skiprows=2) datafile_path_survey_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_Survey_summary_.csv" Survey_2N = pd.read_csv(datafile_path_survey_2N, skiprows=0) data_file_path_24_PM_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_24_hour_Kitchen_PM.csv" Kit_PM_2N_24hr = pd.read_csv(data_file_path_24_PM_2N, skiprows=0) data_file_path_24_Fuel_2N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2N/2N_24_hour_Fuel_removal.csv" Fuel_remove_2N_24hr = pd.read_csv(data_file_path_24_Fuel_2N, skiprows=0) datafile_path_day_3N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Day_1_exact.csv" Day_3N = pd.read_csv(datafile_path_day_3N, skiprows=2) datafile_path_event_3N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Event_1_exact.csv" Event_3N_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) datafile_path_event_3N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Summary_Event_2_exact.csv" Event_3N_2 = pd.read_csv(datafile_path_event_3N_2, skiprows=2) datafile_path_survey_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_Survey_summary_.csv" Survey_3N = pd.read_csv(datafile_path_survey_3N, skiprows=0) data_file_path_24_PM_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_24_hour_Kitchen_PM.csv" Kit_PM_3N_24hr = pd.read_csv(data_file_path_24_PM_3N, skiprows=0) data_file_path_24_Fuel_3N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3N/3N_24_hour_Fuel_removal.csv" Fuel_remove_3N_24hr = pd.read_csv(data_file_path_24_Fuel_3N, skiprows=0) datafile_path_day_4N ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Day_1_exact.csv" Day_4N = pd.read_csv(datafile_path_day_4N, skiprows=2) datafile_path_event_4N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Event_1_exact.csv" Event_4N_1 = pd.read_csv(datafile_path_event_4N_1, skiprows=2) datafile_path_event_4N_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Summary_Event_2_exact.csv" Event_4N_2 = pd.read_csv(datafile_path_event_4N_2, skiprows=2) datafile_path_survey_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_Survey_summary_.csv" Survey_4N = pd.read_csv(datafile_path_survey_4N, skiprows=0) data_file_path_24_PM_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_24_hour_Kitchen_PM.csv" Kit_PM_4N_24hr = pd.read_csv(data_file_path_24_PM_4N, skiprows=0) data_file_path_24_Fuel_4N = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/4N/4N_24_hour_Fuel_removal.csv" Fuel_remove_4N_24hr = pd.read_csv(data_file_path_24_Fuel_4N, skiprows=0) else: datafile_path_day_1H ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_Summary_Day_1_exact.csv" Day_1H = pd.read_csv(datafile_path_day_1H, skiprows=2) datafile_path_event_1H ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_Summary_Event_1_exact.csv" Event_1H = pd.read_csv(datafile_path_event_1H, skiprows=2) datafile_path_survey_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1N/1N_1H_Survey_summary_.csv" Survey_1H = pd.read_csv(datafile_path_survey_1H, skiprows=40) data_file_path_24_PM_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_24_hour_Kitchen_PM.csv" Kit_PM_1H_24hr = pd.read_csv(data_file_path_24_PM_1H, skiprows=0) data_file_path_24_fuel_1H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/1H/1H_24_hour_Fuel_removal.csv" Fuel_remove_1H_24hr = pd.read_csv(data_file_path_24_fuel_1H, skiprows=0) datafile_path_day_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Day_1_exact.csv" Day_2H = pd.read_csv(datafile_path_day_2H, skiprows=2) datafile_path_event_2H_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Event_1_exact.csv" Event_2H_1 = pd.read_csv(datafile_path_event_2H_1, skiprows=2) datafile_path_event_2H_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Summary_Event_2_exact.csv" Event_2H_2 = pd.read_csv(datafile_path_event_2H_2, skiprows=2) datafile_path_survey_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_Survey_summary_.csv" Survey_2H = pd.read_csv(datafile_path_survey_2H, skiprows=0) data_file_path_24_PM_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_24_hour_Kitchen_PM.csv" Kit_PM_2H_24hr = pd.read_csv(data_file_path_24_PM_2H, skiprows=0) data_file_path_24_fuel_2H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/2H/2H_24_hour_Fuel_removal.csv" Fuel_remove_2H_24hr = pd.read_csv(data_file_path_24_fuel_2H, skiprows=0) datafile_path_day_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Day_1_exact.csv" Day_3H = pd.read_csv(datafile_path_day_3H, skiprows=2) datafile_path_event_3N_1 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Event_1_exact.csv" Event_3H_1 = pd.read_csv(datafile_path_event_3N_1, skiprows=2) datafile_path_event_3H_2 ="C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Summary_Event_2_exact.csv" Event_3H_2 = pd.read_csv(datafile_path_event_3H_2, skiprows=2) datafile_path_survey_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_Survey_summary_.csv" Survey_3H = pd.read_csv(datafile_path_survey_3H, skiprows=0) data_file_path_24_PM_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_24_hour_Kitchen_PM.csv" Kit_PM_3H_24hr = pd.read_csv(data_file_path_24_PM_3H, skiprows=0) data_file_path_24_fuel_3H = "C:/Users/rossgra/Box/OSU, CSC, CQC Project files/3H/3H_24_hour_Fuel_removal.csv" Fuel_remove_3H_24hr = pd.read_csv(data_file_path_24_fuel_3H, skiprows=0) NO_hood_counter = np.arange(0,39) hood_counter = np.arange(0,14) print('---------------Fuel per Day per Adult No-Hood Phase---------------------') if Hood_or_no == 'no_hood': Fuel_per_day_per_adult_1N = [] f_d_a_1N = [] Fuel_per_day_per_adult_2N = [] f_d_a_2N = [] Fuel_per_day_per_adult_3N = [] f_d_a_3N = [] Fuel_per_day_per_adult_4N = [] f_d_a_4N =[] count_t = 0 count_f = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_NO_Hood_fuel_day_adult): count_f = 0 continue if Fuel_remove_1N_24hr.iloc[c,6]!= -1.00: Fuel_per_day_per_adult_1N.append(Fuel_remove_1N_24hr.iloc[c,6]/Survey_1N.iloc[c,7]) f_d_a_1N.append(Day_1N.iloc[c,0]) if Fuel_remove_2N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_2N.append(Fuel_remove_2N_24hr.iloc[c,6] / Survey_2N.iloc[c, 7]) f_d_a_2N.append(Day_2N.iloc[c,0]) if Fuel_remove_3N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_3N.append(Fuel_remove_3N_24hr.iloc[c,6]/ Survey_3N.iloc[c, 7]) f_d_a_3N.append(Day_3N.iloc[c, 0]) if Fuel_remove_4N_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_4N.append(Fuel_remove_4N_24hr.iloc[c,6] / Survey_4N.iloc[c, 7]) f_d_a_4N.append(Day_3N.iloc[c, 0]) Fuel_per_day_per_adult_2N_1N = [] f_d_a_2N_1N = [] Fuel_per_day_per_adult_3N_1N = [] f_d_a_3N_1N = [] Fuel_per_day_per_adult_4N_1N = [] f_d_a_4N_1N = [] Fuel_per_day_per_adult_3N_2N = [] f_d_a_3N_2N = [] Fuel_per_day_per_adult_4N_3N = [] f_d_a_4N_3N = [] Fuel_per_day_per_adult_4N_2N = [] f_d_a_4N_2N = [] count_t = 0 count_f = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_NO_Hood_fuel_day_adult): count_f = 0 continue if (len(Fuel_per_day_per_adult_2N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_1N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_1N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_2N_1N.append(Fuel_per_day_per_adult_2N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_2N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_3N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_3N.iloc[c,13] > 0 and Day_1N.iloc[c,13] > 0 and Day_3N.iloc[c,0] == Day_1N.iloc[c,0]: Fuel_per_day_per_adult_3N_1N.append(Fuel_per_day_per_adult_3N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_3N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_1N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_1N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_1N.iloc[c,0]: Fuel_per_day_per_adult_4N_1N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_1N[c]) f_d_a_4N_1N.append(Day_1N.iloc[c,0]) if (len(Fuel_per_day_per_adult_3N)-1) >= c and (len(Fuel_per_day_per_adult_2N)-1) >= c: if Day_3N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_3N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_3N_2N.append(Fuel_per_day_per_adult_3N[c]/Fuel_per_day_per_adult_2N[c]) f_d_a_3N_2N.append(Day_2N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_3N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_3N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_3N.iloc[c,0]: Fuel_per_day_per_adult_4N_3N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_3N[c]) f_d_a_4N_3N.append(Day_3N.iloc[c,0]) if (len(Fuel_per_day_per_adult_4N)-1) >= c and (len(Fuel_per_day_per_adult_2N)-1) >= c: if Day_4N.iloc[c,13] > 0 and Day_2N.iloc[c,13] > 0 and Day_4N.iloc[c,0] == Day_2N.iloc[c,0]: Fuel_per_day_per_adult_4N_2N.append(Fuel_per_day_per_adult_4N[c]/Fuel_per_day_per_adult_2N[c]) f_d_a_4N_2N.append(Day_4N.iloc[c,0]) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_1N, ax=ax_box, color='b') sns.distplot(Fuel_per_day_per_adult_1N, ax=ax_hist, color='b') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('1N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_2N, ax=ax_box, color='g') sns.distplot(Fuel_per_day_per_adult_2N, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('2N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_3N, ax=ax_box, color='r') sns.distplot(Fuel_per_day_per_adult_3N, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('3N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_4N, ax=ax_box, color='y') sns.distplot(Fuel_per_day_per_adult_4N, ax=ax_hist, color='y') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('4N Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) fig, ax = plt.subplots() plt.title('No-Hood Fuel per Day per Adult') quant_1_1N = np.percentile(Fuel_per_day_per_adult_1N, [25,50,75]) Top_lim_1_1N = quant_1_1N[2] + 1.5*(quant_1_1N[2] - quant_1_1N[0]) Low_lim_1_1N = quant_1_1N[0] - 1.5*(quant_1_1N[2] - quant_1_1N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_1N, positions = [1], widths = 0.6) Fuel_D_A_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_1N): if a > Top_lim_1_1N or a < Low_lim_1_1N: Fuel_D_A_1N_outlier.append(f_d_a_1N[v]) plt.text(1,a,f_d_a_1N[v]) plt.text(1,0.1,'1N',color='b') quant_1_2N = np.percentile(Fuel_per_day_per_adult_2N, [25,50,75]) Top_lim_1_2N = quant_1_2N[2] + 1.5*(quant_1_2N[2] - quant_1_2N[0]) Low_lim_1_2N = quant_1_2N[0] - 1.5*(quant_1_2N[2] - quant_1_2N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_2N,positions = [2], widths = 0.6) Fuel_D_A_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2N): if a > Top_lim_1_2N or a < Low_lim_1_2N: Fuel_D_A_2N_outlier.append(f_d_a_2N[v]) plt.text(2,a,f_d_a_2N[v]) plt.text(2,0.1,'2N', color= 'g') quant_1_3N = np.percentile(Fuel_per_day_per_adult_3N, [25,50,75]) Top_lim_1_3N = quant_1_3N[2] + 1.5*(quant_1_3N[2] - quant_1_3N[0]) Low_lim_1_3N = quant_1_3N[0] - 1.5*(quant_1_3N[2] - quant_1_3N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_3N,positions = [3], widths = 0.6) count = 0 Fuel_D_A_3N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N): if a > Top_lim_1_3N or a < Low_lim_1_3N: Fuel_D_A_3N_outlier.append(f_d_a_3N[v]) count = count + 1 if count == 2: plt.text(3,a,f_d_a_3N[v],ha='left',va='bottom') elif count != 2: plt.text(3,a,f_d_a_3N[v],ha='right',va='bottom') plt.text(3,0.1,'3N', color='r') quant_1_4N = np.percentile(Fuel_per_day_per_adult_4N, [25,50,75]) Top_lim_1_4N = quant_1_4N[2] + 1.5*(quant_1_4N[2] - quant_1_4N[0]) Low_lim_1_4N = quant_1_4N[0] - 1.5*(quant_1_4N[2] - quant_1_4N[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_4N,positions = [4], widths = 0.6) Fuel_D_A_4N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N): if a > Top_lim_1_4N or a < Low_lim_1_4N: Fuel_D_A_4N_outlier.append(f_d_a_4N[v]) plt.text(4,a,f_d_a_4N[v]) plt.text(4,0.1,'4N', color='y') plt.xlim(0,5) plt.ylim(0,2.3) print('Fuel/Day/Adult 1N had these values as outliers ', Fuel_D_A_1N_outlier) print('Fuel/Day/Adult 2N had these values as outliers ', Fuel_D_A_2N_outlier) print('Fuel/Day/Adult 3N had these values as outliers ', Fuel_D_A_3N_outlier) print('Fuel/Day/Adult 4N had these values as outliers ', Fuel_D_A_4N_outlier) plt.show() fig_2, ax2 = plt.subplots() plt.title('% No_hood Change from Fuel per Day per Adult' ) quant_1_2N_1N = np.percentile(Fuel_per_day_per_adult_2N_1N, [25,50,75]) Top_lim_1_2N_1N = quant_1_2N_1N[2] + 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) Low_lim_1_2N_1N = quant_1_2N_1N[0] - 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_2N_1N, positions=[1], widths= 0.6) Fuel_D_A_2N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2N_1N): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: Fuel_D_A_2N_1N_outlier.append(f_d_a_2N_1N[v]) plt.text(1, a, f_d_a_2N_1N[v]) plt.text(0.5, 0, '2N / 1N', color= 'g') quant_1_3N_1N = np.percentile(Fuel_per_day_per_adult_3N_1N, [25,50,75]) Top_lim_1_3N_1N = quant_1_3N_1N[2] + 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) Low_lim_1_3N_1N = quant_1_3N_1N[0] - 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3N_1N, positions=[2], widths= 0.6) Fuel_D_A_3N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N_1N): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: Fuel_D_A_3N_1N_outlier.append(f_d_a_3N_1N[v]) plt.text(2, a, f_d_a_3N_1N[v]) plt.text(1.5, 0, '3N / 1N', color= 'r') quant_1_4N_1N = np.percentile(Fuel_per_day_per_adult_4N_1N, [25,50,75]) Top_lim_1_4N_1N = quant_1_4N_1N[2] + 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) Low_lim_1_4N_1N = quant_1_4N_1N[0] - 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_1N, positions=[3], widths= 0.6) Fuel_D_A_4N_1N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_1N): if a > Top_lim_1_4N_1N or a < Low_lim_1_4N_1N: Fuel_D_A_4N_1N_outlier.append(f_d_a_4N_1N[v]) plt.text(3, a, f_d_a_4N_1N[v]) plt.text(2.5, 0, '4N / 1N', color= 'y') quant_1_3N_2N = np.percentile(Fuel_per_day_per_adult_3N_2N, [25,50,75]) Top_lim_1_3N_2N = quant_1_3N_2N[2] + 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) Low_lim_1_3N_2N = quant_1_3N_2N[0] - 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3N_2N, positions=[4], widths= 0.6) Fuel_D_A_3N_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3N_2N): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: Fuel_D_A_3N_2N_outlier.append(f_d_a_3N_2N[v]) plt.text(4, a, f_d_a_3N_2N[v]) plt.text(3.5, 0, '3N / 2N', color= 'm') quant_1_4N_3N = np.percentile(Fuel_per_day_per_adult_4N_3N, [25,50,75]) Top_lim_1_4N_3N = quant_1_4N_3N[2] + 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) Low_lim_1_4N_3N = quant_1_4N_3N[0] - 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_3N, positions=[5], widths= 0.6) Fuel_D_A_4N_3N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_3N): if a > Top_lim_1_4N_3N or a < Low_lim_1_4N_3N: Fuel_D_A_4N_3N_outlier.append(f_d_a_4N_3N[v]) plt.text(5, a, f_d_a_4N_3N[v]) plt.text(4.5, 0, '4N / 3N', color= 'k') quant_1_4N_2N = np.percentile(Fuel_per_day_per_adult_4N_2N, [25,50,75]) Top_lim_1_4N_2N = quant_1_4N_2N[2] + 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) Low_lim_1_4N_2N = quant_1_4N_2N[0] - 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_4N_2N, positions=[6], widths= 0.6) Fuel_D_A_4N_2N_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_4N_2N): if a > Top_lim_1_4N_2N or a < Low_lim_1_4N_2N: Fuel_D_A_4N_2N_outlier.append(f_d_a_4N_2N[v]) plt.text(6, a, f_d_a_4N_2N[v]) plt.text(5.5, 0, '4N / 2N', color= 'tab:orange') plt.xlim(0,7) plt.ylim(-0.5,4) print('Fuel/Day/Adult 2N/1N had these values as outliers ', Fuel_D_A_2N_1N_outlier) print('Fuel/Day/Adult 3N/1N had these values as outliers ', Fuel_D_A_3N_1N_outlier) print('Fuel/Day/Adult 4N/1N had these values as outliers ', Fuel_D_A_4N_1N_outlier) print('Fuel/Day/Adult 3N/2N had these values as outliers ', Fuel_D_A_3N_2N_outlier) print('Fuel/Day/Adult 4N/3N had these values as outliers ', Fuel_D_A_4N_3N_outlier) print('Fuel/Day/Adult 4N/2N had these values as outliers ', Fuel_D_A_4N_2N_outlier) plt.show() quant_1_1N = np.append(quant_1_1N, np.average(Fuel_per_day_per_adult_1N)) quant_1_2N = np.append(quant_1_2N, np.average(Fuel_per_day_per_adult_2N)) quant_1_3N = np.append(quant_1_3N, np.average(Fuel_per_day_per_adult_3N)) quant_1_4N = np.append(quant_1_4N, np.average(Fuel_per_day_per_adult_4N)) D_50_quant_phase_f_d_a = {'Percentile %': ['25','50','75', 'Avg'], '1N': quant_1_1N, '2N': quant_1_2N,'3N' : quant_1_3N,'4N': quant_1_4N} F_D_A_50_phase_no_hood = pd.DataFrame(data=D_50_quant_phase_f_d_a, columns=['Percentile %','1N', '2N', '3N','4N']) quant_1_2N_1N = np.append(quant_1_2N_1N , np.average(Fuel_per_day_per_adult_2N_1N)) quant_1_3N_1N = np.append(quant_1_3N_1N , np.average(Fuel_per_day_per_adult_3N_1N)) quant_1_4N_1N = np.append(quant_1_4N_1N , np.average(Fuel_per_day_per_adult_4N_1N)) quant_1_3N_2N = np.append(quant_1_3N_2N , np.average(Fuel_per_day_per_adult_3N_2N)) quant_1_4N_3N = np.append(quant_1_4N_3N , np.average(Fuel_per_day_per_adult_4N_3N)) quant_1_4N_2N = np.append(quant_1_4N_2N , np.average(Fuel_per_day_per_adult_4N_2N)) D_50_quant_percent_f_d_a ={'Percentile %': ['25','50','75', 'Avg'],'2N / 1N': quant_1_2N_1N,'3N / 1N': quant_1_3N_1N,'4N / 1N': quant_1_4N_1N, '3N / 2N': quant_1_3N_2N,'4N / 3N': quant_1_4N_3N,'4N / 2N': quant_1_4N_2N} F_D_A_50_percent_change_no_hood = pd.DataFrame(data=D_50_quant_percent_f_d_a, columns=['Percentile %','2N / 1N','3N / 1N', '4N / 1N' ,'3N / 2N','4N / 3N','4N / 2N']) print(F_D_A_50_phase_no_hood) print(F_D_A_50_percent_change_no_hood) print ('-------------------Fuel per Day per Adult Hood Phase -------------------') if Hood_or_no == 'hood': Fuel_per_day_per_adult_1H = [] f_d_a_1H = [] Fuel_per_day_per_adult_2H = [] f_d_a_2H = [] Fuel_per_day_per_adult_3H = [] f_d_a_3H = [] count_t = 0 count_f = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_Hood_fuel_day_adult): count_f = 0 continue if Fuel_remove_1H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_1H.append(Fuel_remove_1H_24hr.iloc[c,6]/Survey_1H.iloc[c,7]) f_d_a_1H.append(Day_1H.iloc[c,0]) if Fuel_remove_2H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_2H.append(Fuel_remove_2H_24hr.iloc[c,6] / Survey_2H.iloc[c, 7]) f_d_a_2H.append(Day_2H.iloc[c,0]) if Fuel_remove_3H_24hr.iloc[c,6] != -1.00: Fuel_per_day_per_adult_3H.append(Fuel_remove_3H_24hr.iloc[c,6]/ Survey_3H.iloc[c, 7]) f_d_a_3H.append(Day_3H.iloc[c, 0]) Fuel_per_day_per_adult_2H_1H = [] f_d_a_2H_1H = [] Fuel_per_day_per_adult_3H_1H = [] f_d_a_3H_1H = [] Fuel_per_day_per_adult_3H_2H = [] f_d_a_3H_2H = [] count_t = 0 count_f = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_fuel_day_adult[count_f] - C_Place_holder): count_f = count_f + 1 if count_f == len(Household_removal_Hood_fuel_day_adult): count_f = 0 continue if (len(Fuel_per_day_per_adult_2H)-1) >= c and (len(Fuel_per_day_per_adult_1H)-1) >= c: if Day_1H.iloc[c,13] > 0 and Day_2H.iloc[c,13] > 0 and Day_1H.iloc[c,0] == Day_2H.iloc[c,0]: Fuel_per_day_per_adult_2H_1H.append(Fuel_per_day_per_adult_2H[c]/Fuel_per_day_per_adult_1H[c]) f_d_a_2H_1H.append(Day_1H.iloc[c,0]) if (len(Fuel_per_day_per_adult_3H)-1) >= c and (len(Fuel_per_day_per_adult_1H)-1) >= c: if Day_3H.iloc[c,13] > 0 and Day_1H.iloc[c,13] > 0 and Day_3H.iloc[c,0] == Day_1H.iloc[c,0]: Fuel_per_day_per_adult_3H_1H.append(Fuel_per_day_per_adult_3H[c]/Fuel_per_day_per_adult_1H[c]) f_d_a_3H_1H.append(Day_1H.iloc[c,0]) if (len(Fuel_per_day_per_adult_3H)-1) >= c and (len(Fuel_per_day_per_adult_2H)-1) >= c: if Day_3H.iloc[c,13] > 0 and Day_2H.iloc[c,13] > 0 and Day_3H.iloc[c,0] == Day_2H.iloc[c,0]: Fuel_per_day_per_adult_3H_2H.append(Fuel_per_day_per_adult_3H[c]/Fuel_per_day_per_adult_2H[c]) f_d_a_3H_2H.append(Day_1H.iloc[c,0]) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_1H, ax=ax_box, color='b') sns.distplot(Fuel_per_day_per_adult_1H, ax=ax_hist, color='b') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('1H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_2H, ax=ax_box, color='g') sns.distplot(Fuel_per_day_per_adult_2H, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('2H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Fuel_per_day_per_adult_3H, ax=ax_box, color='r') sns.distplot(Fuel_per_day_per_adult_3H, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('3H Fuel per Day per Adult') plt.ylim(top=2) plt.ylim(bottom = 0) fig_2, ax_2 = plt.subplots() plt.title('Hood Fuel per Day per Adult') quant_1_1H = np.percentile(Fuel_per_day_per_adult_1H, [25,50,75]) Top_lim_1_1H = quant_1_1H[2] + 1.5*(quant_1_1H[2] - quant_1_1H[0]) Low_lim_1_1H = quant_1_1H[0] - 1.5*(quant_1_1H[2] - quant_1_1H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_1H, positions = [1], widths = 0.6) Fuel_D_A_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_1H): if a > Top_lim_1_1H or a < Low_lim_1_1H: Fuel_D_A_1H_outlier.append(f_d_a_1H[v]) plt.text(1,a,f_d_a_1H[v]) plt.text(1,0,'1H',color='b') quant_1_2H = np.percentile(Fuel_per_day_per_adult_2H, [25,50,75]) Top_lim_1_2H = quant_1_2H[2] + 1.5*(quant_1_2H[2] - quant_1_2H[0]) Low_lim_1_2H = quant_1_2H[0] - 1.5*(quant_1_2H[2] - quant_1_2H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_2H,positions = [2], widths = 0.6) count = 0 Fuel_D_A_2H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2H): if a > Top_lim_1_2H or a < Low_lim_1_2H: Fuel_D_A_2H_outlier.append(f_d_a_2H[v]) count = count + 1 if count == 1: plt.text(2,a,f_d_a_2H[v],ha='left',va='bottom') elif count !=1: plt.text(2,a,f_d_a_2H[v],ha='right',va='bottom') plt.text(2,0,'2H', color= 'g') quant_1_3H = np.percentile(Fuel_per_day_per_adult_3H, [25,50,75]) Top_lim_1_3H = quant_1_3H[2] + 1.5*(quant_1_3H[2] - quant_1_3H[0]) Low_lim_1_3H = quant_1_3H[0] - 1.5*(quant_1_3H[2] - quant_1_3H[0]) bp_1 = plt.boxplot(Fuel_per_day_per_adult_3H,positions = [3], widths = 0.6) count = 0 Fuel_D_A_3H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H): if a > Top_lim_1_3H or a < Low_lim_1_3H: Fuel_D_A_3H_outlier.append(f_d_a_3H[v]) count = count + 1 if count == 3: plt.text(3,a,f_d_a_3H[v],ha='left',va='bottom') elif count != 1: plt.text(3,a,f_d_a_3H[v],ha='right',va='bottom') plt.text(3,0,'3H', color='r') plt.xlim(-0,4) plt.ylim(-0.25,2.5) print('Fuel/Day/Adult 1H had these values as outliers ', Fuel_D_A_1H_outlier) print('Fuel/Day/Adult 2H had these values as outliers ', Fuel_D_A_2H_outlier) print('Fuel/Day/Adult 3H had these values as outliers ', Fuel_D_A_3H_outlier) plt.show() fig_2, ax2 = plt.subplots() plt.title('% No_hood Change from Fuel per Day per Adult' ) quant_1_2H_1H = np.percentile(Fuel_per_day_per_adult_2H_1H, [25,50,75]) Top_lim_1_2H_1H = quant_1_2H_1H[2] + 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) Low_lim_1_2H_1H = quant_1_2H_1H[0] - 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_2H_1H, positions=[1], widths= 0.6) Fuel_D_A_2H_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_2H_1H): if a > Top_lim_1_2H_1H or a < Low_lim_1_2H_1H: Fuel_D_A_2H_1H_outlier.append(f_d_a_2H_1H[v]) plt.text(1, a, f_d_a_2H_1H[v]) plt.text(0.75, -0.25, '2H / 1H', color= 'g') quant_1_3H_1H = np.percentile(Fuel_per_day_per_adult_3H_1H, [25,50,75]) Top_lim_1_3H_1H = quant_1_3H_1H[2] + 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) Low_lim_1_3H_1H = quant_1_3H_1H[0] - 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3H_1H, positions=[2], widths= 0.6) Fuel_D_A_3H_1H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H_1H): if a > Top_lim_1_3H_1H or a < Low_lim_1_3H_1H: Fuel_D_A_3H_1H_outlier.append(f_d_a_3H_1H[v]) plt.text(2, a, f_d_a_3H_1H[v]) plt.text(1.75, -0.25, '3H / 1H', color= 'r') quant_1_3H_2H = np.percentile(Fuel_per_day_per_adult_3H_2H, [25,50,75]) Top_lim_1_3H_2H = quant_1_3H_2H[2] + 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) Low_lim_1_3H_2H = quant_1_3H_2H[0] - 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) bp_1_1 = plt.boxplot(Fuel_per_day_per_adult_3H_2H, positions=[3], widths= 0.6) Fuel_D_A_3H_2H_outlier = [] for v,a in enumerate(Fuel_per_day_per_adult_3H_2H): if a > Top_lim_1_3H_2H or a < Low_lim_1_3H_2H: Fuel_D_A_3H_2H_outlier.append(f_d_a_3H_2H[v]) plt.text(3, a, f_d_a_3H_2H[v]) plt.text(2.75, -0.25, '2H / 1H', color= 'm') plt.xlim(-0,4) plt.ylim(-0.25,6) print('Fuel/Day/Adult 2H/1H had these values as outliers ', Fuel_D_A_2H_1H_outlier) print('Fuel/Day/Adult 3H/1H had these values as outliers ', Fuel_D_A_3H_1H_outlier) print('Fuel/Day/Adult 3H/2H had these values as outliers ', Fuel_D_A_3H_2H_outlier) plt.show() quant_1_1H = np.append(quant_1_1H, np.average(Fuel_per_day_per_adult_1H)) quant_1_2H = np.append(quant_1_2H, np.average(Fuel_per_day_per_adult_2H)) quant_1_3H = np.append(quant_1_3H, np.average(Fuel_per_day_per_adult_3H)) D_50_quant_phase_f_d_a_hood = {'Percentile %': ['25','50','75', 'Avg'], '1H': quant_1_1H, '2H': quant_1_2H,'3H' : quant_1_3H} F_D_A_50_phase_hood = pd.DataFrame(data=D_50_quant_phase_f_d_a_hood, columns=['Percentile %','1H', '2H','3H'] ) quant_1_2H_1H = np.append(quant_1_2H_1H , np.average(Fuel_per_day_per_adult_2H_1H)) quant_1_3H_1H = np.append(quant_1_3H_1H , np.average(Fuel_per_day_per_adult_3H_1H)) quant_1_3H_2H = np.append(quant_1_3H_2H , np.average(Fuel_per_day_per_adult_3H_2H)) D_50_quant_percent_f_d_a_hood ={'Percentile %': ['25','50','75', 'Avg'],'2H / 1H': quant_1_2H_1H,'3H / 1H': quant_1_3H_1H,'3H / 2H': quant_1_3H_2H} F_D_A_50_percent_change_hood = pd.DataFrame(data=D_50_quant_percent_f_d_a_hood, columns=['Percentile %','2H / 1H','3H / 1H','3H / 2H']) print(F_D_A_50_phase_hood) print(F_D_A_50_percent_change_hood) print('----------------------- Kitchen PM per Day -----------------------------') if Hood_or_no == 'no_hood': Kit_PM_per_day_1N = [] K_PM_D_1N = [] Kit_PM_per_day_2N = [] K_PM_D_2N = [] Kit_PM_per_day_3N = [] K_PM_D_3N = [] Kit_PM_per_day_4N = [] K_PM_D_4N = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_NO_Hood_PM): count_pm = 0 continue if Kit_PM_1N_24hr.iloc[c,6] != -1.00: Kit_PM_per_day_1N.append(Kit_PM_1N_24hr.iloc[c,6]) K_PM_D_1N.append(Kit_PM_1N_24hr.iloc[c, 0]) if Kit_PM_2N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_2N.append(Kit_PM_2N_24hr.iloc[c, 6]) K_PM_D_2N.append(Kit_PM_2N_24hr.iloc[c, 0]) if Kit_PM_3N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_3N.append(Kit_PM_3N_24hr.iloc[c, 6]) K_PM_D_3N.append(Kit_PM_3N_24hr.iloc[c, 0]) if Kit_PM_4N_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_4N.append(Kit_PM_4N_24hr.iloc[c, 6]) K_PM_D_4N.append(Kit_PM_4N_24hr.iloc[c, 0]) Kit_per_day_2N_1N = [] K_PM_D_2N_1N = [] Kit_per_day_3N_1N = [] K_PM_D_3N_1N = [] Kit_per_day_4N_1N = [] K_PM_D_4N_1N = [] Kit_per_day_3N_2N = [] K_PM_D_3N_2N = [] Kit_per_day_4N_3N = [] K_PM_D_4N_3N = [] Kit_per_day_4N_2N = [] K_PM_D_4N_2N = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_NO_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_NO_Hood_PM): count_pm = 0 continue if (len(Kit_PM_per_day_2N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: if Kit_PM_1N_24hr.iloc[c,6] > 0 and Kit_PM_2N_24hr.iloc[c,6] > 0 and Kit_PM_1N_24hr.iloc[c,0] == Kit_PM_2N_24hr.iloc[c,0]: Kit_per_day_2N_1N.append(Kit_PM_per_day_2N[c]/Kit_PM_per_day_1N[c]) K_PM_D_2N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_3N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: if Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_1N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_1N_24hr.iloc[c, 0]: Kit_per_day_3N_1N.append(Kit_PM_per_day_3N[c]/Kit_PM_per_day_1N[c]) K_PM_D_3N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_1N)-1) >= c: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_1N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 0] == \ Kit_PM_1N_24hr.iloc[c, 0]: Kit_per_day_4N_1N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_1N[c]) K_PM_D_4N_1N.append(Day_1N.iloc[c,0]) if (len(Kit_PM_per_day_3N)-1) >= c and (len(Kit_PM_per_day_2N)-1) >= c: if Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_2N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_2N_24hr.iloc[c, 0]: Kit_per_day_3N_2N.append(Kit_PM_per_day_3N[c]/Kit_PM_per_day_2N[c]) K_PM_D_3N_2N.append(Day_2N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_3N)-1) >= c: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 6] > 0 and Kit_PM_3N_24hr.iloc[c, 0] == \ Kit_PM_4N_24hr.iloc[c, 0]: Kit_per_day_4N_3N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_3N[c]) K_PM_D_4N_3N.append(Day_3N.iloc[c,0]) if (len(Kit_PM_per_day_4N)-1) >= c and (len(Kit_PM_per_day_2N)-1) >= c: if Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 6] > 0 and Kit_PM_4N_24hr.iloc[c, 0] == \ Kit_PM_2N_24hr.iloc[c, 0]: Kit_per_day_4N_2N.append(Kit_PM_per_day_4N[c]/Kit_PM_per_day_2N[c]) K_PM_D_4N_2N.append(Day_4N.iloc[c,0]) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_2N_1N, ax=ax_box, color='g') sns.distplot(Kit_per_day_2N_1N, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 2N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3N_1N, ax=ax_box, color='r') sns.distplot(Kit_per_day_3N_1N, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_1N, ax=ax_box, color='y') sns.distplot(Kit_per_day_4N_1N, ax=ax_hist, color='y') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/1N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3N_2N, ax=ax_box, color='m') sns.distplot(Kit_per_day_3N_2N, ax=ax_hist, color='m') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3N/2N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_3N, ax=ax_box, color='k') sns.distplot(Kit_per_day_4N_3N, ax=ax_hist, color='k') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/3N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_4N_2N, ax=ax_box, color='tab:orange') sns.distplot(Kit_per_day_4N_2N, ax=ax_hist, color='tab:orange') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 4N/2N (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) fig, ax = plt.subplots() plt.title('No-Hood Kitchen PM per day') quant_1_1N = np.percentile(Kit_PM_per_day_1N, [25,50,75]) Top_lim_1_1N = quant_1_1N[2] + 1.5*(quant_1_1N[2] - quant_1_1N[0]) Low_lim_1_1N = quant_1_1N[0] - 1.5*(quant_1_1N[2] - quant_1_1N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_1N, positions = [1], widths = 0.6) kitchen_pm_1N_outlier = [] for v,a in enumerate(Kit_PM_per_day_1N): if a > Top_lim_1_1N or a < Low_lim_1_1N: kitchen_pm_1N_outlier.append(K_PM_D_1N[v]) plt.text(1,a,K_PM_D_1N[v]) plt.text(1,0.1,'1N',color='b') quant_1_2N = np.percentile(Kit_PM_per_day_2N, [25,50,75]) Top_lim_1_2N = quant_1_2N[2] + 1.5*(quant_1_2N[2] - quant_1_2N[0]) Low_lim_1_2N = quant_1_2N[0] - 1.5*(quant_1_2N[2] - quant_1_2N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_2N,positions = [2], widths = 0.6) kitchen_pm_2N_outlier = [] for v,a in enumerate(Kit_PM_per_day_2N): if a > Top_lim_1_2N or a < Low_lim_1_2N: kitchen_pm_2N_outlier.append(K_PM_D_2N[v]) plt.text(2,a,K_PM_D_2N[v]) plt.text(2,0.1,'2N', color= 'g') quant_1_3N = np.percentile(Kit_PM_per_day_3N, [25,50,75]) Top_lim_1_3N = quant_1_3N[2] + 1.5*(quant_1_3N[2] - quant_1_3N[0]) Low_lim_1_3N = quant_1_3N[0] - 1.5*(quant_1_3N[2] - quant_1_3N[0]) kitchen_pm_3N_outlier = [] bp_1 = plt.boxplot(Kit_PM_per_day_3N,positions = [3], widths = 0.6) count = 0 for v,a in enumerate(Kit_PM_per_day_3N): if a > Top_lim_1_3N or a < Low_lim_1_3N: kitchen_pm_3N_outlier.append(K_PM_D_3N[v]) count = count + 1 if count == (3): plt.text(3,a,K_PM_D_3N[v],ha='left', va='bottom') if count == (1): plt.text(3,a,K_PM_D_3N[v],ha='left', va='top') else: plt.text(3,a,K_PM_D_3N[v],ha='right', va='bottom') plt.text(3,0.1,'3N', color='r') quant_1_4N = np.percentile(Kit_PM_per_day_4N, [25,50,75]) Top_lim_1_4N = quant_1_4N[2] + 1.5*(quant_1_4N[2] - quant_1_4N[0]) Low_lim_1_4N = quant_1_4N[0] - 1.5*(quant_1_4N[2] - quant_1_4N[0]) bp_1 = plt.boxplot(Kit_PM_per_day_4N,positions = [4], widths = 0.6) kitchen_pm_4N_outlier = [] for v,a in enumerate(Kit_PM_per_day_4N): if a > Top_lim_1_4N or a < Low_lim_1_4N: kitchen_pm_4N_outlier.append(K_PM_D_4N[v]) plt.text(4,a,K_PM_D_4N[v]) plt.text(4,0.1,'4N', color='y') plt.xlim(0,5) plt.ylim(0,1200) print('Kitchen PM 1N had these values as outliers ', kitchen_pm_1N_outlier) print('Kitchen PM 2N had these values as outliers ', kitchen_pm_2N_outlier) print('Kitchen PM 3N had these values as outliers ', kitchen_pm_3N_outlier) print('Kitchen PM 4N had these values as outliers ', kitchen_pm_4N_outlier) plt.show() fig_2, ax2 = plt.subplots() plt.title('% No_hood PM per Day Change' ) quant_1_2N_1N = np.percentile(Kit_per_day_2N_1N, [25,50,75]) Top_lim_1_2N_1N = quant_1_2N_1N[2] + 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) Low_lim_1_2N_1N = quant_1_2N_1N[0] - 1.5*(quant_1_2N_1N[2]-quant_1_2N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_2N_1N, positions=[1], widths= 0.6) kitchen_pm_2N_1N_outlier = [] for v,a in enumerate(Kit_per_day_2N_1N): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: kitchen_pm_2N_1N_outlier.append(K_PM_D_2N_1N[v]) plt.text(1, a, K_PM_D_2N_1N[v]) plt.text(0.5, -0.25, '2N / 1N', color= 'g') quant_1_3N_1N = np.percentile(Kit_per_day_3N_1N, [25,50,75]) Top_lim_1_3N_1N = quant_1_3N_1N[2] + 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) Low_lim_1_3N_1N = quant_1_3N_1N[0] - 1.5*(quant_1_3N_1N[2]-quant_1_3N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_3N_1N, positions=[2], widths= 0.6) kitchen_pm_3N_1N_outlier = [] for v,a in enumerate(Kit_per_day_3N_1N): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: kitchen_pm_3N_1N_outlier.append(K_PM_D_3N_1N[v]) plt.text(2, a, K_PM_D_3N_1N[v]) plt.text(1.5, -0.25, '3N / 1N', color= 'r') quant_1_4N_1N = np.percentile(Kit_per_day_4N_1N, [25,50,75]) Top_lim_1_4N_1N = quant_1_4N_1N[2] + 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) Low_lim_1_4N_1N = quant_1_4N_1N[0] - 1.5*(quant_1_4N_1N[2]-quant_1_4N_1N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_1N, positions=[3], widths= 0.6) kitchen_pm_4N_1N_outlier = [] for v,a in enumerate(Kit_per_day_4N_1N): if a > Top_lim_1_4N_1N or a < Low_lim_1_4N_1N: kitchen_pm_4N_1N_outlier.append(K_PM_D_4N_1N[v]) plt.text(3, a, K_PM_D_4N_1N[v]) plt.text(2.5, -0.25, '4N / 1N', color= 'y') quant_1_3N_2N = np.percentile(Kit_per_day_3N_2N, [25,50,75]) Top_lim_1_3N_2N = quant_1_3N_2N[2] + 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) Low_lim_1_3N_2N = quant_1_3N_2N[0] - 1.5*(quant_1_3N_2N[2]-quant_1_3N_2N[0]) bp_1_1 = plt.boxplot(Kit_per_day_3N_2N, positions=[4], widths= 0.6) kitchen_pm_3N_2N_outlier = [] for v,a in enumerate(Kit_per_day_3N_2N): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: kitchen_pm_3N_2N_outlier.append(K_PM_D_3N_2N[v]) plt.text(4, a, K_PM_D_3N_2N[v]) plt.text(3.5, -0.25, '3N / 2N', color= 'm') quant_1_4N_3N = np.percentile(Kit_per_day_4N_3N, [25,50,75]) Top_lim_1_4N_3N = quant_1_4N_3N[2] + 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) Low_lim_1_4N_3N = quant_1_4N_3N[0] - 1.5*(quant_1_4N_3N[2]-quant_1_4N_3N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_3N, positions=[5], widths= 0.6) kitchen_pm_4N_3N_outlier = [] for v,a in enumerate(Kit_per_day_4N_3N): if a > Top_lim_1_4N_3N or a < Low_lim_1_4N_3N: kitchen_pm_4N_3N_outlier.append(K_PM_D_4N_3N[v]) plt.text(5, a, K_PM_D_4N_3N[v]) plt.text(4.5, -0.25, '4N / 3N', color= 'k') quant_1_4N_2N = np.percentile(Kit_per_day_4N_2N, [25,50,75]) Top_lim_1_4N_2N = quant_1_4N_2N[2] + 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) Low_lim_1_4N_2N = quant_1_4N_2N[0] - 1.5*(quant_1_4N_2N[2]-quant_1_4N_2N[0]) bp_1_1 = plt.boxplot(Kit_per_day_4N_2N, positions=[6], widths= 0.6) kitchen_pm_4N_2N_outlier = [] for v,a in enumerate(Kit_per_day_4N_2N): if a > Top_lim_1_4N_2N or a < Low_lim_1_4N_2N: kitchen_pm_4N_2N_outlier.append(K_PM_D_4N_2N[v]) plt.text(6, a, K_PM_D_4N_2N[v]) plt.text(5.5, -0.25, '4N / 2N', color= 'tab:orange') plt.xlim(0,7) plt.ylim(-0.5,5) print('Kitchen PM 2N/1N had these values as outliers ', kitchen_pm_2N_1N_outlier) print('Kitchen PM 3N/1N had these values as outliers ', kitchen_pm_3N_1N_outlier) print('Kitchen PM 4N/1N had these values as outliers ', kitchen_pm_4N_1N_outlier) print('Kitchen PM 3N/2N had these values as outliers ', kitchen_pm_3N_2N_outlier) print('Kitchen PM 4N/3N had these values as outliers ', kitchen_pm_4N_3N_outlier) print('Kitchen PM 4N/2N had these values as outliers ', kitchen_pm_4N_2N_outlier) plt.show() quant_1_1N = np.append(quant_1_1N, np.average(Kit_PM_per_day_1N)) quant_1_2N = np.append(quant_1_2N, np.average(Kit_PM_per_day_2N)) quant_1_3N = np.append(quant_1_3N, np.average(Kit_PM_per_day_3N)) quant_1_4N = np.append(quant_1_4N, np.average(Kit_PM_per_day_4N)) D_50_quant_phase_PM_d = {'Percentile %': ['25','50','75', 'Avg'], '1N': quant_1_1N, '2N': quant_1_2N,'3N' : quant_1_3N,'4N': quant_1_4N} PM_D_50_phase_no_hood = pd.DataFrame(data=D_50_quant_phase_PM_d,columns=['Percentile %','1N', '2N', '3N','4N']) quant_1_2N_1N = np.append(quant_1_2N_1N , np.average(Kit_per_day_2N_1N)) quant_1_3N_1N = np.append(quant_1_3N_1N , np.average(Kit_per_day_3N_1N)) quant_1_4N_1N = np.append(quant_1_4N_1N , np.average(Kit_per_day_4N_1N)) quant_1_3N_2N = np.append(quant_1_3N_2N , np.average(Kit_per_day_3N_2N)) quant_1_4N_3N = np.append(quant_1_4N_3N , np.average(Kit_per_day_4N_3N)) quant_1_4N_2N = np.append(quant_1_4N_2N , np.average(Kit_per_day_4N_2N)) D_50_quant_percent_PM_d ={'Percentile %': ['25','50','75', 'Avg'],'2N / 1N': quant_1_2N_1N,'3N / 1N': quant_1_3N_1N,'4N / 1N': quant_1_4N_1N, '3N / 2N': quant_1_3N_2N,'4N / 3N': quant_1_4N_3N,'4N / 2N': quant_1_4N_2N} PM_D_50_percent_change_no_hood = pd.DataFrame(data=D_50_quant_percent_PM_d, columns=['Percentile %','2N / 1N','3N / 1N', '4N / 1N' ,'3N / 2N','4N / 3N','4N / 2N']) print(PM_D_50_phase_no_hood) print(PM_D_50_percent_change_no_hood) if Hood_or_no == 'hood': Kit_PM_per_day_1H = [] K_PM_D_1H = [] Kit_PM_per_day_2H = [] K_PM_D_2H = [] Kit_PM_per_day_3H = [] K_PM_D_3H = [] count_t = 0 count_pm = 0 for c in hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_Hood_PM): count_pm = 0 continue if Kit_PM_1H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_1H.append(Kit_PM_1H_24hr.iloc[c,6]) K_PM_D_1H.append(Kit_PM_1H_24hr.iloc[c,0]) if Kit_PM_2H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_2H.append(Kit_PM_2H_24hr.iloc[c,6]) K_PM_D_2H.append(Kit_PM_2H_24hr.iloc[c,0]) if Kit_PM_3H_24hr.iloc[c, 6] != -1.00: Kit_PM_per_day_3H.append(Kit_PM_3H_24hr.iloc[c,6]) K_PM_D_3H.append(Kit_PM_3H_24hr.iloc[c,0]) Kit_per_day_2H_1H = [] K_PM_D_2H_1H = [] Kit_per_day_3H_1H = [] K_PM_D_3H_1H = [] Kit_per_day_3H_2H = [] K_PM_D_3H_2H = [] count_t = 0 count_pm = 0 for c in NO_hood_counter: if c == (Household_removal[count_t] - C_Place_holder): count_t = count_t + 1 if count_t == len(Household_removal): count_t = 0 continue if c == (Household_removal_Hood_PM[count_pm] - C_Place_holder): count_pm = count_pm + 1 if count_pm == len(Household_removal_Hood_PM): count_pm = 0 continue if (len(Kit_PM_per_day_2H)-1) >= c and (len(Kit_PM_per_day_1H)-1) >= c: if Kit_PM_1H_24hr.iloc[c, 6] > 0 and Kit_PM_2H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 0] == Kit_PM_2H_24hr.iloc[c, 0]: Kit_per_day_2H_1H.append(Kit_PM_per_day_2H[c]/Kit_PM_per_day_1H[c]) K_PM_D_2H_1H.append(Day_1H.iloc[c,0]) if (len(Kit_PM_per_day_3H)-1) >= c and (len(Kit_PM_per_day_1H)-1) >= c: if Kit_PM_3H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 6] > 0 and Kit_PM_1H_24hr.iloc[c, 0] == \ Kit_PM_3H_24hr.iloc[c, 0]: Kit_per_day_3H_1H.append(Kit_PM_per_day_3H[c]/Kit_PM_per_day_1H[c]) K_PM_D_3H_1H.append(Day_1H.iloc[c,0]) if (len(Kit_PM_per_day_3H)-1) >= c and (len(Kit_PM_per_day_2H)-1) >= c: if Kit_PM_3H_24hr.iloc[c, 6] > 0 and Kit_PM_2H_24hr.iloc[c, 6] > 0 and Kit_PM_3H_24hr.iloc[c, 0] == \ Kit_PM_2H_24hr.iloc[c, 0]: Kit_per_day_3H_2H.append(Kit_PM_per_day_3H[c]/Kit_PM_per_day_2H[c]) K_PM_D_3H_2H.append(Day_2H.iloc[c,0]) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_2H_1H, ax=ax_box, color='g') sns.distplot(Kit_per_day_2H_1H, ax=ax_hist, color='g') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 2H/1H (Kitchen PM per Day)') plt.ylim(top=1.5) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3H_1H, ax=ax_box, color='r') sns.distplot(Kit_per_day_3H_1H, ax=ax_hist, color='r') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3H/1H (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) sns.set(style="ticks") f, (ax_box, ax_hist) = plt.subplots(2, sharex=True, gridspec_kw={"height_ratios": (0.15, 0.85)}) sns.boxplot(Kit_per_day_3H_2H, ax=ax_box, color='m') sns.distplot(Kit_per_day_3H_2H, ax=ax_hist, color='m') ax_box.set(yticks=[]) sns.despine(ax=ax_hist) sns.despine(ax=ax_box, left=True) plt.title('% 3H/2H (Kitchen PM per Day)') plt.ylim(top=2) plt.ylim(bottom = 0) fig, ax = plt.subplots() plt.title('Hood Kitchen PM per day') quant_1_1H = np.percentile(Kit_PM_per_day_1H, [25,50,75]) Top_lim_1_1H = quant_1_1H[2] + 1.5*(quant_1_1H[2] - quant_1_1H[0]) Low_lim_1_1H = quant_1_1H[0] - 1.5*(quant_1_1H[2] - quant_1_1H[0]) bp_1 = plt.boxplot(Kit_PM_per_day_1H, positions = [1], widths = 0.6) kitchen_pm_1H_outlier = [] for v,a in enumerate(Kit_PM_per_day_1H): if a > Top_lim_1_1H or a < Low_lim_1_1H: kitchen_pm_1H_outlier.append(K_PM_D_1H[v]) plt.text(1,a,K_PM_D_1H[v]) plt.text(0.5,0.1,'1H',color='b') quant_1_2H = np.percentile(Kit_PM_per_day_2H, [25,50,75]) Top_lim_1_2N = quant_1_2H[2] + 1.5*(quant_1_2H[2] - quant_1_2H[0]) Low_lim_1_2N = quant_1_2H[0] - 1.5*(quant_1_2H[2] - quant_1_2H[0]) bp_1 = plt.boxplot(Kit_PM_per_day_2H,positions = [2], widths = 0.6) kitchen_pm_2H_outlier = [] for v,a in enumerate(Kit_PM_per_day_2H): if a > Top_lim_1_2N or a < Low_lim_1_2N: kitchen_pm_2H_outlier.append(K_PM_D_2H[v]) plt.text(2,a,K_PM_D_2H[v]) plt.text(1.5,0.1,'2H', color= 'g') quant_1_3H = np.percentile(Kit_PM_per_day_3H, [25,50,75]) Top_lim_1_3N = quant_1_3H[2] + 1.5*(quant_1_3H[2] - quant_1_3H[0]) Low_lim_1_3N = quant_1_3H[0] - 1.5*(quant_1_3H[2] - quant_1_3H[0]) kitchen_3H_outlier = [] bp_1 = plt.boxplot(Kit_PM_per_day_3H,positions = [3], widths = 0.6) count = 0 kitchen_pm_3H_outlier = [] for v,a in enumerate(Kit_PM_per_day_3H): if a > Top_lim_1_3N or a < Low_lim_1_3N: kitchen_pm_3H_outlier.append(K_PM_D_3H[v]) plt.text(3,a,K_PM_D_3H[v]) plt.text(2.5,0.1,'3H', color='r') plt.xlim(0,4) plt.ylim(0,1200) print('Kitchen PM 1H had these values as outliers ', kitchen_pm_1H_outlier) print('Kitchen PM 2H had these values as outliers ', kitchen_pm_2H_outlier) print('Kitchen PM 3H had these values as outliers ', kitchen_pm_3H_outlier) plt.show() fig_2, ax2 = plt.subplots() plt.title('% hood PM per Day Change' ) quant_1_2H_1H = np.percentile(Kit_per_day_2H_1H, [25,50,75]) Top_lim_1_2N_1N = quant_1_2H_1H[2] + 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) Low_lim_1_2N_1N = quant_1_2H_1H[0] - 1.5*(quant_1_2H_1H[2]-quant_1_2H_1H[0]) bp_1_1 = plt.boxplot(Kit_per_day_2H_1H, positions=[1], widths= 0.6) kitchen_pm_2H_1H_outlier = [] for v,a in enumerate(Kit_per_day_2H_1H): if a > Top_lim_1_2N_1N or a < Low_lim_1_2N_1N: kitchen_pm_2H_1H_outlier.append(K_PM_D_2H_1H[v]) plt.text(1, a, K_PM_D_2H_1H[v]) plt.text(0.75, -0.25, '2H / 1H', color= 'g') quant_1_3H_1H = np.percentile(Kit_per_day_3H_1H, [25,50,75]) Top_lim_1_3N_1N = quant_1_3H_1H[2] + 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) Low_lim_1_3N_1N = quant_1_3H_1H[0] - 1.5*(quant_1_3H_1H[2]-quant_1_3H_1H[0]) bp_1_1 = plt.boxplot(Kit_per_day_3H_1H, positions=[2], widths= 0.6) kitchen_pm_3H_1H_outlier = [] for v,a in enumerate(Kit_per_day_3H_1H): if a > Top_lim_1_3N_1N or a < Low_lim_1_3N_1N: kitchen_pm_3H_1H_outlier.append(K_PM_D_3H_1H[v]) plt.text(2, a, K_PM_D_3H_1H[v]) plt.text(1.75, -0.25, '3H / 1H', color= 'r') quant_1_3H_2H = np.percentile(Kit_per_day_3H_2H, [25,50,75]) Top_lim_1_3N_2N = quant_1_3H_2H[2] + 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) Low_lim_1_3N_2N = quant_1_3H_2H[0] - 1.5*(quant_1_3H_2H[2]-quant_1_3H_2H[0]) bp_1_1 = plt.boxplot(Kit_per_day_3H_2H, positions=[3], widths= 0.6) kitchen_pm_3H_2H_outlier = [] for v,a in enumerate(Kit_per_day_3H_2H): if a > Top_lim_1_3N_2N or a < Low_lim_1_3N_2N: kitchen_pm_3H_2H_outlier.append(K_PM_D_3H_2H[v]) plt.text(3, a, K_PM_D_3H_2H[v]) plt.text(2.75, -0.25, '3H / 2H', color= 'm') plt.xlim(0,4) plt.ylim(-0.5,5) print('Kitchen PM 2H/1H had these values as outliers ', kitchen_pm_2H_1H_outlier) print('Kitchen PM 3H/1H had these values as outliers ', kitchen_pm_3H_1H_outlier) print('Kitchen PM 3H/2H had these values as outliers ', kitchen_pm_3H_2H_outlier) plt.show() quant_1_1H = np.append(quant_1_1H, np.average(Kit_PM_per_day_1H)) quant_1_2H = np.append(quant_1_2H, np.average(Kit_PM_per_day_2H)) quant_1_3H = np.append(quant_1_3H, np.average(Kit_PM_per_day_3H)) D_50_quant_phase_PM_D_hood = {'Percentile %': ['25','50','75', 'Avg'], '1H': quant_1_1H, '2H': quant_1_2H,'3H' : quant_1_3H} PM_D_50_phase_hood = pd.DataFrame(data=D_50_quant_phase_PM_D_hood, columns= ['Percentile %','1H','2H','3H' ]) quant_1_2H_1H = np.append(quant_1_2H_1H , np.average(Kit_per_day_2H_1H)) quant_1_3H_1H = np.append(quant_1_3H_1H , np.average(Kit_per_day_3H_1H)) quant_1_3H_2H = np.append(quant_1_3H_2H , np.average(Kit_per_day_3H_2H)) D_50_quant_percent_PM_D_hood ={'Percentile %': ['25','50','75', 'Avg'],'2H / 1H': quant_1_2H_1H,'3H / 1H': quant_1_3H_1H,'3H / 2H': quant_1_3H_2H} PM_D_50_percent_change_hood = pd.DataFrame(data=D_50_quant_percent_PM_D_hood, columns=['Percentile %','2H / 1H','3H / 1H','3H / 2H']) print(PM_D_50_phase_hood) print(PM_D_50_percent_change_hood) if Hood_or_no == 'no_hood': plt.title('Histogram of Fuel per 24 Hours per Person - No Hood' ) plt.hist([Fuel_per_day_per_adult_1N], color=['b'], alpha=0.5, label='1N') plt.hist([Fuel_per_day_per_adult_2N], color=['g'], alpha=0.5, label='2N') plt.hist([Fuel_per_day_per_adult_3N], color=['r'], alpha=0.5, label='3N') plt.hist([Fuel_per_day_per_adult_4N], color=['y'], alpha=0.5, label='4N') plt.legend(loc='upper right') plt.show() plt.title('Histogram of Kitchen PM 24 Hours - No Hood' ) plt.hist([Kit_PM_per_day_1N], color=['b'], alpha=0.5, label='1N') plt.hist([Kit_PM_per_day_2N], color=['g'], alpha=0.5, label='2N') plt.hist([Kit_PM_per_day_3N], color=['r'], alpha=0.5, label='3N') plt.hist([Kit_PM_per_day_4N], color=['y'], alpha=0.5, label='4N') plt.legend(loc='upper right') plt.show() if Hood_or_no == 'hood': plt.title('Histogram of Fuel per 24 Hours per Person - Hood' ) plt.hist([Fuel_per_day_per_adult_1H], color=['b'], alpha=0.5, label='1H') plt.hist([Fuel_per_day_per_adult_2H], color=['g'], alpha=0.5, label='2H') plt.hist([Fuel_per_day_per_adult_3H], color=['r'], alpha=0.5, label='3H') plt.legend(loc='upper right') plt.show() plt.title('Histogram of Kitchen PM 24 Hours - Hood' ) plt.hist([Kit_PM_per_day_1H], color=['b'], alpha=0.5, label='1H') plt.hist([Kit_PM_per_day_2H], color=['g'], alpha=0.5, label='2H') plt.hist([Kit_PM_per_day_3H], color=['r'], alpha=0.5, label='3H') plt.legend(loc='upper right') plt.show()
true
true
f70ea318486da939ee5b7f2bf6eee6d6a675026f
7,269
py
Python
tests/metarl/torch/algos/test_torch_algo_utils.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
2
2020-03-15T14:35:15.000Z
2021-02-15T16:38:00.000Z
tests/metarl/torch/algos/test_torch_algo_utils.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
null
null
null
tests/metarl/torch/algos/test_torch_algo_utils.py
icml2020submission6857/metarl
9b66cefa2b6bcb6a38096d629ce8853b47c7171d
[ "MIT" ]
1
2020-02-24T03:04:23.000Z
2020-02-24T03:04:23.000Z
"""Test torch algo utility functions.""" import numpy as np import pytest import tensorflow as tf import torch import torch.nn.functional as F import metarl.tf.misc.tensor_utils as tf_utils import metarl.torch.algos._utils as torch_algo_utils from tests.fixtures import TfGraphTestCase def stack(d, arr): """Stack 'arr' 'd' times.""" return np.repeat(np.expand_dims(arr, axis=0), repeats=d, axis=0) ONES = np.ones((4, 6)) ZEROS = np.zeros((4, 6)) ARRANGE = stack(4, np.arange(6)) PI_DIGITS = stack(4, [3, 1, 4, 1, 5, 9]) E_DIGITS = stack(4, [2, 7, 1, 8, 2, 8]) FIBS = stack(4, [1, 1, 2, 3, 5, 8]) nums_1d = np.arange(0, 4).astype(float) nums_2d = np.arange(0, 4).astype(float).reshape(2, 2) nums_3d = np.arange(0, 8).astype(float).reshape(2, 2, 2) class TestTorchAlgoUtils(TfGraphTestCase): """Test class for torch algo utility functions.""" # yapf: disable @pytest.mark.parametrize('gae_lambda, rewards_val, baselines_val', [ (0.4, ONES, ZEROS), (0.8, PI_DIGITS, ARRANGE), (1.2, ONES, FIBS), (1.7, E_DIGITS, PI_DIGITS), ]) # yapf: enable def testcompute_advantages(self, gae_lambda, rewards_val, baselines_val): """Test compute_advantage function.""" discount = 0.99 max_len = rewards_val.shape[-1] torch_advs = torch_algo_utils.compute_advantages( discount, gae_lambda, max_len, torch.Tensor(baselines_val), torch.Tensor(rewards_val)) rewards = tf.compat.v1.placeholder(dtype=tf.float32, name='reward', shape=[None, None]) baselines = tf.compat.v1.placeholder(dtype=tf.float32, name='baseline', shape=[None, None]) adv = tf_utils.compute_advantages(discount, gae_lambda, max_len, baselines, rewards) tf_advs = self.sess.run(adv, feed_dict={ rewards: rewards_val, baselines: baselines_val, }) assert np.allclose(torch_advs.numpy(), tf_advs.reshape(torch_advs.shape), atol=1e-5) def test_add_padding_last_1d(self): """Test pad_to_last function for 1d.""" max_length = 10 expected = F.pad(torch.Tensor(nums_1d), (0, max_length - nums_1d.shape[-1])) tensor_padding = torch_algo_utils.pad_to_last(nums_1d, total_length=max_length) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_1d, total_length=10, axis=0) assert expected.eq(tensor_padding).all() def test_add_padding_last_2d(self): """Test pad_to_last function for 2d.""" max_length = 10 tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10) expected = F.pad(torch.Tensor(nums_2d), (0, max_length - nums_2d.shape[-1])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10, axis=0) expected = F.pad(torch.Tensor(nums_2d), (0, 0, 0, max_length - nums_2d.shape[0])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10, axis=1) expected = F.pad(torch.Tensor(nums_2d), (0, max_length - nums_2d.shape[-1], 0, 0)) assert expected.eq(tensor_padding).all() def test_add_padding_last_3d(self): """Test pad_to_last function for 3d.""" max_length = 10 tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10) expected = F.pad(torch.Tensor(nums_3d), (0, max_length - nums_3d.shape[-1], 0, 0, 0, 0)) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=0) expected = F.pad(torch.Tensor(nums_3d), (0, 0, 0, 0, 0, max_length - nums_3d.shape[0])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=1) expected = F.pad(torch.Tensor(nums_3d), (0, 0, 0, max_length - nums_3d.shape[-1], 0, 0)) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=2) expected = F.pad(torch.Tensor(nums_3d), (0, max_length - nums_3d.shape[-1], 0, 0, 0, 0)) assert expected.eq(tensor_padding).all() @pytest.mark.parametrize('nums', [nums_1d, nums_2d, nums_3d]) def test_out_of_index_error(self, nums): """Test pad_to_last raises IndexError.""" with pytest.raises(IndexError): torch_algo_utils.pad_to_last(nums, total_length=10, axis=len(nums.shape)) def testmake_optimizer_with_type(self): """Test make_optimizer function with type as first argument.""" optimizer_type = torch.optim.Adam module = torch.nn.Linear(2, 1) lr = 0.123 optimizer = torch_algo_utils.make_optimizer(optimizer_type, module, lr=lr) assert isinstance(optimizer, optimizer_type) assert optimizer.defaults['lr'] == lr def testmake_optimizer_with_tuple(self): """Test make_optimizer function with tuple as first argument.""" optimizer_type = (torch.optim.Adam, {'lr': 0.1}) module = torch.nn.Linear(2, 1) optimizer = torch_algo_utils.make_optimizer(optimizer_type, module) assert isinstance(optimizer, optimizer_type) assert optimizer.defaults['lr'] == optimizer_type[1]['lr'] def testmake_optimizer_raise_value_error(self): """Test make_optimizer raises value error.""" optimizer_type = (torch.optim.Adam, {'lr': 0.1}) module = torch.nn.Linear(2, 1) with pytest.raises(ValueError): _ = torch_algo_utils.make_optimizer(optimizer_type, module, lr=0.123)
42.508772
79
0.526482
import numpy as np import pytest import tensorflow as tf import torch import torch.nn.functional as F import metarl.tf.misc.tensor_utils as tf_utils import metarl.torch.algos._utils as torch_algo_utils from tests.fixtures import TfGraphTestCase def stack(d, arr): return np.repeat(np.expand_dims(arr, axis=0), repeats=d, axis=0) ONES = np.ones((4, 6)) ZEROS = np.zeros((4, 6)) ARRANGE = stack(4, np.arange(6)) PI_DIGITS = stack(4, [3, 1, 4, 1, 5, 9]) E_DIGITS = stack(4, [2, 7, 1, 8, 2, 8]) FIBS = stack(4, [1, 1, 2, 3, 5, 8]) nums_1d = np.arange(0, 4).astype(float) nums_2d = np.arange(0, 4).astype(float).reshape(2, 2) nums_3d = np.arange(0, 8).astype(float).reshape(2, 2, 2) class TestTorchAlgoUtils(TfGraphTestCase): @pytest.mark.parametrize('gae_lambda, rewards_val, baselines_val', [ (0.4, ONES, ZEROS), (0.8, PI_DIGITS, ARRANGE), (1.2, ONES, FIBS), (1.7, E_DIGITS, PI_DIGITS), ]) def testcompute_advantages(self, gae_lambda, rewards_val, baselines_val): discount = 0.99 max_len = rewards_val.shape[-1] torch_advs = torch_algo_utils.compute_advantages( discount, gae_lambda, max_len, torch.Tensor(baselines_val), torch.Tensor(rewards_val)) rewards = tf.compat.v1.placeholder(dtype=tf.float32, name='reward', shape=[None, None]) baselines = tf.compat.v1.placeholder(dtype=tf.float32, name='baseline', shape=[None, None]) adv = tf_utils.compute_advantages(discount, gae_lambda, max_len, baselines, rewards) tf_advs = self.sess.run(adv, feed_dict={ rewards: rewards_val, baselines: baselines_val, }) assert np.allclose(torch_advs.numpy(), tf_advs.reshape(torch_advs.shape), atol=1e-5) def test_add_padding_last_1d(self): max_length = 10 expected = F.pad(torch.Tensor(nums_1d), (0, max_length - nums_1d.shape[-1])) tensor_padding = torch_algo_utils.pad_to_last(nums_1d, total_length=max_length) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_1d, total_length=10, axis=0) assert expected.eq(tensor_padding).all() def test_add_padding_last_2d(self): max_length = 10 tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10) expected = F.pad(torch.Tensor(nums_2d), (0, max_length - nums_2d.shape[-1])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10, axis=0) expected = F.pad(torch.Tensor(nums_2d), (0, 0, 0, max_length - nums_2d.shape[0])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_2d, total_length=10, axis=1) expected = F.pad(torch.Tensor(nums_2d), (0, max_length - nums_2d.shape[-1], 0, 0)) assert expected.eq(tensor_padding).all() def test_add_padding_last_3d(self): max_length = 10 tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10) expected = F.pad(torch.Tensor(nums_3d), (0, max_length - nums_3d.shape[-1], 0, 0, 0, 0)) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=0) expected = F.pad(torch.Tensor(nums_3d), (0, 0, 0, 0, 0, max_length - nums_3d.shape[0])) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=1) expected = F.pad(torch.Tensor(nums_3d), (0, 0, 0, max_length - nums_3d.shape[-1], 0, 0)) assert expected.eq(tensor_padding).all() tensor_padding = torch_algo_utils.pad_to_last(nums_3d, total_length=10, axis=2) expected = F.pad(torch.Tensor(nums_3d), (0, max_length - nums_3d.shape[-1], 0, 0, 0, 0)) assert expected.eq(tensor_padding).all() @pytest.mark.parametrize('nums', [nums_1d, nums_2d, nums_3d]) def test_out_of_index_error(self, nums): with pytest.raises(IndexError): torch_algo_utils.pad_to_last(nums, total_length=10, axis=len(nums.shape)) def testmake_optimizer_with_type(self): optimizer_type = torch.optim.Adam module = torch.nn.Linear(2, 1) lr = 0.123 optimizer = torch_algo_utils.make_optimizer(optimizer_type, module, lr=lr) assert isinstance(optimizer, optimizer_type) assert optimizer.defaults['lr'] == lr def testmake_optimizer_with_tuple(self): optimizer_type = (torch.optim.Adam, {'lr': 0.1}) module = torch.nn.Linear(2, 1) optimizer = torch_algo_utils.make_optimizer(optimizer_type, module) assert isinstance(optimizer, optimizer_type) assert optimizer.defaults['lr'] == optimizer_type[1]['lr'] def testmake_optimizer_raise_value_error(self): optimizer_type = (torch.optim.Adam, {'lr': 0.1}) module = torch.nn.Linear(2, 1) with pytest.raises(ValueError): _ = torch_algo_utils.make_optimizer(optimizer_type, module, lr=0.123)
true
true
f70ea35f73bd2981d6bbb6766833e038e16308d4
23,711
py
Python
src/sage/schemes/plane_curves/projective_curve.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
null
null
null
src/sage/schemes/plane_curves/projective_curve.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
null
null
null
src/sage/schemes/plane_curves/projective_curve.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
1
2020-07-24T12:20:37.000Z
2020-07-24T12:20:37.000Z
""" Projective plane curves over a general ring AUTHORS: - William Stein (2005-11-13) - David Joyner (2005-11-13) - David Kohel (2006-01) - Moritz Minzlaff (2010-11) """ #***************************************************************************** # Copyright (C) 2005 William Stein <wstein@gmail.com> # # Distributed under the terms of the GNU General Public License (GPL) # # The full text of the GPL is available at: # # http://www.gnu.org/licenses/ #***************************************************************************** from sage.interfaces.all import singular from sage.misc.all import add, sage_eval from sage.rings.all import degree_lowest_rational_function from sage.schemes.projective.projective_space import is_ProjectiveSpace from curve import Curve_generic_projective class ProjectiveSpaceCurve_generic(Curve_generic_projective): def _repr_type(self): return "Projective Space" def __init__(self, A, X): if not is_ProjectiveSpace(A): raise TypeError("A (=%s) must be a projective space"%A) Curve_generic_projective.__init__(self, A, X) d = self.dimension() if d != 1: raise ValueError("defining equations (=%s) define a scheme of dimension %s != 1"%(X,d)) class ProjectiveCurve_generic(Curve_generic_projective): def __init__(self, A, f): if not (is_ProjectiveSpace(A) and A.dimension != 2): raise TypeError("Argument A (= %s) must be a projective plane."%A) Curve_generic_projective.__init__(self, A, [f]) def _repr_type(self): return "Projective" def arithmetic_genus(self): r""" Return the arithmetic genus of this curve. This is the arithmetic genus `g_a(C)` as defined in Hartshorne. If the curve has degree `d` then this is simply `(d-1)(d-2)/2`. It need *not* equal the geometric genus (the genus of the normalization of the curve). EXAMPLE:: sage: x,y,z = PolynomialRing(GF(5), 3, 'xyz').gens() sage: C = Curve(y^2*z^7 - x^9 - x*z^8); C Projective Curve over Finite Field of size 5 defined by -x^9 + y^2*z^7 - x*z^8 sage: C.arithmetic_genus() 28 sage: C.genus() 4 """ d = self.defining_polynomial().total_degree() return int((d-1)*(d-2)/2) def divisor_of_function(self, r): """ Return the divisor of a function on a curve. INPUT: r is a rational function on X OUTPUT: - ``list`` - The divisor of r represented as a list of coefficients and points. (TODO: This will change to a more structural output in the future.) EXAMPLES:: sage: FF = FiniteField(5) sage: P2 = ProjectiveSpace(2, FF, names = ['x','y','z']) sage: R = P2.coordinate_ring() sage: x, y, z = R.gens() sage: f = y^2*z^7 - x^9 - x*z^8 sage: C = Curve(f) sage: K = FractionField(R) sage: r = 1/x sage: C.divisor_of_function(r) # todo: not implemented !!!! [[-1, (0, 0, 1)]] sage: r = 1/x^3 sage: C.divisor_of_function(r) # todo: not implemented !!!! [[-3, (0, 0, 1)]] """ F = self.base_ring() f = self.defining_polynomial() x, y, z = f.parent().gens() pnts = self.rational_points() divf = [] for P in pnts: if P[2] != F(0): # What is the '5' in this line and the 'r()' in the next??? lcs = self.local_coordinates(P,5) ldg = degree_lowest_rational_function(r(lcs[0],lcs[1]),z) if ldg[0] != 0: divf.append([ldg[0],P]) return divf def local_coordinates(self, pt, n): r""" Return local coordinates to precision n at the given point. Behaviour is flaky - some choices of `n` are worst that others. INPUT: - ``pt`` - an F-rational point on X which is not a point of ramification for the projection (x,y) - x. - ``n`` - the number of terms desired OUTPUT: x = x0 + t y = y0 + power series in t EXAMPLES:: sage: FF = FiniteField(5) sage: P2 = ProjectiveSpace(2, FF, names = ['x','y','z']) sage: x, y, z = P2.coordinate_ring().gens() sage: C = Curve(y^2*z^7-x^9-x*z^8) sage: pt = C([2,3,1]) sage: C.local_coordinates(pt,9) # todo: not implemented !!!! [2 + t, 3 + 3*t^2 + t^3 + 3*t^4 + 3*t^6 + 3*t^7 + t^8 + 2*t^9 + 3*t^11 + 3*t^12] """ f = self.defining_polynomial() R = f.parent() F = self.base_ring() p = F.characteristic() x0 = F(pt[0]) y0 = F(pt[1]) astr = ["a"+str(i) for i in range(1,2*n)] x,y = R.gens() R0 = PolynomialRing(F,2*n+2,names = [str(x),str(y),"t"]+astr) vars0 = R0.gens() t = vars0[2] yt = y0*t**0 + add([vars0[i]*t**(i-2) for i in range(3,2*n+2)]) xt = x0+t ft = f(xt,yt) S = singular S.eval('ring s = '+str(p)+','+str(R0.gens())+',lp;') S.eval('poly f = '+str(ft)) cmd = 'matrix c = coeffs ('+str(ft)+',t)' S.eval(cmd) N = int(S.eval('size(c)')) b = ["c["+str(i)+",1]," for i in range(2,N/2-4)] b = ''.join(b) b = b[:len(b)-1] #to cut off the trailing comma cmd = 'ideal I = '+b S.eval(cmd) c = S.eval('slimgb(I)') d = c.split("=") d = d[1:] d[len(d)-1] += "\n" e = [x[:x.index("\n")] for x in d] vals = [] for x in e: for y in vars0: if str(y) in x: if len(x.replace(str(y),"")) != 0: i = x.find("-") if i>0: vals.append([eval(x[1:i]),x[:i],F(eval(x[i+1:]))]) i = x.find("+") if i>0: vals.append([eval(x[1:i]),x[:i],-F(eval(x[i+1:]))]) else: vals.append([eval(str(y)[1:]),str(y),F(0)]) vals.sort() k = len(vals) v = [x0+t,y0+add([vals[i][2]*t**(i+1) for i in range(k)])] return v def plot(self, *args, **kwds): """ Plot the real points of an affine patch of this projective plane curve. INPUT: - ``self`` - an affine plane curve - ``patch`` - (optional) the affine patch to be plotted; if not specified, the patch corresponding to the last projective coordinate being nonzero - ``*args`` - optional tuples (variable, minimum, maximum) for plotting dimensions - ``**kwds`` - optional keyword arguments passed on to ``implicit_plot`` EXAMPLES: A cuspidal curve:: sage: R.<x, y, z> = QQ[] sage: C = Curve(x^3 - y^2*z) sage: C.plot() Graphics object consisting of 1 graphics primitive The other affine patches of the same curve:: sage: C.plot(patch=0) Graphics object consisting of 1 graphics primitive sage: C.plot(patch=1) Graphics object consisting of 1 graphics primitive An elliptic curve:: sage: E = EllipticCurve('101a') sage: C = Curve(E) sage: C.plot() Graphics object consisting of 1 graphics primitive sage: C.plot(patch=0) Graphics object consisting of 1 graphics primitive sage: C.plot(patch=1) Graphics object consisting of 1 graphics primitive A hyperelliptic curve:: sage: P.<x> = QQ[] sage: f = 4*x^5 - 30*x^3 + 45*x - 22 sage: C = HyperellipticCurve(f) sage: C.plot() Graphics object consisting of 1 graphics primitive sage: C.plot(patch=0) Graphics object consisting of 1 graphics primitive sage: C.plot(patch=1) Graphics object consisting of 1 graphics primitive """ # if user hasn't specified a favourite affine patch, take the # one avoiding "infinity", i.e. the one corresponding to the # last projective coordinate being nonzero patch = kwds.pop('patch', self.ngens() - 1) from constructor import Curve C = Curve(self.affine_patch(patch)) return C.plot(*args, **kwds) def is_singular(C): r""" Returns whether the curve is singular or not. EXAMPLES: Over `\QQ`:: sage: F = QQ sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^3-Y^2*Z) sage: C.is_singular() True Over a finite field:: sage: F = GF(19) sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^3+Y^3+Z^3) sage: C.is_singular() False sage: D = Curve(X^4-X*Z^3) sage: D.is_singular() True sage: E = Curve(X^5+19*Y^5+Z^5) sage: E.is_singular() True sage: E = Curve(X^5+9*Y^5+Z^5) sage: E.is_singular() False Over `\CC`:: sage: F = CC sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X) sage: C.is_singular() False sage: D = Curve(Y^2*Z-X^3) sage: D.is_singular() True sage: E = Curve(Y^2*Z-X^3+Z^3) sage: E.is_singular() False Showing that ticket #12187 is fixed:: sage: F.<X,Y,Z> = GF(2)[] sage: G = Curve(X^2+Y*Z) sage: G.is_singular() False """ poly = C.defining_polynomial() return poly.parent().ideal(poly.gradient()+[poly]).dimension()> 0 class ProjectiveCurve_finite_field(ProjectiveCurve_generic): def rational_points_iterator(self): r""" Return a generator object for the rational points on this curve. INPUT: - ``self`` -- a projective curve OUTPUT: A generator of all the rational points on the curve defined over its base field. EXAMPLE:: sage: F = GF(37) sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^7+Y*X*Z^5*55+Y^7*12) sage: len(list(C.rational_points_iterator())) 37 :: sage: F = GF(2) sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X*Y*Z) sage: a = C.rational_points_iterator() sage: next(a) (1 : 0 : 0) sage: next(a) (0 : 1 : 0) sage: next(a) (1 : 1 : 0) sage: next(a) (0 : 0 : 1) sage: next(a) (1 : 0 : 1) sage: next(a) (0 : 1 : 1) sage: next(a) Traceback (most recent call last): ... StopIteration :: sage: F = GF(3^2,'a') sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^3+5*Y^2*Z-33*X*Y*X) sage: b = C.rational_points_iterator() sage: next(b) (0 : 1 : 0) sage: next(b) (0 : 0 : 1) sage: next(b) (2*a + 2 : a : 1) sage: next(b) (2 : a + 1 : 1) sage: next(b) (a + 1 : 2*a + 1 : 1) sage: next(b) (1 : 2 : 1) sage: next(b) (2*a + 2 : 2*a : 1) sage: next(b) (2 : 2*a + 2 : 1) sage: next(b) (a + 1 : a + 2 : 1) sage: next(b) (1 : 1 : 1) sage: next(b) Traceback (most recent call last): ... StopIteration """ g = self.defining_polynomial() K = g.parent().base_ring() from sage.rings.polynomial.all import PolynomialRing R = PolynomialRing(K,'X') X = R.gen() one = K.one() zero = K.zero() # the point with Z = 0 = Y try: t = self.point([one,zero,zero]) yield(t) except TypeError: pass # points with Z = 0, Y = 1 g10 = R(g(X,one,zero)) if g10.is_zero(): for x in K: yield(self.point([x,one,zero])) else: for x in g10.roots(multiplicities=False): yield(self.point([x,one,zero])) # points with Z = 1 for y in K: gy1 = R(g(X,y,one)) if gy1.is_zero(): for x in K: yield(self.point([x,y,one])) else: for x in gy1.roots(multiplicities=False): yield(self.point([x,y,one])) def rational_points(self, algorithm="enum", sort=True): r""" Return the rational points on this curve computed via enumeration. INPUT: - ``algorithm`` (string, default: 'enum') -- the algorithm to use. Currently this is ignored. - ``sort`` (boolean, default ``True``) -- whether the output points should be sorted. If False, the order of the output is non-deterministic. OUTPUT: A list of all the rational points on the curve defined over its base field, possibly sorted. .. note:: This is a slow Python-level implementation. EXAMPLES:: sage: F = GF(7) sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^3+Y^3-Z^3) sage: C.rational_points() [(0 : 1 : 1), (0 : 2 : 1), (0 : 4 : 1), (1 : 0 : 1), (2 : 0 : 1), (3 : 1 : 0), (4 : 0 : 1), (5 : 1 : 0), (6 : 1 : 0)] :: sage: F = GF(1237) sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^7+7*Y^6*Z+Z^4*X^2*Y*89) sage: len(C.rational_points()) 1237 :: sage: F = GF(2^6,'a') sage: P2.<X,Y,Z> = ProjectiveSpace(F,2) sage: C = Curve(X^5+11*X*Y*Z^3 + X^2*Y^3 - 13*Y^2*Z^3) sage: len(C.rational_points()) 104 :: sage: R.<x,y,z> = GF(2)[] sage: f = x^3*y + y^3*z + x*z^3 sage: C = Curve(f); pts = C.rational_points() sage: pts [(0 : 0 : 1), (0 : 1 : 0), (1 : 0 : 0)] """ points = list(self.rational_points_iterator()) if sort: points.sort() return points class ProjectiveCurve_prime_finite_field(ProjectiveCurve_finite_field): def _points_via_singular(self, sort=True): r""" Return all rational points on this curve, computed using Singular's Brill-Noether implementation. INPUT: - ``sort`` - bool (default: True), if True return the point list sorted. If False, returns the points in the order computed by Singular. EXAMPLE:: sage: x, y, z = PolynomialRing(GF(5), 3, 'xyz').gens() sage: f = y^2*z^7 - x^9 - x*z^8 sage: C = Curve(f); C Projective Curve over Finite Field of size 5 defined by -x^9 + y^2*z^7 - x*z^8 sage: C._points_via_singular() [(0 : 0 : 1), (0 : 1 : 0), (2 : 2 : 1), (2 : 3 : 1), (3 : 1 : 1), (3 : 4 : 1)] sage: C._points_via_singular(sort=False) #random [(0 : 1 : 0), (3 : 1 : 1), (3 : 4 : 1), (2 : 2 : 1), (0 : 0 : 1), (2 : 3 : 1)] .. note:: The Brill-Noether package does not always work (i.e., the 'bn' algorithm. When it fails a RuntimeError exception is raised. """ f = self.defining_polynomial()._singular_() singular = f.parent() singular.lib('brnoeth') try: X1 = f.Adj_div() except (TypeError, RuntimeError) as s: raise RuntimeError(str(s) + "\n\n ** Unable to use the\ Brill-Noether Singular package to\ compute all points (see above).") X2 = singular.NSplaces(1, X1) R = X2[5][1][1] singular.set_ring(R) # We use sage_flattened_str_list since iterating through # the entire list through the sage/singular interface directly # would involve hundreds of calls to singular, and timing issues with # the expect interface could crop up. Also, this is vastly # faster (and more robust). v = singular('POINTS').sage_flattened_str_list() pnts = [self(int(v[3*i]), int(v[3*i+1]), int(v[3*i+2])) for i in range(len(v)//3)] # singular always dehomogenizes with respect to the last variable # so if this variable divides the curve equation, we need to add # points at infinity F = self.defining_polynomial() z = F.parent().gens()[-1] if z.divides(F): pnts += [self(1,a,0) for a in self.base_ring()] pnts += [self(0,1,0)] # remove multiple points pnts = list(set(pnts)) if sort: pnts.sort() return pnts def riemann_roch_basis(self, D): r""" Return a basis for the Riemann-Roch space corresponding to `D`. This uses Singular's Brill-Noether implementation. INPUT: - ``D`` - a divisor OUTPUT: A list of function field elements that form a basis of the Riemann-Roch space EXAMPLE:: sage: R.<x,y,z> = GF(2)[] sage: f = x^3*y + y^3*z + x*z^3 sage: C = Curve(f); pts = C.rational_points() sage: D = C.divisor([ (4, pts[0]), (4, pts[2]) ]) sage: C.riemann_roch_basis(D) [x/y, 1, z/y, z^2/y^2, z/x, z^2/(x*y)] :: sage: R.<x,y,z> = GF(5)[] sage: f = x^7 + y^7 + z^7 sage: C = Curve(f); pts = C.rational_points() sage: D = C.divisor([ (3, pts[0]), (-1,pts[1]), (10, pts[5]) ]) sage: C.riemann_roch_basis(D) [(-2*x + y)/(x + y), (-x + z)/(x + y)] .. NOTE:: Currently this only works over prime field and divisors supported on rational points. """ f = self.defining_polynomial()._singular_() singular = f.parent() singular.lib('brnoeth') try: X1 = f.Adj_div() except (TypeError, RuntimeError) as s: raise RuntimeError(str(s) + "\n\n ** Unable to use the Brill-Noether Singular package to compute all points (see above).") X2 = singular.NSplaces(1, X1) # retrieve list of all computed closed points (possibly of degree >1) v = X2[3].sage_flattened_str_list() # We use sage_flattened_str_list since iterating through # the entire list through the sage/singular interface directly # would involve hundreds of calls to singular, and timing issues with # the expect interface could crop up. Also, this is vastly # faster (and more robust). v = [ v[i].partition(',') for i in range(len(v)) ] pnts = [ ( int(v[i][0]), int(v[i][2])-1 ) for i in range(len(v))] # retrieve coordinates of rational points R = X2[5][1][1] singular.set_ring(R) v = singular('POINTS').sage_flattened_str_list() coords = [self(int(v[3*i]), int(v[3*i+1]), int(v[3*i+2])) for i in range(len(v)//3)] # build correct representation of D for singular Dsupport = D.support() Dcoeffs = [] for x in pnts: if x[0] == 1: Dcoeffs.append(D.coefficient(coords[x[1]])) else: Dcoeffs.append(0) Dstr = str(tuple(Dcoeffs)) G = singular(','.join([str(x) for x in Dcoeffs]), type='intvec') # call singular's brill noether routine and return T = X2[1][2] T.set_ring() LG = G.BrillNoether(X2) LG = [X.split(',\n') for X in LG.sage_structured_str_list()] x,y,z = self.ambient_space().coordinate_ring().gens() vars = {'x':x, 'y':y, 'z':z} V = [(sage_eval(a, vars)/sage_eval(b, vars)) for a, b in LG] return V def rational_points(self, algorithm="enum", sort=True): r""" INPUT: - ``algorithm`` - string: - ``'enum'`` - straightforward enumeration - ``'bn'`` - via Singular's brnoeth package. EXAMPLE:: sage: x, y, z = PolynomialRing(GF(5), 3, 'xyz').gens() sage: f = y^2*z^7 - x^9 - x*z^8 sage: C = Curve(f); C Projective Curve over Finite Field of size 5 defined by -x^9 + y^2*z^7 - x*z^8 sage: C.rational_points() [(0 : 0 : 1), (0 : 1 : 0), (2 : 2 : 1), (2 : 3 : 1), (3 : 1 : 1), (3 : 4 : 1)] sage: C = Curve(x - y + z) sage: C.rational_points() [(0 : 1 : 1), (1 : 1 : 0), (1 : 2 : 1), (2 : 3 : 1), (3 : 4 : 1), (4 : 0 : 1)] sage: C = Curve(x*z+z^2) sage: C.rational_points('all') [(0 : 1 : 0), (1 : 0 : 0), (1 : 1 : 0), (2 : 1 : 0), (3 : 1 : 0), (4 : 0 : 1), (4 : 1 : 0), (4 : 1 : 1), (4 : 2 : 1), (4 : 3 : 1), (4 : 4 : 1)] .. note:: The Brill-Noether package does not always work (i.e., the 'bn' algorithm. When it fails a RuntimeError exception is raised. """ if algorithm == "enum": return ProjectiveCurve_finite_field.rational_points(self, algorithm="enum", sort=sort) elif algorithm == "bn": return self._points_via_singular(sort=sort) elif algorithm == "all": S_enum = self.rational_points(algorithm = "enum") S_bn = self.rational_points(algorithm = "bn") if S_enum != S_bn: raise RuntimeError("Bug in rational_points -- different\ algorithms give different answers for\ curve %s!"%self) return S_enum else: raise ValueError("No algorithm '%s' known"%algorithm) def Hasse_bounds(q, genus=1): r""" Return the Hasse-Weil bounds for the cardinality of a nonsingular curve defined over `\GF{q}` of given ``genus``. INPUT: - ``q`` (int) -- a prime power - ``genus`` (int, default 1) -- a non-negative integer, OUTPUT: (tuple) The Hasse bounds (lb,ub) for the cardinality of a curve of genus ``genus`` defined over `\GF{q}`. EXAMPLES:: sage: Hasse_bounds(2) (1, 5) sage: Hasse_bounds(next_prime(10^30)) (999999999999998000000000000058, 1000000000000002000000000000058) """ if genus==1: rq = (4*q).isqrt() else: rq = (4*(genus**2)*q).isqrt() return (q+1-rq,q+1+rq)
32.17232
134
0.484374
from sage.interfaces.all import singular from sage.misc.all import add, sage_eval from sage.rings.all import degree_lowest_rational_function from sage.schemes.projective.projective_space import is_ProjectiveSpace from curve import Curve_generic_projective class ProjectiveSpaceCurve_generic(Curve_generic_projective): def _repr_type(self): return "Projective Space" def __init__(self, A, X): if not is_ProjectiveSpace(A): raise TypeError("A (=%s) must be a projective space"%A) Curve_generic_projective.__init__(self, A, X) d = self.dimension() if d != 1: raise ValueError("defining equations (=%s) define a scheme of dimension %s != 1"%(X,d)) class ProjectiveCurve_generic(Curve_generic_projective): def __init__(self, A, f): if not (is_ProjectiveSpace(A) and A.dimension != 2): raise TypeError("Argument A (= %s) must be a projective plane."%A) Curve_generic_projective.__init__(self, A, [f]) def _repr_type(self): return "Projective" def arithmetic_genus(self): d = self.defining_polynomial().total_degree() return int((d-1)*(d-2)/2) def divisor_of_function(self, r): F = self.base_ring() f = self.defining_polynomial() x, y, z = f.parent().gens() pnts = self.rational_points() divf = [] for P in pnts: if P[2] != F(0): lcs = self.local_coordinates(P,5) ldg = degree_lowest_rational_function(r(lcs[0],lcs[1]),z) if ldg[0] != 0: divf.append([ldg[0],P]) return divf def local_coordinates(self, pt, n): f = self.defining_polynomial() R = f.parent() F = self.base_ring() p = F.characteristic() x0 = F(pt[0]) y0 = F(pt[1]) astr = ["a"+str(i) for i in range(1,2*n)] x,y = R.gens() R0 = PolynomialRing(F,2*n+2,names = [str(x),str(y),"t"]+astr) vars0 = R0.gens() t = vars0[2] yt = y0*t**0 + add([vars0[i]*t**(i-2) for i in range(3,2*n+2)]) xt = x0+t ft = f(xt,yt) S = singular S.eval('ring s = '+str(p)+','+str(R0.gens())+',lp;') S.eval('poly f = '+str(ft)) cmd = 'matrix c = coeffs ('+str(ft)+',t)' S.eval(cmd) N = int(S.eval('size(c)')) b = ["c["+str(i)+",1]," for i in range(2,N/2-4)] b = ''.join(b) b = b[:len(b)-1] cmd = 'ideal I = '+b S.eval(cmd) c = S.eval('slimgb(I)') d = c.split("=") d = d[1:] d[len(d)-1] += "\n" e = [x[:x.index("\n")] for x in d] vals = [] for x in e: for y in vars0: if str(y) in x: if len(x.replace(str(y),"")) != 0: i = x.find("-") if i>0: vals.append([eval(x[1:i]),x[:i],F(eval(x[i+1:]))]) i = x.find("+") if i>0: vals.append([eval(x[1:i]),x[:i],-F(eval(x[i+1:]))]) else: vals.append([eval(str(y)[1:]),str(y),F(0)]) vals.sort() k = len(vals) v = [x0+t,y0+add([vals[i][2]*t**(i+1) for i in range(k)])] return v def plot(self, *args, **kwds): # one avoiding "infinity", i.e. the one corresponding to the # last projective coordinate being nonzero patch = kwds.pop('patch', self.ngens() - 1) from constructor import Curve C = Curve(self.affine_patch(patch)) return C.plot(*args, **kwds) def is_singular(C): poly = C.defining_polynomial() return poly.parent().ideal(poly.gradient()+[poly]).dimension()> 0 class ProjectiveCurve_finite_field(ProjectiveCurve_generic): def rational_points_iterator(self): g = self.defining_polynomial() K = g.parent().base_ring() from sage.rings.polynomial.all import PolynomialRing R = PolynomialRing(K,'X') X = R.gen() one = K.one() zero = K.zero() # the point with Z = 0 = Y try: t = self.point([one,zero,zero]) yield(t) except TypeError: pass # points with Z = 0, Y = 1 g10 = R(g(X,one,zero)) if g10.is_zero(): for x in K: yield(self.point([x,one,zero])) else: for x in g10.roots(multiplicities=False): yield(self.point([x,one,zero])) # points with Z = 1 for y in K: gy1 = R(g(X,y,one)) if gy1.is_zero(): for x in K: yield(self.point([x,y,one])) else: for x in gy1.roots(multiplicities=False): yield(self.point([x,y,one])) def rational_points(self, algorithm="enum", sort=True): points = list(self.rational_points_iterator()) if sort: points.sort() return points class ProjectiveCurve_prime_finite_field(ProjectiveCurve_finite_field): def _points_via_singular(self, sort=True): f = self.defining_polynomial()._singular_() singular = f.parent() singular.lib('brnoeth') try: X1 = f.Adj_div() except (TypeError, RuntimeError) as s: raise RuntimeError(str(s) + "\n\n ** Unable to use the\ Brill-Noether Singular package to\ compute all points (see above).") X2 = singular.NSplaces(1, X1) R = X2[5][1][1] singular.set_ring(R) # We use sage_flattened_str_list since iterating through # the entire list through the sage/singular interface directly # would involve hundreds of calls to singular, and timing issues with # the expect interface could crop up. Also, this is vastly # faster (and more robust). v = singular('POINTS').sage_flattened_str_list() pnts = [self(int(v[3*i]), int(v[3*i+1]), int(v[3*i+2])) for i in range(len(v)//3)] # singular always dehomogenizes with respect to the last variable # so if this variable divides the curve equation, we need to add # points at infinity F = self.defining_polynomial() z = F.parent().gens()[-1] if z.divides(F): pnts += [self(1,a,0) for a in self.base_ring()] pnts += [self(0,1,0)] # remove multiple points pnts = list(set(pnts)) if sort: pnts.sort() return pnts def riemann_roch_basis(self, D): f = self.defining_polynomial()._singular_() singular = f.parent() singular.lib('brnoeth') try: X1 = f.Adj_div() except (TypeError, RuntimeError) as s: raise RuntimeError(str(s) + "\n\n ** Unable to use the Brill-Noether Singular package to compute all points (see above).") X2 = singular.NSplaces(1, X1) # retrieve list of all computed closed points (possibly of degree >1) v = X2[3].sage_flattened_str_list() # We use sage_flattened_str_list since iterating through # the entire list through the sage/singular interface directly # would involve hundreds of calls to singular, and timing issues with # the expect interface could crop up. Also, this is vastly # faster (and more robust). v = [ v[i].partition(',') for i in range(len(v)) ] pnts = [ ( int(v[i][0]), int(v[i][2])-1 ) for i in range(len(v))] # retrieve coordinates of rational points R = X2[5][1][1] singular.set_ring(R) v = singular('POINTS').sage_flattened_str_list() coords = [self(int(v[3*i]), int(v[3*i+1]), int(v[3*i+2])) for i in range(len(v)//3)] # build correct representation of D for singular Dsupport = D.support() Dcoeffs = [] for x in pnts: if x[0] == 1: Dcoeffs.append(D.coefficient(coords[x[1]])) else: Dcoeffs.append(0) Dstr = str(tuple(Dcoeffs)) G = singular(','.join([str(x) for x in Dcoeffs]), type='intvec') # call singular's brill noether routine and return T = X2[1][2] T.set_ring() LG = G.BrillNoether(X2) LG = [X.split(',\n') for X in LG.sage_structured_str_list()] x,y,z = self.ambient_space().coordinate_ring().gens() vars = {'x':x, 'y':y, 'z':z} V = [(sage_eval(a, vars)/sage_eval(b, vars)) for a, b in LG] return V def rational_points(self, algorithm="enum", sort=True): if algorithm == "enum": return ProjectiveCurve_finite_field.rational_points(self, algorithm="enum", sort=sort) elif algorithm == "bn": return self._points_via_singular(sort=sort) elif algorithm == "all": S_enum = self.rational_points(algorithm = "enum") S_bn = self.rational_points(algorithm = "bn") if S_enum != S_bn: raise RuntimeError("Bug in rational_points -- different\ algorithms give different answers for\ curve %s!"%self) return S_enum else: raise ValueError("No algorithm '%s' known"%algorithm) def Hasse_bounds(q, genus=1): if genus==1: rq = (4*q).isqrt() else: rq = (4*(genus**2)*q).isqrt() return (q+1-rq,q+1+rq)
true
true
f70ea37895116b00771e48f3d3576aa318e30354
5,209
py
Python
HelloWorldOpenCV.py
jhbrito/HelloWorlds
7e2247ca7f312a516ce6a5054913d59e2f1de0f9
[ "MIT" ]
6
2020-02-14T15:18:14.000Z
2022-02-22T15:40:33.000Z
HelloWorldOpenCV.py
jhbrito/HelloWorlds
7e2247ca7f312a516ce6a5054913d59e2f1de0f9
[ "MIT" ]
null
null
null
HelloWorldOpenCV.py
jhbrito/HelloWorlds
7e2247ca7f312a516ce6a5054913d59e2f1de0f9
[ "MIT" ]
null
null
null
# Demo with a few examples of using OpenCV functions and UI # packages: opencv-python # uses lena: https://upload.wikimedia.org/wikipedia/en/7/7d/Lenna_%28test_image%29.png import numpy as np import cv2 print("Hello World OpenCV") print("OpenCV Version:", cv2.__version__) image = np.ones((256, 256), dtype="uint8") image = image * 127 image[0:128, 0:128] = 0 image[128:, 128:] = 255 cv2.imshow("Image", image) cv2.waitKey(0) # Opening and Viewing an Image import os.path if os.path.isfile('lena.png'): print("Test Image File exist") else: print("Test Image File does not exist; downloading...") import urllib.request as urllib_request urllib_request.urlretrieve("https://upload.wikimedia.org/wikipedia/en/7/7d/Lenna_%28test_image%29.png", "lena.png") image = cv2.imread("./lena.png") cv2.imshow("Image", image) cv2.waitKey(0) cv2.destroyAllWindows() rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) cv2.imshow("Image RGB", rgb_image) cv2.waitKey(0) cv2.destroyAllWindows() def viewImage(image, name_of_window): cv2.namedWindow(name_of_window, cv2.WINDOW_AUTOSIZE) cv2.imshow(name_of_window, image) cv2.waitKey(0) cv2.destroyAllWindows() viewImage(image, "Lena") # Edit pixels edited = image.copy() edited[200:390, 200:360, 0] = 255 viewImage(edited, "Lena edited") # Cropping cropped = image[200:390, 200:360] viewImage(cropped, "Lena cropped") # Resizing scale_percent = 10 # percent of original size width = int(image.shape[1] * scale_percent / 100) height = int(image.shape[0] * scale_percent / 100) dim = (width, height) resized = cv2.resize(image, dim, interpolation=cv2.INTER_AREA) viewImage(resized, "Lena resized to {}%".format(scale_percent)) # Drawing a Rectangle output = image.copy() cv2.rectangle(output, (200, 200), (360, 390), (255, 0, 0), 10) viewImage(output, "Lena with a rectangle") # Drawing a line cv2.line(output, (256, 390), (256, 512), (0, 0, 255), 5) viewImage(output, "Lena with a line") # Writing on an image cv2.putText(output, "Lena", (360, 390), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 2) viewImage(output, "Lena with text") # Saving an image cv2.imwrite("./output.jpg", output) # Blurring/Smoothing blurred = cv2.GaussianBlur(image, (15, 15), 0) viewImage(blurred, "Lena blurred") # Rotating (h, w, d) = image.shape center = (w // 2, h // 2) rot = 45 M = cv2.getRotationMatrix2D(center, rot, 1.0) rotated = cv2.warpAffine(image, M, (w, h)) viewImage(rotated, "Lena rotated by {} degrees".format(rot)) # Blend alpha_slider_max = 100 def on_trackbar_weight(val): alpha = val / alpha_slider_max beta = (1.0 - alpha) blend = cv2.addWeighted(image, alpha, rotated, beta, 0.0) cv2.imshow('Lena blended', blend) cv2.namedWindow('Lena blended') trackbar_name = 'Alpha 0 - {}'.format(alpha_slider_max) cv2.createTrackbar(trackbar_name, 'Lena blended', 50, alpha_slider_max, on_trackbar_weight) on_trackbar_weight(50) cv2.waitKey() cv2.destroyWindow('Lena blended') # Grayscaling gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) viewImage(gray_image, "Lena gray-scale") # Thresholding threshold_slider_max = 255 threshold = 200 ret, threshold_image = cv2.threshold(gray_image, threshold, 255, 0) def on_trackbar_threshold(val): threshold = val ret, threshold_image = cv2.threshold(gray_image, threshold, 255, 0) cv2.imshow("Lena thresholded", threshold_image) cv2.namedWindow("Lena thresholded") trackbar_name = "Threshold 0 - {}".format(threshold_slider_max) cv2.createTrackbar(trackbar_name, "Lena thresholded", threshold, threshold_slider_max, on_trackbar_threshold) on_trackbar_threshold(threshold) cv2.waitKey() cv2.destroyWindow("Lena thresholded") # Contours contours, hierarchy = cv2.findContours(threshold_image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) image_with_contours = image.copy() cv2.drawContours(image_with_contours, contours, -1, (255, 0, 0), 1) viewImage(image_with_contours, "Lena contours") # Face Detection face_cascade = cv2.CascadeClassifier('venv\Lib\site-packages\cv2\data\haarcascade_frontalface_default.xml') faces = face_cascade.detectMultiScale(gray_image) print("Lena with {} faces detected".format(len(faces))) image_faces = image.copy() for (x, y, w, h) in faces: cv2.rectangle(image_faces, (x, y), (x + w, y + h), (0, 255, 0), 2) viewImage(image_faces, "Lena with {} faces detected".format(len(faces))) def display_box(im, bbox): n_boxes = len(bbox) for j_box in range(n_boxes): for j in range(4): cv2.line(im, (int(bbox[j_box][j][0]), int(bbox[j_box][j][1])), (int(bbox[j_box][(j + 1) % 4][0]), int(bbox[j_box][(j + 1) % 4][1])), (255, 0, 0), 3) # Display results cv2.imshow("Results", im) inputImage = cv2.imread("qrcode.jpg") qrDecoder = cv2.QRCodeDetector() data, bbox, rectifiedImage = qrDecoder.detectAndDecode(inputImage) if len(data) > 0: print("Decoded Data : {}".format(data)) display_box(inputImage, bbox) rectifiedImage = np.uint8(rectifiedImage) cv2.imshow("Rectified QRCode", rectifiedImage) else: print("QR Code not detected") cv2.imshow("Results", inputImage) cv2.waitKey(0) cv2.destroyAllWindows()
29.429379
119
0.710501
import numpy as np import cv2 print("Hello World OpenCV") print("OpenCV Version:", cv2.__version__) image = np.ones((256, 256), dtype="uint8") image = image * 127 image[0:128, 0:128] = 0 image[128:, 128:] = 255 cv2.imshow("Image", image) cv2.waitKey(0) import os.path if os.path.isfile('lena.png'): print("Test Image File exist") else: print("Test Image File does not exist; downloading...") import urllib.request as urllib_request urllib_request.urlretrieve("https://upload.wikimedia.org/wikipedia/en/7/7d/Lenna_%28test_image%29.png", "lena.png") image = cv2.imread("./lena.png") cv2.imshow("Image", image) cv2.waitKey(0) cv2.destroyAllWindows() rgb_image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) cv2.imshow("Image RGB", rgb_image) cv2.waitKey(0) cv2.destroyAllWindows() def viewImage(image, name_of_window): cv2.namedWindow(name_of_window, cv2.WINDOW_AUTOSIZE) cv2.imshow(name_of_window, image) cv2.waitKey(0) cv2.destroyAllWindows() viewImage(image, "Lena") edited = image.copy() edited[200:390, 200:360, 0] = 255 viewImage(edited, "Lena edited") cropped = image[200:390, 200:360] viewImage(cropped, "Lena cropped") scale_percent = 10 width = int(image.shape[1] * scale_percent / 100) height = int(image.shape[0] * scale_percent / 100) dim = (width, height) resized = cv2.resize(image, dim, interpolation=cv2.INTER_AREA) viewImage(resized, "Lena resized to {}%".format(scale_percent)) output = image.copy() cv2.rectangle(output, (200, 200), (360, 390), (255, 0, 0), 10) viewImage(output, "Lena with a rectangle") cv2.line(output, (256, 390), (256, 512), (0, 0, 255), 5) viewImage(output, "Lena with a line") cv2.putText(output, "Lena", (360, 390), cv2.FONT_HERSHEY_SIMPLEX, 2, (0, 255, 0), 2) viewImage(output, "Lena with text") cv2.imwrite("./output.jpg", output) blurred = cv2.GaussianBlur(image, (15, 15), 0) viewImage(blurred, "Lena blurred") (h, w, d) = image.shape center = (w // 2, h // 2) rot = 45 M = cv2.getRotationMatrix2D(center, rot, 1.0) rotated = cv2.warpAffine(image, M, (w, h)) viewImage(rotated, "Lena rotated by {} degrees".format(rot)) alpha_slider_max = 100 def on_trackbar_weight(val): alpha = val / alpha_slider_max beta = (1.0 - alpha) blend = cv2.addWeighted(image, alpha, rotated, beta, 0.0) cv2.imshow('Lena blended', blend) cv2.namedWindow('Lena blended') trackbar_name = 'Alpha 0 - {}'.format(alpha_slider_max) cv2.createTrackbar(trackbar_name, 'Lena blended', 50, alpha_slider_max, on_trackbar_weight) on_trackbar_weight(50) cv2.waitKey() cv2.destroyWindow('Lena blended') gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) viewImage(gray_image, "Lena gray-scale") threshold_slider_max = 255 threshold = 200 ret, threshold_image = cv2.threshold(gray_image, threshold, 255, 0) def on_trackbar_threshold(val): threshold = val ret, threshold_image = cv2.threshold(gray_image, threshold, 255, 0) cv2.imshow("Lena thresholded", threshold_image) cv2.namedWindow("Lena thresholded") trackbar_name = "Threshold 0 - {}".format(threshold_slider_max) cv2.createTrackbar(trackbar_name, "Lena thresholded", threshold, threshold_slider_max, on_trackbar_threshold) on_trackbar_threshold(threshold) cv2.waitKey() cv2.destroyWindow("Lena thresholded") contours, hierarchy = cv2.findContours(threshold_image, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) image_with_contours = image.copy() cv2.drawContours(image_with_contours, contours, -1, (255, 0, 0), 1) viewImage(image_with_contours, "Lena contours") face_cascade = cv2.CascadeClassifier('venv\Lib\site-packages\cv2\data\haarcascade_frontalface_default.xml') faces = face_cascade.detectMultiScale(gray_image) print("Lena with {} faces detected".format(len(faces))) image_faces = image.copy() for (x, y, w, h) in faces: cv2.rectangle(image_faces, (x, y), (x + w, y + h), (0, 255, 0), 2) viewImage(image_faces, "Lena with {} faces detected".format(len(faces))) def display_box(im, bbox): n_boxes = len(bbox) for j_box in range(n_boxes): for j in range(4): cv2.line(im, (int(bbox[j_box][j][0]), int(bbox[j_box][j][1])), (int(bbox[j_box][(j + 1) % 4][0]), int(bbox[j_box][(j + 1) % 4][1])), (255, 0, 0), 3) cv2.imshow("Results", im) inputImage = cv2.imread("qrcode.jpg") qrDecoder = cv2.QRCodeDetector() data, bbox, rectifiedImage = qrDecoder.detectAndDecode(inputImage) if len(data) > 0: print("Decoded Data : {}".format(data)) display_box(inputImage, bbox) rectifiedImage = np.uint8(rectifiedImage) cv2.imshow("Rectified QRCode", rectifiedImage) else: print("QR Code not detected") cv2.imshow("Results", inputImage) cv2.waitKey(0) cv2.destroyAllWindows()
true
true
f70ea378aaf48671277dab43c5ea3b4387cbb5a8
7,007
py
Python
vitrage/datasources/zabbix/driver.py
openstack/vitrage
95b33dbf39b040e23915882a2879c87aec239ca9
[ "Apache-2.0" ]
89
2015-09-30T21:42:17.000Z
2022-03-28T16:31:19.000Z
vitrage/datasources/zabbix/driver.py
openstack/vitrage
95b33dbf39b040e23915882a2879c87aec239ca9
[ "Apache-2.0" ]
4
2015-12-13T13:06:53.000Z
2016-01-03T19:51:28.000Z
vitrage/datasources/zabbix/driver.py
openstack/vitrage
95b33dbf39b040e23915882a2879c87aec239ca9
[ "Apache-2.0" ]
43
2015-11-04T15:54:27.000Z
2021-12-10T14:24:03.000Z
# Copyright 2016 - Nokia # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from collections import namedtuple from oslo_config import cfg from oslo_log import log from oslo_utils import importutils as utils from vitrage.common.constants import DatasourceAction from vitrage.common.constants import DatasourceOpts as DSOpts from vitrage.common.constants import DatasourceProperties as DSProps from vitrage.datasources.alarm_driver_base import AlarmDriverBase from vitrage.datasources.zabbix.properties import ZabbixProperties as ZProps from vitrage.datasources.zabbix.properties import ZabbixTriggerStatus \ as TriggerStatus from vitrage.datasources.zabbix.properties import ZabbixTriggerValue \ as TriggerValue from vitrage.datasources.zabbix import ZABBIX_DATASOURCE from vitrage.utils import file as file_utils CONF = cfg.CONF LOG = log.getLogger(__name__) class ZabbixDriver(AlarmDriverBase): ServiceKey = namedtuple('ServiceKey', ['hostname', 'triggerid']) conf_map = None def __init__(self): super(ZabbixDriver, self).__init__() if not ZabbixDriver.conf_map: ZabbixDriver.conf_map =\ ZabbixDriver._configuration_mapping() self._client = None def zabbix_client_login(self): if not CONF.zabbix.user: LOG.warning('Zabbix user is not defined') if not CONF.zabbix.password: LOG.warning('Zabbix password is not defined') if not CONF.zabbix.url: LOG.warning('Zabbix url is not defined') try: if not self._client: self._client = utils.import_object( 'pyzabbix.ZabbixAPI', CONF.zabbix.url) self._client.login( CONF.zabbix.user, CONF.zabbix.password) except Exception: LOG.exception('pyzabbix.ZabbixAPI error occurred.') self._client = None def _vitrage_type(self): return ZABBIX_DATASOURCE def _alarm_key(self, alarm): return self.ServiceKey(hostname=alarm[ZProps.RESOURCE_NAME], triggerid=alarm[ZProps.TRIGGER_ID]) def _get_alarms(self): self.zabbix_client_login() if not self._client: return [] alarms = [] valid_hosts = (host for host in self._client.host.get(output=[ZProps.HOST]) if host[ZProps.HOST] in ZabbixDriver.conf_map) for host in valid_hosts: self._get_triggers_per_host(host, alarms) return alarms def _get_triggers_per_host(self, host, alarms): host_id = host[ZProps.HOST_ID] triggers = self._client.trigger.get(hostids=host_id, expandDescription=True) triggers_rawtexts = self._get_triggers_rawtexts(host_id) for trigger in triggers: trigger[ZProps.ZABBIX_RESOURCE_NAME] = host[ZProps.HOST] trigger_id = trigger[ZProps.TRIGGER_ID] trigger[ZProps.RAWTEXT] = triggers_rawtexts[trigger_id] alarms.append(trigger) def _get_triggers_rawtexts(self, host_id): output = [ZProps.TRIGGER_ID, ZProps.DESCRIPTION] triggers = self._client.trigger.get(hostids=host_id, output=output) return {trigger[ZProps.TRIGGER_ID]: trigger[ZProps.DESCRIPTION] for trigger in triggers} def _enrich_alarms(self, alarms): """Enrich zabbix alarm using zabbix configuration file converting Zabbix host name to Vitrage resource type and name :param alarms: Zabbix alarm :return: enriched alarm """ for alarm in alarms: alarm[ZProps.VALUE] = self._get_value(alarm) zabbix_host = alarm[ZProps.ZABBIX_RESOURCE_NAME] vitrage_host = ZabbixDriver.conf_map[zabbix_host] alarm[ZProps.RESOURCE_TYPE] = vitrage_host[ZProps.RESOURCE_TYPE] alarm[ZProps.RESOURCE_NAME] = vitrage_host[ZProps.RESOURCE_NAME] def _is_erroneous(self, alarm): return alarm and \ alarm[ZProps.VALUE] == TriggerValue.PROBLEM def _status_changed(self, new_alarm, old_alarm): if not (new_alarm and old_alarm): return False if new_alarm[ZProps.VALUE] != old_alarm[ZProps.VALUE]: return True if new_alarm[ZProps.VALUE] == TriggerValue.PROBLEM: priority_changed = \ new_alarm[ZProps.PRIORITY] != old_alarm[ZProps.PRIORITY] description_changed = \ new_alarm[ZProps.DESCRIPTION] != old_alarm[ZProps.DESCRIPTION] return priority_changed or description_changed def _is_valid(self, alarm): return alarm[ZProps.RESOURCE_TYPE] is not None and \ alarm[ZProps.RESOURCE_NAME] is not None @staticmethod def _get_value(alarm): if alarm[ZProps.STATUS] == TriggerStatus.DISABLED: return TriggerValue.OK return alarm[ZProps.VALUE] @staticmethod def _configuration_mapping(): try: zabbix_config_file = CONF.zabbix[DSOpts.CONFIG_FILE] zabbix_config = file_utils.load_yaml_file(zabbix_config_file) zabbix_config_elements = zabbix_config[ZABBIX_DATASOURCE] mappings = {} for element_config in zabbix_config_elements: mappings[element_config['zabbix_host']] = { ZProps.RESOURCE_TYPE: element_config['type'], ZProps.RESOURCE_NAME: element_config['name'] } return mappings except Exception: LOG.exception('Failed in init.') return {} def enrich_event(self, event, event_type): event[DSProps.EVENT_TYPE] = event_type if ZabbixDriver.conf_map: zabbix_host = event[ZProps.HOST] event[ZProps.ZABBIX_RESOURCE_NAME] = zabbix_host v_resource = ZabbixDriver.conf_map[zabbix_host] event[ZProps.RESOURCE_NAME] = v_resource[ZProps.RESOURCE_NAME] event[ZProps.RESOURCE_TYPE] = v_resource[ZProps.RESOURCE_TYPE] return ZabbixDriver.make_pickleable([event], ZABBIX_DATASOURCE, DatasourceAction.UPDATE)[0] @staticmethod def get_event_types(): return ['zabbix.alarm.ok', 'zabbix.alarm.problem'] @staticmethod def should_delete_outdated_entities(): return True
35.75
78
0.657057
from collections import namedtuple from oslo_config import cfg from oslo_log import log from oslo_utils import importutils as utils from vitrage.common.constants import DatasourceAction from vitrage.common.constants import DatasourceOpts as DSOpts from vitrage.common.constants import DatasourceProperties as DSProps from vitrage.datasources.alarm_driver_base import AlarmDriverBase from vitrage.datasources.zabbix.properties import ZabbixProperties as ZProps from vitrage.datasources.zabbix.properties import ZabbixTriggerStatus \ as TriggerStatus from vitrage.datasources.zabbix.properties import ZabbixTriggerValue \ as TriggerValue from vitrage.datasources.zabbix import ZABBIX_DATASOURCE from vitrage.utils import file as file_utils CONF = cfg.CONF LOG = log.getLogger(__name__) class ZabbixDriver(AlarmDriverBase): ServiceKey = namedtuple('ServiceKey', ['hostname', 'triggerid']) conf_map = None def __init__(self): super(ZabbixDriver, self).__init__() if not ZabbixDriver.conf_map: ZabbixDriver.conf_map =\ ZabbixDriver._configuration_mapping() self._client = None def zabbix_client_login(self): if not CONF.zabbix.user: LOG.warning('Zabbix user is not defined') if not CONF.zabbix.password: LOG.warning('Zabbix password is not defined') if not CONF.zabbix.url: LOG.warning('Zabbix url is not defined') try: if not self._client: self._client = utils.import_object( 'pyzabbix.ZabbixAPI', CONF.zabbix.url) self._client.login( CONF.zabbix.user, CONF.zabbix.password) except Exception: LOG.exception('pyzabbix.ZabbixAPI error occurred.') self._client = None def _vitrage_type(self): return ZABBIX_DATASOURCE def _alarm_key(self, alarm): return self.ServiceKey(hostname=alarm[ZProps.RESOURCE_NAME], triggerid=alarm[ZProps.TRIGGER_ID]) def _get_alarms(self): self.zabbix_client_login() if not self._client: return [] alarms = [] valid_hosts = (host for host in self._client.host.get(output=[ZProps.HOST]) if host[ZProps.HOST] in ZabbixDriver.conf_map) for host in valid_hosts: self._get_triggers_per_host(host, alarms) return alarms def _get_triggers_per_host(self, host, alarms): host_id = host[ZProps.HOST_ID] triggers = self._client.trigger.get(hostids=host_id, expandDescription=True) triggers_rawtexts = self._get_triggers_rawtexts(host_id) for trigger in triggers: trigger[ZProps.ZABBIX_RESOURCE_NAME] = host[ZProps.HOST] trigger_id = trigger[ZProps.TRIGGER_ID] trigger[ZProps.RAWTEXT] = triggers_rawtexts[trigger_id] alarms.append(trigger) def _get_triggers_rawtexts(self, host_id): output = [ZProps.TRIGGER_ID, ZProps.DESCRIPTION] triggers = self._client.trigger.get(hostids=host_id, output=output) return {trigger[ZProps.TRIGGER_ID]: trigger[ZProps.DESCRIPTION] for trigger in triggers} def _enrich_alarms(self, alarms): for alarm in alarms: alarm[ZProps.VALUE] = self._get_value(alarm) zabbix_host = alarm[ZProps.ZABBIX_RESOURCE_NAME] vitrage_host = ZabbixDriver.conf_map[zabbix_host] alarm[ZProps.RESOURCE_TYPE] = vitrage_host[ZProps.RESOURCE_TYPE] alarm[ZProps.RESOURCE_NAME] = vitrage_host[ZProps.RESOURCE_NAME] def _is_erroneous(self, alarm): return alarm and \ alarm[ZProps.VALUE] == TriggerValue.PROBLEM def _status_changed(self, new_alarm, old_alarm): if not (new_alarm and old_alarm): return False if new_alarm[ZProps.VALUE] != old_alarm[ZProps.VALUE]: return True if new_alarm[ZProps.VALUE] == TriggerValue.PROBLEM: priority_changed = \ new_alarm[ZProps.PRIORITY] != old_alarm[ZProps.PRIORITY] description_changed = \ new_alarm[ZProps.DESCRIPTION] != old_alarm[ZProps.DESCRIPTION] return priority_changed or description_changed def _is_valid(self, alarm): return alarm[ZProps.RESOURCE_TYPE] is not None and \ alarm[ZProps.RESOURCE_NAME] is not None @staticmethod def _get_value(alarm): if alarm[ZProps.STATUS] == TriggerStatus.DISABLED: return TriggerValue.OK return alarm[ZProps.VALUE] @staticmethod def _configuration_mapping(): try: zabbix_config_file = CONF.zabbix[DSOpts.CONFIG_FILE] zabbix_config = file_utils.load_yaml_file(zabbix_config_file) zabbix_config_elements = zabbix_config[ZABBIX_DATASOURCE] mappings = {} for element_config in zabbix_config_elements: mappings[element_config['zabbix_host']] = { ZProps.RESOURCE_TYPE: element_config['type'], ZProps.RESOURCE_NAME: element_config['name'] } return mappings except Exception: LOG.exception('Failed in init.') return {} def enrich_event(self, event, event_type): event[DSProps.EVENT_TYPE] = event_type if ZabbixDriver.conf_map: zabbix_host = event[ZProps.HOST] event[ZProps.ZABBIX_RESOURCE_NAME] = zabbix_host v_resource = ZabbixDriver.conf_map[zabbix_host] event[ZProps.RESOURCE_NAME] = v_resource[ZProps.RESOURCE_NAME] event[ZProps.RESOURCE_TYPE] = v_resource[ZProps.RESOURCE_TYPE] return ZabbixDriver.make_pickleable([event], ZABBIX_DATASOURCE, DatasourceAction.UPDATE)[0] @staticmethod def get_event_types(): return ['zabbix.alarm.ok', 'zabbix.alarm.problem'] @staticmethod def should_delete_outdated_entities(): return True
true
true
f70ea3d37cea2ad796cf374b9a4ab9e73fc5c35c
1,057
py
Python
migrations/versions/5c80010c853a_posts_table.py
ChanForPres/Social-Blogging-App
525c4588dfa50780a8b7c067111c101ade4e20f2
[ "MIT" ]
null
null
null
migrations/versions/5c80010c853a_posts_table.py
ChanForPres/Social-Blogging-App
525c4588dfa50780a8b7c067111c101ade4e20f2
[ "MIT" ]
null
null
null
migrations/versions/5c80010c853a_posts_table.py
ChanForPres/Social-Blogging-App
525c4588dfa50780a8b7c067111c101ade4e20f2
[ "MIT" ]
null
null
null
"""posts table Revision ID: 5c80010c853a Revises: 6ca7139bbbf2 Create Date: 2018-06-25 17:18:29.165993 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '5c80010c853a' down_revision = '6ca7139bbbf2' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('post', sa.Column('id', sa.Integer(), nullable=False), sa.Column('body', sa.String(length=140), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_post_timestamp'), 'post', ['timestamp'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_post_timestamp'), table_name='post') op.drop_table('post') # ### end Alembic commands ###
27.815789
83
0.676443
from alembic import op import sqlalchemy as sa revision = '5c80010c853a' down_revision = '6ca7139bbbf2' branch_labels = None depends_on = None def upgrade(): ), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_post_timestamp'), 'post', ['timestamp'], unique=False)
true
true
f70ea4478d7916ef0eca7ea387f9da960ada87f9
27
py
Python
src/euler_python_package/euler_python/medium/p255.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p255.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
src/euler_python_package/euler_python/medium/p255.py
wilsonify/euler
5214b776175e6d76a7c6d8915d0e062d189d9b79
[ "MIT" ]
null
null
null
def problem255(): pass
9
17
0.62963
def problem255(): pass
true
true
f70ea613433f02ed837e0b8f877dedbe1826238c
3,471
py
Python
factory-ai-vision/EdgeSolution/modules/WebModule/backend/configs/api_router.py
piyushka17/azure-intelligent-edge-patterns
0d088899afb0022daa2ac434226824dba2c997c1
[ "MIT" ]
null
null
null
factory-ai-vision/EdgeSolution/modules/WebModule/backend/configs/api_router.py
piyushka17/azure-intelligent-edge-patterns
0d088899afb0022daa2ac434226824dba2c997c1
[ "MIT" ]
null
null
null
factory-ai-vision/EdgeSolution/modules/WebModule/backend/configs/api_router.py
piyushka17/azure-intelligent-edge-patterns
0d088899afb0022daa2ac434226824dba2c997c1
[ "MIT" ]
null
null
null
"""API router """ from django.conf.urls import url from django.urls import path from rest_framework.routers import DefaultRouter from vision_on_edge.azure_app_insight.api import views as app_insight_views from vision_on_edge.azure_parts.api import views as azure_part_views from vision_on_edge.azure_settings.api import views as azure_setting_views from vision_on_edge.azure_training.api import views as azure_training_views from vision_on_edge.azure_training_status.api import \ views as azure_training_status_views from vision_on_edge.cameras.api import util_views as camera_util_views from vision_on_edge.cameras.api import views from vision_on_edge.feedback.api import views as feedback_views from vision_on_edge.image_predictions.api import \ views as image_prediction_views from vision_on_edge.images.api import views as image_views from vision_on_edge.locations.api import views as location_views from vision_on_edge.notifications.api import views as notifications_views from vision_on_edge.relabeling.api import views as relabel_views from vision_on_edge.streams.api import views as stream_views router = DefaultRouter() router.trailing_slash = '/?' router.register('settings', azure_setting_views.SettingViewSet) router.register('cameras', views.CameraViewSet) router.register('parts', azure_part_views.PartViewSet) router.register('locations', location_views.LocationViewSet) router.register('image_predictions', image_prediction_views.ImagePredictionViewSet) router.register('projects', azure_training_views.ProjectViewSet) router.register('training_status', azure_training_status_views.TrainingStatusViewSet) router.register('tasks', azure_training_views.TaskViewSet) router.register('images', image_views.ImageViewSet) router.register('feedback', feedback_views.FeedbackViewSet) router.register('notifications', notifications_views.NotificationViewSet) router.register('images', image_views.ImageViewSet) urlpatterns = router.urls urlpatterns += [ url('streams/connect', stream_views.connect_stream), path('streams/<int:stream_id>/disconnect', stream_views.disconnect_stream), path('streams/<int:stream_id>/video_feed', stream_views.video_feed), path('streams/<int:stream_id>/capture', stream_views.capture), path('streams/<int:stream_id>/keep_alive', stream_views.keep_alive), path('projects/<int:project_id>/train', azure_training_views.train), path('projects/<int:project_id>/export', azure_training_views.export), path('projects/<int:project_id>/train_performance', azure_training_views.train_performance), path('projects/<int:project_id>/inference_video_feed', stream_views.inference_video_feed), path('projects/<int:project_id>/pull_cv_project', azure_training_views.pull_cv_project), path('projects/<int:project_id>/update_prob_threshold', azure_training_views.update_prob_threshold), path('projects/<int:project_id>/reset_project', azure_training_views.reset_project), path('projects/<int:project_id>/reset_camera', azure_training_views.project_reset_camera), path('projects/null/export', azure_training_views.export_null), path('relabel', relabel_views.upload_relabel_image), path('relabel/update', relabel_views.relabel_update), path('appinsight/key', app_insight_views.instrumentation_key), path('camera_utils/verify_rtsp', camera_util_views.verify_rtsp) ] app_name = "api"
47.547945
79
0.803803
from django.conf.urls import url from django.urls import path from rest_framework.routers import DefaultRouter from vision_on_edge.azure_app_insight.api import views as app_insight_views from vision_on_edge.azure_parts.api import views as azure_part_views from vision_on_edge.azure_settings.api import views as azure_setting_views from vision_on_edge.azure_training.api import views as azure_training_views from vision_on_edge.azure_training_status.api import \ views as azure_training_status_views from vision_on_edge.cameras.api import util_views as camera_util_views from vision_on_edge.cameras.api import views from vision_on_edge.feedback.api import views as feedback_views from vision_on_edge.image_predictions.api import \ views as image_prediction_views from vision_on_edge.images.api import views as image_views from vision_on_edge.locations.api import views as location_views from vision_on_edge.notifications.api import views as notifications_views from vision_on_edge.relabeling.api import views as relabel_views from vision_on_edge.streams.api import views as stream_views router = DefaultRouter() router.trailing_slash = '/?' router.register('settings', azure_setting_views.SettingViewSet) router.register('cameras', views.CameraViewSet) router.register('parts', azure_part_views.PartViewSet) router.register('locations', location_views.LocationViewSet) router.register('image_predictions', image_prediction_views.ImagePredictionViewSet) router.register('projects', azure_training_views.ProjectViewSet) router.register('training_status', azure_training_status_views.TrainingStatusViewSet) router.register('tasks', azure_training_views.TaskViewSet) router.register('images', image_views.ImageViewSet) router.register('feedback', feedback_views.FeedbackViewSet) router.register('notifications', notifications_views.NotificationViewSet) router.register('images', image_views.ImageViewSet) urlpatterns = router.urls urlpatterns += [ url('streams/connect', stream_views.connect_stream), path('streams/<int:stream_id>/disconnect', stream_views.disconnect_stream), path('streams/<int:stream_id>/video_feed', stream_views.video_feed), path('streams/<int:stream_id>/capture', stream_views.capture), path('streams/<int:stream_id>/keep_alive', stream_views.keep_alive), path('projects/<int:project_id>/train', azure_training_views.train), path('projects/<int:project_id>/export', azure_training_views.export), path('projects/<int:project_id>/train_performance', azure_training_views.train_performance), path('projects/<int:project_id>/inference_video_feed', stream_views.inference_video_feed), path('projects/<int:project_id>/pull_cv_project', azure_training_views.pull_cv_project), path('projects/<int:project_id>/update_prob_threshold', azure_training_views.update_prob_threshold), path('projects/<int:project_id>/reset_project', azure_training_views.reset_project), path('projects/<int:project_id>/reset_camera', azure_training_views.project_reset_camera), path('projects/null/export', azure_training_views.export_null), path('relabel', relabel_views.upload_relabel_image), path('relabel/update', relabel_views.relabel_update), path('appinsight/key', app_insight_views.instrumentation_key), path('camera_utils/verify_rtsp', camera_util_views.verify_rtsp) ] app_name = "api"
true
true
f70ea783739509cd8b2366a0314a6db0627abca1
6,569
py
Python
lib/sqlalchemy/orm/exc.py
petit87/sqlalchemy
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
[ "MIT" ]
null
null
null
lib/sqlalchemy/orm/exc.py
petit87/sqlalchemy
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
[ "MIT" ]
null
null
null
lib/sqlalchemy/orm/exc.py
petit87/sqlalchemy
67d674bd63ca36ac32b23f96e2b19e9dac6b0863
[ "MIT" ]
null
null
null
# orm/exc.py # Copyright (C) 2005-2022 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: https://www.opensource.org/licenses/mit-license.php """SQLAlchemy ORM exceptions.""" from __future__ import annotations from .. import exc as sa_exc from .. import util from ..exc import MultipleResultsFound # noqa from ..exc import NoResultFound # noqa NO_STATE = (AttributeError, KeyError) """Exception types that may be raised by instrumentation implementations.""" class StaleDataError(sa_exc.SQLAlchemyError): """An operation encountered database state that is unaccounted for. Conditions which cause this to happen include: * A flush may have attempted to update or delete rows and an unexpected number of rows were matched during the UPDATE or DELETE statement. Note that when version_id_col is used, rows in UPDATE or DELETE statements are also matched against the current known version identifier. * A mapped object with version_id_col was refreshed, and the version number coming back from the database does not match that of the object itself. * A object is detached from its parent object, however the object was previously attached to a different parent identity which was garbage collected, and a decision cannot be made if the new parent was really the most recent "parent". """ ConcurrentModificationError = StaleDataError class FlushError(sa_exc.SQLAlchemyError): """A invalid condition was detected during flush().""" class UnmappedError(sa_exc.InvalidRequestError): """Base for exceptions that involve expected mappings not present.""" class ObjectDereferencedError(sa_exc.SQLAlchemyError): """An operation cannot complete due to an object being garbage collected. """ class DetachedInstanceError(sa_exc.SQLAlchemyError): """An attempt to access unloaded attributes on a mapped instance that is detached.""" code = "bhk3" class UnmappedInstanceError(UnmappedError): """An mapping operation was requested for an unknown instance.""" @util.preload_module("sqlalchemy.orm.base") def __init__(self, obj, msg=None): base = util.preloaded.orm_base if not msg: try: base.class_mapper(type(obj)) name = _safe_cls_name(type(obj)) msg = ( "Class %r is mapped, but this instance lacks " "instrumentation. This occurs when the instance " "is created before sqlalchemy.orm.mapper(%s) " "was called." % (name, name) ) except UnmappedClassError: msg = _default_unmapped(type(obj)) if isinstance(obj, type): msg += ( "; was a class (%s) supplied where an instance was " "required?" % _safe_cls_name(obj) ) UnmappedError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class UnmappedClassError(UnmappedError): """An mapping operation was requested for an unknown class.""" def __init__(self, cls, msg=None): if not msg: msg = _default_unmapped(cls) UnmappedError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class ObjectDeletedError(sa_exc.InvalidRequestError): """A refresh operation failed to retrieve the database row corresponding to an object's known primary key identity. A refresh operation proceeds when an expired attribute is accessed on an object, or when :meth:`_query.Query.get` is used to retrieve an object which is, upon retrieval, detected as expired. A SELECT is emitted for the target row based on primary key; if no row is returned, this exception is raised. The true meaning of this exception is simply that no row exists for the primary key identifier associated with a persistent object. The row may have been deleted, or in some cases the primary key updated to a new value, outside of the ORM's management of the target object. """ @util.preload_module("sqlalchemy.orm.base") def __init__(self, state, msg=None): base = util.preloaded.orm_base if not msg: msg = ( "Instance '%s' has been deleted, or its " "row is otherwise not present." % base.state_str(state) ) sa_exc.InvalidRequestError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class UnmappedColumnError(sa_exc.InvalidRequestError): """Mapping operation was requested on an unknown column.""" class LoaderStrategyException(sa_exc.InvalidRequestError): """A loader strategy for an attribute does not exist.""" def __init__( self, applied_to_property_type, requesting_property, applies_to, actual_strategy_type, strategy_key, ): if actual_strategy_type is None: sa_exc.InvalidRequestError.__init__( self, "Can't find strategy %s for %s" % (strategy_key, requesting_property), ) else: sa_exc.InvalidRequestError.__init__( self, 'Can\'t apply "%s" strategy to property "%s", ' 'which is a "%s"; this loader strategy is intended ' 'to be used with a "%s".' % ( util.clsname_as_plain_name(actual_strategy_type), requesting_property, util.clsname_as_plain_name(applied_to_property_type), util.clsname_as_plain_name(applies_to), ), ) def _safe_cls_name(cls): try: cls_name = ".".join((cls.__module__, cls.__name__)) except AttributeError: cls_name = getattr(cls, "__name__", None) if cls_name is None: cls_name = repr(cls) return cls_name @util.preload_module("sqlalchemy.orm.base") def _default_unmapped(cls): base = util.preloaded.orm_base try: mappers = base.manager_of_class(cls).mappers except (TypeError,) + NO_STATE: mappers = {} name = _safe_cls_name(cls) if not mappers: return "Class '%s' is not mapped" % name
31.581731
76
0.642716
from __future__ import annotations from .. import exc as sa_exc from .. import util from ..exc import MultipleResultsFound from ..exc import NoResultFound NO_STATE = (AttributeError, KeyError) class StaleDataError(sa_exc.SQLAlchemyError): ConcurrentModificationError = StaleDataError class FlushError(sa_exc.SQLAlchemyError): class UnmappedError(sa_exc.InvalidRequestError): class ObjectDereferencedError(sa_exc.SQLAlchemyError): class DetachedInstanceError(sa_exc.SQLAlchemyError): code = "bhk3" class UnmappedInstanceError(UnmappedError): @util.preload_module("sqlalchemy.orm.base") def __init__(self, obj, msg=None): base = util.preloaded.orm_base if not msg: try: base.class_mapper(type(obj)) name = _safe_cls_name(type(obj)) msg = ( "Class %r is mapped, but this instance lacks " "instrumentation. This occurs when the instance " "is created before sqlalchemy.orm.mapper(%s) " "was called." % (name, name) ) except UnmappedClassError: msg = _default_unmapped(type(obj)) if isinstance(obj, type): msg += ( "; was a class (%s) supplied where an instance was " "required?" % _safe_cls_name(obj) ) UnmappedError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class UnmappedClassError(UnmappedError): def __init__(self, cls, msg=None): if not msg: msg = _default_unmapped(cls) UnmappedError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class ObjectDeletedError(sa_exc.InvalidRequestError): @util.preload_module("sqlalchemy.orm.base") def __init__(self, state, msg=None): base = util.preloaded.orm_base if not msg: msg = ( "Instance '%s' has been deleted, or its " "row is otherwise not present." % base.state_str(state) ) sa_exc.InvalidRequestError.__init__(self, msg) def __reduce__(self): return self.__class__, (None, self.args[0]) class UnmappedColumnError(sa_exc.InvalidRequestError): class LoaderStrategyException(sa_exc.InvalidRequestError): def __init__( self, applied_to_property_type, requesting_property, applies_to, actual_strategy_type, strategy_key, ): if actual_strategy_type is None: sa_exc.InvalidRequestError.__init__( self, "Can't find strategy %s for %s" % (strategy_key, requesting_property), ) else: sa_exc.InvalidRequestError.__init__( self, 'Can\'t apply "%s" strategy to property "%s", ' 'which is a "%s"; this loader strategy is intended ' 'to be used with a "%s".' % ( util.clsname_as_plain_name(actual_strategy_type), requesting_property, util.clsname_as_plain_name(applied_to_property_type), util.clsname_as_plain_name(applies_to), ), ) def _safe_cls_name(cls): try: cls_name = ".".join((cls.__module__, cls.__name__)) except AttributeError: cls_name = getattr(cls, "__name__", None) if cls_name is None: cls_name = repr(cls) return cls_name @util.preload_module("sqlalchemy.orm.base") def _default_unmapped(cls): base = util.preloaded.orm_base try: mappers = base.manager_of_class(cls).mappers except (TypeError,) + NO_STATE: mappers = {} name = _safe_cls_name(cls) if not mappers: return "Class '%s' is not mapped" % name
true
true
f70ea910deb64c851f94887e577c45916aab7cf2
12,652
py
Python
doc/conf.py
gitter-badger/SoCo
65977466057748ea522a6d8b7f2a649091485a07
[ "MIT" ]
1
2019-03-09T14:23:48.000Z
2019-03-09T14:23:48.000Z
doc/conf.py
gitter-badger/SoCo
65977466057748ea522a6d8b7f2a649091485a07
[ "MIT" ]
null
null
null
doc/conf.py
gitter-badger/SoCo
65977466057748ea522a6d8b7f2a649091485a07
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # soco documentation build configuration file, created by # sphinx-quickstart on Mon Sep 14 08:03:37 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex sys.path.insert(0, os.path.abspath('..')) import soco # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '1.3' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.extlinks', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'SoCo' copyright = '2015, The SoCo Team' author = "`The SoCo Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = soco.__version__ # The full version, including alpha/beta/rc tags. release = soco.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = 'any' # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. modindex_common_prefix = ['soco.', 'soco.music_services.'] # If true, keep warnings as "system message" paragraphs in the built documents. keep_warnings = True # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # Allow auto links into the Python and Requests docs intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), 'requests': ('http://www.python-requests.org/en/latest/', None) } # Shortcuts to Github Issues etc. Use them like this: # :issue:`123` (which will generate a link to issue 123) extlinks = { 'issue': ('https://github.com/SoCo/SoCo/issues/%s', '#'), 'PR': ('https://github.com/SoCo/SoCo/pull/%s', '#') } # Document members by default, and in source order. This allows the stub files # in the api directory to be much shorter. autodoc_default_flags = ['members'] autodoc_member_order = 'bysource' # Concatenate the class and __init__ docstrings autoclass_content = 'both' # Nicer inheritance graphs for RTD theme. NB the image map does not rescale # properly, so we have had to add some javascript to handle it. See # _templates and _static inheritance_node_attrs = dict( fontsize=14, height=0.75, color='dodgerblue', style='rounded', ) inheritance_graph_attrs = dict( rankdir="LR", size='""', ) # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'socodoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', # Latex figure (float) alignment # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'soco.tex', 'soco Documentation', 'Author', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'soco', 'soco Documentation', [author], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'soco', 'soco Documentation', author, 'soco', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The basename for the epub file. It defaults to the project name. # epub_basename = project # The HTML theme for the epub output. Since the default themes are not # optimized for small screen space, using the same theme for HTML and epub # output is usually not wise. This defaults to 'epub', a theme designed to # save visual space. # epub_theme = 'epub' # The language of the text. It defaults to the language option # or 'en' if the language is not set. # epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. # epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. # epub_identifier = '' # A unique identification for the text. # epub_uid = '' # A tuple containing the cover image and cover page html template filenames. # epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. # epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. # epub_tocdepth = 3 # Allow duplicate toc entries. # epub_tocdup = True # Choose between 'default' and 'includehidden'. # epub_tocscope = 'default' # Fix unsupported image types using the Pillow. # epub_fix_images = False # Scale large images. # epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. # epub_show_urls = 'inline' # If false, no index is generated. # epub_use_index = True
31.788945
79
0.710639
import sys import os import shlex sys.path.insert(0, os.path.abspath('..')) import soco needs_sphinx = '1.3' extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.extlinks', 'sphinx.ext.inheritance_diagram', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', ] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = 'SoCo' copyright = '2015, The SoCo Team' author = "`The SoCo Team" # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = soco.__version__ # The full version, including alpha/beta/rc tags. release = soco.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = 'any' # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. modindex_common_prefix = ['soco.', 'soco.music_services.'] # If true, keep warnings as "system message" paragraphs in the built documents. keep_warnings = True # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # Allow auto links into the Python and Requests docs intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), 'requests': ('http://www.python-requests.org/en/latest/', None) } # Shortcuts to Github Issues etc. Use them like this: # :issue:`123` (which will generate a link to issue 123) extlinks = { 'issue': ('https://github.com/SoCo/SoCo/issues/%s', ' 'PR': ('https://github.com/SoCo/SoCo/pull/%s', ' } # Document members by default, and in source order. This allows the stub files # in the api directory to be much shorter. autodoc_default_flags = ['members'] autodoc_member_order = 'bysource' # Concatenate the class and __init__ docstrings autoclass_content = 'both' # Nicer inheritance graphs for RTD theme. NB the image map does not rescale # properly, so we have had to add some javascript to handle it. See # _templates and _static inheritance_node_attrs = dict( fontsize=14, height=0.75, color='dodgerblue', style='rounded', ) inheritance_graph_attrs = dict( rankdir="LR", size='""', ) # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'socodoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', # Latex figure (float) alignment # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'soco.tex', 'soco Documentation', 'Author', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'soco', 'soco Documentation', [author], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'soco', 'soco Documentation', author, 'soco', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright epub_exclude_files = ['search.html']
true
true
f70eaaf12a9fe00936523a8b54e0345d5ae2821e
768
py
Python
src/deployable/__main__.py
cpuabuse/py-deployment-automation
aea0c48ac4c5a81f2e027c984ab65f911ad29d0d
[ "0BSD" ]
1
2020-02-23T22:35:28.000Z
2020-02-23T22:35:28.000Z
src/deployable/__main__.py
cpuabuse/py-deployment-automation
aea0c48ac4c5a81f2e027c984ab65f911ad29d0d
[ "0BSD" ]
null
null
null
src/deployable/__main__.py
cpuabuse/py-deployment-automation
aea0c48ac4c5a81f2e027c984ab65f911ad29d0d
[ "0BSD" ]
null
null
null
#!/usr/bin/env python3 """ Main module for the deployable project. """ # Bootstrap to be able to perform absolute imports as standalone code if __name__ == "__main__": from absolute_import import absolute_import absolute_import(file=__file__, name=__name__, path=__path__) # Normal imports from argparse import ArgumentParser, RawDescriptionHelpFormatter from deployable.defaults.args import description, epilog from typing import Any, Tuple def get_args() -> Tuple[Any]: """ Retrieves arguments from command line. """ # Create parser and groups parser = ArgumentParser(description=description, epilog=epilog, formatter_class=RawDescriptionHelpFormatter) def main() -> None: """ Entrypoint. """ # Call main method if __name__ == "__main__": main()
21.942857
109
0.763021
if __name__ == "__main__": from absolute_import import absolute_import absolute_import(file=__file__, name=__name__, path=__path__) from argparse import ArgumentParser, RawDescriptionHelpFormatter from deployable.defaults.args import description, epilog from typing import Any, Tuple def get_args() -> Tuple[Any]: parser = ArgumentParser(description=description, epilog=epilog, formatter_class=RawDescriptionHelpFormatter) def main() -> None: if __name__ == "__main__": main()
true
true
f70eab155736800811a7f28862b37c609211be8a
4,889
py
Python
src/cartpole.py
rish-16/gym-navmaze
cc21d730ec6ab1e96a4a1a8f602a5bbb951d2929
[ "MIT" ]
1
2021-06-10T13:40:09.000Z
2021-06-10T13:40:09.000Z
src/cartpole.py
rish-16/gym-navmaze
cc21d730ec6ab1e96a4a1a8f602a5bbb951d2929
[ "MIT" ]
null
null
null
src/cartpole.py
rish-16/gym-navmaze
cc21d730ec6ab1e96a4a1a8f602a5bbb951d2929
[ "MIT" ]
1
2021-06-10T13:40:11.000Z
2021-06-10T13:40:11.000Z
import numpy as np from collections import deque import pickle import torch from utils import collect_trajectories, random_sample from PPO import PPO import matplotlib.pyplot as plt from parallelEnv import * import gym env = gym.make("CartPole-v0") env.reset() env.seed(2) obs_dim = env.observation_space.shape[0] n_actions = env.action_space.n act_dist = [0 for i in range(n_actions)] def train(episode, env_name): gamma = .99 gae_lambda = 0.95 use_gae = True beta = .01 cliprange = 0.1 best_score = -np.inf goal_score = 195.0 ep_length = [] nenvs = 1 rollout_length = 200 minibatches = 10*8 nbatch = nenvs * rollout_length optimization_epochs = 4 device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu") envs = parallelEnv(env_name, nenvs, seed=1234) agent = PPO(state_size=obs_dim, action_size=n_actions, seed=0, hidden_layers=[64,64], lr_policy=1e-4, use_reset=True, device=device) print(agent.policy) # keep track of progress mean_rewards = [] scores_window = deque(maxlen=100) loss_storage = [] for i_episode in range(episode+1): log_probs_old, states, actions, rewards, values, dones, vals_last, infos, ep_length = collect_trajectories(envs, act_dist, ep_length, agent.policy, rollout_length) returns = np.zeros_like(rewards) advantages = np.zeros_like(rewards) if not use_gae: for t in reversed(range(rollout_length)): if t == rollout_length - 1: returns[t] = rewards[t] + gamma * (1-dones[t]) * vals_last else: returns[t] = rewards[t] + gamma * (1-dones[t]) * returns[t+1] advantages[t] = returns[t] - values[t] else: for t in reversed(range(rollout_length)): if t == rollout_length - 1: returns[t] = rewards[t] + gamma * (1-dones[t]) * vals_last td_error = returns[t] - values[t] else: returns[t] = rewards[t] + gamma * (1-dones[t]) * returns[t+1] td_error = rewards[t] + gamma * (1-dones[t]) * values[t+1] - values[t] advantages[t] = advantages[t] * gae_lambda * gamma * (1-dones[t]) + td_error # convert to pytorch tensors and move to gpu if available returns = torch.from_numpy(returns).float().to(device).view(-1,) advantages = torch.from_numpy(advantages).float().to(device).view(-1,) advantages = (advantages - advantages.mean()) / (advantages.std() + 1e-10) for _ in range(optimization_epochs): sampler = random_sample(nbatch, minibatches) for inds in sampler: mb_log_probs_old = log_probs_old[inds] mb_states = states[inds] mb_actions = actions[inds] mb_returns = returns[inds] mb_advantages = advantages[inds] loss_p, loss_v, loss_ent = agent.update(mb_log_probs_old, mb_states, mb_actions, mb_returns, mb_advantages, cliprange=cliprange, beta=beta) loss_storage.append([loss_p, loss_v, loss_ent]) total_rewards = np.sum(rewards, axis=0) scores_window.append(np.mean(total_rewards)) # last 100 scores mean_rewards.append(np.mean(total_rewards)) # get the average reward of the parallel environments cliprange *= 0.999 # the clipping parameter reduces as time goes on beta *= 0.999 # the regulation term reduces if i_episode % 100 == 0: print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window))) print(total_rewards) if np.mean(scores_window)>=goal_score and np.mean(scores_window)>=best_score: torch.save(agent.policy.state_dict(), "policy_cartpole.pth") best_score = np.mean(scores_window) return mean_rewards, loss_storage, act_dist, ep_length mean_rewards, loss, new_act_dist, ep_length = train(10000, 'CartPole-v0') print (new_act_dist[-1]) print (ep_length) plt.rcParams['xtick.direction'] = 'in' plt.rcParams['ytick.direction'] = 'in' plt.rcParams['font.size'] = 10 plt.title("PPO + MLP + GAE for 10000 episodes") plt.subplot(131) plt.plot(mean_rewards) plt.ylabel('Average score') plt.xlabel('Episode') plt.subplot(132) plt.plot(list(range(len(ep_length))), ep_length, color="red") plt.ylabel('Episode Length') plt.xlabel('Episode') plt.subplot(133) plt.ylabel('Frequency') plt.xlabel('Actions') plt.bar(['Action {}'.format(i) for i in range(len(new_act_dist))], new_act_dist[-1]) plt.show()
36.759398
171
0.606668
import numpy as np from collections import deque import pickle import torch from utils import collect_trajectories, random_sample from PPO import PPO import matplotlib.pyplot as plt from parallelEnv import * import gym env = gym.make("CartPole-v0") env.reset() env.seed(2) obs_dim = env.observation_space.shape[0] n_actions = env.action_space.n act_dist = [0 for i in range(n_actions)] def train(episode, env_name): gamma = .99 gae_lambda = 0.95 use_gae = True beta = .01 cliprange = 0.1 best_score = -np.inf goal_score = 195.0 ep_length = [] nenvs = 1 rollout_length = 200 minibatches = 10*8 nbatch = nenvs * rollout_length optimization_epochs = 4 device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu") envs = parallelEnv(env_name, nenvs, seed=1234) agent = PPO(state_size=obs_dim, action_size=n_actions, seed=0, hidden_layers=[64,64], lr_policy=1e-4, use_reset=True, device=device) print(agent.policy) mean_rewards = [] scores_window = deque(maxlen=100) loss_storage = [] for i_episode in range(episode+1): log_probs_old, states, actions, rewards, values, dones, vals_last, infos, ep_length = collect_trajectories(envs, act_dist, ep_length, agent.policy, rollout_length) returns = np.zeros_like(rewards) advantages = np.zeros_like(rewards) if not use_gae: for t in reversed(range(rollout_length)): if t == rollout_length - 1: returns[t] = rewards[t] + gamma * (1-dones[t]) * vals_last else: returns[t] = rewards[t] + gamma * (1-dones[t]) * returns[t+1] advantages[t] = returns[t] - values[t] else: for t in reversed(range(rollout_length)): if t == rollout_length - 1: returns[t] = rewards[t] + gamma * (1-dones[t]) * vals_last td_error = returns[t] - values[t] else: returns[t] = rewards[t] + gamma * (1-dones[t]) * returns[t+1] td_error = rewards[t] + gamma * (1-dones[t]) * values[t+1] - values[t] advantages[t] = advantages[t] * gae_lambda * gamma * (1-dones[t]) + td_error returns = torch.from_numpy(returns).float().to(device).view(-1,) advantages = torch.from_numpy(advantages).float().to(device).view(-1,) advantages = (advantages - advantages.mean()) / (advantages.std() + 1e-10) for _ in range(optimization_epochs): sampler = random_sample(nbatch, minibatches) for inds in sampler: mb_log_probs_old = log_probs_old[inds] mb_states = states[inds] mb_actions = actions[inds] mb_returns = returns[inds] mb_advantages = advantages[inds] loss_p, loss_v, loss_ent = agent.update(mb_log_probs_old, mb_states, mb_actions, mb_returns, mb_advantages, cliprange=cliprange, beta=beta) loss_storage.append([loss_p, loss_v, loss_ent]) total_rewards = np.sum(rewards, axis=0) scores_window.append(np.mean(total_rewards)) mean_rewards.append(np.mean(total_rewards)) cliprange *= 0.999 beta *= 0.999 if i_episode % 100 == 0: print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window))) print(total_rewards) if np.mean(scores_window)>=goal_score and np.mean(scores_window)>=best_score: torch.save(agent.policy.state_dict(), "policy_cartpole.pth") best_score = np.mean(scores_window) return mean_rewards, loss_storage, act_dist, ep_length mean_rewards, loss, new_act_dist, ep_length = train(10000, 'CartPole-v0') print (new_act_dist[-1]) print (ep_length) plt.rcParams['xtick.direction'] = 'in' plt.rcParams['ytick.direction'] = 'in' plt.rcParams['font.size'] = 10 plt.title("PPO + MLP + GAE for 10000 episodes") plt.subplot(131) plt.plot(mean_rewards) plt.ylabel('Average score') plt.xlabel('Episode') plt.subplot(132) plt.plot(list(range(len(ep_length))), ep_length, color="red") plt.ylabel('Episode Length') plt.xlabel('Episode') plt.subplot(133) plt.ylabel('Frequency') plt.xlabel('Actions') plt.bar(['Action {}'.format(i) for i in range(len(new_act_dist))], new_act_dist[-1]) plt.show()
true
true
f70eab2e8fd598cbdc5417a99d3c40fb7b2434e9
643
py
Python
lofo/infer_defaults.py
williamberrios/lofo-importance
34967cf47dc1c2797d3a77f8926918ae91e4197a
[ "MIT" ]
1
2020-10-31T10:05:45.000Z
2020-10-31T10:05:45.000Z
lofo/infer_defaults.py
williamberrios/lofo-importance
34967cf47dc1c2797d3a77f8926918ae91e4197a
[ "MIT" ]
null
null
null
lofo/infer_defaults.py
williamberrios/lofo-importance
34967cf47dc1c2797d3a77f8926918ae91e4197a
[ "MIT" ]
null
null
null
import numpy as np from sklearn.preprocessing import LabelEncoder from lightgbm import LGBMClassifier, LGBMRegressor def infer_model(df, features, y, n_jobs): model_class = LGBMRegressor if len(np.unique(y)) == 2: y = LabelEncoder().fit_transform(y) model_class = LGBMClassifier categoricals = df[features].select_dtypes(exclude=[np.number]).columns.tolist() for f in categoricals: df[f] = LabelEncoder().fit_transform(df[f].apply(str)) min_child_samples = int(0.01*df.shape[0]) model = model_class(min_child_samples=min_child_samples, n_jobs=n_jobs) return model, df, categoricals, y
30.619048
83
0.720062
import numpy as np from sklearn.preprocessing import LabelEncoder from lightgbm import LGBMClassifier, LGBMRegressor def infer_model(df, features, y, n_jobs): model_class = LGBMRegressor if len(np.unique(y)) == 2: y = LabelEncoder().fit_transform(y) model_class = LGBMClassifier categoricals = df[features].select_dtypes(exclude=[np.number]).columns.tolist() for f in categoricals: df[f] = LabelEncoder().fit_transform(df[f].apply(str)) min_child_samples = int(0.01*df.shape[0]) model = model_class(min_child_samples=min_child_samples, n_jobs=n_jobs) return model, df, categoricals, y
true
true
f70eac78971c0c7d00f18c3d2c1b7bbe17c6918c
11,789
py
Python
napari/_vispy/overlays/axes.py
kolibril13/napari
b39647d94e587f0255b0d4cc3087855e160a8929
[ "BSD-3-Clause" ]
null
null
null
napari/_vispy/overlays/axes.py
kolibril13/napari
b39647d94e587f0255b0d4cc3087855e160a8929
[ "BSD-3-Clause" ]
null
null
null
napari/_vispy/overlays/axes.py
kolibril13/napari
b39647d94e587f0255b0d4cc3087855e160a8929
[ "BSD-3-Clause" ]
null
null
null
import numpy as np from vispy.scene.visuals import Compound, Line, Mesh, Text from vispy.visuals.transforms import STTransform from ...layers.shapes._shapes_utils import triangulate_ellipse from ...utils.colormaps.standardize_color import transform_color from ...utils.theme import get_theme from ...utils.translations import trans def make_dashed_line(num_dashes, axis): """Make a dashed line. Parameters ---------- num_dashes : int Number of dashes in the line. axis : int Axis which is dashed. Returns ------- np.ndarray Dashed line, of shape (num_dashes, 3) with zeros in the non dashed axes and line segments in the dashed axis. """ dashes = np.linspace(0, 1, num_dashes * 2) dashed_line_ends = np.concatenate( [[dashes[2 * i], dashes[2 * i + 1]] for i in range(num_dashes)], axis=0 ) dashed_line = np.zeros((2 * num_dashes, 3)) dashed_line[:, axis] = np.array(dashed_line_ends) return dashed_line def make_arrow_head(num_segments, axis): """Make an arrowhead line. Parameters ---------- num_segments : int Number of segments in the arrowhead. axis Arrowhead direction. Returns ------- np.ndarray, np.ndarray Vertices and faces of the arrowhead. """ corners = np.array([[-1, -1], [-1, 1], [1, 1], [1, -1]]) * 0.1 vertices, faces = triangulate_ellipse(corners, num_segments) full_vertices = np.zeros((num_segments + 1, 3)) inds = list(range(3)) inds.pop(axis) full_vertices[:, inds] = vertices full_vertices[:, axis] = 0.9 full_vertices[0, axis] = 1.02 return full_vertices, faces def color_lines(colors): if len(colors) == 2: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 2], axis=0, ) elif len(colors) == 3: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 2, [colors[2]] * 2], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) def color_dashed_lines(colors): if len(colors) == 2: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 4 * 2], axis=0, ) elif len(colors) == 3: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 4 * 2, [colors[2]] * 8 * 2], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) def color_arrowheads(colors, num_segments): if len(colors) == 2: return np.concatenate( [[colors[0]] * num_segments, [colors[1]] * num_segments], axis=0, ) elif len(colors) == 3: return np.concatenate( [ [colors[0]] * num_segments, [colors[1]] * num_segments, [colors[2]] * num_segments, ], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) class VispyAxesOverlay: """Axes indicating world coordinate origin and orientation.""" _NUM_SEGMENTS_ARROWHEAD = 100 def __init__(self, viewer, parent=None, order=0): self._viewer = viewer self._scale = 1 # Target axes length in canvas pixels self._target_length = 80 # CMYRGB for 6 axes data in x, y, z, ... ordering self._default_color = [ [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 0, 0, 1], [0, 1, 0, 1], [0, 0, 1, 1], ] # Text offset from line end position self._text_offsets = 0.1 * np.array([1, 1, 1]) # note order is x, y, z for VisPy self._line_data2D = np.array( [[0, 0, 0], [1, 0, 0], [0, 0, 0], [0, 1, 0]] ) self._line_data3D = np.array( [[0, 0, 0], [1, 0, 0], [0, 0, 0], [0, 1, 0], [0, 0, 0], [0, 0, 1]] ) # note order is x, y, z for VisPy self._dashed_line_data2D = np.concatenate( [[[1, 0, 0], [0, 0, 0]], make_dashed_line(4, axis=1)], axis=0, ) self._dashed_line_data3D = np.concatenate( [ [[1, 0, 0], [0, 0, 0]], make_dashed_line(4, axis=1), make_dashed_line(8, axis=2), ], axis=0, ) # note order is x, y, z for VisPy vertices = np.empty((0, 3)) faces = np.empty((0, 3)) for axis in range(2): v, f = make_arrow_head(self._NUM_SEGMENTS_ARROWHEAD, axis) faces = np.concatenate([faces, f + len(vertices)], axis=0) vertices = np.concatenate([vertices, v], axis=0) self._default_arrow_vertices2D = vertices self._default_arrow_faces2D = faces.astype(int) vertices = np.empty((0, 3)) faces = np.empty((0, 3)) for axis in range(3): v, f = make_arrow_head(self._NUM_SEGMENTS_ARROWHEAD, axis) faces = np.concatenate([faces, f + len(vertices)], axis=0) vertices = np.concatenate([vertices, v], axis=0) self._default_arrow_vertices3D = vertices self._default_arrow_faces3D = faces.astype(int) self.node = Compound( [Line(connect='segments', method='gl', width=3), Mesh(), Text()], parent=parent, ) self.node.transform = STTransform() self.node.order = order # Add a text node to display axes labels self.text_node = self.node._subvisuals[2] self.text_node.font_size = 10 self.text_node.anchors = ('center', 'center') self.text_node.text = f'{1}' self.node.canvas._backend.destroyed.connect(self._set_canvas_none) # End Note self._viewer.events.theme.connect(self._on_data_change) self._viewer.axes.events.visible.connect(self._on_visible_change) self._viewer.axes.events.colored.connect(self._on_data_change) self._viewer.axes.events.dashed.connect(self._on_data_change) self._viewer.axes.events.labels.connect(self._on_data_change) self._viewer.axes.events.arrows.connect(self._on_data_change) self._viewer.dims.events.order.connect(self._on_data_change) self._viewer.dims.events.range.connect(self._on_data_change) self._viewer.dims.events.ndisplay.connect(self._on_data_change) self._viewer.dims.events.axis_labels.connect(self._on_data_change) self._viewer.camera.events.zoom.connect(self._on_zoom_change) self._on_visible_change(None) self._on_data_change(None) def _set_canvas_none(self): self.node._set_canvas(None) self.text_node._set_canvas(None) def _on_visible_change(self, event): """Change visibiliy of axes.""" self.node.visible = self._viewer.axes.visible self._on_zoom_change(event) self._on_data_change(event) def _on_data_change(self, event): """Change style of axes.""" if not self._viewer.axes.visible: return # Determine which axes are displayed axes = self._viewer.dims.displayed # Actual number of displayed dims ndisplay = len(self._viewer.dims.displayed) # Determine the labels of those axes axes_labels = [self._viewer.dims.axis_labels[a] for a in axes[::-1]] # Counting backwards from total number of dimensions # determine axes positions. This is done as by default # the last NumPy axis corresponds to the first Vispy axis reversed_axes = [self._viewer.dims.ndim - 1 - a for a in axes[::-1]] # Determine colors of axes based on reverse position if self._viewer.axes.colored: axes_colors = [ self._default_color[ra % len(self._default_color)] for ra in reversed_axes ] else: # the reason for using the `as_hex` here is to avoid # `UserWarning` which is emitted when RGB values are above 1 background_color = get_theme( self._viewer.theme, False ).canvas.as_hex() background_color = transform_color(background_color)[0] color = np.subtract(1, background_color) color[-1] = background_color[-1] axes_colors = [color] * ndisplay # Determine data based on number of displayed dimensions and # axes visualization parameters if self._viewer.axes.dashed and ndisplay == 2: data = self._dashed_line_data2D color = color_dashed_lines(axes_colors) text_data = self._line_data2D[1::2] elif self._viewer.axes.dashed and ndisplay == 3: data = self._dashed_line_data3D color = color_dashed_lines(axes_colors) text_data = self._line_data3D[1::2] elif not self._viewer.axes.dashed and ndisplay == 2: data = self._line_data2D color = color_lines(axes_colors) text_data = self._line_data2D[1::2] elif not self._viewer.axes.dashed and ndisplay == 3: data = self._line_data3D color = color_lines(axes_colors) text_data = self._line_data3D[1::2] else: raise ValueError( trans._( 'Axes dash status and ndisplay combination not supported', deferred=True, ) ) if self._viewer.axes.arrows and ndisplay == 2: arrow_vertices = self._default_arrow_vertices2D arrow_faces = self._default_arrow_faces2D arrow_color = color_arrowheads( axes_colors, self._NUM_SEGMENTS_ARROWHEAD ) elif self._viewer.axes.arrows and ndisplay == 3: arrow_vertices = self._default_arrow_vertices3D arrow_faces = self._default_arrow_faces3D arrow_color = color_arrowheads( axes_colors, self._NUM_SEGMENTS_ARROWHEAD ) else: arrow_vertices = np.zeros((3, 3)) arrow_faces = np.array([[0, 1, 2]]) arrow_color = [[0, 0, 0, 0]] self.node._subvisuals[0].set_data(data, color) self.node._subvisuals[1].set_data( vertices=arrow_vertices, faces=arrow_faces, face_colors=arrow_color, ) # Set visibility status of text self.text_node.visible = ( self._viewer.axes.visible and self._viewer.axes.labels ) self.text_node.text = axes_labels self.text_node.color = axes_colors self.text_node.pos = text_data + self._text_offsets def _on_zoom_change(self, event): """Update axes length based on zoom scale.""" if not self._viewer.axes.visible: return scale = 1 / self._viewer.camera.zoom # If scale has not changed, do not redraw if abs(np.log10(self._scale) - np.log10(scale)) < 1e-4: return self._scale = scale scale_canvas2world = self._scale target_canvas_pixels = self._target_length scale = target_canvas_pixels * scale_canvas2world # Update axes scale self.node.transform.scale = [scale, scale, scale, 1]
34.270349
79
0.573925
import numpy as np from vispy.scene.visuals import Compound, Line, Mesh, Text from vispy.visuals.transforms import STTransform from ...layers.shapes._shapes_utils import triangulate_ellipse from ...utils.colormaps.standardize_color import transform_color from ...utils.theme import get_theme from ...utils.translations import trans def make_dashed_line(num_dashes, axis): dashes = np.linspace(0, 1, num_dashes * 2) dashed_line_ends = np.concatenate( [[dashes[2 * i], dashes[2 * i + 1]] for i in range(num_dashes)], axis=0 ) dashed_line = np.zeros((2 * num_dashes, 3)) dashed_line[:, axis] = np.array(dashed_line_ends) return dashed_line def make_arrow_head(num_segments, axis): corners = np.array([[-1, -1], [-1, 1], [1, 1], [1, -1]]) * 0.1 vertices, faces = triangulate_ellipse(corners, num_segments) full_vertices = np.zeros((num_segments + 1, 3)) inds = list(range(3)) inds.pop(axis) full_vertices[:, inds] = vertices full_vertices[:, axis] = 0.9 full_vertices[0, axis] = 1.02 return full_vertices, faces def color_lines(colors): if len(colors) == 2: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 2], axis=0, ) elif len(colors) == 3: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 2, [colors[2]] * 2], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) def color_dashed_lines(colors): if len(colors) == 2: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 4 * 2], axis=0, ) elif len(colors) == 3: return np.concatenate( [[colors[0]] * 2, [colors[1]] * 4 * 2, [colors[2]] * 8 * 2], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) def color_arrowheads(colors, num_segments): if len(colors) == 2: return np.concatenate( [[colors[0]] * num_segments, [colors[1]] * num_segments], axis=0, ) elif len(colors) == 3: return np.concatenate( [ [colors[0]] * num_segments, [colors[1]] * num_segments, [colors[2]] * num_segments, ], axis=0, ) else: return ValueError( trans._( 'Either 2 or 3 colors must be provided, got {number}.', deferred=True, number=len(colors), ) ) class VispyAxesOverlay: _NUM_SEGMENTS_ARROWHEAD = 100 def __init__(self, viewer, parent=None, order=0): self._viewer = viewer self._scale = 1 self._target_length = 80 self._default_color = [ [0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 0, 0, 1], [0, 1, 0, 1], [0, 0, 1, 1], ] self._text_offsets = 0.1 * np.array([1, 1, 1]) self._line_data2D = np.array( [[0, 0, 0], [1, 0, 0], [0, 0, 0], [0, 1, 0]] ) self._line_data3D = np.array( [[0, 0, 0], [1, 0, 0], [0, 0, 0], [0, 1, 0], [0, 0, 0], [0, 0, 1]] ) self._dashed_line_data2D = np.concatenate( [[[1, 0, 0], [0, 0, 0]], make_dashed_line(4, axis=1)], axis=0, ) self._dashed_line_data3D = np.concatenate( [ [[1, 0, 0], [0, 0, 0]], make_dashed_line(4, axis=1), make_dashed_line(8, axis=2), ], axis=0, ) vertices = np.empty((0, 3)) faces = np.empty((0, 3)) for axis in range(2): v, f = make_arrow_head(self._NUM_SEGMENTS_ARROWHEAD, axis) faces = np.concatenate([faces, f + len(vertices)], axis=0) vertices = np.concatenate([vertices, v], axis=0) self._default_arrow_vertices2D = vertices self._default_arrow_faces2D = faces.astype(int) vertices = np.empty((0, 3)) faces = np.empty((0, 3)) for axis in range(3): v, f = make_arrow_head(self._NUM_SEGMENTS_ARROWHEAD, axis) faces = np.concatenate([faces, f + len(vertices)], axis=0) vertices = np.concatenate([vertices, v], axis=0) self._default_arrow_vertices3D = vertices self._default_arrow_faces3D = faces.astype(int) self.node = Compound( [Line(connect='segments', method='gl', width=3), Mesh(), Text()], parent=parent, ) self.node.transform = STTransform() self.node.order = order self.text_node = self.node._subvisuals[2] self.text_node.font_size = 10 self.text_node.anchors = ('center', 'center') self.text_node.text = f'{1}' self.node.canvas._backend.destroyed.connect(self._set_canvas_none) self._viewer.events.theme.connect(self._on_data_change) self._viewer.axes.events.visible.connect(self._on_visible_change) self._viewer.axes.events.colored.connect(self._on_data_change) self._viewer.axes.events.dashed.connect(self._on_data_change) self._viewer.axes.events.labels.connect(self._on_data_change) self._viewer.axes.events.arrows.connect(self._on_data_change) self._viewer.dims.events.order.connect(self._on_data_change) self._viewer.dims.events.range.connect(self._on_data_change) self._viewer.dims.events.ndisplay.connect(self._on_data_change) self._viewer.dims.events.axis_labels.connect(self._on_data_change) self._viewer.camera.events.zoom.connect(self._on_zoom_change) self._on_visible_change(None) self._on_data_change(None) def _set_canvas_none(self): self.node._set_canvas(None) self.text_node._set_canvas(None) def _on_visible_change(self, event): self.node.visible = self._viewer.axes.visible self._on_zoom_change(event) self._on_data_change(event) def _on_data_change(self, event): if not self._viewer.axes.visible: return axes = self._viewer.dims.displayed ndisplay = len(self._viewer.dims.displayed) axes_labels = [self._viewer.dims.axis_labels[a] for a in axes[::-1]] reversed_axes = [self._viewer.dims.ndim - 1 - a for a in axes[::-1]] if self._viewer.axes.colored: axes_colors = [ self._default_color[ra % len(self._default_color)] for ra in reversed_axes ] else: background_color = get_theme( self._viewer.theme, False ).canvas.as_hex() background_color = transform_color(background_color)[0] color = np.subtract(1, background_color) color[-1] = background_color[-1] axes_colors = [color] * ndisplay if self._viewer.axes.dashed and ndisplay == 2: data = self._dashed_line_data2D color = color_dashed_lines(axes_colors) text_data = self._line_data2D[1::2] elif self._viewer.axes.dashed and ndisplay == 3: data = self._dashed_line_data3D color = color_dashed_lines(axes_colors) text_data = self._line_data3D[1::2] elif not self._viewer.axes.dashed and ndisplay == 2: data = self._line_data2D color = color_lines(axes_colors) text_data = self._line_data2D[1::2] elif not self._viewer.axes.dashed and ndisplay == 3: data = self._line_data3D color = color_lines(axes_colors) text_data = self._line_data3D[1::2] else: raise ValueError( trans._( 'Axes dash status and ndisplay combination not supported', deferred=True, ) ) if self._viewer.axes.arrows and ndisplay == 2: arrow_vertices = self._default_arrow_vertices2D arrow_faces = self._default_arrow_faces2D arrow_color = color_arrowheads( axes_colors, self._NUM_SEGMENTS_ARROWHEAD ) elif self._viewer.axes.arrows and ndisplay == 3: arrow_vertices = self._default_arrow_vertices3D arrow_faces = self._default_arrow_faces3D arrow_color = color_arrowheads( axes_colors, self._NUM_SEGMENTS_ARROWHEAD ) else: arrow_vertices = np.zeros((3, 3)) arrow_faces = np.array([[0, 1, 2]]) arrow_color = [[0, 0, 0, 0]] self.node._subvisuals[0].set_data(data, color) self.node._subvisuals[1].set_data( vertices=arrow_vertices, faces=arrow_faces, face_colors=arrow_color, ) self.text_node.visible = ( self._viewer.axes.visible and self._viewer.axes.labels ) self.text_node.text = axes_labels self.text_node.color = axes_colors self.text_node.pos = text_data + self._text_offsets def _on_zoom_change(self, event): if not self._viewer.axes.visible: return scale = 1 / self._viewer.camera.zoom if abs(np.log10(self._scale) - np.log10(scale)) < 1e-4: return self._scale = scale scale_canvas2world = self._scale target_canvas_pixels = self._target_length scale = target_canvas_pixels * scale_canvas2world self.node.transform.scale = [scale, scale, scale, 1]
true
true
f70eacd9831e30421747847c4c2092590d7cb3b4
4,179
py
Python
dataloader.py
PaperCodeReview/MoCo-TF
1ea01b2d005de3e030229f79a37135468fa1631e
[ "MIT" ]
22
2020-10-01T10:14:36.000Z
2022-02-02T12:20:42.000Z
dataloader.py
PaperCodeReview/MoCo-TF
1ea01b2d005de3e030229f79a37135468fa1631e
[ "MIT" ]
2
2021-06-25T06:06:50.000Z
2021-11-08T23:43:38.000Z
dataloader.py
PaperCodeReview/MoCo-TF
1ea01b2d005de3e030229f79a37135468fa1631e
[ "MIT" ]
4
2021-03-03T06:19:45.000Z
2021-05-20T08:07:50.000Z
import os import random import numpy as np import pandas as pd import tensorflow as tf from augment import Augment AUTO = tf.data.experimental.AUTOTUNE def set_dataset(task, data_path): trainset = pd.read_csv( os.path.join( data_path, 'imagenet_trainset.csv' )).values.tolist() trainset = [[os.path.join(data_path, t[0]), t[1]] for t in trainset] if task == 'lincls': valset = pd.read_csv( os.path.join( data_path, 'imagenet_valset.csv' )).values.tolist() valset = [[os.path.join(data_path, t[0]), t[1]] for t in valset] return np.array(trainset, dtype='object'), np.array(valset, dtype='object') return np.array(trainset, dtype='object') class DataLoader: def __init__(self, args, mode, datalist, batch_size, num_workers=1, shuffle=True): self.args = args self.mode = mode self.datalist = datalist self.batch_size = batch_size self.num_workers = num_workers self.shuffle = shuffle self.dataloader = self._dataloader() def __len__(self): return len(self.datalist) def fetch_dataset(self, path, y=None): x = tf.io.read_file(path) if y is not None: return tf.data.Dataset.from_tensors((x, y)) return tf.data.Dataset.from_tensors(x) def augmentation(self, img, shape): augset = Augment(self.args, self.mode) if self.args.task in ['v1', 'v2']: img_list = [] for _ in range(2): # query, key aug_img = tf.identity(img) if self.args.task == 'v1': aug_img = augset._augmentv1(aug_img, shape) # moco v1 else: radius = np.random.choice([3, 5]) aug_img = augset._augmentv2(aug_img, shape, (radius, radius)) # moco v2 img_list.append(aug_img) return img_list else: return augset._augment_lincls(img, shape) def dataset_parser(self, value, label=None): shape = tf.image.extract_jpeg_shape(value) img = tf.io.decode_jpeg(value, channels=3) if label is None: # moco query, key = self.augmentation(img, shape) inputs = {'query': query, 'key': key} labels = tf.zeros([]) else: # lincls inputs = self.augmentation(img, shape) labels = tf.one_hot(label, self.args.classes) return (inputs, labels) def shuffle_BN(self, value, labels): if self.num_workers > 1: pre_shuffle = [(i, value['key'][i]) for i in range(self.batch_size)] random.shuffle(pre_shuffle) shuffle_idx = [] value_temp = [] for vv in pre_shuffle: shuffle_idx.append(vv[0]) value_temp.append(tf.expand_dims(vv[1], axis=0)) value['key'] = tf.concat(value_temp, axis=0) unshuffle_idx = np.array(shuffle_idx).argsort().tolist() value.update({'unshuffle': unshuffle_idx}) return (value, labels) def _dataloader(self): self.imglist = self.datalist[:,0].tolist() if self.args.task in ['v1', 'v2']: dataset = tf.data.Dataset.from_tensor_slices(self.imglist) else: self.labellist = self.datalist[:,1].tolist() dataset = tf.data.Dataset.from_tensor_slices((self.imglist, self.labellist)) dataset = dataset.repeat() if self.shuffle: dataset = dataset.shuffle(len(self.datalist)) dataset = dataset.interleave(self.fetch_dataset, num_parallel_calls=AUTO) dataset = dataset.map(self.dataset_parser, num_parallel_calls=AUTO) dataset = dataset.batch(self.batch_size) dataset = dataset.prefetch(AUTO) if self.args.shuffle_bn and self.args.task in ['v1', 'v2']: # only moco dataset = dataset.map(self.shuffle_BN, num_parallel_calls=AUTO) return dataset
36.657895
92
0.569275
import os import random import numpy as np import pandas as pd import tensorflow as tf from augment import Augment AUTO = tf.data.experimental.AUTOTUNE def set_dataset(task, data_path): trainset = pd.read_csv( os.path.join( data_path, 'imagenet_trainset.csv' )).values.tolist() trainset = [[os.path.join(data_path, t[0]), t[1]] for t in trainset] if task == 'lincls': valset = pd.read_csv( os.path.join( data_path, 'imagenet_valset.csv' )).values.tolist() valset = [[os.path.join(data_path, t[0]), t[1]] for t in valset] return np.array(trainset, dtype='object'), np.array(valset, dtype='object') return np.array(trainset, dtype='object') class DataLoader: def __init__(self, args, mode, datalist, batch_size, num_workers=1, shuffle=True): self.args = args self.mode = mode self.datalist = datalist self.batch_size = batch_size self.num_workers = num_workers self.shuffle = shuffle self.dataloader = self._dataloader() def __len__(self): return len(self.datalist) def fetch_dataset(self, path, y=None): x = tf.io.read_file(path) if y is not None: return tf.data.Dataset.from_tensors((x, y)) return tf.data.Dataset.from_tensors(x) def augmentation(self, img, shape): augset = Augment(self.args, self.mode) if self.args.task in ['v1', 'v2']: img_list = [] for _ in range(2): aug_img = tf.identity(img) if self.args.task == 'v1': aug_img = augset._augmentv1(aug_img, shape) else: radius = np.random.choice([3, 5]) aug_img = augset._augmentv2(aug_img, shape, (radius, radius)) img_list.append(aug_img) return img_list else: return augset._augment_lincls(img, shape) def dataset_parser(self, value, label=None): shape = tf.image.extract_jpeg_shape(value) img = tf.io.decode_jpeg(value, channels=3) if label is None: query, key = self.augmentation(img, shape) inputs = {'query': query, 'key': key} labels = tf.zeros([]) else: inputs = self.augmentation(img, shape) labels = tf.one_hot(label, self.args.classes) return (inputs, labels) def shuffle_BN(self, value, labels): if self.num_workers > 1: pre_shuffle = [(i, value['key'][i]) for i in range(self.batch_size)] random.shuffle(pre_shuffle) shuffle_idx = [] value_temp = [] for vv in pre_shuffle: shuffle_idx.append(vv[0]) value_temp.append(tf.expand_dims(vv[1], axis=0)) value['key'] = tf.concat(value_temp, axis=0) unshuffle_idx = np.array(shuffle_idx).argsort().tolist() value.update({'unshuffle': unshuffle_idx}) return (value, labels) def _dataloader(self): self.imglist = self.datalist[:,0].tolist() if self.args.task in ['v1', 'v2']: dataset = tf.data.Dataset.from_tensor_slices(self.imglist) else: self.labellist = self.datalist[:,1].tolist() dataset = tf.data.Dataset.from_tensor_slices((self.imglist, self.labellist)) dataset = dataset.repeat() if self.shuffle: dataset = dataset.shuffle(len(self.datalist)) dataset = dataset.interleave(self.fetch_dataset, num_parallel_calls=AUTO) dataset = dataset.map(self.dataset_parser, num_parallel_calls=AUTO) dataset = dataset.batch(self.batch_size) dataset = dataset.prefetch(AUTO) if self.args.shuffle_bn and self.args.task in ['v1', 'v2']: dataset = dataset.map(self.shuffle_BN, num_parallel_calls=AUTO) return dataset
true
true
f70eacda302b793da33c4fb5beb6b45c6cc933ec
1,315
py
Python
user_modeling.py
leeamen/k_means
dfa9cad22033c108e3988a99f4d58c685eb06921
[ "MIT" ]
null
null
null
user_modeling.py
leeamen/k_means
dfa9cad22033c108e3988a99f4d58c685eb06921
[ "MIT" ]
null
null
null
user_modeling.py
leeamen/k_means
dfa9cad22033c108e3988a99f4d58c685eb06921
[ "MIT" ]
null
null
null
#!/usr/bin/python #coding:utf-8 import numpy as np import logging import mylog import mykmeans as ml logger = logging.getLogger(__name__) logger.setLevel(logging.ERROR) def str2num(s): a = ['very_low', 'Low', 'Middle', 'High'] for i in range(0, len(a)): if a[i] == s: return float(i) if __name__ == '__main__': filename = './data/data_user_modeling.txt' train_data = np.loadtxt(filename, delimiter = ',', converters = {5:str2num}) logger.debug(train_data) logger.debug(train_data.shape) train_x = train_data[:,0:-1] train_y = train_data[:,-1] logger.debug(train_x) logger.debug(train_y) param = {} param['use_random_for_k'] = 1 param['k'] = [i for i in range(0, 258, 1)] param['n_clusters'] = 4 param['max_iter'] = 100 kmeans = ml.Kmeans(param) kmeans.Fit(train_x) # logger.debug(kmeans) pred = kmeans.Predict(train_x) logger.info('train_y:%s', train_y) logger.info(' pred:%s', pred) # logger.info('k-means准确率:%f', 1.0*sum(pred == train_y)/len(train_y)) # ml.PickingRightK(train_x, param) import myplot myplot.Figure() ml.FitMulti(train_x, param, 100) ml.BisectingFitMulti(train_x, param, 100) myplot.Legend(['k-means','bisecting']) myplot.Title('user modeling') myplot.Show()
28.586957
108
0.641825
import numpy as np import logging import mylog import mykmeans as ml logger = logging.getLogger(__name__) logger.setLevel(logging.ERROR) def str2num(s): a = ['very_low', 'Low', 'Middle', 'High'] for i in range(0, len(a)): if a[i] == s: return float(i) if __name__ == '__main__': filename = './data/data_user_modeling.txt' train_data = np.loadtxt(filename, delimiter = ',', converters = {5:str2num}) logger.debug(train_data) logger.debug(train_data.shape) train_x = train_data[:,0:-1] train_y = train_data[:,-1] logger.debug(train_x) logger.debug(train_y) param = {} param['use_random_for_k'] = 1 param['k'] = [i for i in range(0, 258, 1)] param['n_clusters'] = 4 param['max_iter'] = 100 kmeans = ml.Kmeans(param) kmeans.Fit(train_x) pred = kmeans.Predict(train_x) logger.info('train_y:%s', train_y) logger.info(' pred:%s', pred) import myplot myplot.Figure() ml.FitMulti(train_x, param, 100) ml.BisectingFitMulti(train_x, param, 100) myplot.Legend(['k-means','bisecting']) myplot.Title('user modeling') myplot.Show()
true
true
f70eaf3e78788501749a3d605244c7e2c2fa2be1
448
py
Python
creme/linear_model/__init__.py
Raul9595/creme
39cec7ac27ccd40ff0a7bdd6bceaf7ce25c1a8da
[ "BSD-3-Clause" ]
1
2020-07-27T03:06:46.000Z
2020-07-27T03:06:46.000Z
creme/linear_model/__init__.py
2torus/creme
bcc5e2a0155663a1f0ba779c68f23456695bcb54
[ "BSD-3-Clause" ]
null
null
null
creme/linear_model/__init__.py
2torus/creme
bcc5e2a0155663a1f0ba779c68f23456695bcb54
[ "BSD-3-Clause" ]
null
null
null
""" Generalized linear models optimized with online gradient descent from :mod:`creme.optim`. """ from .fm import FMRegressor from .lin_reg import LinearRegression from .log_reg import LogisticRegression from .pa import PAClassifier from .pa import PARegressor from .softmax import SoftmaxRegression __all__ = [ 'FMRegressor', 'LinearRegression', 'LogisticRegression', 'PAClassifier', 'PARegressor', 'SoftmaxRegression' ]
22.4
89
0.754464
from .fm import FMRegressor from .lin_reg import LinearRegression from .log_reg import LogisticRegression from .pa import PAClassifier from .pa import PARegressor from .softmax import SoftmaxRegression __all__ = [ 'FMRegressor', 'LinearRegression', 'LogisticRegression', 'PAClassifier', 'PARegressor', 'SoftmaxRegression' ]
true
true
f70eb08931175461e6046102ac021119a7ea22c2
8,490
py
Python
docs/conf.py
daherman/dedupe
053d373aaed47201f720c5b6d1a568fc49742cc3
[ "MIT" ]
1
2020-06-26T18:03:34.000Z
2020-06-26T18:03:34.000Z
docs/conf.py
daherman/dedupe
053d373aaed47201f720c5b6d1a568fc49742cc3
[ "MIT" ]
null
null
null
docs/conf.py
daherman/dedupe
053d373aaed47201f720c5b6d1a568fc49742cc3
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # # dedupe documentation build configuration file, created by # sphinx-quickstart on Thu Apr 10 11:27:59 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'dedupe' copyright = u'2018, Forest Gregg, Derek Eder, and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.9.4' # The full version, including alpha/beta/rc tags. release = '1.9.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', 'common_*.rst'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Custom stylesheet # html_style = 'css/custom.css' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { # 'canonical_url': 'https://docs.dedupe.io/' # } # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'dedupedoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'dedupe.tex', u'dedupe Documentation', u'Forest Gregg, Derek Eder, and contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'dedupe', u'dedupe Documentation', [u'Forest Gregg, Derek Eder, and contributors'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'dedupe', u'dedupe Documentation', u'Forest Gregg, Derek Eder, and contributors', 'dedupe', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
31.444444
95
0.715901
import sys import os extensions = [ 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', ] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'dedupe' copyright = u'2018, Forest Gregg, Derek Eder, and contributors' # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.9.4' # The full version, including alpha/beta/rc tags. release = '1.9.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', 'common_*.rst'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Custom stylesheet # html_style = 'css/custom.css' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { # 'canonical_url': 'https://docs.dedupe.io/' # } # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'dedupedoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'dedupe.tex', u'dedupe Documentation', u'Forest Gregg, Derek Eder, and contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'dedupe', u'dedupe Documentation', [u'Forest Gregg, Derek Eder, and contributors'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'dedupe', u'dedupe Documentation', u'Forest Gregg, Derek Eder, and contributors', 'dedupe', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu.
true
true
f70eb0922b08398271e740b375a058194fb88a63
10,033
py
Python
auth-api/src/auth_api/models/user.py
vysakh-menon-aot/sbc-auth
b5c18df3b7586cb7d9761f7fc0809cb2cbb3b096
[ "Apache-2.0" ]
null
null
null
auth-api/src/auth_api/models/user.py
vysakh-menon-aot/sbc-auth
b5c18df3b7586cb7d9761f7fc0809cb2cbb3b096
[ "Apache-2.0" ]
null
null
null
auth-api/src/auth_api/models/user.py
vysakh-menon-aot/sbc-auth
b5c18df3b7586cb7d9761f7fc0809cb2cbb3b096
[ "Apache-2.0" ]
null
null
null
# Copyright © 2019 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This manages a User record in the Auth service. A User stores basic information from a KeyCloak user (including the KeyCloak GUID). """ import datetime from flask import current_app from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, and_, or_ from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship from auth_api.utils.enums import AccessType, LoginSource, Status, UserStatus from auth_api.utils.roles import Role from auth_api.utils.user_context import UserContext, user_context from .base_model import BaseModel from .db import db from .membership import Membership as MembershipModel from .org import Org as OrgModel from .user_status_code import UserStatusCode class User(BaseModel): """This is the model for a User.""" __tablename__ = 'users' __versioned__ = { 'exclude': ['modified', 'modified_by_id', 'modified_by', 'created'] } id = Column(Integer, primary_key=True) username = Column('username', String(100), index=True) firstname = Column('first_name', String(200), index=True) lastname = Column('last_name', String(200), index=True) email = Column('email', String(200), index=True) keycloak_guid = Column( 'keycloak_guid', UUID(as_uuid=True), unique=True, nullable=True # bcros users comes with no guid ) is_terms_of_use_accepted = Column(Boolean(), default=False, nullable=True) terms_of_use_accepted_version = Column( ForeignKey('documents.version_id'), nullable=True ) # a type for the user to identify what kind of user it is..ie anonymous , bcsc etc ..similar to login source type = Column('type', String(200), nullable=True) status = Column(ForeignKey('user_status_codes.id')) idp_userid = Column('idp_userid', String(256), index=True) login_source = Column('login_source', String(200), nullable=True) login_time = Column(DateTime, default=None, nullable=True) contacts = relationship('ContactLink', primaryjoin='User.id == ContactLink.user_id', lazy='select') orgs = relationship('Membership', primaryjoin='and_(User.id == Membership.user_id, or_(Membership.status == ' + str( Status.ACTIVE.value) + ', Membership.status == ' + str( Status.PENDING_APPROVAL.value) + '))', lazy='select') # noqa:E127 terms_of_use_version = relationship('Documents', foreign_keys=[terms_of_use_accepted_version], uselist=False, lazy='select') user_status = relationship('UserStatusCode', foreign_keys=[status], lazy='subquery') @classmethod def find_by_username(cls, username): """Return the first user with the provided username.""" return cls.query.filter_by(username=username).first() @classmethod @user_context def find_by_jwt_token(cls, **kwargs): """Find an existing user by the keycloak GUID and (idpUserId is null or from token) in the provided token.""" user_from_context: UserContext = kwargs['user_context'] return db.session.query(User).filter( and_(User.keycloak_guid == user_from_context.sub, or_(User.idp_userid == user_from_context.token_info.get('idp_userid', None), User.idp_userid.is_(None)))).one_or_none() @classmethod @user_context def create_from_jwt_token(cls, first_name: str, last_name: str, **kwargs): """Create a User from the provided JWT.""" user_from_context: UserContext = kwargs['user_context'] token = user_from_context.token_info if token: user = User( username=user_from_context.user_name, firstname=first_name, lastname=last_name, email=token.get('email', None), keycloak_guid=user_from_context.sub, created=datetime.datetime.now(), login_source=user_from_context.login_source, status=UserStatusCode.get_default_type(), idp_userid=token.get('idp_userid', None), login_time=datetime.datetime.now(), type=cls._get_type(user_from_context=user_from_context) ) current_app.logger.debug( 'Creating user from JWT:{}; User:{}'.format(token, user) ) user.save() return user return None @classmethod @user_context def update_from_jwt_token(cls, user, # pylint:disable=too-many-arguments first_name: str, last_name: str, is_login: bool = False, **kwargs): """Update a User from the provided JWT.""" user_from_context: UserContext = kwargs['user_context'] token = user_from_context.token_info if not token or not user: return None # Do not save if nothing has been changed # pylint: disable=too-many-boolean-expressions if not is_login \ and (user.username == user_from_context.user_name or user.username) \ and user.firstname == first_name \ and user.lastname == last_name \ and user.email == token.get('email', user.email) \ and (str(user.keycloak_guid) == user_from_context.sub or user.keycloak_guid) \ and user.status == UserStatus.ACTIVE.value \ and (user.login_source == user_from_context.login_source or user.login_source) \ and user.idp_userid == token.get('idp_userid', None): return user current_app.logger.debug( 'Updating user from JWT:{}; User:{}'.format(token, user) ) user.username = user_from_context.user_name or user.username user.firstname = first_name user.lastname = last_name user.email = token.get('email', user.email) user.modified = datetime.datetime.now() if token.get('accessType', None) == AccessType.ANONYMOUS.value: # update kcguid for anonymous users user.keycloak_guid = user_from_context.sub or user.keycloak_guid # If this user is marked as Inactive, this login will re-activate them user.status = UserStatus.ACTIVE.value user.login_source = user_from_context.login_source or user.login_source user.type = cls._get_type(user_from_context) # If this is a request during login, update login_time if is_login: user.login_time = datetime.datetime.now() user.idp_userid = token.get('idp_userid') cls.commit() return user @classmethod def find_users(cls, first_name, last_name, email): """Return a set of users with either the given username or the given email.""" # TODO: This needs to be improved for scalability. Paging large datasets etc. if first_name == '' and last_name == '' and email == '': return cls.query.all() return cls.query.filter(or_(cls.firstname == first_name, cls.lastname == last_name, cls.email == email)).all() @classmethod @user_context def update_terms_of_use(cls, is_terms_accepted, terms_of_use_version, **kwargs): """Update the terms of service for the user.""" user_from_context: UserContext = kwargs['user_context'] if user_from_context.token_info: user = cls.find_by_jwt_token() user.is_terms_of_use_accepted = is_terms_accepted user.terms_of_use_accepted_version = terms_of_use_version current_app.logger.debug( 'Updating users Terms of use is_terms_accepted:{}; terms_of_use_version:{}'.format( is_terms_accepted, terms_of_use_version) ) cls.save(user) return user return None @classmethod def find_users_by_org_id_by_status_by_roles(cls, org_id, roles, status=Status.ACTIVE.value): """Find all members of the org with a status.""" return db.session.query(User). \ join(MembershipModel, (User.id == MembershipModel.user_id) & (MembershipModel.status == status) & (MembershipModel.membership_type_code.in_(roles))). \ join(OrgModel).filter(OrgModel.id == org_id).all() def delete(self): """Users cannot be deleted so intercept the ORM by just returning.""" return self @classmethod def _get_type(cls, user_from_context: UserContext) -> str: """Return type of the user from the token info.""" user_type: str = None if user_from_context.roles: if Role.ANONYMOUS_USER.value in user_from_context.roles \ or user_from_context.login_source == LoginSource.BCROS.value: user_type = Role.ANONYMOUS_USER.name elif Role.GOV_ACCOUNT_USER.value in user_from_context.roles: user_type = Role.GOV_ACCOUNT_USER.name elif Role.PUBLIC_USER.value in user_from_context.roles \ or user_from_context.login_source in [LoginSource.BCEID.value, LoginSource.BCSC.value]: user_type = Role.PUBLIC_USER.name elif user_from_context.is_staff(): user_type = Role.STAFF.name elif user_from_context.is_system(): user_type = Role.SYSTEM.name return user_type
43.4329
118
0.653344
import datetime from flask import current_app from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, and_, or_ from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import relationship from auth_api.utils.enums import AccessType, LoginSource, Status, UserStatus from auth_api.utils.roles import Role from auth_api.utils.user_context import UserContext, user_context from .base_model import BaseModel from .db import db from .membership import Membership as MembershipModel from .org import Org as OrgModel from .user_status_code import UserStatusCode class User(BaseModel): __tablename__ = 'users' __versioned__ = { 'exclude': ['modified', 'modified_by_id', 'modified_by', 'created'] } id = Column(Integer, primary_key=True) username = Column('username', String(100), index=True) firstname = Column('first_name', String(200), index=True) lastname = Column('last_name', String(200), index=True) email = Column('email', String(200), index=True) keycloak_guid = Column( 'keycloak_guid', UUID(as_uuid=True), unique=True, nullable=True ) is_terms_of_use_accepted = Column(Boolean(), default=False, nullable=True) terms_of_use_accepted_version = Column( ForeignKey('documents.version_id'), nullable=True ) type = Column('type', String(200), nullable=True) status = Column(ForeignKey('user_status_codes.id')) idp_userid = Column('idp_userid', String(256), index=True) login_source = Column('login_source', String(200), nullable=True) login_time = Column(DateTime, default=None, nullable=True) contacts = relationship('ContactLink', primaryjoin='User.id == ContactLink.user_id', lazy='select') orgs = relationship('Membership', primaryjoin='and_(User.id == Membership.user_id, or_(Membership.status == ' + str( Status.ACTIVE.value) + ', Membership.status == ' + str( Status.PENDING_APPROVAL.value) + '))', lazy='select') terms_of_use_version = relationship('Documents', foreign_keys=[terms_of_use_accepted_version], uselist=False, lazy='select') user_status = relationship('UserStatusCode', foreign_keys=[status], lazy='subquery') @classmethod def find_by_username(cls, username): return cls.query.filter_by(username=username).first() @classmethod @user_context def find_by_jwt_token(cls, **kwargs): user_from_context: UserContext = kwargs['user_context'] return db.session.query(User).filter( and_(User.keycloak_guid == user_from_context.sub, or_(User.idp_userid == user_from_context.token_info.get('idp_userid', None), User.idp_userid.is_(None)))).one_or_none() @classmethod @user_context def create_from_jwt_token(cls, first_name: str, last_name: str, **kwargs): user_from_context: UserContext = kwargs['user_context'] token = user_from_context.token_info if token: user = User( username=user_from_context.user_name, firstname=first_name, lastname=last_name, email=token.get('email', None), keycloak_guid=user_from_context.sub, created=datetime.datetime.now(), login_source=user_from_context.login_source, status=UserStatusCode.get_default_type(), idp_userid=token.get('idp_userid', None), login_time=datetime.datetime.now(), type=cls._get_type(user_from_context=user_from_context) ) current_app.logger.debug( 'Creating user from JWT:{}; User:{}'.format(token, user) ) user.save() return user return None @classmethod @user_context def update_from_jwt_token(cls, user, first_name: str, last_name: str, is_login: bool = False, **kwargs): user_from_context: UserContext = kwargs['user_context'] token = user_from_context.token_info if not token or not user: return None if not is_login \ and (user.username == user_from_context.user_name or user.username) \ and user.firstname == first_name \ and user.lastname == last_name \ and user.email == token.get('email', user.email) \ and (str(user.keycloak_guid) == user_from_context.sub or user.keycloak_guid) \ and user.status == UserStatus.ACTIVE.value \ and (user.login_source == user_from_context.login_source or user.login_source) \ and user.idp_userid == token.get('idp_userid', None): return user current_app.logger.debug( 'Updating user from JWT:{}; User:{}'.format(token, user) ) user.username = user_from_context.user_name or user.username user.firstname = first_name user.lastname = last_name user.email = token.get('email', user.email) user.modified = datetime.datetime.now() if token.get('accessType', None) == AccessType.ANONYMOUS.value: user.keycloak_guid = user_from_context.sub or user.keycloak_guid user.status = UserStatus.ACTIVE.value user.login_source = user_from_context.login_source or user.login_source user.type = cls._get_type(user_from_context) if is_login: user.login_time = datetime.datetime.now() user.idp_userid = token.get('idp_userid') cls.commit() return user @classmethod def find_users(cls, first_name, last_name, email): if first_name == '' and last_name == '' and email == '': return cls.query.all() return cls.query.filter(or_(cls.firstname == first_name, cls.lastname == last_name, cls.email == email)).all() @classmethod @user_context def update_terms_of_use(cls, is_terms_accepted, terms_of_use_version, **kwargs): user_from_context: UserContext = kwargs['user_context'] if user_from_context.token_info: user = cls.find_by_jwt_token() user.is_terms_of_use_accepted = is_terms_accepted user.terms_of_use_accepted_version = terms_of_use_version current_app.logger.debug( 'Updating users Terms of use is_terms_accepted:{}; terms_of_use_version:{}'.format( is_terms_accepted, terms_of_use_version) ) cls.save(user) return user return None @classmethod def find_users_by_org_id_by_status_by_roles(cls, org_id, roles, status=Status.ACTIVE.value): return db.session.query(User). \ join(MembershipModel, (User.id == MembershipModel.user_id) & (MembershipModel.status == status) & (MembershipModel.membership_type_code.in_(roles))). \ join(OrgModel).filter(OrgModel.id == org_id).all() def delete(self): return self @classmethod def _get_type(cls, user_from_context: UserContext) -> str: user_type: str = None if user_from_context.roles: if Role.ANONYMOUS_USER.value in user_from_context.roles \ or user_from_context.login_source == LoginSource.BCROS.value: user_type = Role.ANONYMOUS_USER.name elif Role.GOV_ACCOUNT_USER.value in user_from_context.roles: user_type = Role.GOV_ACCOUNT_USER.name elif Role.PUBLIC_USER.value in user_from_context.roles \ or user_from_context.login_source in [LoginSource.BCEID.value, LoginSource.BCSC.value]: user_type = Role.PUBLIC_USER.name elif user_from_context.is_staff(): user_type = Role.STAFF.name elif user_from_context.is_system(): user_type = Role.SYSTEM.name return user_type
true
true
f70eb14a7a606f28ca7a0378c580389cd746f583
213
py
Python
tests/test.py
JamesRunnalls/netcdf2geotiff
69ed4232998ce45bcb47035a077009ad0882f5ec
[ "MIT" ]
null
null
null
tests/test.py
JamesRunnalls/netcdf2geotiff
69ed4232998ce45bcb47035a077009ad0882f5ec
[ "MIT" ]
null
null
null
tests/test.py
JamesRunnalls/netcdf2geotiff
69ed4232998ce45bcb47035a077009ad0882f5ec
[ "MIT" ]
null
null
null
from netcdf2geotiff import rgb_geotiff, singleband_geotiff rgb_geotiff("test3.nc", "test3.tif", "RED", "GREEN", "BLUE", "lat", "lon") singleband_geotiff("test3.nc", "tests3.tif", "IDEPIX_SNOW_ICE", "lat", "lon")
42.6
77
0.713615
from netcdf2geotiff import rgb_geotiff, singleband_geotiff rgb_geotiff("test3.nc", "test3.tif", "RED", "GREEN", "BLUE", "lat", "lon") singleband_geotiff("test3.nc", "tests3.tif", "IDEPIX_SNOW_ICE", "lat", "lon")
true
true
f70eb1feadb7d4ea843f887e4935a9b29163f734
1,071
py
Python
python/src/aoc/year2015/day6.py
ocirne/adventofcode
ea9b5f1b48a04284521e85c96b420ed54adf55f0
[ "Unlicense" ]
1
2021-02-16T21:30:04.000Z
2021-02-16T21:30:04.000Z
python/src/aoc/year2015/day6.py
ocirne/adventofcode
ea9b5f1b48a04284521e85c96b420ed54adf55f0
[ "Unlicense" ]
null
null
null
python/src/aoc/year2015/day6.py
ocirne/adventofcode
ea9b5f1b48a04284521e85c96b420ed54adf55f0
[ "Unlicense" ]
null
null
null
from collections import defaultdict from aoc.util import load_input def turn(d, fun, sxy, exy): sx, sy = map(int, sxy.split(",")) ex, ey = map(int, exy.split(",")) for x in range(sx, ex + 1): for y in range(sy, ey + 1): d[(x, y)] = fun(d[(x, y)]) def run(data, toggle, turn_on, turn_off): grid = defaultdict(lambda: 0) for line in data: token = line.split() if line.startswith("toggle"): turn(grid, toggle, token[1], token[3]) elif line.startswith("turn on"): turn(grid, turn_on, token[2], token[4]) elif line.startswith("turn off"): turn(grid, turn_off, token[2], token[4]) else: raise Exception return sum(grid.values()) def part1(lines): return run(lines, lambda v: not v, lambda _: True, lambda _: False) def part2(lines): return run(lines, lambda x: x + 2, lambda x: x + 1, lambda x: max(0, x - 1)) if __name__ == "__main__": data = load_input(__file__, 2015, "6") print(part1(data)) print(part2(data))
26.121951
80
0.573296
from collections import defaultdict from aoc.util import load_input def turn(d, fun, sxy, exy): sx, sy = map(int, sxy.split(",")) ex, ey = map(int, exy.split(",")) for x in range(sx, ex + 1): for y in range(sy, ey + 1): d[(x, y)] = fun(d[(x, y)]) def run(data, toggle, turn_on, turn_off): grid = defaultdict(lambda: 0) for line in data: token = line.split() if line.startswith("toggle"): turn(grid, toggle, token[1], token[3]) elif line.startswith("turn on"): turn(grid, turn_on, token[2], token[4]) elif line.startswith("turn off"): turn(grid, turn_off, token[2], token[4]) else: raise Exception return sum(grid.values()) def part1(lines): return run(lines, lambda v: not v, lambda _: True, lambda _: False) def part2(lines): return run(lines, lambda x: x + 2, lambda x: x + 1, lambda x: max(0, x - 1)) if __name__ == "__main__": data = load_input(__file__, 2015, "6") print(part1(data)) print(part2(data))
true
true
f70eb2beb7c9126a39c5a492ce778317fcf5af57
73,671
py
Python
lexer_parser/domas_parser.py
franzbarron/proyecto-compis
29b3b9a588a92f5257bc3b1bb646734b7ace985b
[ "MIT" ]
null
null
null
lexer_parser/domas_parser.py
franzbarron/proyecto-compis
29b3b9a588a92f5257bc3b1bb646734b7ace985b
[ "MIT" ]
null
null
null
lexer_parser/domas_parser.py
franzbarron/proyecto-compis
29b3b9a588a92f5257bc3b1bb646734b7ace985b
[ "MIT" ]
null
null
null
from sly import Parser from sly.yacc import _decorator as _ from .domas_lexer import DomasLexer from .domas_quadruples import Quadruple from .domas_errors import * from . import domas_semantic_cube as sm import json # to debug only import os import copy os.system('color') class DomasParser(Parser): # Parser directives tokens = DomasLexer.tokens # debugfile = 'parser.out' start = 'programa' # Tables function_table = {} class_table = {} constant_table = {'int': [], 'float': [], 'string': [], 'bool': []} # Stacks stack_of_stacks = [[], []] # operands, operators !important stack_vars = [] last_arr_t = [] displacements = [] for_var_dir = [] break_stack = [] # Lists quadruples = [] jumps = [] # Counters quad_counter = 1 param_counter = 0 temp_counter = 0 attr_counter = 1 # Aux vars current_class = None last_arr_id = None last_type = None last_func_added = None has_returned = False found_errors = False types = ['int', 'float', 'string', 'bool', 'void'] operators = ['+', '-', '*', '/', '<', '>', '<=', '>=', '==', '<>', '&', '|'] # Add a function to the function table def add_to_func_table(self, id, return_type): self.function_table[id] = { 'return_type': return_type, 'vars': {}, 'num_types': '0\u001f' * len(self.types), 'params': '', 'num_temps': '0\u001f' * len(self.types) } # Checks if a variable exists def check_variable_exists(self, var): if self.current_class != None: return var in self.function_table[self.curr_scope]['vars'] or var in self.class_table[self.current_class]['vars'] return var in self.function_table[self.curr_scope]['vars'] or var in self.function_table[self.program_name]['vars'] # Returns the type of a variable if it exists def get_var_type(self, var, tok): if not self.check_variable_exists(var): self.found_errors = True print('ERROR: No variable\033[1m', var, '\033[0mwas found.') print(' Missing reference found on line', tok.lineno) return None if self.current_class != None: if var in self.function_table[self.curr_scope]['vars']: return self.function_table[self.curr_scope]['vars'][var]['type'] return self.class_table[self.current_class]['vars'][var]['type'] if var in self.function_table[self.curr_scope]['vars']: return self.function_table[self.curr_scope]['vars'][var]['type'] return self.function_table[self.program_name]['vars'][var]['type'] # Updates the amount of temporals used in fucntions. def update_num_temps(self, func_num_temps, type_idx, quantity=1): lst = func_num_temps.split('\u001f') lst[type_idx] = str(int(lst[type_idx]) + quantity) return '\u001f'.join(lst) # Cheks if a var is an array by finding its first dimension def check_var_is_array(self, var): if not var: return if var['dir'] >= 4500 and var['dir'] < 6000: return False if not self.check_variable_exists(var['value']): return False if self.current_class != None: if var['value'] in self.function_table[self.curr_scope]['vars']: return 'd1' in self.function_table[self.curr_scope]['vars'][var['value']] else: return 'd1' in self.class_table[self.current_class]['vars'][var['value']] elif var['value'] in self.function_table[self.curr_scope]['vars']: return 'd1' in self.function_table[self.curr_scope]['vars'][var['value']] else: return 'd1' in self.function_table[self.program_name]['vars'][var['value']] # Makes quadruples for arithemtic operators and pushes them into the quadruple stack def make_and_push_quad(self): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() if not ro or not lo: raise SystemError("Reached unsolvable state") r_type = sm.checkOperation(lo['type'], ro['type'], op) self.last_type = r_type idx = self.types.index(r_type) num_temps = self.function_table[self.curr_scope]['num_temps'] self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 @_('PROGRAM ID pro1 SEMI pro0 declarations') def programa(self, p): if self.found_errors: raise CompilationError() # func_dir_out = open('debug/funcdir.out', 'w') # class_dir_out = open('debug/classdir.out', 'w') # func_dir_out.write(json.dumps( # self.function_table, indent=2)) # class_dir_out.write(json.dumps( # self.class_table, indent=2)) return (self.program_name, self.function_table, self.class_table, self.constant_table, self.quadruples) # Creates goto main quadruple and appends it to the quadruple list @_('') def pro0(self, p): self.quadruples.append(Quadruple(-1, -1, 'goto', -1)) self.quad_counter += 1 # creates function table @_('') def pro1(self, p): self.program_name = p[-1] self.curr_scope = p[-1] self.function_table[p[-1]] = { 'return_type': None, 'vars': {}, 'num_types': '0\u001f0\u001f0\u001f0\u001f0\u001f' } @ _('class_declaration out_class var_declaration function_definition main') def declarations(self, p): return 'declarations' @ _('''CLASS ID cd1 inherits LCURL ATTRIBUTES attribute_declaration METHODS method_definition RCURL class_declaration''', 'empty') def class_declaration(self, p): return 'class_declaration' # Adds class to class table @ _('') def cd1(self, p): if p[-1] in self.class_table: self.found_errors = True print( 'ERROR: A class with the name\033[1m', p[-1], '\033[0mhas already been defined') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.class_table[p[-1]] = { 'vars': {}, 'num_types': '0\u001f0\u001f0\u001f0\u001f0\u001f' } self.current_class = p[-1] @ _('INHERITS ID cd3', 'empty') def inherits(self, p): return 'inherits' # Copies the information from parent class to its child. @ _('') def cd3(self, p): if not p[-1] in self.class_table: self.found_errors = True print('ERROR: Id\033[1m', p[-1], '\033[0mis not defined as a class') print(' Missing reference found on line', self.symstack[-1].lineno) else: self.class_table[self.current_class] = copy.deepcopy( self.class_table[p[-1]]) @ _('VAR ID ad1 attr_vector', 'VAR ID ad1 attr_simple_var', 'empty') def attribute_declaration(self, p): return 'attribute_declaration' # Appends declared variable to the stack of variables if it doesn't exist @ _('') def ad1(self, p): if p[-1] in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute\033[1m', p[-1], '\033[0mhas already been defined') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.stack_vars.append(p[-1]) @ _('''LBRACKET CTE_I ad2 attr_multidim RBRACKET COLON simple_type ad4 SEMI attribute_declaration''') def attr_vector(self, p): return 'vector' # Adds the first dimension of an array to the information of the variable @ _('') def ad2(self, p): self.latest_var = self.stack_vars.pop() if self.class_table[self.current_class]['vars']: self.class_table[self.current_class]['vars'][self.latest_var] = { 'd1': p[-1]} else: self.class_table[self.current_class]['vars'] = { self.latest_var: {'d1': p[-1]}} @ _('COMMA CTE_I ad3', 'empty') def attr_multidim(self, p): return 'attr_multidim' # Adds the second dimension of an array to the information of the variable. @ _('') def ad3(self, p): self.class_table[self.current_class]['vars'][self.latest_var]['d2'] = p[-1] # Adds the type and direction to the information of the variable declared and updated the amount of types used in the class. @ _('') def ad4(self, p): idx = self.types.index(p[-1]) num_types = self.class_table[self.current_class]['num_types'] if 'd1' in self.class_table[self.current_class]['vars'][self.latest_var]: q = self.class_table[self.current_class]['vars'][self.latest_var]['d1'] if 'd2' in self.class_table[self.current_class]['vars'][self.latest_var]: q *= self.class_table[self.current_class]['vars'][self.latest_var]['d2'] self.class_table[self.current_class]['vars'][self.latest_var]['dir'] = 6000 + idx * \ 300 + int(num_types.split('\u001f')[idx]) self.class_table[self.current_class]['vars'][self.latest_var]['type'] = p[-1] self.class_table[self.current_class]['num_types'] = self.update_num_temps( num_types, idx, q) @ _('attr_var_list COLON simple_type ad5 SEMI attribute_declaration') def attr_simple_var(self, p): return 'attr_simple_var' @ _('COMMA ID ad1 attr_var_list', 'empty') def attr_var_list(self, p): return 'attr_var_list' # Pops the stack of variables and giving each their corresponding type. @ _('') def ad5(self, p): while len(self.stack_vars) > 0: curr_var = self.stack_vars.pop() if curr_var in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute\033[1m', curr_var, '\033[0mhas already been defined in class', self.current_class) print(' Redefinition found on line', self.symstack[-2].lineno) idx = self.types.index(p[-1]) num_types = self.class_table[self.current_class]['num_types'] self.class_table[self.current_class]['num_types'] = self.update_num_temps( num_types, idx) self.class_table[self.current_class]['vars'][curr_var] = { 'type': p[-1], 'dir': 6000 + idx * 300 + int(num_types.split('\u001f')[idx])} @ _('''def_type fd1 FUNCTION ID md3 LPAREN m_parameters RPAREN LCURL fd4 var_declaration statements RCURL fd5 fd6 method_definition''', 'empty') def method_definition(self, p): return 'method_definition' # Adds the id with the name of the class prefixed to the function table. @ _('') def md3(self, p): if p[-1] in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute or method\033[1m', p[-1], '\033[0mhas already been defined in class', self.current_class) print(' Redefinition found on line', self.symstack[-1].lineno) else: self.add_to_func_table( self.current_class + '.' + p[-1], self.curr_func_type) self.last_func_added = self.current_class + '.' + p[-1] self.curr_scope = self.last_func_added idx = self.types.index(self.curr_func_type) num_types = self.function_table[self.program_name]['num_types'] self.function_table[self.program_name]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.program_name]['vars'][self.current_class + '.' + p[-1]] = { 'type': self.curr_func_type, 'dir': 0, 'real_dir': idx * 300 + int(num_types.split('\u001f')[idx])} @_('ID p1 COLON simple_type m2 m_param_choose', 'empty') def m_parameters(self, p): return 'parameters' # Adds the id and type of the parameter to the table of variables of the current method. @ _('') def m2(self, p): if self.latest_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A parameter\033[1m', self.latest_var, '\033[0mhas already been declared for method', self.last_func_added.split('.')[1], 'in class', self.current_class) print(' Redefinition found on line', self.symstack[-2].lineno) else: idx = self.types.index(p[-1]) self.function_table[self.curr_scope]['params'] += str( idx) num_types = self.function_table[self.curr_scope]['num_types'] self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'type': p[-1], 'dir': 1500 + idx * 300 + int(num_types.split('\u001f')[idx]) } @ _('COMMA m_parameters', 'empty') def m_param_choose(self, p): return 'm_param_choose' @ _('') def out_class(self, p): self.current_class = None self.curr_scope = self.program_name @ _('VAR ID gvd1 vector', 'VAR ID gvd1 simple_var', 'empty') def var_declaration(self, p): return p[0] # Appends the current var to the stack of variables of it doesn't exist. @ _('') def gvd1(self, p): if p[-1] in self.function_table: self.found_errors = True print('ERROR: A function with ID\033[1m', p[-1], '\033[0mhas already been declared. Variables may not share name with functions') print(' Redefinition found on line', self.symstack[-1].lineno) # elif self.current_class != None and p[-1] in self.class_table[self.current_class]: # raise RedefinitionError(p[-1]) else: self.stack_vars.append(p[-1]) @ _('LBRACKET CTE_I gvd2 multidim RBRACKET COLON simple_type gvd4 SEMI var_declaration') def vector(self, p): return 'vector' # Adds the variable and its first dimension to the variable table of the current scope. @ _('') def gvd2(self, p): self.latest_var = self.stack_vars.pop() if self.function_table[self.curr_scope]['vars']: self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'd1': p[-1] } else: self.function_table[self.curr_scope]['vars'] = { self.latest_var: {'d1': p[-1]} } @ _('COMMA CTE_I gvd3', 'empty') def multidim(self, p): return 'multidim' @ _('var_list COLON composite_type gvd6 SEMI var_declaration', 'var_list COLON simple_type gvd5 SEMI var_declaration') def simple_var(self, p): return 'simple_var' @ _('COMMA ID gvd1 var_list', 'empty') def var_list(self, p): return 'var_list' # Adds the second dimension to the latest variable in the current scope @ _('') def gvd3(self, p): self.function_table[self.curr_scope]['vars'][self.latest_var]['d2'] = p[-1] # Adds the type and address to the information of the variable declared and updated the amount of types used in the class. @ _('') def gvd4(self, p): idx = self.types.index(p[-1]) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: q = self.function_table[self.curr_scope]['vars'][self.latest_var]['d1'] if 'd2' in self.function_table[self.curr_scope]['vars'][self.latest_var]: q *= self.function_table[self.curr_scope]['vars'][self.latest_var]['d2'] self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] = idx * \ 300 + int(num_types.split('\u001f')[idx]) + offset self.function_table[self.curr_scope]['vars'][self.latest_var]['type'] = p[-1] self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx, q) # Pops the var stack, adding their ids and types to the variable directory of the current scope @ _('') def gvd5(self, p): while len(self.stack_vars) > 0: curr_var = self.stack_vars.pop() if curr_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A variable\033[1m', curr_var, '\033[0mhas already been declared.') print(' Redefinition found on line', self.symstack[-5].lineno) idx = self.types.index(p[-1]) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 self.function_table[self.curr_scope]['vars'][curr_var] = { 'type': p[-1], 'dir': idx * 300 + int(num_types.split('\u001f')[idx]) + offset} self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) # Same as gvd5 but it also adds the variables of the class in question to the table of the current scope @ _('') def gvd6(self, p): while len(self.stack_vars) > 0: var_id = self.stack_vars.pop() if var_id in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A variable\033[1m', var_id, '\033[0mhas already been declared.') print(' Redefinition found on line', self.symstack[-5].lineno) offset = 1500 if self.curr_scope != self.program_name else 0 num_types = self.function_table[self.curr_scope]['num_types'] base_addrs = [int(n) for n in num_types.split('\u001f')[:-1]] if not p[-1] in self.class_table: for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break print('ERROR: No class\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', lineno) return for attr in self.class_table[p[-1]]['vars']: attr_type = self.class_table[p[-1]]['vars'][attr]['type'] idx = self.types.index(attr_type) num_types = self.function_table[self.curr_scope]['num_types'] q = 1 self.function_table[self.curr_scope]['vars'][var_id + '.' + attr] = { 'type': attr_type, 'dir': base_addrs[idx] + self.class_table[p[-1]]['vars'][attr]['dir'] - 6000 + offset } if 'd1' in self.class_table[p[-1]]['vars'][attr]: q = self.class_table[p[-1]]['vars'][attr]['d1'] self.function_table[self.curr_scope]['vars'][var_id + '.' + attr]['d1'] = q if 'd2' in self.class_table[p[-1]]['vars'][attr]: d2 = self.class_table[p[-1]]['vars'][attr]['d2'] q *= d2 self.function_table[self.curr_scope]['vars'][var_id + '.' + attr]['d2'] = d2 self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx, q) self.function_table[self.curr_scope]['vars'][var_id] = { 'type': p[-1]} @ _('def_type fd1 FUNCTION ID fd3 LPAREN parameters RPAREN LCURL fd4 var_declaration statements RCURL fd5 fd6 function_definition', 'empty') def function_definition(self, p): return 'function_definition' # saves thee current function type in a variable @ _('') def fd1(self, p): self.curr_func_type = p[-1] # Adds the id of the function to the function table @ _('') def fd3(self, p): if p[-1] in self.function_table: self.found_errors = True print('ERROR: A function\033[1m', p[-1], '\033[0mhas already been defined.') print(' Redefinition found on line', self.symstack[-1].lineno) elif p[-1] in self.function_table[self.program_name]['vars']: self.found_errors = True print('ERROR: A global variable\033[1m', p[-1], '\033[0mhas been declared. Functions may not share names with global variables') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.add_to_func_table(p[-1], self.curr_func_type) self.last_func_added = p[-1] self.curr_scope = self.last_func_added idx = self.types.index(self.curr_func_type) num_types = self.function_table[self.program_name]['num_types'] self.function_table[self.program_name]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.program_name]['vars'][p[-1]] = { 'type': self.curr_func_type, 'dir': 0, 'real_dir': idx * 300 + int(num_types.split('\u001f')[idx])} # Adds the start of the quadruples related to the current function to the ts information in the function table @ _('') def fd4(self, p): if not self.last_func_added: return self.function_table[self.last_func_added]['start'] = self.quad_counter # Deletes the variable table of the current scope @ _('') def fd5(self, p): if not self.last_func_added: return del self.function_table[self.last_func_added]['vars'] # Creates and appends the end_func quadruple to the quadruple stack @ _('') def fd6(self, p): if self.curr_func_type != 'void' and self.has_returned == False: self.found_errors = True print('ERROR: Function\033[1m', self.curr_scope, '\033[0mis missing a return statement') print(' Non-void functions must have a return statement.') self.quadruples.append(Quadruple(-1, -1, 'end_func', -1)) self.quad_counter += 1 self.temp_counter = 1 self.has_returned = False @ _('statement statements', 'empty') def statements(self, p): return 'statements' @ _('simple_type', 'VOID') def def_type(self, p): return p[0] @ _('INT', 'FLOAT', 'STRING', 'BOOL') def simple_type(self, p): return p[0] @ _('ID') def composite_type(self, p): return p[0] @ _('ID p1 COLON simple_type p2 param_choose', 'empty') def parameters(self, p): return 'parameters' @ _('COMMA parameters', 'empty') def param_choose(self, p): return 'param_choose' # Saves the ID of the parameter ina variable @ _('') def p1(self, p): self.latest_var = p[-1] # Adds the type of the parameter to its information in the variable table of the current scope @ _('') def p2(self, p): if self.latest_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A parameter\033[1m', self.latest_var, '\033[0mhas already been declared for function', self.last_func_added) print(' Redefinition found on line', self.symstack[-2].lineno) idx = self.types.index(p[-1]) self.function_table[self.curr_scope]['params'] += str(idx) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'type': p[-1], 'dir': idx * 300 + int(num_types.split('\u001f')[idx]) + offset} @_('assignment', 'call_to_void_function', 'function_returns', 'read', 'print', 'decision_statement', 'repetition_statement', 'BREAK br0 SEMI') def statement(self, p): return 'statement' # Adds the quadruple counter to the break stack @_('') def br0(self, p): if len(self.jumps) == 0: self.found_errors = True print('ERROR: break statement on line', self.symstack[-1].lineno, 'used outside a loop') self.quadruples.append(Quadruple(-1, -1, 'goto', None)) self.break_stack.append(self.quad_counter) self.quad_counter += 1 @_('variable ass1 EQUALS expression ass2 SEMI') def assignment(self, p): return 'assignment' # Save the id in a variable @_('') def ass1(self, p): self.latest_var = p[-1] # Generate the quadruple containing an '=' as its operator. Get the addresses of the left and res @_('') def ass2(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): self.make_and_push_quad() made_quad = True lo = self.stack_of_stacks[-2].pop() if not lo: return v_type = self.get_var_type(self.latest_var, self.symstack[-2]) if not v_type: return self.last_type = sm.checkOperation(v_type, lo['type'], '=') if not made_quad and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.current_class != None: if self.latest_var in self.class_table[self.current_class]['vars']: if 'd1' in self.class_table[self.current_class]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.class_table[self.current_class]['vars'][self.latest_var]['dir'] else: if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] elif self.latest_var in self.function_table[self.curr_scope]['vars']: if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] else: if 'd1' in self.function_table[self.program_name]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.program_name]['vars'][self.latest_var]['dir'] q = Quadruple(lo_dir, -1, '=', var_dir) self.quadruples.append(q) self.quad_counter += 1 @_('id_or_attribute', 'id_or_attribute v0 LBRACKET expression v1 RBRACKET', 'id_or_attribute v0 LBRACKET expression v2 COMMA v4 expression v3 RBRACKET') def variable(self, p): return p[0] # Checks that the variable exists and is an array @_('') def v0(self, p): self.check_variable_exists(p[-1]) if self.current_class != None: if not 'd1' in self.class_table[self.current_class]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return elif p[-1] in self.function_table[self.curr_scope]['vars']: if not 'd1' in self.function_table[self.curr_scope]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return elif not 'd1' in self.function_table[self.program_name]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return self.last_arr_id = p[-1] self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) # Checks that the variable is a matrix @_('') def v4(self, p): self.check_variable_exists(self.last_arr_id) if self.current_class != None: if not 'd2' in self.class_table[self.current_class]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: if not 'd2' in self.function_table[self.curr_scope]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') else: if not 'd2' in self.function_table[self.program_name]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) # Calculates the address of the array index @_('') def v1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None and self.last_arr_id in self.class_table[self.current_class]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.class_table[self.current_class]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.class_table[self.current_class]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.program_name]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) self.stack_of_stacks.pop() self.stack_of_stacks.pop() # Calculate the address of the matrix index @_('') def v2(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None: pass elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 d2 = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d2'] if not d2 in self.constant_table['int']: self.constant_table['int'].append(d2) cons_dir = self.constant_table['int'].index(d2) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '*', t_dir)) self.quad_counter += 1 self.displacements.append(t_dir) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 d2 = self.function_table[self.program_name]['vars'][self.last_arr_id]['d2'] if not d2 in self.constant_table['int']: self.constant_table['int'].append(d2) cons_dir = self.constant_table['int'].index(d2) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '*', t_dir)) self.quad_counter += 1 self.displacements.append(t_dir) self.stack_of_stacks.pop() self.stack_of_stacks.pop() # Calculate s1*d2 @_('') def v3(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None: pass elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d2'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0, 2) self.quadruples.append( Quadruple(self.displacements.pop(), t_addr, '+', t_dir)) self.quadruples.append(Quadruple(cons_dir, t_dir, '+', t_dir + 1)) self.quad_counter += 2 self.last_arr_t.append(t_dir + 1) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d2'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.program_name]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0, 2) self.quadruples.append( Quadruple(self.displacements.pop(), t_addr, '+', t_dir)) self.quadruples.append(Quadruple(cons_dir, t_dir, '+', t_dir + 1)) self.quad_counter += 2 self.last_arr_t.append(t_dir + 1) self.stack_of_stacks.pop() self.stack_of_stacks.pop() @_('ID', 'ID DOT ID') def id_or_attribute(self, p): if len(p) > 1: return p[0] + p[1] + p[2] return p[0] # Returns value, type and address of the constant @_('variable', 'CTE_I', 'CTE_F', 'CTE_STRING', 'cte_bool', 'call_to_function') def var_cte(self, p): offset = 4500 if hasattr(p, 'CTE_I'): cte_type = 'int' if not p[0] in self.constant_table['int']: self.constant_table['int'].append(p[0]) cons_dir = self.constant_table['int'].index(p[0]) + offset elif hasattr(p, 'CTE_F'): cte_type = 'float' if not p[0] in self.constant_table['float']: self.constant_table['float'].append(p[0]) cons_dir = self.constant_table['float'].index(p[0]) + offset + 300 elif hasattr(p, 'CTE_STRING'): cte_type = 'string' if not p[0] in self.constant_table['string']: self.constant_table['string'].append(p[0]) cons_dir = self.constant_table['string'].index(p[0]) + offset + 600 elif hasattr(p, 'cte_bool'): cte_type = 'bool' if not p[0] in self.constant_table['bool']: self.constant_table['bool'].append(p[0]) cons_dir = self.constant_table['bool'].index(p[0]) + offset + 900 elif hasattr(p, 'call_to_function'): return p[0] else: if not self.check_variable_exists(p[0]): for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break self.found_errors = True print('ERROR: No variable\033[1m', p[0], '\033[0mwas found.') print(' Missing reference found on line', lineno) return if self.current_class != None and p[0] in self.class_table[self.current_class]['vars']: cte_type = self.class_table[self.current_class]['vars'][p[0]]['type'] cons_dir = self.class_table[self.current_class]['vars'][p[0]]['dir'] else: cte_type = self.get_var_type(p[0], self.symstack[-2]) if p[0] in self.function_table[self.curr_scope]['vars']: cons_dir = self.function_table[self.curr_scope]['vars'][p[0]]['dir'] else: cons_dir = self.function_table[self.program_name]['vars'][p[0]]['dir'] return {'value': p[0], 'type': cte_type, 'dir': cons_dir} @_('constant e2 operator e3 expression', 'constant e2', 'LPAREN e1 expression RPAREN e4', 'LPAREN e1 expression RPAREN e4 operator e3 expression') def expression(self, p): if hasattr(p, 'LPAREN'): return p[2] return p[0] # Append the open parenthesis to the stack of operators in the stacks_of_stacks @_('') def e1(self, p): self.stack_of_stacks[-1].append('(') # Appende the operand to the stack of operands in the stacks_of _stacks @_('') def e2(self, p): self.stack_of_stacks[-2].append(p[-1]) # Makes the quadruple of operation @_('') def e3(self, p): if len(self.stack_of_stacks[-1]) == 0 or self.stack_of_stacks[-1][-1] == '(': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] == '*' or self.stack_of_stacks[-1][-1] == '/': self.make_and_push_quad() if (self.stack_of_stacks[-1] and (self.stack_of_stacks[-1][-1] == '+' or self.stack_of_stacks[-1][-1] == '-')) and (p[-1] == '+' or p[-1] == '-'): self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] == '*' or p[-1] == '/': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] == '+' or self.stack_of_stacks[-1][-1] == '-': self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] == '+' or p[-1] == '-': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] in sm.comparison_ops or self.stack_of_stacks[-1][-1] in sm.equality_ops: self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] in sm.comparison_ops or p[-1] in sm.equality_ops: self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] in sm.logic_ops: self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] in sm.logic_ops: self.stack_of_stacks[-1].append(p[-1]) # Pops the operator stack and makes quads until an open parenthesis is found @_('') def e4(self, p): while(self.stack_of_stacks[-1][-1] != '('): self.make_and_push_quad() self.stack_of_stacks[-1].pop() @_('AND', 'OR') def logical_operator(self, p): return p[-1] @_('LT', 'GT', 'SAME', 'GEQ', 'LEQ', 'NEQ') def relational_operator(self, p): return p[0] @_('PLUS', 'MINUS', 'MULTIPLY', 'DIVIDE') def arithmetic_operator(self, p): return p[0] @_('logical_operator', 'relational_operator', 'arithmetic_operator') def operator(self, p): return p[0] @_('PLUS var_cte', 'MINUS var_cte', 'var_cte') def constant(self, p): if len(p) > 1 and p[1] == '-': return -p.var_cte else: return p.var_cte @_('READ LPAREN read_h') def read(self, p): return 'read' @_('variable r1 COMMA read_h', 'variable r1 RPAREN SEMI') def read_h(self, p): return 'read_h' # Makes the read quadruple wiht res being the address prefixed with a dollar sign ($) @_('') def r1(self, p): if self.current_class != None and p[-1] in self.class_table[self.current_class]['vars']: if 'd1' in self.class_table[self.current_class]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.class_table[self.current_class]['vars'][p[-1]]['dir'] elif p[-1] in self.function_table[self.curr_scope]['vars']: if 'd1' in self.function_table[self.curr_scope]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.function_table[self.curr_scope]['vars'][p[-1]]['dir'] elif p[-1] in self.function_table[self.program_name]['vars']: if 'd1' in self.function_table[self.program_name]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.function_table[self.program_name]['vars'][p[-1]]['dir'] else: raise UndeclaredIdError(p[-1]) self.quadruples.append(Quadruple(-1, -1, 'read', var_addr)) self.quad_counter += 1 @_('function_or_method vf0 ctf2 LPAREN func_params RPAREN fp2 fp3 ctf0 ctf3') def call_to_function(self, p): if not self.check_variable_exists(self.called_func): # self.found_errors = True # print('ERROR: No function\033[1m', # self.called_func, '\033[0mwas found.') # print(' Missing reference found on line', # self.symstack[-5].lineno) return func_dir = self.function_table[self.program_name]['vars'][self.called_func]['dir'] func_type = self.function_table[self.called_func]['return_type'] return {'value': 't' + str(self.temp_counter - 1), 'type': func_type, 'dir': func_dir} # Append two empty stacks to the stack_of_stacks @_('') def ctf2(self, p): self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) # Pops the staks from the stacks_of_stacks @_('') def ctf3(self, p): self.stack_of_stacks.pop() self.stack_of_stacks.pop() # Call the return value in the address of the corresponding variable @_('') def ctf0(self, p): if not self.check_variable_exists(self.called_func): self.found_errors = True print('ERROR: No function\033[1m', self.called_func, '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-3].lineno) return func_dir = self.function_table[self.program_name]['vars'][self.called_func]['real_dir'] func_type = self.function_table[self.program_name]['vars'][self.called_func]['type'] idx = self.types.index(func_type) num_temps = self.function_table[self.curr_scope]['num_temps'] # self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( # num_temps, idx) t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(func_dir, -1, '=', t_dir)) self.function_table[self.program_name]['vars'][self.called_func]['dir'] = t_dir self.quad_counter += 1 self.temp_counter += 1 # @_('ID ctf1', 'ID DOT ID') def function_or_method(self, p): if(len(p) == 2): return (p[0], None) else: var_type = self.get_var_type(p[0], self.symstack[-1]) quads = [] for attr in self.class_table[var_type]['vars']: var_dir = self.function_table[self.curr_scope]['vars'][p[0]+'.'+attr]['dir'] attr_dir = self.class_table[var_type]['vars'][attr]['dir'] quads.append(Quadruple(var_dir, -2, '=', attr_dir)) # self.quad_counter += 1 return (var_type + p[1] + p[2], quads) # Check if name of the function being called exists in the function table @_('') def ctf1(self, p): if self.current_class != None: if not self.current_class + '.' + p[-1] in self.function_table: self.found_errors = True print('ERROR: No function\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-1].lineno) elif not p[-1] in self.function_table: self.found_errors = True print('ERROR: No function\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-1].lineno) @_('COMMA expression fp1 param_list', 'empty') def param_list(self, p): return 'param_list' @_('PRINT LPAREN res_write RPAREN SEMI') def print(self, p): return 'print' @_('expression pr1 comma_thing') def res_write(self, p): return 'res_write' @_('COMMA res_write', 'empty') def comma_thing(self, p): return 'comma_thing' # Make quadruples if the stack of operators is not empty and Make print quadruple @_('') def pr1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if not lo['dir'] in range(4500, 6000) and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if not ro['dir'] in range(4500, 6000) and self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1] self.quadruples.append( Quadruple(-1, -1, 'print', last_quad.res)) self.quad_counter += 1 else: var = self.stack_of_stacks[-2].pop() if not var: return if self.check_var_is_array(var): var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = var['dir'] self.quadruples.append( Quadruple(-1, -1, 'print', var_dir)) self.quad_counter += 1 @_('TRUE', 'FALSE') def cte_bool(self, p): return p[0] @_('IF LPAREN expression dec1 RPAREN THEN LCURL statements RCURL else_stm') def decision_statement(self, p): return 'decision_statement' # Make quadruples if the stack of operators is not empty and goto_f quadruple @_('') def dec1(self, p): while len(self.stack_of_stacks[-1]): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + 3000 r_type = sm.checkOperation(lo['type'], ro['type'], op) self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 lo = self.stack_of_stacks[-2].pop() if lo['type'] != 'bool': raise SyntaxError( 'Expression to evaluate in if statement is not boolean') else: self.quadruples.append(Quadruple(-1, lo['dir'], 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 @_('dec2 ELSE LCURL statements RCURL dec3', 'empty dec4') def else_stm(self, p): return 'else_stm' # Makes goto quadruples @_('') def dec2(self, p): falso = self.jumps.pop() self.quadruples.append(Quadruple(-1, -1, 'goto', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.quadruples[falso - 1].res = self.quad_counter # Actualizes goto quadruple @_('') def dec3(self, p): jump = self.jumps.pop() self.quadruples[jump - 1].res = self.quad_counter # Actualizes goto_f @_('') def dec4(self, p): jump = self.jumps.pop() self.quadruples[jump - 1].res = self.quad_counter @_('conditional', 'non_conditional') def repetition_statement(self, p): return 'repetition_statement' @_('WHILE LPAREN con0 expression con1 RPAREN DO LCURL statements RCURL con2') def conditional(self, p): return 'conditional' # Add quadruple counter to jumps stack @_('') def con0(self, p): self.jumps.append(self.quad_counter) # Make quadruples if the stack of operators is not empty and make goto_f quadruple @_('') def con1(self, p): while len(self.stack_of_stacks[-1]): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) r_type = sm.checkOperation(lo['type'], ro['type'], op) self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 if self.last_type != 'bool': raise SyntaxError( 'Expression to evaluate in if statement is not boolean') else: last_quad = self.quadruples[-1].res self.quadruples.append(Quadruple(-1, last_quad, 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 # Make goto quadruple and actualize goto_f @_('') def con2(self, p): falso = self.jumps.pop() ret = self.jumps.pop() self.quadruples.append(Quadruple(-1, -1, 'goto', ret)) self.quadruples[falso - 1].res = self.quad_counter + 1 if len(self.break_stack): bq = self.break_stack.pop() self.quadruples[bq - 1].res = self.quad_counter + 1 self.quad_counter += 1 @_('FOR variable ass1 EQUALS expression ass2 nc0 UNTIL expression nc1 DO nc2 LCURL statements RCURL nc3') def non_conditional(self, p): return 'non_conditional' # Append the result of the last quadruple to the for_var_dir stack @_('') def nc0(self, p): self.for_var_dir.append(self.quadruples[-1].res) # Make quadruples if the stack of operators is not empty and do quadruple with <= as its operator @_('') def nc1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if not lo['dir'] in range(4500, 6000) and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if not ro['dir'] in range(4500, 6000) and self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1].res if (last_quad % 1500) // 300 != 0 and (last_quad % 1500) // 300 != 1: raise TypeError('Type mismatch') num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = 3 * 300 + \ int(num_temps.split('\u001f')[3]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 3) self.quadruples.append( Quadruple(self.for_var_dir[-1], last_quad, '<=', t_dir)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.temp_counter += 1 else: var = self.stack_of_stacks[-2].pop() if (var['dir'] % 1500) // 300 != 0 and (var['dir'] % 1500) // 300 != 1: raise TypeError('Type mismatch') num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = 3 * 300 + \ int(num_temps.split('\u001f')[3]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 3) if self.check_var_is_array(var): var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = var['dir'] self.quadruples.append( Quadruple(self.for_var_dir[-1], var_dir, '<=', t_dir)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.temp_counter += 1 # Make goto_f quadruple @_('') def nc2(self, p): last_quad = self.quadruples[-1].res self.quadruples.append(Quadruple(-1, last_quad, 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 # Make goto quadruple and actualize goto_f @_('') def nc3(self, p): falso = self.jumps.pop() cond = self.jumps.pop() if not 1 in self.constant_table['int']: self.constant_table['int'].append(1) one_dir = self.constant_table['int'].index(1) + 4500 self.quadruples.append( Quadruple(self.for_var_dir[-1], one_dir, '+', self.for_var_dir[-1])) self.quad_counter += 1 self.quadruples.append(Quadruple(-1, -1, 'goto', cond)) self.quad_counter += 1 self.quadruples[falso - 1].res = self.quad_counter if len(self.break_stack): bq = self.break_stack.pop() self.quadruples[bq - 1].res = self.quad_counter + 1 self.for_var_dir.pop() @_('RETURN LPAREN expression fr0 RPAREN SEMI fr1') def function_returns(self, p): return 'function_returns' # Make return quadruple @_('') def fr0(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): self.make_and_push_quad() made_quad = True if made_quad: last_quad = self.quadruples[-1] self.quadruples.append( Quadruple(last_quad.res, -1, 'return', self.function_table[self.program_name]['vars'][self.curr_scope]['real_dir'])) self.quad_counter += 1 self.stack_of_stacks[-2].pop() else: self.quadruples.append( Quadruple(self.stack_of_stacks[-2].pop()['dir'], -1, 'return', self.function_table[self.program_name]['vars'][self.curr_scope]['real_dir'])) self.quad_counter += 1 # Actualize has_returned to true @_('') def fr1(self, p): self.has_returned = True @ _('function_or_method vf0 LPAREN func_params RPAREN fp2 fp3 SEMI') def call_to_void_function(self, p): return 'call_to_void_function' # Make gosub quadruple @ _('') def fp2(self, p): self.quadruples.append( Quadruple(self.called_func, -1, 'gosub', -1)) self.quad_counter += 1 # Equalize parameter counter to 0 @ _('') def fp3(self, p): self.param_counter = 0 # make era quadruple @ _('') def vf0(self, p): self.called_func, quads = p[-1] if self.current_class != None: self.called_func = self.current_class + '.' + self.called_func self.quadruples.append(Quadruple(self.called_func, -1, 'era', -1)) self.quad_counter += 1 if quads: for q in quads: self.quadruples.append(q) self.quad_counter += 1 @ _('expression fp1 param_list', 'empty') def func_params(self, p): return 'func_params' # Make quadruples if the stack of operators is not empty and do param quadruples @ _('') def fp1(self, p): if not self.called_func in self.function_table: for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break self.found_errors = True print('ERROR: No function\033[1m', self.called_func, '\033[0mwas found.') print(' Missing reference found on line', lineno) return made_quad = False while(len(self.stack_of_stacks[-1])): offset = 800 * len(self.types) * 2 ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) t_dir = idx * 300 + 3000 self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1] if self.param_counter == len(self.function_table[self.called_func]['params']): self.found_errors = True print( 'ERROR: Too many parameters passed in call to function on line', self.symstack[-2].lineno) return try: sm.checkAssignment(self.types[int(self.function_table[self.called_func] ['params'][self.param_counter])], self.types[(last_quad.res % 1500) // 300], '=') except TypeError: self.found_errors = True print( 'ERROR: Type mismatch on line', self.symstack[-2].lineno) print( ' Expected value of type', self.types[int(self.function_table[self.called_func]['params'][self.param_counter])], 'got value of type', self.types[(last_quad.res % 1500) // 300], 'instead') return self.quadruples.append( Quadruple(last_quad.res, -1, 'param', self.param_counter)) self.quad_counter += 1 self.param_counter += 1 else: val = self.stack_of_stacks[-2].pop() if self.param_counter == len(self.function_table[self.called_func]['params']): self.found_errors = True print( 'ERROR: Too many parameters passed in call to function on line', self.symstack[-2].lineno) return if not val: return try: sm.checkAssignment(self.types[int(self.function_table[self.called_func] ['params'][self.param_counter])], self.types[(val['dir'] % 1500) // 300], '=') except TypeError: self.found_errors = True print( 'ERROR: Type mismatch on line', self.symstack[-2].lineno) print( ' Expected value of type', self.types[int(self.function_table[self.called_func]['params'][self.param_counter])], 'got value of type', self.types[(val['dir'] % 1500) // 300], 'instead') return self.quadruples.append( Quadruple(val['dir'], -1, 'param', self.param_counter)) self.quad_counter += 1 self.param_counter += 1 @ _('MAIN m1_add_to_func_table LPAREN RPAREN LCURL main0 var_declaration statements RCURL main2') def main(self, p): return 'main' # Actualize the jump of the first goto made int he list of quadruples @ _('') def main0(self, p): self.quadruples[0].res = self.quad_counter # Do end quadruple and delete function and class tables @ _('') def main2(self, p): self.quadruples.append(Quadruple(-1, -1, 'end', -1)) del self.function_table[self.program_name]['vars'] del self.function_table['main']['vars'] for class_name in self.class_table: del self.class_table[class_name]['vars'] pass # Add main to function table @ _('') def m1_add_to_func_table(self, p): self.curr_scope = 'main' self.add_to_func_table('main', None) @ _('') def empty(self, p): pass def error(self, p): if not p: return print('ERROR: Syntax error found on line', p.lineno) if p.value == 'var': print( ' All variable declarations must be done before any other statement') elif p.value == '(': print( ' Parentheses are not allowed in this position.') elif p.value == '{': print( ' Curly brackets are not allowed in this position.') elif p.value == '[': print( ' Brackets are not allowed in this position.') elif p.value == ')': print( ' Closing parenthesis found without matching opening one.') elif p.value == '}': print( ' Closing curly bracket without an opening one.') elif p.value == ']': print( ' Closing bracket without an opening one.') elif p.value == ';': print( ' Must only be used at the end of statements') elif p.value == '=': print( ' Assignment is not allowed here. Perhaps you meant to use ==?') else: print( ' Keyword or id misplaced') if not self.found_errors: print( ' It\'s possible that all other syntax errors may be fixed by solving this one.') self.errok() self.found_errors = True while True: tok = next(self.tokens, None) if tok == None: raise EOFError() if tok.type == 'SEMI': tok = next(self.tokens, None) return tok
43.773619
213
0.565392
from sly import Parser from sly.yacc import _decorator as _ from .domas_lexer import DomasLexer from .domas_quadruples import Quadruple from .domas_errors import * from . import domas_semantic_cube as sm import json import os import copy os.system('color') class DomasParser(Parser): tokens = DomasLexer.tokens start = 'programa' function_table = {} class_table = {} constant_table = {'int': [], 'float': [], 'string': [], 'bool': []} stack_of_stacks = [[], []] stack_vars = [] last_arr_t = [] displacements = [] for_var_dir = [] break_stack = [] quadruples = [] jumps = [] quad_counter = 1 param_counter = 0 temp_counter = 0 attr_counter = 1 current_class = None last_arr_id = None last_type = None last_func_added = None has_returned = False found_errors = False types = ['int', 'float', 'string', 'bool', 'void'] operators = ['+', '-', '*', '/', '<', '>', '<=', '>=', '==', '<>', '&', '|'] def add_to_func_table(self, id, return_type): self.function_table[id] = { 'return_type': return_type, 'vars': {}, 'num_types': '0\u001f' * len(self.types), 'params': '', 'num_temps': '0\u001f' * len(self.types) } def check_variable_exists(self, var): if self.current_class != None: return var in self.function_table[self.curr_scope]['vars'] or var in self.class_table[self.current_class]['vars'] return var in self.function_table[self.curr_scope]['vars'] or var in self.function_table[self.program_name]['vars'] def get_var_type(self, var, tok): if not self.check_variable_exists(var): self.found_errors = True print('ERROR: No variable\033[1m', var, '\033[0mwas found.') print(' Missing reference found on line', tok.lineno) return None if self.current_class != None: if var in self.function_table[self.curr_scope]['vars']: return self.function_table[self.curr_scope]['vars'][var]['type'] return self.class_table[self.current_class]['vars'][var]['type'] if var in self.function_table[self.curr_scope]['vars']: return self.function_table[self.curr_scope]['vars'][var]['type'] return self.function_table[self.program_name]['vars'][var]['type'] def update_num_temps(self, func_num_temps, type_idx, quantity=1): lst = func_num_temps.split('\u001f') lst[type_idx] = str(int(lst[type_idx]) + quantity) return '\u001f'.join(lst) def check_var_is_array(self, var): if not var: return if var['dir'] >= 4500 and var['dir'] < 6000: return False if not self.check_variable_exists(var['value']): return False if self.current_class != None: if var['value'] in self.function_table[self.curr_scope]['vars']: return 'd1' in self.function_table[self.curr_scope]['vars'][var['value']] else: return 'd1' in self.class_table[self.current_class]['vars'][var['value']] elif var['value'] in self.function_table[self.curr_scope]['vars']: return 'd1' in self.function_table[self.curr_scope]['vars'][var['value']] else: return 'd1' in self.function_table[self.program_name]['vars'][var['value']] def make_and_push_quad(self): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() if not ro or not lo: raise SystemError("Reached unsolvable state") r_type = sm.checkOperation(lo['type'], ro['type'], op) self.last_type = r_type idx = self.types.index(r_type) num_temps = self.function_table[self.curr_scope]['num_temps'] self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 @_('PROGRAM ID pro1 SEMI pro0 declarations') def programa(self, p): if self.found_errors: raise CompilationError() return (self.program_name, self.function_table, self.class_table, self.constant_table, self.quadruples) @_('') def pro0(self, p): self.quadruples.append(Quadruple(-1, -1, 'goto', -1)) self.quad_counter += 1 @_('') def pro1(self, p): self.program_name = p[-1] self.curr_scope = p[-1] self.function_table[p[-1]] = { 'return_type': None, 'vars': {}, 'num_types': '0\u001f0\u001f0\u001f0\u001f0\u001f' } @ _('class_declaration out_class var_declaration function_definition main') def declarations(self, p): return 'declarations' @ _('''CLASS ID cd1 inherits LCURL ATTRIBUTES attribute_declaration METHODS method_definition RCURL class_declaration''', 'empty') def class_declaration(self, p): return 'class_declaration' @ _('') def cd1(self, p): if p[-1] in self.class_table: self.found_errors = True print( 'ERROR: A class with the name\033[1m', p[-1], '\033[0mhas already been defined') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.class_table[p[-1]] = { 'vars': {}, 'num_types': '0\u001f0\u001f0\u001f0\u001f0\u001f' } self.current_class = p[-1] @ _('INHERITS ID cd3', 'empty') def inherits(self, p): return 'inherits' @ _('') def cd3(self, p): if not p[-1] in self.class_table: self.found_errors = True print('ERROR: Id\033[1m', p[-1], '\033[0mis not defined as a class') print(' Missing reference found on line', self.symstack[-1].lineno) else: self.class_table[self.current_class] = copy.deepcopy( self.class_table[p[-1]]) @ _('VAR ID ad1 attr_vector', 'VAR ID ad1 attr_simple_var', 'empty') def attribute_declaration(self, p): return 'attribute_declaration' @ _('') def ad1(self, p): if p[-1] in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute\033[1m', p[-1], '\033[0mhas already been defined') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.stack_vars.append(p[-1]) @ _('''LBRACKET CTE_I ad2 attr_multidim RBRACKET COLON simple_type ad4 SEMI attribute_declaration''') def attr_vector(self, p): return 'vector' # Adds the first dimension of an array to the information of the variable @ _('') def ad2(self, p): self.latest_var = self.stack_vars.pop() if self.class_table[self.current_class]['vars']: self.class_table[self.current_class]['vars'][self.latest_var] = { 'd1': p[-1]} else: self.class_table[self.current_class]['vars'] = { self.latest_var: {'d1': p[-1]}} @ _('COMMA CTE_I ad3', 'empty') def attr_multidim(self, p): return 'attr_multidim' # Adds the second dimension of an array to the information of the variable. @ _('') def ad3(self, p): self.class_table[self.current_class]['vars'][self.latest_var]['d2'] = p[-1] # Adds the type and direction to the information of the variable declared and updated the amount of types used in the class. @ _('') def ad4(self, p): idx = self.types.index(p[-1]) num_types = self.class_table[self.current_class]['num_types'] if 'd1' in self.class_table[self.current_class]['vars'][self.latest_var]: q = self.class_table[self.current_class]['vars'][self.latest_var]['d1'] if 'd2' in self.class_table[self.current_class]['vars'][self.latest_var]: q *= self.class_table[self.current_class]['vars'][self.latest_var]['d2'] self.class_table[self.current_class]['vars'][self.latest_var]['dir'] = 6000 + idx * \ 300 + int(num_types.split('\u001f')[idx]) self.class_table[self.current_class]['vars'][self.latest_var]['type'] = p[-1] self.class_table[self.current_class]['num_types'] = self.update_num_temps( num_types, idx, q) @ _('attr_var_list COLON simple_type ad5 SEMI attribute_declaration') def attr_simple_var(self, p): return 'attr_simple_var' @ _('COMMA ID ad1 attr_var_list', 'empty') def attr_var_list(self, p): return 'attr_var_list' # Pops the stack of variables and giving each their corresponding type. @ _('') def ad5(self, p): while len(self.stack_vars) > 0: curr_var = self.stack_vars.pop() if curr_var in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute\033[1m', curr_var, '\033[0mhas already been defined in class', self.current_class) print(' Redefinition found on line', self.symstack[-2].lineno) idx = self.types.index(p[-1]) num_types = self.class_table[self.current_class]['num_types'] self.class_table[self.current_class]['num_types'] = self.update_num_temps( num_types, idx) self.class_table[self.current_class]['vars'][curr_var] = { 'type': p[-1], 'dir': 6000 + idx * 300 + int(num_types.split('\u001f')[idx])} @ _('''def_type fd1 FUNCTION ID md3 LPAREN m_parameters RPAREN LCURL fd4 var_declaration statements RCURL fd5 fd6 method_definition''', 'empty') def method_definition(self, p): return 'method_definition' # Adds the id with the name of the class prefixed to the function table. @ _('') def md3(self, p): if p[-1] in self.class_table[self.current_class]['vars']: self.found_errors = True print('ERROR: An attribute or method\033[1m', p[-1], '\033[0mhas already been defined in class', self.current_class) print(' Redefinition found on line', self.symstack[-1].lineno) else: self.add_to_func_table( self.current_class + '.' + p[-1], self.curr_func_type) self.last_func_added = self.current_class + '.' + p[-1] self.curr_scope = self.last_func_added idx = self.types.index(self.curr_func_type) num_types = self.function_table[self.program_name]['num_types'] self.function_table[self.program_name]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.program_name]['vars'][self.current_class + '.' + p[-1]] = { 'type': self.curr_func_type, 'dir': 0, 'real_dir': idx * 300 + int(num_types.split('\u001f')[idx])} @_('ID p1 COLON simple_type m2 m_param_choose', 'empty') def m_parameters(self, p): return 'parameters' # Adds the id and type of the parameter to the table of variables of the current method. @ _('') def m2(self, p): if self.latest_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A parameter\033[1m', self.latest_var, '\033[0mhas already been declared for method', self.last_func_added.split('.')[1], 'in class', self.current_class) print(' Redefinition found on line', self.symstack[-2].lineno) else: idx = self.types.index(p[-1]) self.function_table[self.curr_scope]['params'] += str( idx) num_types = self.function_table[self.curr_scope]['num_types'] self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'type': p[-1], 'dir': 1500 + idx * 300 + int(num_types.split('\u001f')[idx]) } @ _('COMMA m_parameters', 'empty') def m_param_choose(self, p): return 'm_param_choose' @ _('') def out_class(self, p): self.current_class = None self.curr_scope = self.program_name @ _('VAR ID gvd1 vector', 'VAR ID gvd1 simple_var', 'empty') def var_declaration(self, p): return p[0] # Appends the current var to the stack of variables of it doesn't exist. @ _('') def gvd1(self, p): if p[-1] in self.function_table: self.found_errors = True print('ERROR: A function with ID\033[1m', p[-1], '\033[0mhas already been declared. Variables may not share name with functions') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.stack_vars.append(p[-1]) @ _('LBRACKET CTE_I gvd2 multidim RBRACKET COLON simple_type gvd4 SEMI var_declaration') def vector(self, p): return 'vector' @ _('') def gvd2(self, p): self.latest_var = self.stack_vars.pop() if self.function_table[self.curr_scope]['vars']: self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'd1': p[-1] } else: self.function_table[self.curr_scope]['vars'] = { self.latest_var: {'d1': p[-1]} } @ _('COMMA CTE_I gvd3', 'empty') def multidim(self, p): return 'multidim' @ _('var_list COLON composite_type gvd6 SEMI var_declaration', 'var_list COLON simple_type gvd5 SEMI var_declaration') def simple_var(self, p): return 'simple_var' @ _('COMMA ID gvd1 var_list', 'empty') def var_list(self, p): return 'var_list' @ _('') def gvd3(self, p): self.function_table[self.curr_scope]['vars'][self.latest_var]['d2'] = p[-1] @ _('') def gvd4(self, p): idx = self.types.index(p[-1]) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: q = self.function_table[self.curr_scope]['vars'][self.latest_var]['d1'] if 'd2' in self.function_table[self.curr_scope]['vars'][self.latest_var]: q *= self.function_table[self.curr_scope]['vars'][self.latest_var]['d2'] self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] = idx * \ 300 + int(num_types.split('\u001f')[idx]) + offset self.function_table[self.curr_scope]['vars'][self.latest_var]['type'] = p[-1] self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx, q) @ _('') def gvd5(self, p): while len(self.stack_vars) > 0: curr_var = self.stack_vars.pop() if curr_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A variable\033[1m', curr_var, '\033[0mhas already been declared.') print(' Redefinition found on line', self.symstack[-5].lineno) idx = self.types.index(p[-1]) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 self.function_table[self.curr_scope]['vars'][curr_var] = { 'type': p[-1], 'dir': idx * 300 + int(num_types.split('\u001f')[idx]) + offset} self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) @ _('') def gvd6(self, p): while len(self.stack_vars) > 0: var_id = self.stack_vars.pop() if var_id in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A variable\033[1m', var_id, '\033[0mhas already been declared.') print(' Redefinition found on line', self.symstack[-5].lineno) offset = 1500 if self.curr_scope != self.program_name else 0 num_types = self.function_table[self.curr_scope]['num_types'] base_addrs = [int(n) for n in num_types.split('\u001f')[:-1]] if not p[-1] in self.class_table: for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break print('ERROR: No class\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', lineno) return for attr in self.class_table[p[-1]]['vars']: attr_type = self.class_table[p[-1]]['vars'][attr]['type'] idx = self.types.index(attr_type) num_types = self.function_table[self.curr_scope]['num_types'] q = 1 self.function_table[self.curr_scope]['vars'][var_id + '.' + attr] = { 'type': attr_type, 'dir': base_addrs[idx] + self.class_table[p[-1]]['vars'][attr]['dir'] - 6000 + offset } if 'd1' in self.class_table[p[-1]]['vars'][attr]: q = self.class_table[p[-1]]['vars'][attr]['d1'] self.function_table[self.curr_scope]['vars'][var_id + '.' + attr]['d1'] = q if 'd2' in self.class_table[p[-1]]['vars'][attr]: d2 = self.class_table[p[-1]]['vars'][attr]['d2'] q *= d2 self.function_table[self.curr_scope]['vars'][var_id + '.' + attr]['d2'] = d2 self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx, q) self.function_table[self.curr_scope]['vars'][var_id] = { 'type': p[-1]} @ _('def_type fd1 FUNCTION ID fd3 LPAREN parameters RPAREN LCURL fd4 var_declaration statements RCURL fd5 fd6 function_definition', 'empty') def function_definition(self, p): return 'function_definition' @ _('') def fd1(self, p): self.curr_func_type = p[-1] @ _('') def fd3(self, p): if p[-1] in self.function_table: self.found_errors = True print('ERROR: A function\033[1m', p[-1], '\033[0mhas already been defined.') print(' Redefinition found on line', self.symstack[-1].lineno) elif p[-1] in self.function_table[self.program_name]['vars']: self.found_errors = True print('ERROR: A global variable\033[1m', p[-1], '\033[0mhas been declared. Functions may not share names with global variables') print(' Redefinition found on line', self.symstack[-1].lineno) else: self.add_to_func_table(p[-1], self.curr_func_type) self.last_func_added = p[-1] self.curr_scope = self.last_func_added idx = self.types.index(self.curr_func_type) num_types = self.function_table[self.program_name]['num_types'] self.function_table[self.program_name]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.program_name]['vars'][p[-1]] = { 'type': self.curr_func_type, 'dir': 0, 'real_dir': idx * 300 + int(num_types.split('\u001f')[idx])} @ _('') def fd4(self, p): if not self.last_func_added: return self.function_table[self.last_func_added]['start'] = self.quad_counter @ _('') def fd5(self, p): if not self.last_func_added: return del self.function_table[self.last_func_added]['vars'] @ _('') def fd6(self, p): if self.curr_func_type != 'void' and self.has_returned == False: self.found_errors = True print('ERROR: Function\033[1m', self.curr_scope, '\033[0mis missing a return statement') print(' Non-void functions must have a return statement.') self.quadruples.append(Quadruple(-1, -1, 'end_func', -1)) self.quad_counter += 1 self.temp_counter = 1 self.has_returned = False @ _('statement statements', 'empty') def statements(self, p): return 'statements' @ _('simple_type', 'VOID') def def_type(self, p): return p[0] @ _('INT', 'FLOAT', 'STRING', 'BOOL') def simple_type(self, p): return p[0] @ _('ID') def composite_type(self, p): return p[0] @ _('ID p1 COLON simple_type p2 param_choose', 'empty') def parameters(self, p): return 'parameters' @ _('COMMA parameters', 'empty') def param_choose(self, p): return 'param_choose' @ _('') def p1(self, p): self.latest_var = p[-1] @ _('') def p2(self, p): if self.latest_var in self.function_table[self.curr_scope]['vars']: self.found_errors = True print('ERROR: A parameter\033[1m', self.latest_var, '\033[0mhas already been declared for function', self.last_func_added) print(' Redefinition found on line', self.symstack[-2].lineno) idx = self.types.index(p[-1]) self.function_table[self.curr_scope]['params'] += str(idx) num_types = self.function_table[self.curr_scope]['num_types'] offset = 1500 if self.curr_scope != self.program_name else 0 self.function_table[self.curr_scope]['num_types'] = self.update_num_temps( num_types, idx) self.function_table[self.curr_scope]['vars'][self.latest_var] = { 'type': p[-1], 'dir': idx * 300 + int(num_types.split('\u001f')[idx]) + offset} @_('assignment', 'call_to_void_function', 'function_returns', 'read', 'print', 'decision_statement', 'repetition_statement', 'BREAK br0 SEMI') def statement(self, p): return 'statement' @_('') def br0(self, p): if len(self.jumps) == 0: self.found_errors = True print('ERROR: break statement on line', self.symstack[-1].lineno, 'used outside a loop') self.quadruples.append(Quadruple(-1, -1, 'goto', None)) self.break_stack.append(self.quad_counter) self.quad_counter += 1 @_('variable ass1 EQUALS expression ass2 SEMI') def assignment(self, p): return 'assignment' @_('') def ass1(self, p): self.latest_var = p[-1] @_('') def ass2(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): self.make_and_push_quad() made_quad = True lo = self.stack_of_stacks[-2].pop() if not lo: return v_type = self.get_var_type(self.latest_var, self.symstack[-2]) if not v_type: return self.last_type = sm.checkOperation(v_type, lo['type'], '=') if not made_quad and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.current_class != None: if self.latest_var in self.class_table[self.current_class]['vars']: if 'd1' in self.class_table[self.current_class]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.class_table[self.current_class]['vars'][self.latest_var]['dir'] else: if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] elif self.latest_var in self.function_table[self.curr_scope]['vars']: if 'd1' in self.function_table[self.curr_scope]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.curr_scope]['vars'][self.latest_var]['dir'] else: if 'd1' in self.function_table[self.program_name]['vars'][self.latest_var]: if not self.last_arr_t: return var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = self.function_table[self.program_name]['vars'][self.latest_var]['dir'] q = Quadruple(lo_dir, -1, '=', var_dir) self.quadruples.append(q) self.quad_counter += 1 @_('id_or_attribute', 'id_or_attribute v0 LBRACKET expression v1 RBRACKET', 'id_or_attribute v0 LBRACKET expression v2 COMMA v4 expression v3 RBRACKET') def variable(self, p): return p[0] @_('') def v0(self, p): self.check_variable_exists(p[-1]) if self.current_class != None: if not 'd1' in self.class_table[self.current_class]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return elif p[-1] in self.function_table[self.curr_scope]['vars']: if not 'd1' in self.function_table[self.curr_scope]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return elif not 'd1' in self.function_table[self.program_name]['vars'][p[-1]]: self.found_errors = True print('ERROR: Variable\033[1m', p[-1], '\033[0mis not an array or matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) return self.last_arr_id = p[-1] self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) @_('') def v4(self, p): self.check_variable_exists(self.last_arr_id) if self.current_class != None: if not 'd2' in self.class_table[self.current_class]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: if not 'd2' in self.function_table[self.curr_scope]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') else: if not 'd2' in self.function_table[self.program_name]['vars'][self.last_arr_id]: self.found_errors = True print('ERROR: Variable\033[1m', self.last_arr_id, '\033[0mis not a matrix.') self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) @_('') def v1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None and self.last_arr_id in self.class_table[self.current_class]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.class_table[self.current_class]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.class_table[self.current_class]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.program_name]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '+', t_dir)) self.quad_counter += 1 self.last_arr_t.append(t_dir) self.stack_of_stacks.pop() self.stack_of_stacks.pop() @_('') def v2(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None: pass elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 d2 = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d2'] if not d2 in self.constant_table['int']: self.constant_table['int'].append(d2) cons_dir = self.constant_table['int'].index(d2) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '*', t_dir)) self.quad_counter += 1 self.displacements.append(t_dir) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d1'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 d2 = self.function_table[self.program_name]['vars'][self.last_arr_id]['d2'] if not d2 in self.constant_table['int']: self.constant_table['int'].append(d2) cons_dir = self.constant_table['int'].index(d2) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0) self.quadruples.append(Quadruple(cons_dir, t_addr, '*', t_dir)) self.quad_counter += 1 self.displacements.append(t_dir) self.stack_of_stacks.pop() self.stack_of_stacks.pop() @_('') def v3(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if self.current_class != None: pass elif self.last_arr_id in self.function_table[self.curr_scope]['vars']: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['d2'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.curr_scope]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0, 2) self.quadruples.append( Quadruple(self.displacements.pop(), t_addr, '+', t_dir)) self.quadruples.append(Quadruple(cons_dir, t_dir, '+', t_dir + 1)) self.quad_counter += 2 self.last_arr_t.append(t_dir + 1) else: t_addr = self.quadruples[-1].res if made_quad else self.stack_of_stacks[-2].pop()[ 'dir'] if (t_addr % 1500) // 300 != 0 and (t_addr % 1500) // 300 != 1: raise TypeError('Type mismatch') lms = self.function_table[self.program_name]['vars'][self.last_arr_id]['d2'] self.quadruples.append(Quadruple(0, lms, 'verify', t_addr)) self.quad_counter += 1 dir_b = self.function_table[self.program_name]['vars'][self.last_arr_id]['dir'] if not dir_b in self.constant_table['int']: self.constant_table['int'].append(dir_b) cons_dir = self.constant_table['int'].index(dir_b) + 4500 num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = int(num_temps.split('\u001f')[0]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 0, 2) self.quadruples.append( Quadruple(self.displacements.pop(), t_addr, '+', t_dir)) self.quadruples.append(Quadruple(cons_dir, t_dir, '+', t_dir + 1)) self.quad_counter += 2 self.last_arr_t.append(t_dir + 1) self.stack_of_stacks.pop() self.stack_of_stacks.pop() @_('ID', 'ID DOT ID') def id_or_attribute(self, p): if len(p) > 1: return p[0] + p[1] + p[2] return p[0] @_('variable', 'CTE_I', 'CTE_F', 'CTE_STRING', 'cte_bool', 'call_to_function') def var_cte(self, p): offset = 4500 if hasattr(p, 'CTE_I'): cte_type = 'int' if not p[0] in self.constant_table['int']: self.constant_table['int'].append(p[0]) cons_dir = self.constant_table['int'].index(p[0]) + offset elif hasattr(p, 'CTE_F'): cte_type = 'float' if not p[0] in self.constant_table['float']: self.constant_table['float'].append(p[0]) cons_dir = self.constant_table['float'].index(p[0]) + offset + 300 elif hasattr(p, 'CTE_STRING'): cte_type = 'string' if not p[0] in self.constant_table['string']: self.constant_table['string'].append(p[0]) cons_dir = self.constant_table['string'].index(p[0]) + offset + 600 elif hasattr(p, 'cte_bool'): cte_type = 'bool' if not p[0] in self.constant_table['bool']: self.constant_table['bool'].append(p[0]) cons_dir = self.constant_table['bool'].index(p[0]) + offset + 900 elif hasattr(p, 'call_to_function'): return p[0] else: if not self.check_variable_exists(p[0]): for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break self.found_errors = True print('ERROR: No variable\033[1m', p[0], '\033[0mwas found.') print(' Missing reference found on line', lineno) return if self.current_class != None and p[0] in self.class_table[self.current_class]['vars']: cte_type = self.class_table[self.current_class]['vars'][p[0]]['type'] cons_dir = self.class_table[self.current_class]['vars'][p[0]]['dir'] else: cte_type = self.get_var_type(p[0], self.symstack[-2]) if p[0] in self.function_table[self.curr_scope]['vars']: cons_dir = self.function_table[self.curr_scope]['vars'][p[0]]['dir'] else: cons_dir = self.function_table[self.program_name]['vars'][p[0]]['dir'] return {'value': p[0], 'type': cte_type, 'dir': cons_dir} @_('constant e2 operator e3 expression', 'constant e2', 'LPAREN e1 expression RPAREN e4', 'LPAREN e1 expression RPAREN e4 operator e3 expression') def expression(self, p): if hasattr(p, 'LPAREN'): return p[2] return p[0] @_('') def e1(self, p): self.stack_of_stacks[-1].append('(') @_('') def e2(self, p): self.stack_of_stacks[-2].append(p[-1]) @_('') def e3(self, p): if len(self.stack_of_stacks[-1]) == 0 or self.stack_of_stacks[-1][-1] == '(': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] == '*' or self.stack_of_stacks[-1][-1] == '/': self.make_and_push_quad() if (self.stack_of_stacks[-1] and (self.stack_of_stacks[-1][-1] == '+' or self.stack_of_stacks[-1][-1] == '-')) and (p[-1] == '+' or p[-1] == '-'): self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] == '*' or p[-1] == '/': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] == '+' or self.stack_of_stacks[-1][-1] == '-': self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] == '+' or p[-1] == '-': self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] in sm.comparison_ops or self.stack_of_stacks[-1][-1] in sm.equality_ops: self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] in sm.comparison_ops or p[-1] in sm.equality_ops: self.stack_of_stacks[-1].append(p[-1]) elif self.stack_of_stacks[-1][-1] in sm.logic_ops: self.make_and_push_quad() self.stack_of_stacks[-1].append(p[-1]) elif p[-1] in sm.logic_ops: self.stack_of_stacks[-1].append(p[-1]) @_('') def e4(self, p): while(self.stack_of_stacks[-1][-1] != '('): self.make_and_push_quad() self.stack_of_stacks[-1].pop() @_('AND', 'OR') def logical_operator(self, p): return p[-1] @_('LT', 'GT', 'SAME', 'GEQ', 'LEQ', 'NEQ') def relational_operator(self, p): return p[0] @_('PLUS', 'MINUS', 'MULTIPLY', 'DIVIDE') def arithmetic_operator(self, p): return p[0] @_('logical_operator', 'relational_operator', 'arithmetic_operator') def operator(self, p): return p[0] @_('PLUS var_cte', 'MINUS var_cte', 'var_cte') def constant(self, p): if len(p) > 1 and p[1] == '-': return -p.var_cte else: return p.var_cte @_('READ LPAREN read_h') def read(self, p): return 'read' @_('variable r1 COMMA read_h', 'variable r1 RPAREN SEMI') def read_h(self, p): return 'read_h' @_('') def r1(self, p): if self.current_class != None and p[-1] in self.class_table[self.current_class]['vars']: if 'd1' in self.class_table[self.current_class]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.class_table[self.current_class]['vars'][p[-1]]['dir'] elif p[-1] in self.function_table[self.curr_scope]['vars']: if 'd1' in self.function_table[self.curr_scope]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.function_table[self.curr_scope]['vars'][p[-1]]['dir'] elif p[-1] in self.function_table[self.program_name]['vars']: if 'd1' in self.function_table[self.program_name]['vars'][p[-1]]: var_addr = '$' + str(self.last_arr_t.pop()) else: var_addr = self.function_table[self.program_name]['vars'][p[-1]]['dir'] else: raise UndeclaredIdError(p[-1]) self.quadruples.append(Quadruple(-1, -1, 'read', var_addr)) self.quad_counter += 1 @_('function_or_method vf0 ctf2 LPAREN func_params RPAREN fp2 fp3 ctf0 ctf3') def call_to_function(self, p): if not self.check_variable_exists(self.called_func): return func_dir = self.function_table[self.program_name]['vars'][self.called_func]['dir'] func_type = self.function_table[self.called_func]['return_type'] return {'value': 't' + str(self.temp_counter - 1), 'type': func_type, 'dir': func_dir} @_('') def ctf2(self, p): self.stack_of_stacks.append([]) self.stack_of_stacks.append([]) @_('') def ctf3(self, p): self.stack_of_stacks.pop() self.stack_of_stacks.pop() @_('') def ctf0(self, p): if not self.check_variable_exists(self.called_func): self.found_errors = True print('ERROR: No function\033[1m', self.called_func, '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-3].lineno) return func_dir = self.function_table[self.program_name]['vars'][self.called_func]['real_dir'] func_type = self.function_table[self.program_name]['vars'][self.called_func]['type'] idx = self.types.index(func_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) self.quadruples.append( Quadruple(func_dir, -1, '=', t_dir)) self.function_table[self.program_name]['vars'][self.called_func]['dir'] = t_dir self.quad_counter += 1 self.temp_counter += 1 @_('ID ctf1', 'ID DOT ID') def function_or_method(self, p): if(len(p) == 2): return (p[0], None) else: var_type = self.get_var_type(p[0], self.symstack[-1]) quads = [] for attr in self.class_table[var_type]['vars']: var_dir = self.function_table[self.curr_scope]['vars'][p[0]+'.'+attr]['dir'] attr_dir = self.class_table[var_type]['vars'][attr]['dir'] quads.append(Quadruple(var_dir, -2, '=', attr_dir)) return (var_type + p[1] + p[2], quads) @_('') def ctf1(self, p): if self.current_class != None: if not self.current_class + '.' + p[-1] in self.function_table: self.found_errors = True print('ERROR: No function\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-1].lineno) elif not p[-1] in self.function_table: self.found_errors = True print('ERROR: No function\033[1m', p[-1], '\033[0mwas found.') print(' Missing reference found on line', self.symstack[-1].lineno) @_('COMMA expression fp1 param_list', 'empty') def param_list(self, p): return 'param_list' @_('PRINT LPAREN res_write RPAREN SEMI') def print(self, p): return 'print' @_('expression pr1 comma_thing') def res_write(self, p): return 'res_write' @_('COMMA res_write', 'empty') def comma_thing(self, p): return 'comma_thing' @_('') def pr1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if not lo['dir'] in range(4500, 6000) and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if not ro['dir'] in range(4500, 6000) and self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1] self.quadruples.append( Quadruple(-1, -1, 'print', last_quad.res)) self.quad_counter += 1 else: var = self.stack_of_stacks[-2].pop() if not var: return if self.check_var_is_array(var): var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = var['dir'] self.quadruples.append( Quadruple(-1, -1, 'print', var_dir)) self.quad_counter += 1 @_('TRUE', 'FALSE') def cte_bool(self, p): return p[0] @_('IF LPAREN expression dec1 RPAREN THEN LCURL statements RCURL else_stm') def decision_statement(self, p): return 'decision_statement' @_('') def dec1(self, p): while len(self.stack_of_stacks[-1]): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + 3000 r_type = sm.checkOperation(lo['type'], ro['type'], op) self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 lo = self.stack_of_stacks[-2].pop() if lo['type'] != 'bool': raise SyntaxError( 'Expression to evaluate in if statement is not boolean') else: self.quadruples.append(Quadruple(-1, lo['dir'], 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 @_('dec2 ELSE LCURL statements RCURL dec3', 'empty dec4') def else_stm(self, p): return 'else_stm' @_('') def dec2(self, p): falso = self.jumps.pop() self.quadruples.append(Quadruple(-1, -1, 'goto', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.quadruples[falso - 1].res = self.quad_counter @_('') def dec3(self, p): jump = self.jumps.pop() self.quadruples[jump - 1].res = self.quad_counter @_('') def dec4(self, p): jump = self.jumps.pop() self.quadruples[jump - 1].res = self.quad_counter @_('conditional', 'non_conditional') def repetition_statement(self, p): return 'repetition_statement' @_('WHILE LPAREN con0 expression con1 RPAREN DO LCURL statements RCURL con2') def conditional(self, p): return 'conditional' @_('') def con0(self, p): self.jumps.append(self.quad_counter) @_('') def con1(self, p): while len(self.stack_of_stacks[-1]): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) r_type = sm.checkOperation(lo['type'], ro['type'], op) self.stack_of_stacks[-2].append( {'value': 't' + str(self.temp_counter), 'type': r_type, 'dir': t_dir}) if self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 if self.last_type != 'bool': raise SyntaxError( 'Expression to evaluate in if statement is not boolean') else: last_quad = self.quadruples[-1].res self.quadruples.append(Quadruple(-1, last_quad, 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 @_('') def con2(self, p): falso = self.jumps.pop() ret = self.jumps.pop() self.quadruples.append(Quadruple(-1, -1, 'goto', ret)) self.quadruples[falso - 1].res = self.quad_counter + 1 if len(self.break_stack): bq = self.break_stack.pop() self.quadruples[bq - 1].res = self.quad_counter + 1 self.quad_counter += 1 @_('FOR variable ass1 EQUALS expression ass2 nc0 UNTIL expression nc1 DO nc2 LCURL statements RCURL nc3') def non_conditional(self, p): return 'non_conditional' @_('') def nc0(self, p): self.for_var_dir.append(self.quadruples[-1].res) @_('') def nc1(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = idx * 300 + \ int(num_temps.split('\u001f')[idx]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) if not lo['dir'] in range(4500, 6000) and self.check_var_is_array(lo): lo_dir = '$' + str(self.last_arr_t.pop()) else: lo_dir = lo['dir'] if not ro['dir'] in range(4500, 6000) and self.check_var_is_array(ro): ro_dir = '$' + str(self.last_arr_t.pop()) else: ro_dir = ro['dir'] self.quadruples.append( Quadruple(lo_dir, ro_dir, op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1].res if (last_quad % 1500) // 300 != 0 and (last_quad % 1500) // 300 != 1: raise TypeError('Type mismatch') num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = 3 * 300 + \ int(num_temps.split('\u001f')[3]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 3) self.quadruples.append( Quadruple(self.for_var_dir[-1], last_quad, '<=', t_dir)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.temp_counter += 1 else: var = self.stack_of_stacks[-2].pop() if (var['dir'] % 1500) // 300 != 0 and (var['dir'] % 1500) // 300 != 1: raise TypeError('Type mismatch') num_temps = self.function_table[self.curr_scope]['num_temps'] t_dir = 3 * 300 + \ int(num_temps.split('\u001f')[3]) + 3000 self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, 3) if self.check_var_is_array(var): var_dir = '$' + str(self.last_arr_t.pop()) else: var_dir = var['dir'] self.quadruples.append( Quadruple(self.for_var_dir[-1], var_dir, '<=', t_dir)) self.jumps.append(self.quad_counter) self.quad_counter += 1 self.temp_counter += 1 @_('') def nc2(self, p): last_quad = self.quadruples[-1].res self.quadruples.append(Quadruple(-1, last_quad, 'goto_f', -1)) self.jumps.append(self.quad_counter) self.quad_counter += 1 @_('') def nc3(self, p): falso = self.jumps.pop() cond = self.jumps.pop() if not 1 in self.constant_table['int']: self.constant_table['int'].append(1) one_dir = self.constant_table['int'].index(1) + 4500 self.quadruples.append( Quadruple(self.for_var_dir[-1], one_dir, '+', self.for_var_dir[-1])) self.quad_counter += 1 self.quadruples.append(Quadruple(-1, -1, 'goto', cond)) self.quad_counter += 1 self.quadruples[falso - 1].res = self.quad_counter if len(self.break_stack): bq = self.break_stack.pop() self.quadruples[bq - 1].res = self.quad_counter + 1 self.for_var_dir.pop() @_('RETURN LPAREN expression fr0 RPAREN SEMI fr1') def function_returns(self, p): return 'function_returns' @_('') def fr0(self, p): made_quad = False while(len(self.stack_of_stacks[-1])): self.make_and_push_quad() made_quad = True if made_quad: last_quad = self.quadruples[-1] self.quadruples.append( Quadruple(last_quad.res, -1, 'return', self.function_table[self.program_name]['vars'][self.curr_scope]['real_dir'])) self.quad_counter += 1 self.stack_of_stacks[-2].pop() else: self.quadruples.append( Quadruple(self.stack_of_stacks[-2].pop()['dir'], -1, 'return', self.function_table[self.program_name]['vars'][self.curr_scope]['real_dir'])) self.quad_counter += 1 @_('') def fr1(self, p): self.has_returned = True @ _('function_or_method vf0 LPAREN func_params RPAREN fp2 fp3 SEMI') def call_to_void_function(self, p): return 'call_to_void_function' @ _('') def fp2(self, p): self.quadruples.append( Quadruple(self.called_func, -1, 'gosub', -1)) self.quad_counter += 1 @ _('') def fp3(self, p): self.param_counter = 0 @ _('') def vf0(self, p): self.called_func, quads = p[-1] if self.current_class != None: self.called_func = self.current_class + '.' + self.called_func self.quadruples.append(Quadruple(self.called_func, -1, 'era', -1)) self.quad_counter += 1 if quads: for q in quads: self.quadruples.append(q) self.quad_counter += 1 @ _('expression fp1 param_list', 'empty') def func_params(self, p): return 'func_params' @ _('') def fp1(self, p): if not self.called_func in self.function_table: for i in range(1, len(self.symstack)): if hasattr(self.symstack[i * -1], 'lineno'): lineno = self.symstack[i * -1].lineno break self.found_errors = True print('ERROR: No function\033[1m', self.called_func, '\033[0mwas found.') print(' Missing reference found on line', lineno) return made_quad = False while(len(self.stack_of_stacks[-1])): offset = 800 * len(self.types) * 2 ro = self.stack_of_stacks[-2].pop() lo = self.stack_of_stacks[-2].pop() op = self.stack_of_stacks[-1].pop() self.last_type = sm.checkOperation(lo['type'], ro['type'], op) idx = self.types.index(self.last_type) num_temps = self.function_table[self.curr_scope]['num_temps'] self.function_table[self.curr_scope]['num_temps'] = self.update_num_temps( num_temps, idx) t_dir = idx * 300 + 3000 self.quadruples.append( Quadruple(lo['dir'], ro['dir'], op, t_dir)) self.temp_counter += 1 self.quad_counter += 1 made_quad = True if made_quad: last_quad = self.quadruples[-1] if self.param_counter == len(self.function_table[self.called_func]['params']): self.found_errors = True print( 'ERROR: Too many parameters passed in call to function on line', self.symstack[-2].lineno) return try: sm.checkAssignment(self.types[int(self.function_table[self.called_func] ['params'][self.param_counter])], self.types[(last_quad.res % 1500) // 300], '=') except TypeError: self.found_errors = True print( 'ERROR: Type mismatch on line', self.symstack[-2].lineno) print( ' Expected value of type', self.types[int(self.function_table[self.called_func]['params'][self.param_counter])], 'got value of type', self.types[(last_quad.res % 1500) // 300], 'instead') return self.quadruples.append( Quadruple(last_quad.res, -1, 'param', self.param_counter)) self.quad_counter += 1 self.param_counter += 1 else: val = self.stack_of_stacks[-2].pop() if self.param_counter == len(self.function_table[self.called_func]['params']): self.found_errors = True print( 'ERROR: Too many parameters passed in call to function on line', self.symstack[-2].lineno) return if not val: return try: sm.checkAssignment(self.types[int(self.function_table[self.called_func] ['params'][self.param_counter])], self.types[(val['dir'] % 1500) // 300], '=') except TypeError: self.found_errors = True print( 'ERROR: Type mismatch on line', self.symstack[-2].lineno) print( ' Expected value of type', self.types[int(self.function_table[self.called_func]['params'][self.param_counter])], 'got value of type', self.types[(val['dir'] % 1500) // 300], 'instead') return self.quadruples.append( Quadruple(val['dir'], -1, 'param', self.param_counter)) self.quad_counter += 1 self.param_counter += 1 @ _('MAIN m1_add_to_func_table LPAREN RPAREN LCURL main0 var_declaration statements RCURL main2') def main(self, p): return 'main' @ _('') def main0(self, p): self.quadruples[0].res = self.quad_counter @ _('') def main2(self, p): self.quadruples.append(Quadruple(-1, -1, 'end', -1)) del self.function_table[self.program_name]['vars'] del self.function_table['main']['vars'] for class_name in self.class_table: del self.class_table[class_name]['vars'] pass @ _('') def m1_add_to_func_table(self, p): self.curr_scope = 'main' self.add_to_func_table('main', None) @ _('') def empty(self, p): pass def error(self, p): if not p: return print('ERROR: Syntax error found on line', p.lineno) if p.value == 'var': print( ' All variable declarations must be done before any other statement') elif p.value == '(': print( ' Parentheses are not allowed in this position.') elif p.value == '{': print( ' Curly brackets are not allowed in this position.') elif p.value == '[': print( ' Brackets are not allowed in this position.') elif p.value == ')': print( ' Closing parenthesis found without matching opening one.') elif p.value == '}': print( ' Closing curly bracket without an opening one.') elif p.value == ']': print( ' Closing bracket without an opening one.') elif p.value == ';': print( ' Must only be used at the end of statements') elif p.value == '=': print( ' Assignment is not allowed here. Perhaps you meant to use ==?') else: print( ' Keyword or id misplaced') if not self.found_errors: print( ' It\'s possible that all other syntax errors may be fixed by solving this one.') self.errok() self.found_errors = True while True: tok = next(self.tokens, None) if tok == None: raise EOFError() if tok.type == 'SEMI': tok = next(self.tokens, None) return tok
true
true
f70eb2c0d33d0d03e11a8847b577c694fe9dd459
1,322
py
Python
tracdap-runtime/python/test/tracdap_examples/test_chaining.py
martin-traverse/tracdap
2df2f08bee352f4f5188953efe5a33aa1ae51f2d
[ "Apache-2.0" ]
null
null
null
tracdap-runtime/python/test/tracdap_examples/test_chaining.py
martin-traverse/tracdap
2df2f08bee352f4f5188953efe5a33aa1ae51f2d
[ "Apache-2.0" ]
null
null
null
tracdap-runtime/python/test/tracdap_examples/test_chaining.py
martin-traverse/tracdap
2df2f08bee352f4f5188953efe5a33aa1ae51f2d
[ "Apache-2.0" ]
null
null
null
# Copyright 2022 Accenture Global Solutions Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import pathlib import sys import tracdap.rt.launch as launch _ROOT_DIR = pathlib.Path(__file__).parent \ .joinpath("../../../..") \ .resolve() _EXAMPLES_DIR = _ROOT_DIR.joinpath("examples/models/python") class ChainingExample(unittest.TestCase): def test_chaining(self): job_config = _EXAMPLES_DIR.joinpath("chaining/chaining.yaml") sys_config = _EXAMPLES_DIR.joinpath("sys_config.yaml") test_dir = str(_EXAMPLES_DIR.joinpath("chaining")) try: sys.path.append(test_dir) launch.launch_job(job_config, sys_config, dev_mode=True) self.assertTrue(True) finally: sys.path.remove(test_dir)
28.12766
75
0.706505
import unittest import pathlib import sys import tracdap.rt.launch as launch _ROOT_DIR = pathlib.Path(__file__).parent \ .joinpath("../../../..") \ .resolve() _EXAMPLES_DIR = _ROOT_DIR.joinpath("examples/models/python") class ChainingExample(unittest.TestCase): def test_chaining(self): job_config = _EXAMPLES_DIR.joinpath("chaining/chaining.yaml") sys_config = _EXAMPLES_DIR.joinpath("sys_config.yaml") test_dir = str(_EXAMPLES_DIR.joinpath("chaining")) try: sys.path.append(test_dir) launch.launch_job(job_config, sys_config, dev_mode=True) self.assertTrue(True) finally: sys.path.remove(test_dir)
true
true
f70eb2d8117df4c61263da019a776ba2536c2d96
4,243
py
Python
day08/run.py
kung-foo/aoc2021
ce3111d50b7b3354331b8dc546b13c7fe8882ba9
[ "Apache-2.0" ]
null
null
null
day08/run.py
kung-foo/aoc2021
ce3111d50b7b3354331b8dc546b13c7fe8882ba9
[ "Apache-2.0" ]
null
null
null
day08/run.py
kung-foo/aoc2021
ce3111d50b7b3354331b8dc546b13c7fe8882ba9
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 import os import sys import random import numpy as np src = open("input.txt", "r").readlines() example = """ be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce """.splitlines() # example = "acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf".splitlines() """ 0: 1: 2: 3: 4: aaaa .... aaaa aaaa .... b c . c . c . c b c b c . c . c . c b c .... .... dddd dddd dddd e f . f e . . f . f e f . f e . . f . f gggg .... gggg gggg .... 5: 6: 7: 8: 9: aaaa aaaa aaaa aaaa aaaa b . b . . c b c b c b . b . . c b c b c dddd dddd .... dddd dddd . f e f . f e f . f . f e f . f e f . f gggg gggg .... gggg gggg """ # src = example src = [r.strip() for r in src if r.strip()] dc = [6, 2, 5, 5, 4, 5, 6, 3, 7, 6] # 0 is a subset of 8 # *1 is a subset of 0, 3, 4, 7, 8, 9 ^[2, 5, 6] # 2 is a subset of 8 # 3 is a subset of 8, 9 # *4 is a subset of 8, 9 # 5 is a subset of 8, 9 # 6 is a subset of 8 # *7 is a subset of 0, 3, 8, 9 ^[1, 2, 4, 5, 6] # *8 is a subset of # 9 is a subset of 8 part1 = 0 def collapse(digits): # clean up where we only have a "new" single guess for v in digits.values(): if len(v) == 1: for i, j in digits.items(): if len(j) == 1: continue j.difference_update(v) # cleanup where a digit has multiple guesses, but one of the guesses only appears once guesses = [0 for _ in range(10)] for d in digits.values(): for v in d: guesses[v] += 1 for gi, c in enumerate(guesses): if c > 1: continue for i, j in digits.items(): if gi in j: j.difference_update(j.difference({gi})) return digits def get_choices(digits, idx): choices = [] for k, v in digits.items(): if idx in v: choices.append(k) return choices total = 0 for line in src: scram, outp = line.split(" | ") scram = [frozenset(x) for x in scram.split()] outp = [frozenset(x) for x in outp.split()] for d in outp: if len(d) in (2, 4, 3, 7): part1 += 1 digits = {} for d in scram: if len(d) == 2: digits[d] = {1} one = d elif len(d) == 4: digits[d] = {4} four = d elif len(d) == 3: digits[d] = {7} elif len(d) == 7: digits[d] = {8} elif len(d) == 6: digits[d] = {0, 6, 9} elif len(d) == 5: digits[d] = {2, 3, 5} else: assert "wut" # reduce based on if it is a subset of 1 for d in scram: if one.issubset(d): digits[d].difference_update({2, 5, 6}) # four must be a subset of 9 for c in get_choices(digits, 9): if four.issubset(c): digits[c] = {9} nine = c # five must be a subset of nine for c in get_choices(digits, 5): if c.issubset(nine): digits[c] = {5} digits = collapse(digits) c = "" for d in outp: c += str(list(digits[d])[0]) total += int(c) print("part1:", part1) print("part1:", total)
26.51875
111
0.525572
import os import sys import random import numpy as np src = open("input.txt", "r").readlines() example = """ be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce """.splitlines() src = [r.strip() for r in src if r.strip()] dc = [6, 2, 5, 5, 4, 5, 6, 3, 7, 6] part1 = 0 def collapse(digits): for v in digits.values(): if len(v) == 1: for i, j in digits.items(): if len(j) == 1: continue j.difference_update(v) guesses = [0 for _ in range(10)] for d in digits.values(): for v in d: guesses[v] += 1 for gi, c in enumerate(guesses): if c > 1: continue for i, j in digits.items(): if gi in j: j.difference_update(j.difference({gi})) return digits def get_choices(digits, idx): choices = [] for k, v in digits.items(): if idx in v: choices.append(k) return choices total = 0 for line in src: scram, outp = line.split(" | ") scram = [frozenset(x) for x in scram.split()] outp = [frozenset(x) for x in outp.split()] for d in outp: if len(d) in (2, 4, 3, 7): part1 += 1 digits = {} for d in scram: if len(d) == 2: digits[d] = {1} one = d elif len(d) == 4: digits[d] = {4} four = d elif len(d) == 3: digits[d] = {7} elif len(d) == 7: digits[d] = {8} elif len(d) == 6: digits[d] = {0, 6, 9} elif len(d) == 5: digits[d] = {2, 3, 5} else: assert "wut" for d in scram: if one.issubset(d): digits[d].difference_update({2, 5, 6}) for c in get_choices(digits, 9): if four.issubset(c): digits[c] = {9} nine = c for c in get_choices(digits, 5): if c.issubset(nine): digits[c] = {5} digits = collapse(digits) c = "" for d in outp: c += str(list(digits[d])[0]) total += int(c) print("part1:", part1) print("part1:", total)
true
true
f70eb2e3a9de09a2e1bdb89d0cfb67ccb56d4e41
7,146
py
Python
Tests/GUI/DMachineSetup/test_DMachineSetup.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
null
null
null
Tests/GUI/DMachineSetup/test_DMachineSetup.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
null
null
null
Tests/GUI/DMachineSetup/test_DMachineSetup.py
Superomeg4/pyleecan
2b695b5f39e77475a07aa0ea89489fb0a9659337
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ @date Created on Thu May 18 14:35:34 2017 @copyright (C) 2015-2016 EOMYS ENGINEERING. @author: pierre_b """ from os.path import join, isfile from os import remove import sys from unittest import TestCase from ddt import ddt, data import mock # for unittest of raw_input from PyQt5 import QtWidgets from pyleecan.Classes.MachineSyRM import MachineSyRM from pyleecan.Classes.MachineIPMSM import MachineIPMSM from pyleecan.Classes.MachineDFIM import MachineDFIM from pyleecan.Classes.MachineSCIM import MachineSCIM from pyleecan.Classes.MachineSIPMSM import MachineSIPMSM from pyleecan.Classes.MachineWRSM import MachineWRSM from pyleecan.Classes.MachineSRM import MachineSRM from pyleecan.GUI.Dialog.DMachineSetup.DMachineSetup import DMachineSetup from pyleecan.Tests import save_gui_path as save_path from pyleecan.GUI.Dialog.DMachineSetup.SMachineType.SMachineType import SMachineType from pyleecan.GUI.Dialog.DMachineSetup.SMagnet.SMagnet import SMagnet from pyleecan.GUI.Dialog.DMachineSetup.SWindParam.SWindParam import SWindParam from pyleecan.GUI.Dialog.DMachineSetup.SWindCond.SWindCond import SWindCond from pyleecan.GUI.Dialog.DMachineSetup.SBar.SBar import SBar from pyleecan.GUI.Dialog.DMachineSetup.SWSlot.SWSlot import SWSlot from pyleecan.GUI.Dialog.DMachineSetup.SMHoleMag.SMHoleMag import SMHoleMag import matplotlib.pyplot as plt from pyleecan.Tests import DATA_DIR load_test = list() load_test.append( # 1 {"type": "SCIM", "index": 0, "name": "SCIM_001", "p": 1, "count": 10} ) load_test.append( # 2 {"type": "DFIM", "index": 1, "name": "DFIM_001", "p": 2, "count": 12} ) load_test.append( # 3 {"type": "SyRM", "index": 2, "name": "SynRM_001", "p": 2, "count": 9} ) load_test.append( # 4 {"type": "SPMSM", "index": 3, "name": "SPMSM_001", "p": 4, "count": 9} ) load_test.append( # 5 {"type": "SIPMSM", "index": 4, "name": "SIPMSM_008", "p": 4, "count": 9} ) load_test.append( # 6 {"type": "IPMSM", "index": 5, "name": "machine_IPMSM_A", "p": 5, "count": 9} ) load_test.append( # 7 {"type": "WRSM", "index": 6, "name": "WRSM_001", "p": 6, "count": 12} ) load_test.append( # 8 {"type": "SRM", "index": 7, "name": "SRM_test_load", "p": 10, "count": 9} ) from PyQt5.QtCore import Qt ENABLE_ITEM = Qt.ItemIsSelectable | Qt.ItemIsEnabled @ddt class test_DMachineSetup(TestCase): """Test that the widget DMachineSetup behave like it should""" def setUp(self): """Run at the begining of every test to setup the gui""" self.widget = DMachineSetup(matlib_path="./MaterialData") @classmethod def setUpClass(cls): """Start the app for the test""" print("\nStart Test DMachineSetup") cls.app = QtWidgets.QApplication(sys.argv) @classmethod def tearDownClass(cls): """Exit the app after the test""" cls.app.quit() @data(*load_test) def test_load(self, test_dict): """Check that you can load a machine """ return_value = ( join(join(DATA_DIR, "Load_GUI"), test_dict["name"] + ".json"), "Json (*.json)", ) with mock.patch( "PyQt5.QtWidgets.QFileDialog.getOpenFileName", return_value=return_value ): # To trigger the slot self.widget.b_load.clicked.emit(True) # To remember to update when adding a new machine type self.assertEqual(self.widget.w_step.c_type.count(), 8) # Check load MachineType self.assertEqual(type(self.widget.w_step), SMachineType) self.assertEqual(self.widget.w_step.c_type.currentIndex(), test_dict["index"]) self.assertEqual(self.widget.w_step.c_type.currentText(), test_dict["type"]) self.assertEqual(self.widget.w_step.si_p.value(), test_dict["p"]) self.assertEqual(self.widget.w_step.le_name.text(), test_dict["name"]) # Check that the nav_step is correct self.assertEqual(self.widget.nav_step.count(), test_dict["count"]) def test_set_save_machine_type(self): """Check that the Widget allow to change the machine type and save """ # Check that all the machine type are available self.assertEqual(self.widget.w_step.c_type.count(), 8) # DFIM self.widget.w_step.c_type.setCurrentIndex(1) self.assertEqual(self.widget.w_step.c_type.currentText(), "DFIM") self.assertEqual(type(self.widget.machine), MachineDFIM) save_function(self, self.widget, "test_dfim_save") # SyRM self.widget.w_step.c_type.setCurrentIndex(2) self.assertEqual(self.widget.w_step.c_type.currentText(), "SyRM") self.assertEqual(type(self.widget.machine), MachineSyRM) save_function(self, self.widget, "test_syrm_save") # SPMSM self.widget.w_step.c_type.setCurrentIndex(3) self.assertEqual(self.widget.w_step.c_type.currentText(), "SPMSM") self.assertEqual(type(self.widget.machine), MachineSIPMSM) save_function(self, self.widget, "test_spmsm_save") # SIPMSM self.widget.w_step.c_type.setCurrentIndex(4) self.assertEqual(self.widget.w_step.c_type.currentText(), "SIPMSM") self.assertEqual(type(self.widget.machine), MachineSIPMSM) save_function(self, self.widget, "test_sipmsm_save") # IPMSM self.widget.w_step.c_type.setCurrentIndex(5) self.assertEqual(self.widget.w_step.c_type.currentText(), "IPMSM") self.assertEqual(type(self.widget.machine), MachineIPMSM) save_function(self, self.widget, "test_ipmsm_save") # WRSM self.widget.w_step.c_type.setCurrentIndex(6) self.assertEqual(self.widget.w_step.c_type.currentText(), "WRSM") self.assertEqual(type(self.widget.machine), MachineWRSM) save_function(self, self.widget, "test_wrsm_save") # SRM self.widget.w_step.c_type.setCurrentIndex(7) self.assertEqual(self.widget.w_step.c_type.currentText(), "SRM") self.assertEqual(type(self.widget.machine), MachineSRM) save_function(self, self.widget, "test_srm_save") # SCIM self.widget.w_step.c_type.setCurrentIndex(0) self.assertEqual(self.widget.w_step.c_type.currentText(), "SCIM") self.assertEqual(type(self.widget.machine), MachineSCIM) def save_function(self, widget, file_name): """Function to save a machine from the GUI """ file_path = join(save_path, file_name + ".json") # Check that the file didn't already exist if isfile(file_path): remove(file_path) self.assertFalse(isfile(file_path)) return_value = (file_path, "Json (*.json)") with mock.patch( "PyQt5.QtWidgets.QFileDialog.getSaveFileName", return_value=return_value ): # To trigger the slot widget.b_save.clicked.emit(True) # Check that the file now exist => delete for next test self.assertTrue(isfile(file_path)) remove(file_path) # Check that the GUI have been updated self.assertEqual(type(widget.w_step), SMachineType) self.assertEqual(widget.w_step.le_name.text(), file_name)
39.480663
86
0.687378
from os.path import join, isfile from os import remove import sys from unittest import TestCase from ddt import ddt, data import mock from PyQt5 import QtWidgets from pyleecan.Classes.MachineSyRM import MachineSyRM from pyleecan.Classes.MachineIPMSM import MachineIPMSM from pyleecan.Classes.MachineDFIM import MachineDFIM from pyleecan.Classes.MachineSCIM import MachineSCIM from pyleecan.Classes.MachineSIPMSM import MachineSIPMSM from pyleecan.Classes.MachineWRSM import MachineWRSM from pyleecan.Classes.MachineSRM import MachineSRM from pyleecan.GUI.Dialog.DMachineSetup.DMachineSetup import DMachineSetup from pyleecan.Tests import save_gui_path as save_path from pyleecan.GUI.Dialog.DMachineSetup.SMachineType.SMachineType import SMachineType from pyleecan.GUI.Dialog.DMachineSetup.SMagnet.SMagnet import SMagnet from pyleecan.GUI.Dialog.DMachineSetup.SWindParam.SWindParam import SWindParam from pyleecan.GUI.Dialog.DMachineSetup.SWindCond.SWindCond import SWindCond from pyleecan.GUI.Dialog.DMachineSetup.SBar.SBar import SBar from pyleecan.GUI.Dialog.DMachineSetup.SWSlot.SWSlot import SWSlot from pyleecan.GUI.Dialog.DMachineSetup.SMHoleMag.SMHoleMag import SMHoleMag import matplotlib.pyplot as plt from pyleecan.Tests import DATA_DIR load_test = list() load_test.append( {"type": "SCIM", "index": 0, "name": "SCIM_001", "p": 1, "count": 10} ) load_test.append( {"type": "DFIM", "index": 1, "name": "DFIM_001", "p": 2, "count": 12} ) load_test.append( {"type": "SyRM", "index": 2, "name": "SynRM_001", "p": 2, "count": 9} ) load_test.append( {"type": "SPMSM", "index": 3, "name": "SPMSM_001", "p": 4, "count": 9} ) load_test.append( {"type": "SIPMSM", "index": 4, "name": "SIPMSM_008", "p": 4, "count": 9} ) load_test.append( {"type": "IPMSM", "index": 5, "name": "machine_IPMSM_A", "p": 5, "count": 9} ) load_test.append( {"type": "WRSM", "index": 6, "name": "WRSM_001", "p": 6, "count": 12} ) load_test.append( {"type": "SRM", "index": 7, "name": "SRM_test_load", "p": 10, "count": 9} ) from PyQt5.QtCore import Qt ENABLE_ITEM = Qt.ItemIsSelectable | Qt.ItemIsEnabled @ddt class test_DMachineSetup(TestCase): def setUp(self): self.widget = DMachineSetup(matlib_path="./MaterialData") @classmethod def setUpClass(cls): print("\nStart Test DMachineSetup") cls.app = QtWidgets.QApplication(sys.argv) @classmethod def tearDownClass(cls): cls.app.quit() @data(*load_test) def test_load(self, test_dict): return_value = ( join(join(DATA_DIR, "Load_GUI"), test_dict["name"] + ".json"), "Json (*.json)", ) with mock.patch( "PyQt5.QtWidgets.QFileDialog.getOpenFileName", return_value=return_value ): self.widget.b_load.clicked.emit(True) self.assertEqual(self.widget.w_step.c_type.count(), 8) self.assertEqual(type(self.widget.w_step), SMachineType) self.assertEqual(self.widget.w_step.c_type.currentIndex(), test_dict["index"]) self.assertEqual(self.widget.w_step.c_type.currentText(), test_dict["type"]) self.assertEqual(self.widget.w_step.si_p.value(), test_dict["p"]) self.assertEqual(self.widget.w_step.le_name.text(), test_dict["name"]) self.assertEqual(self.widget.nav_step.count(), test_dict["count"]) def test_set_save_machine_type(self): self.assertEqual(self.widget.w_step.c_type.count(), 8) self.widget.w_step.c_type.setCurrentIndex(1) self.assertEqual(self.widget.w_step.c_type.currentText(), "DFIM") self.assertEqual(type(self.widget.machine), MachineDFIM) save_function(self, self.widget, "test_dfim_save") self.widget.w_step.c_type.setCurrentIndex(2) self.assertEqual(self.widget.w_step.c_type.currentText(), "SyRM") self.assertEqual(type(self.widget.machine), MachineSyRM) save_function(self, self.widget, "test_syrm_save") self.widget.w_step.c_type.setCurrentIndex(3) self.assertEqual(self.widget.w_step.c_type.currentText(), "SPMSM") self.assertEqual(type(self.widget.machine), MachineSIPMSM) save_function(self, self.widget, "test_spmsm_save") self.widget.w_step.c_type.setCurrentIndex(4) self.assertEqual(self.widget.w_step.c_type.currentText(), "SIPMSM") self.assertEqual(type(self.widget.machine), MachineSIPMSM) save_function(self, self.widget, "test_sipmsm_save") self.widget.w_step.c_type.setCurrentIndex(5) self.assertEqual(self.widget.w_step.c_type.currentText(), "IPMSM") self.assertEqual(type(self.widget.machine), MachineIPMSM) save_function(self, self.widget, "test_ipmsm_save") self.widget.w_step.c_type.setCurrentIndex(6) self.assertEqual(self.widget.w_step.c_type.currentText(), "WRSM") self.assertEqual(type(self.widget.machine), MachineWRSM) save_function(self, self.widget, "test_wrsm_save") self.widget.w_step.c_type.setCurrentIndex(7) self.assertEqual(self.widget.w_step.c_type.currentText(), "SRM") self.assertEqual(type(self.widget.machine), MachineSRM) save_function(self, self.widget, "test_srm_save") self.widget.w_step.c_type.setCurrentIndex(0) self.assertEqual(self.widget.w_step.c_type.currentText(), "SCIM") self.assertEqual(type(self.widget.machine), MachineSCIM) def save_function(self, widget, file_name): file_path = join(save_path, file_name + ".json") if isfile(file_path): remove(file_path) self.assertFalse(isfile(file_path)) return_value = (file_path, "Json (*.json)") with mock.patch( "PyQt5.QtWidgets.QFileDialog.getSaveFileName", return_value=return_value ): # To trigger the slot widget.b_save.clicked.emit(True) # Check that the file now exist => delete for next test self.assertTrue(isfile(file_path)) remove(file_path) # Check that the GUI have been updated self.assertEqual(type(widget.w_step), SMachineType) self.assertEqual(widget.w_step.le_name.text(), file_name)
true
true
f70eb2f3401ca84927cd6ad2318eeb8439e46c72
2,814
py
Python
core/queue/views.py
lottspot/prevention-point
e4d5eaa437c3e979e8585bdada4efd33e995e39e
[ "MIT" ]
35
2019-03-12T23:59:10.000Z
2021-04-05T15:07:38.000Z
core/queue/views.py
lottspot/prevention-point
e4d5eaa437c3e979e8585bdada4efd33e995e39e
[ "MIT" ]
365
2019-03-12T23:40:39.000Z
2022-02-10T11:07:26.000Z
core/queue/views.py
lottspot/prevention-point
e4d5eaa437c3e979e8585bdada4efd33e995e39e
[ "MIT" ]
20
2019-03-12T23:36:25.000Z
2021-12-30T00:05:42.000Z
import datetime from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from core.permissions import DjangoModelPermissions from core.visits.serializer import PopulatedVisitSerializer from core.models import Visit, FrontDeskEvent, FrontDeskEventType from core.front_desk_events.serializer import FrontDeskEventForQueueSerializer from django.contrib.auth.models import User class QueueViewSet(viewsets.ViewSet): """ API endpoint that displays the queue uses regular ViewSet to be able to display adjacent model responses in one view, hence the permission classes being repeated here instead of using viewsets.py prototype """ # DjangoModelPermissions requires a queryset to function, # the next line is what the docs suggest as a 'sentinel queryset' queryset= FrontDeskEvent.objects.none() permission_classes = [DjangoModelPermissions, IsAuthenticated] def retrieve(self, request, program_id=None): """ retrieve most recent front desk event for each visit that is happening today, filtered by program """ # filter by visits that are happening today in a certain program visits_queryset = ( Visit.objects.select_related("participant", "program") .filter( program=program_id, created_at__date=datetime.date.today(), ) .order_by("urgency", "-created_at") ) todays_visit_data = PopulatedVisitSerializer( visits_queryset, many=True, context={"request": request} ).data active_visits_queue = [] front_desk_events = FrontDeskEvent.objects.select_related("visit").filter( visit__in=[dict(x)["id"] for x in todays_visit_data] ).order_by("-created_at").values("id", "visit", "event_type", "created_at") # for each visit, get the most recent front desk event, to glean current visit status for visit in todays_visit_data: events = list( filter(lambda x: x.get("visit") is visit.get("id"), front_desk_events) ) if events: event = events[0] event_type = event.get("event_type") if event_type in [ FrontDeskEventType.ARRIVED.name, FrontDeskEventType.STEPPED_OUT.name, FrontDeskEventType.CAME_BACK.name, ]: # if most recent front desk event is an 'active' status add it to visit object visit["status"] = event # then add it to the 'active visits queue' active_visits_queue.append(visit) return Response(active_visits_queue)
40.2
98
0.657783
import datetime from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from core.permissions import DjangoModelPermissions from core.visits.serializer import PopulatedVisitSerializer from core.models import Visit, FrontDeskEvent, FrontDeskEventType from core.front_desk_events.serializer import FrontDeskEventForQueueSerializer from django.contrib.auth.models import User class QueueViewSet(viewsets.ViewSet): queryset= FrontDeskEvent.objects.none() permission_classes = [DjangoModelPermissions, IsAuthenticated] def retrieve(self, request, program_id=None): visits_queryset = ( Visit.objects.select_related("participant", "program") .filter( program=program_id, created_at__date=datetime.date.today(), ) .order_by("urgency", "-created_at") ) todays_visit_data = PopulatedVisitSerializer( visits_queryset, many=True, context={"request": request} ).data active_visits_queue = [] front_desk_events = FrontDeskEvent.objects.select_related("visit").filter( visit__in=[dict(x)["id"] for x in todays_visit_data] ).order_by("-created_at").values("id", "visit", "event_type", "created_at") for visit in todays_visit_data: events = list( filter(lambda x: x.get("visit") is visit.get("id"), front_desk_events) ) if events: event = events[0] event_type = event.get("event_type") if event_type in [ FrontDeskEventType.ARRIVED.name, FrontDeskEventType.STEPPED_OUT.name, FrontDeskEventType.CAME_BACK.name, ]: visit["status"] = event active_visits_queue.append(visit) return Response(active_visits_queue)
true
true
f70eb3a29c6a510eca3947f79787ed0abd7f6655
732
py
Python
day01_Sonar_Sweep/day01.py
anolivei/advent_of_code_2021
1eac988d37bf754bbee68fad2e927914351a5a2b
[ "MIT" ]
null
null
null
day01_Sonar_Sweep/day01.py
anolivei/advent_of_code_2021
1eac988d37bf754bbee68fad2e927914351a5a2b
[ "MIT" ]
null
null
null
day01_Sonar_Sweep/day01.py
anolivei/advent_of_code_2021
1eac988d37bf754bbee68fad2e927914351a5a2b
[ "MIT" ]
null
null
null
def open_input(): with open("input.txt") as fd: array = fd.read().splitlines() array = list(map(int, array)) return array def part_one(array): lenght = len(array) increased = 0 for i in range(0, lenght - 1): if array[i] < array[i + 1]: increased += 1 print("part one:", increased) def part_two(array): lenght = len(array) increased = 0 for i in range(0, lenght - 3): sum1 = array[i] + array[i + 1] + array[i + 2] sum2 = array[i + 1] + array[i + 2] + array[i + 3] if sum1 < sum2: increased += 1 print("part two:", increased) if (__name__ == "__main__"): array = open_input() part_one(array) part_two(array)
22.875
57
0.543716
def open_input(): with open("input.txt") as fd: array = fd.read().splitlines() array = list(map(int, array)) return array def part_one(array): lenght = len(array) increased = 0 for i in range(0, lenght - 1): if array[i] < array[i + 1]: increased += 1 print("part one:", increased) def part_two(array): lenght = len(array) increased = 0 for i in range(0, lenght - 3): sum1 = array[i] + array[i + 1] + array[i + 2] sum2 = array[i + 1] + array[i + 2] + array[i + 3] if sum1 < sum2: increased += 1 print("part two:", increased) if (__name__ == "__main__"): array = open_input() part_one(array) part_two(array)
true
true
f70eb3de404bce1c3ea4c27a5b70915529f3a386
6,153
py
Python
sdk/python/pulumi_azure_native/customproviders/v20180901preview/get_custom_resource_provider.py
sebtelko/pulumi-azure-native
711ec021b5c73da05611c56c8a35adb0ce3244e4
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/customproviders/v20180901preview/get_custom_resource_provider.py
sebtelko/pulumi-azure-native
711ec021b5c73da05611c56c8a35adb0ce3244e4
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_native/customproviders/v20180901preview/get_custom_resource_provider.py
sebtelko/pulumi-azure-native
711ec021b5c73da05611c56c8a35adb0ce3244e4
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs __all__ = [ 'GetCustomResourceProviderResult', 'AwaitableGetCustomResourceProviderResult', 'get_custom_resource_provider', ] @pulumi.output_type class GetCustomResourceProviderResult: """ A manifest file that defines the custom resource provider resources. """ def __init__(__self__, actions=None, id=None, location=None, name=None, provisioning_state=None, resource_types=None, tags=None, type=None, validations=None): if actions and not isinstance(actions, list): raise TypeError("Expected argument 'actions' to be a list") pulumi.set(__self__, "actions", actions) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if location and not isinstance(location, str): raise TypeError("Expected argument 'location' to be a str") pulumi.set(__self__, "location", location) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if provisioning_state and not isinstance(provisioning_state, str): raise TypeError("Expected argument 'provisioning_state' to be a str") pulumi.set(__self__, "provisioning_state", provisioning_state) if resource_types and not isinstance(resource_types, list): raise TypeError("Expected argument 'resource_types' to be a list") pulumi.set(__self__, "resource_types", resource_types) if tags and not isinstance(tags, dict): raise TypeError("Expected argument 'tags' to be a dict") pulumi.set(__self__, "tags", tags) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) if validations and not isinstance(validations, list): raise TypeError("Expected argument 'validations' to be a list") pulumi.set(__self__, "validations", validations) @property @pulumi.getter def actions(self) -> Optional[Sequence['outputs.CustomRPActionRouteDefinitionResponse']]: """ A list of actions that the custom resource provider implements. """ return pulumi.get(self, "actions") @property @pulumi.getter def id(self) -> str: """ Resource Id """ return pulumi.get(self, "id") @property @pulumi.getter def location(self) -> str: """ Resource location """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> str: """ Resource name """ return pulumi.get(self, "name") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> str: """ The provisioning state of the resource provider. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="resourceTypes") def resource_types(self) -> Optional[Sequence['outputs.CustomRPResourceTypeRouteDefinitionResponse']]: """ A list of resource types that the custom resource provider implements. """ return pulumi.get(self, "resource_types") @property @pulumi.getter def tags(self) -> Optional[Mapping[str, str]]: """ Resource tags """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> str: """ Resource type """ return pulumi.get(self, "type") @property @pulumi.getter def validations(self) -> Optional[Sequence['outputs.CustomRPValidationsResponse']]: """ A list of validations to run on the custom resource provider's requests. """ return pulumi.get(self, "validations") class AwaitableGetCustomResourceProviderResult(GetCustomResourceProviderResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetCustomResourceProviderResult( actions=self.actions, id=self.id, location=self.location, name=self.name, provisioning_state=self.provisioning_state, resource_types=self.resource_types, tags=self.tags, type=self.type, validations=self.validations) def get_custom_resource_provider(resource_group_name: Optional[str] = None, resource_provider_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCustomResourceProviderResult: """ A manifest file that defines the custom resource provider resources. :param str resource_group_name: The name of the resource group. :param str resource_provider_name: The name of the resource provider. """ __args__ = dict() __args__['resourceGroupName'] = resource_group_name __args__['resourceProviderName'] = resource_provider_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:customproviders/v20180901preview:getCustomResourceProvider', __args__, opts=opts, typ=GetCustomResourceProviderResult).value return AwaitableGetCustomResourceProviderResult( actions=__ret__.actions, id=__ret__.id, location=__ret__.location, name=__ret__.name, provisioning_state=__ret__.provisioning_state, resource_types=__ret__.resource_types, tags=__ret__.tags, type=__ret__.type, validations=__ret__.validations)
35.982456
174
0.653827
import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs __all__ = [ 'GetCustomResourceProviderResult', 'AwaitableGetCustomResourceProviderResult', 'get_custom_resource_provider', ] @pulumi.output_type class GetCustomResourceProviderResult: def __init__(__self__, actions=None, id=None, location=None, name=None, provisioning_state=None, resource_types=None, tags=None, type=None, validations=None): if actions and not isinstance(actions, list): raise TypeError("Expected argument 'actions' to be a list") pulumi.set(__self__, "actions", actions) if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if location and not isinstance(location, str): raise TypeError("Expected argument 'location' to be a str") pulumi.set(__self__, "location", location) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if provisioning_state and not isinstance(provisioning_state, str): raise TypeError("Expected argument 'provisioning_state' to be a str") pulumi.set(__self__, "provisioning_state", provisioning_state) if resource_types and not isinstance(resource_types, list): raise TypeError("Expected argument 'resource_types' to be a list") pulumi.set(__self__, "resource_types", resource_types) if tags and not isinstance(tags, dict): raise TypeError("Expected argument 'tags' to be a dict") pulumi.set(__self__, "tags", tags) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) if validations and not isinstance(validations, list): raise TypeError("Expected argument 'validations' to be a list") pulumi.set(__self__, "validations", validations) @property @pulumi.getter def actions(self) -> Optional[Sequence['outputs.CustomRPActionRouteDefinitionResponse']]: return pulumi.get(self, "actions") @property @pulumi.getter def id(self) -> str: return pulumi.get(self, "id") @property @pulumi.getter def location(self) -> str: return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> str: return pulumi.get(self, "name") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> str: return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="resourceTypes") def resource_types(self) -> Optional[Sequence['outputs.CustomRPResourceTypeRouteDefinitionResponse']]: return pulumi.get(self, "resource_types") @property @pulumi.getter def tags(self) -> Optional[Mapping[str, str]]: return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> str: return pulumi.get(self, "type") @property @pulumi.getter def validations(self) -> Optional[Sequence['outputs.CustomRPValidationsResponse']]: return pulumi.get(self, "validations") class AwaitableGetCustomResourceProviderResult(GetCustomResourceProviderResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetCustomResourceProviderResult( actions=self.actions, id=self.id, location=self.location, name=self.name, provisioning_state=self.provisioning_state, resource_types=self.resource_types, tags=self.tags, type=self.type, validations=self.validations) def get_custom_resource_provider(resource_group_name: Optional[str] = None, resource_provider_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCustomResourceProviderResult: __args__ = dict() __args__['resourceGroupName'] = resource_group_name __args__['resourceProviderName'] = resource_provider_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-native:customproviders/v20180901preview:getCustomResourceProvider', __args__, opts=opts, typ=GetCustomResourceProviderResult).value return AwaitableGetCustomResourceProviderResult( actions=__ret__.actions, id=__ret__.id, location=__ret__.location, name=__ret__.name, provisioning_state=__ret__.provisioning_state, resource_types=__ret__.resource_types, tags=__ret__.tags, type=__ret__.type, validations=__ret__.validations)
true
true
f70eb40817dabb9bff5e70b4866289c7586fcf23
1,189
py
Python
plugins/action/gitlab_modwrap.py
sma-de/ansible-collections-gitlab
5da99b04722fc016d3e8589635fcbb3579dcfda2
[ "BSD-3-Clause" ]
null
null
null
plugins/action/gitlab_modwrap.py
sma-de/ansible-collections-gitlab
5da99b04722fc016d3e8589635fcbb3579dcfda2
[ "BSD-3-Clause" ]
null
null
null
plugins/action/gitlab_modwrap.py
sma-de/ansible-collections-gitlab
5da99b04722fc016d3e8589635fcbb3579dcfda2
[ "BSD-3-Clause" ]
null
null
null
from __future__ import (absolute_import, division, print_function) __metaclass__ = type import collections ##from ansible.errors import AnsibleOptionsError, AnsibleModuleError##, AnsibleError ####from ansible.module_utils._text import to_native from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.git.plugins.module_utils.plugins.gitlab_action import GitlabBase from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert class ActionModule(GitlabBase): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(*args, **kwargs) self._supports_check_mode = False self._supports_async = False @property def argspec(self): tmp = super(ActionModule, self).argspec tmp.update({ 'modname': (list(string_types)), 'modargs': ([collections.abc.Mapping], {}), }) return tmp def run_specific(self, result): cmdret = self.exec_gitlab_module( self.get_taskparam('modname'), modargs=self.get_taskparam('modargs') ) result.update(cmdret) return result
25.847826
96
0.699748
from __future__ import (absolute_import, division, print_function) __metaclass__ = type import collections ionModule(GitlabBase): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(*args, **kwargs) self._supports_check_mode = False self._supports_async = False @property def argspec(self): tmp = super(ActionModule, self).argspec tmp.update({ 'modname': (list(string_types)), 'modargs': ([collections.abc.Mapping], {}), }) return tmp def run_specific(self, result): cmdret = self.exec_gitlab_module( self.get_taskparam('modname'), modargs=self.get_taskparam('modargs') ) result.update(cmdret) return result
true
true
f70eb5b9f0829e7189fca574a45e2c0d95713fd9
2,964
py
Python
python/tests/test_oberon.py
tmarkovski/okapi
1ce36d2a82bb0e409a5183cd116d3a9eb474fc9f
[ "Apache-2.0" ]
null
null
null
python/tests/test_oberon.py
tmarkovski/okapi
1ce36d2a82bb0e409a5183cd116d3a9eb474fc9f
[ "Apache-2.0" ]
null
null
null
python/tests/test_oberon.py
tmarkovski/okapi
1ce36d2a82bb0e409a5183cd116d3a9eb474fc9f
[ "Apache-2.0" ]
null
null
null
import unittest from okapi.proto.okapi.security.v1 import CreateOberonKeyRequest, CreateOberonTokenRequest, CreateOberonProofRequest, \ VerifyOberonProofRequest, UnBlindOberonTokenRequest, BlindOberonTokenRequest from okapi.wrapper import Oberon class KeyTests(unittest.TestCase): def test_oberon_demo(self): key = Oberon.create_key(CreateOberonKeyRequest()) data = bytes("alice", "utf8") nonce = bytes("1234", "utf8") token = Oberon.create_token(CreateOberonTokenRequest(data=data, sk=key.sk)) proof = Oberon.create_proof(CreateOberonProofRequest(data=data, nonce=nonce, token=token.token)) result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid, "Proof should verify") def test_demo_with_binding(self): key = Oberon.create_key(CreateOberonKeyRequest()) data = bytes("alice", "utf8") nonce = bytes("1234", "utf8") issuer_2fa = bytes("issuer code", "utf8") token_request = CreateOberonTokenRequest(data=data, sk=key.sk) token_request.blinding.append(issuer_2fa) blinded_token = Oberon.create_token(token_request) # Holder unblinds the token unblind_request = UnBlindOberonTokenRequest(token=blinded_token.token) unblind_request.blinding.append(issuer_2fa) token = Oberon.unblind_token(unblind_request) # Holder prepares a proof without blinding proof = Oberon.create_proof(CreateOberonProofRequest(data=data, nonce=nonce, token=token.token)) # Verifier verifies the proof result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid) # Holder blinds the token with a personal pin user_pin = bytes("0042", "utf8") blind_request = BlindOberonTokenRequest(token=token.token) blind_request.blinding.append(user_pin) user_blinded_token = Oberon.blind_token(blind_request) proof_request = CreateOberonProofRequest(data=data, nonce=nonce, token=user_blinded_token.token) proof_request.blinding.append(user_pin) proof = Oberon.create_proof(proof_request) # Verifier verifies the proof result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid) # Bad actor creates a proof with incorrect blinding pin proof_request = CreateOberonProofRequest(data=data, nonce=nonce, token=user_blinded_token.token) proof_request.blinding.append(bytes("invalid pin", "utf8")) proof = Oberon.create_proof(proof_request) # Verifies tries to verify proof, fails result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertFalse(result.valid)
48.590164
119
0.721997
import unittest from okapi.proto.okapi.security.v1 import CreateOberonKeyRequest, CreateOberonTokenRequest, CreateOberonProofRequest, \ VerifyOberonProofRequest, UnBlindOberonTokenRequest, BlindOberonTokenRequest from okapi.wrapper import Oberon class KeyTests(unittest.TestCase): def test_oberon_demo(self): key = Oberon.create_key(CreateOberonKeyRequest()) data = bytes("alice", "utf8") nonce = bytes("1234", "utf8") token = Oberon.create_token(CreateOberonTokenRequest(data=data, sk=key.sk)) proof = Oberon.create_proof(CreateOberonProofRequest(data=data, nonce=nonce, token=token.token)) result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid, "Proof should verify") def test_demo_with_binding(self): key = Oberon.create_key(CreateOberonKeyRequest()) data = bytes("alice", "utf8") nonce = bytes("1234", "utf8") issuer_2fa = bytes("issuer code", "utf8") token_request = CreateOberonTokenRequest(data=data, sk=key.sk) token_request.blinding.append(issuer_2fa) blinded_token = Oberon.create_token(token_request) unblind_request = UnBlindOberonTokenRequest(token=blinded_token.token) unblind_request.blinding.append(issuer_2fa) token = Oberon.unblind_token(unblind_request) proof = Oberon.create_proof(CreateOberonProofRequest(data=data, nonce=nonce, token=token.token)) result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid) user_pin = bytes("0042", "utf8") blind_request = BlindOberonTokenRequest(token=token.token) blind_request.blinding.append(user_pin) user_blinded_token = Oberon.blind_token(blind_request) proof_request = CreateOberonProofRequest(data=data, nonce=nonce, token=user_blinded_token.token) proof_request.blinding.append(user_pin) proof = Oberon.create_proof(proof_request) result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertTrue(result.valid) proof_request = CreateOberonProofRequest(data=data, nonce=nonce, token=user_blinded_token.token) proof_request.blinding.append(bytes("invalid pin", "utf8")) proof = Oberon.create_proof(proof_request) result = Oberon.verify_proof(VerifyOberonProofRequest(data=data, nonce=nonce, pk=key.pk, proof=proof.proof)) self.assertFalse(result.valid)
true
true
f70eb60e7035a3d1c3d3275fedadb650fd20fe9f
3,030
py
Python
searchmethods/modularGA.py
esnet/hps-rl
8426652e622394a955a44c42201e2204f6bfa0f2
[ "BSD-3-Clause-LBNL" ]
null
null
null
searchmethods/modularGA.py
esnet/hps-rl
8426652e622394a955a44c42201e2204f6bfa0f2
[ "BSD-3-Clause-LBNL" ]
null
null
null
searchmethods/modularGA.py
esnet/hps-rl
8426652e622394a955a44c42201e2204f6bfa0f2
[ "BSD-3-Clause-LBNL" ]
null
null
null
import numpy, random class Individual: def __init__(self,genome, llimits =[], ulimits=[], type=[], LEN = 1,fitness_func = None): if genome is None: self.genome = numpy.zeros(LEN,dtype=float) for gene in range(LEN): if type[gene] == "integer": self.genome[gene] = numpy.random.randint(llimits[gene], ulimits[gene]) else: self.genome[gene] = numpy.random.uniform(llimits[gene], ulimits[gene]) else: self.genome = genome self.fitness = fitness_func(self.genome) def __str__(self): return "".join(str(int(i)) for i in self.genome) def crossover(a, b, fitness): g, h = a.genome.copy(), b.genome.copy() for pt in range(len(g)): if numpy.random.random() < 0.5: g[pt], h[pt] = h[pt], g[pt] return (Individual(genome=g,fitness_func=fitness), Individual(genome=h,fitness_func=fitness)) def mutate(a, mut_prob,fitness): g = a.genome.copy() for pt in range(len(g)): if numpy.random.random() < mut_prob: g[pt] = not g[pt] return Individual(g,fitness_func=fitness) def stats(pop, gen,threshold): best = max(pop, key=lambda x: x.fitness) print("{0} {1:.2f} {2} {3}".format(gen, numpy.mean([i.fitness for i in pop]), best.fitness, str(best))) return (best.fitness >= threshold) def roulette(items, n): total = float(sum(w.fitness for w in items)) i = 0 w, v = items[0].fitness, items[0] while n: x = total * (1 - numpy.random.random() ** (1.0 / n)) total -= x while x > w: x -= w i += 1 w, v = items[i].fitness, items[i] w -= x yield v n -= 1 def tournament(items, n, tsize=5): for i in range(n): candidates = random.sample(items, tsize) yield max(candidates, key=lambda x: x.fitness) def step(pop,cross_prob,mut_prob,fitness): newpop = [] parents = roulette(pop, len(pop) + 1) # one extra for final xover while len(newpop) < len(pop): if numpy.random.random() < cross_prob: newpop.extend(map(mutate, crossover(next(parents), next(parents),fitness=fitness),[mut_prob,mut_prob],[fitness,fitness])) else: newpop.append(mutate(next(parents),mut_prob=mut_prob,fitness=fitness)) return newpop def run(llimit, ulimit, type, GENERATIONS, CROSSOVER_PROB, POPSIZE, LEN, MUTATION_PROB,FITNESS,THRESHOLD): numpy.random.seed(100) pop = [Individual(None,llimit,ulimit,type,LEN,FITNESS) for i in range(POPSIZE)] print(pop) stats(pop, 0, THRESHOLD) for gen in range(1, GENERATIONS): pop = step(pop,CROSSOVER_PROB,MUTATION_PROB,FITNESS) if stats(pop, gen, THRESHOLD): print("Success") llimit = [0.5,1e-6,1e-6,0] ulimit = [1.5,0.1,0.1,3] type = ['real','real','real','integer'] LEN = 4 FITNESS, SUCCESS_THRESHOLD = (numpy.sum, LEN) run(llimit,ulimit,type,100,1,100,4,0.9,FITNESS,10)
34.044944
133
0.59703
import numpy, random class Individual: def __init__(self,genome, llimits =[], ulimits=[], type=[], LEN = 1,fitness_func = None): if genome is None: self.genome = numpy.zeros(LEN,dtype=float) for gene in range(LEN): if type[gene] == "integer": self.genome[gene] = numpy.random.randint(llimits[gene], ulimits[gene]) else: self.genome[gene] = numpy.random.uniform(llimits[gene], ulimits[gene]) else: self.genome = genome self.fitness = fitness_func(self.genome) def __str__(self): return "".join(str(int(i)) for i in self.genome) def crossover(a, b, fitness): g, h = a.genome.copy(), b.genome.copy() for pt in range(len(g)): if numpy.random.random() < 0.5: g[pt], h[pt] = h[pt], g[pt] return (Individual(genome=g,fitness_func=fitness), Individual(genome=h,fitness_func=fitness)) def mutate(a, mut_prob,fitness): g = a.genome.copy() for pt in range(len(g)): if numpy.random.random() < mut_prob: g[pt] = not g[pt] return Individual(g,fitness_func=fitness) def stats(pop, gen,threshold): best = max(pop, key=lambda x: x.fitness) print("{0} {1:.2f} {2} {3}".format(gen, numpy.mean([i.fitness for i in pop]), best.fitness, str(best))) return (best.fitness >= threshold) def roulette(items, n): total = float(sum(w.fitness for w in items)) i = 0 w, v = items[0].fitness, items[0] while n: x = total * (1 - numpy.random.random() ** (1.0 / n)) total -= x while x > w: x -= w i += 1 w, v = items[i].fitness, items[i] w -= x yield v n -= 1 def tournament(items, n, tsize=5): for i in range(n): candidates = random.sample(items, tsize) yield max(candidates, key=lambda x: x.fitness) def step(pop,cross_prob,mut_prob,fitness): newpop = [] parents = roulette(pop, len(pop) + 1) while len(newpop) < len(pop): if numpy.random.random() < cross_prob: newpop.extend(map(mutate, crossover(next(parents), next(parents),fitness=fitness),[mut_prob,mut_prob],[fitness,fitness])) else: newpop.append(mutate(next(parents),mut_prob=mut_prob,fitness=fitness)) return newpop def run(llimit, ulimit, type, GENERATIONS, CROSSOVER_PROB, POPSIZE, LEN, MUTATION_PROB,FITNESS,THRESHOLD): numpy.random.seed(100) pop = [Individual(None,llimit,ulimit,type,LEN,FITNESS) for i in range(POPSIZE)] print(pop) stats(pop, 0, THRESHOLD) for gen in range(1, GENERATIONS): pop = step(pop,CROSSOVER_PROB,MUTATION_PROB,FITNESS) if stats(pop, gen, THRESHOLD): print("Success") llimit = [0.5,1e-6,1e-6,0] ulimit = [1.5,0.1,0.1,3] type = ['real','real','real','integer'] LEN = 4 FITNESS, SUCCESS_THRESHOLD = (numpy.sum, LEN) run(llimit,ulimit,type,100,1,100,4,0.9,FITNESS,10)
true
true
f70eb629524c29581304d4058b938e52261dca03
10,701
py
Python
kubernetes_asyncio/client/models/v1_topology_spread_constraint.py
lsst-sqre/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
[ "Apache-2.0" ]
null
null
null
kubernetes_asyncio/client/models/v1_topology_spread_constraint.py
lsst-sqre/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
[ "Apache-2.0" ]
null
null
null
kubernetes_asyncio/client/models/v1_topology_spread_constraint.py
lsst-sqre/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: v1.19.15 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from kubernetes_asyncio.client.configuration import Configuration class V1TopologySpreadConstraint(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'label_selector': 'V1LabelSelector', 'max_skew': 'int', 'topology_key': 'str', 'when_unsatisfiable': 'str' } attribute_map = { 'label_selector': 'labelSelector', 'max_skew': 'maxSkew', 'topology_key': 'topologyKey', 'when_unsatisfiable': 'whenUnsatisfiable' } def __init__(self, label_selector=None, max_skew=None, topology_key=None, when_unsatisfiable=None, local_vars_configuration=None): # noqa: E501 """V1TopologySpreadConstraint - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._label_selector = None self._max_skew = None self._topology_key = None self._when_unsatisfiable = None self.discriminator = None if label_selector is not None: self.label_selector = label_selector self.max_skew = max_skew self.topology_key = topology_key self.when_unsatisfiable = when_unsatisfiable @property def label_selector(self): """Gets the label_selector of this V1TopologySpreadConstraint. # noqa: E501 :return: The label_selector of this V1TopologySpreadConstraint. # noqa: E501 :rtype: V1LabelSelector """ return self._label_selector @label_selector.setter def label_selector(self, label_selector): """Sets the label_selector of this V1TopologySpreadConstraint. :param label_selector: The label_selector of this V1TopologySpreadConstraint. # noqa: E501 :type: V1LabelSelector """ self._label_selector = label_selector @property def max_skew(self): """Gets the max_skew of this V1TopologySpreadConstraint. # noqa: E501 MaxSkew describes the degree to which pods may be unevenly distributed. When `whenUnsatisfiable=DoNotSchedule`, it is the maximum permitted difference between the number of matching pods in the target topology and the global minimum. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 1/1/0: | zone1 | zone2 | zone3 | | P | P | | - if MaxSkew is 1, incoming pod can only be scheduled to zone3 to become 1/1/1; scheduling it onto zone1(zone2) would make the ActualSkew(2-0) on zone1(zone2) violate MaxSkew(1). - if MaxSkew is 2, incoming pod can be scheduled onto any zone. When `whenUnsatisfiable=ScheduleAnyway`, it is used to give higher precedence to topologies that satisfy it. It's a required field. Default value is 1 and 0 is not allowed. # noqa: E501 :return: The max_skew of this V1TopologySpreadConstraint. # noqa: E501 :rtype: int """ return self._max_skew @max_skew.setter def max_skew(self, max_skew): """Sets the max_skew of this V1TopologySpreadConstraint. MaxSkew describes the degree to which pods may be unevenly distributed. When `whenUnsatisfiable=DoNotSchedule`, it is the maximum permitted difference between the number of matching pods in the target topology and the global minimum. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 1/1/0: | zone1 | zone2 | zone3 | | P | P | | - if MaxSkew is 1, incoming pod can only be scheduled to zone3 to become 1/1/1; scheduling it onto zone1(zone2) would make the ActualSkew(2-0) on zone1(zone2) violate MaxSkew(1). - if MaxSkew is 2, incoming pod can be scheduled onto any zone. When `whenUnsatisfiable=ScheduleAnyway`, it is used to give higher precedence to topologies that satisfy it. It's a required field. Default value is 1 and 0 is not allowed. # noqa: E501 :param max_skew: The max_skew of this V1TopologySpreadConstraint. # noqa: E501 :type: int """ if self.local_vars_configuration.client_side_validation and max_skew is None: # noqa: E501 raise ValueError("Invalid value for `max_skew`, must not be `None`") # noqa: E501 self._max_skew = max_skew @property def topology_key(self): """Gets the topology_key of this V1TopologySpreadConstraint. # noqa: E501 TopologyKey is the key of node labels. Nodes that have a label with this key and identical values are considered to be in the same topology. We consider each <key, value> as a \"bucket\", and try to put balanced number of pods into each bucket. It's a required field. # noqa: E501 :return: The topology_key of this V1TopologySpreadConstraint. # noqa: E501 :rtype: str """ return self._topology_key @topology_key.setter def topology_key(self, topology_key): """Sets the topology_key of this V1TopologySpreadConstraint. TopologyKey is the key of node labels. Nodes that have a label with this key and identical values are considered to be in the same topology. We consider each <key, value> as a \"bucket\", and try to put balanced number of pods into each bucket. It's a required field. # noqa: E501 :param topology_key: The topology_key of this V1TopologySpreadConstraint. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and topology_key is None: # noqa: E501 raise ValueError("Invalid value for `topology_key`, must not be `None`") # noqa: E501 self._topology_key = topology_key @property def when_unsatisfiable(self): """Gets the when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501 WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy the spread constraint. - DoNotSchedule (default) tells the scheduler not to schedule it. - ScheduleAnyway tells the scheduler to schedule the pod in any location, but giving higher precedence to topologies that would help reduce the skew. A constraint is considered \"Unsatisfiable\" for an incoming pod if and only if every possible node assigment for that pod would violate \"MaxSkew\" on some topology. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 3/1/1: | zone1 | zone2 | zone3 | | P P P | P | P | If WhenUnsatisfiable is set to DoNotSchedule, incoming pod can only be scheduled to zone2(zone3) to become 3/2/1(3/1/2) as ActualSkew(2-1) on zone2(zone3) satisfies MaxSkew(1). In other words, the cluster can still be imbalanced, but scheduler won't make it *more* imbalanced. It's a required field. # noqa: E501 :return: The when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501 :rtype: str """ return self._when_unsatisfiable @when_unsatisfiable.setter def when_unsatisfiable(self, when_unsatisfiable): """Sets the when_unsatisfiable of this V1TopologySpreadConstraint. WhenUnsatisfiable indicates how to deal with a pod if it doesn't satisfy the spread constraint. - DoNotSchedule (default) tells the scheduler not to schedule it. - ScheduleAnyway tells the scheduler to schedule the pod in any location, but giving higher precedence to topologies that would help reduce the skew. A constraint is considered \"Unsatisfiable\" for an incoming pod if and only if every possible node assigment for that pod would violate \"MaxSkew\" on some topology. For example, in a 3-zone cluster, MaxSkew is set to 1, and pods with the same labelSelector spread as 3/1/1: | zone1 | zone2 | zone3 | | P P P | P | P | If WhenUnsatisfiable is set to DoNotSchedule, incoming pod can only be scheduled to zone2(zone3) to become 3/2/1(3/1/2) as ActualSkew(2-1) on zone2(zone3) satisfies MaxSkew(1). In other words, the cluster can still be imbalanced, but scheduler won't make it *more* imbalanced. It's a required field. # noqa: E501 :param when_unsatisfiable: The when_unsatisfiable of this V1TopologySpreadConstraint. # noqa: E501 :type: str """ if self.local_vars_configuration.client_side_validation and when_unsatisfiable is None: # noqa: E501 raise ValueError("Invalid value for `when_unsatisfiable`, must not be `None`") # noqa: E501 self._when_unsatisfiable = when_unsatisfiable def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1TopologySpreadConstraint): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1TopologySpreadConstraint): return True return self.to_dict() != other.to_dict()
51.447115
965
0.680404
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1TopologySpreadConstraint(object): openapi_types = { 'label_selector': 'V1LabelSelector', 'max_skew': 'int', 'topology_key': 'str', 'when_unsatisfiable': 'str' } attribute_map = { 'label_selector': 'labelSelector', 'max_skew': 'maxSkew', 'topology_key': 'topologyKey', 'when_unsatisfiable': 'whenUnsatisfiable' } def __init__(self, label_selector=None, max_skew=None, topology_key=None, when_unsatisfiable=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._label_selector = None self._max_skew = None self._topology_key = None self._when_unsatisfiable = None self.discriminator = None if label_selector is not None: self.label_selector = label_selector self.max_skew = max_skew self.topology_key = topology_key self.when_unsatisfiable = when_unsatisfiable @property def label_selector(self): return self._label_selector @label_selector.setter def label_selector(self, label_selector): self._label_selector = label_selector @property def max_skew(self): return self._max_skew @max_skew.setter def max_skew(self, max_skew): if self.local_vars_configuration.client_side_validation and max_skew is None: raise ValueError("Invalid value for `max_skew`, must not be `None`") self._max_skew = max_skew @property def topology_key(self): return self._topology_key @topology_key.setter def topology_key(self, topology_key): if self.local_vars_configuration.client_side_validation and topology_key is None: raise ValueError("Invalid value for `topology_key`, must not be `None`") self._topology_key = topology_key @property def when_unsatisfiable(self): return self._when_unsatisfiable @when_unsatisfiable.setter def when_unsatisfiable(self, when_unsatisfiable): if self.local_vars_configuration.client_side_validation and when_unsatisfiable is None: raise ValueError("Invalid value for `when_unsatisfiable`, must not be `None`") self._when_unsatisfiable = when_unsatisfiable def to_dict(self): result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, V1TopologySpreadConstraint): return False return self.to_dict() == other.to_dict() def __ne__(self, other): if not isinstance(other, V1TopologySpreadConstraint): return True return self.to_dict() != other.to_dict()
true
true
f70eb689c0446ccc43bdd2bc306542d157e6219f
589
py
Python
splango/urls.py
shimon/Splango
a89408b8b62d631763d42e049dd1b7d6ef72b02f
[ "MIT" ]
3
2016-06-02T10:18:57.000Z
2022-03-16T06:10:21.000Z
splango/urls.py
shimon/Splango
a89408b8b62d631763d42e049dd1b7d6ef72b02f
[ "MIT" ]
null
null
null
splango/urls.py
shimon/Splango
a89408b8b62d631763d42e049dd1b7d6ef72b02f
[ "MIT" ]
1
2022-03-16T06:10:33.000Z
2022-03-16T06:10:33.000Z
from django.conf.urls.defaults import * urlpatterns = patterns( 'splango.views', url(r'^confirm_human/$', 'confirm_human', name="splango-confirm-human"), url(r'^admin/$', 'experiments_overview', name="splango-admin"), url(r'^admin/exp/(?P<expname>[^/]+)/$', 'experiment_detail', name="splango-experiment-detail"), url(r'^admin/exp/(?P<expname>[^/]+)/(?P<report_id>\d+)/$', 'experiment_report', name="splango-experiment-report"), url(r'^admin/exp/(?P<expname>[^/]+)/(?P<variant>[^/]+)/(?P<goal>[^/]+)/$', 'experiment_log', name="splango-experiment-log"), )
39.266667
128
0.629881
from django.conf.urls.defaults import * urlpatterns = patterns( 'splango.views', url(r'^confirm_human/$', 'confirm_human', name="splango-confirm-human"), url(r'^admin/$', 'experiments_overview', name="splango-admin"), url(r'^admin/exp/(?P<expname>[^/]+)/$', 'experiment_detail', name="splango-experiment-detail"), url(r'^admin/exp/(?P<expname>[^/]+)/(?P<report_id>\d+)/$', 'experiment_report', name="splango-experiment-report"), url(r'^admin/exp/(?P<expname>[^/]+)/(?P<variant>[^/]+)/(?P<goal>[^/]+)/$', 'experiment_log', name="splango-experiment-log"), )
true
true
f70eb6f89adb3426524735b14874f388558e4b0c
5,289
py
Python
data_creation.py
Bruce-zxy/deep-study-lenet5
bba6531c9234c077107f79ff852f141cfed58229
[ "MIT" ]
null
null
null
data_creation.py
Bruce-zxy/deep-study-lenet5
bba6531c9234c077107f79ff852f141cfed58229
[ "MIT" ]
null
null
null
data_creation.py
Bruce-zxy/deep-study-lenet5
bba6531c9234c077107f79ff852f141cfed58229
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os import sys import random import numpy as np import pandas as pd import h5py import matplotlib.pyplot as plt from math import cos, sin, atan2, sqrt, pi, radians, degrees, ceil, isnan from skimage import io, transform BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) TRAIN_CSV_PATH = './pointdata4/traindata/' TEST_CSV_PATH = './pointdata4/testdata/' data_path = './h5/' train_file_path = data_path + 'initial_train_data.h5' test_file_path = data_path + 'initial_test_data.h5' # 按旋转角度分类的子级目录 label_dirs = [[16, 19], [43,71,129, 260], [95,128,129, 274]] # 按道路分类的父级目录 label_set = [0, 1, 2] # 获取二维点集的中心点坐标 def get_centroid(point_set): c_x, c_y = zip(*point_set) centroid_x = sum(c_x)/len(c_x) centroid_y = sum(c_y)/len(c_y) return centroid_x, centroid_y # 逆时针旋转坐标点 def n_rotate(angle, valuex, valuey, centerx, centery): valuex = np.array(valuex) valuey = np.array(valuey) nRotatex = (valuex-centerx)*cos(angle) - \ (valuey-centery)*sin(angle) + centerx nRotatey = (valuex-centerx)*sin(angle) + \ (valuey-centery)*cos(angle) + centery return nRotatex, nRotatey # 获取csv文件的列表 def get_csv_list(path): csv_file_list = [] file_list = os.listdir(path) for file_name in file_list: if file_name.endswith('csv'): csv_file_list.append(path + "/" + file_name) return csv_file_list # 获取csv文件中的点集数据 def get_csv_data(path_list): # 创建空的定维数组 sum_data = np.empty([0, 1024, 2], dtype=np.float32) # 遍历每个csv文件 for path in path_list: # 将每个csv文件读取为Numpy的数据 data = np.genfromtxt(path, delimiter=',', dtype=np.float32)[:, :2] data_len = len(data) empty_len = 1024 - data_len # 完整的1024个元数据=csv文件数据+在csv文件中随机指定下标数据 count = 0 while count < empty_len: data = np.append( data, [data[random.randint(0, data_len-1)]], axis=0) count += 1 sum_data = np.append(sum_data, [data], axis=0) print(sum_data.shape) return sum_data # 随机打乱点集数据 def exchange_data_index(sum_data, label_data): cursor_index = 0 max_range = len(sum_data) while cursor_index < max_range: random_index = random.randint(0, max_range-1) temp_sum_data = sum_data[0] temp_label_data = label_data[0] sum_data = np.delete(sum_data, 0, axis=0) label_data = np.delete(label_data, 0, axis=0) sum_data = np.insert(sum_data, random_index, temp_sum_data, axis=0) label_data = np.insert(label_data, random_index, temp_label_data, axis=0) cursor_index += 1 return sum_data, label_data def get_label_and_data(root_path, label_dirs): sum_data = np.empty([0, 1024, 2], dtype=np.float32) typical_data = np.empty([0], dtype=np.int32) for data_type, label_dir_set in enumerate(label_dirs): print(">> 现在进入【第%d类】数据" % (data_type+1)) for rotate_angle in label_dir_set: print("-- 需要旋转%d度的数据集:" % (rotate_angle)) # 获取csv文件列表 csv_list = get_csv_list( root_path + str(data_type) + '/' + str(rotate_angle)) # 获取csv文件点集数据 csv_data = get_csv_data(csv_list) # 遍历样本数据 for i, sample_data in enumerate(csv_data): # 求出点集的中心坐标点 centroid_x, centroid_y = get_centroid(sample_data) # 根据中心坐标点旋转点集中的点 for index, coordinate in enumerate(sample_data): x, y = coordinate n_x, n_y = n_rotate( radians(rotate_angle), x, y, centroid_x, centroid_y) # 旋转后的点集坐标中心化 sample_data[index] = [n_x-centroid_x, n_y-centroid_y] # 旋转后的点集回归原列表 csv_data[i] = sample_data # 归集点集标签 typical_data = np.append(typical_data, [data_type], axis=0) # 将每个不同数量的样本合并到主列表中(n,1024,2)=>(m,n,1024,2) sum_data = np.append(sum_data, csv_data, axis=0) return sum_data, typical_data if __name__ == "__main__": sum_train_data, train_typical_data = get_label_and_data( TRAIN_CSV_PATH, label_dirs) sum_test_data, test_typical_data = get_label_and_data( TEST_CSV_PATH, label_dirs) # 随机打乱点集数据 rand_sum_train_data, rand_train_typical_data = exchange_data_index( sum_train_data, train_typical_data) rand_sum_test_data, rand_test_typical_data = exchange_data_index( sum_test_data, test_typical_data) if os.access(data_path, os.F_OK) == False: os.mkdir(data_path) if os.access(train_file_path, os.F_OK) == True: os.remove(train_file_path) open(train_file_path, 'w') with h5py.File(train_file_path, 'r+') as f: f.create_dataset('data', data=rand_sum_train_data) f.create_dataset('label', data=rand_train_typical_data) if os.access(test_file_path, os.F_OK) == True: os.remove(test_file_path) open(test_file_path, 'w') with h5py.File(test_file_path, 'r+') as f: f.create_dataset('data', data=rand_sum_test_data) f.create_dataset('label', data=rand_test_typical_data)
31.861446
76
0.636226
import os import sys import random import numpy as np import pandas as pd import h5py import matplotlib.pyplot as plt from math import cos, sin, atan2, sqrt, pi, radians, degrees, ceil, isnan from skimage import io, transform BASE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(BASE_DIR) TRAIN_CSV_PATH = './pointdata4/traindata/' TEST_CSV_PATH = './pointdata4/testdata/' data_path = './h5/' train_file_path = data_path + 'initial_train_data.h5' test_file_path = data_path + 'initial_test_data.h5' label_dirs = [[16, 19], [43,71,129, 260], [95,128,129, 274]] label_set = [0, 1, 2] def get_centroid(point_set): c_x, c_y = zip(*point_set) centroid_x = sum(c_x)/len(c_x) centroid_y = sum(c_y)/len(c_y) return centroid_x, centroid_y def n_rotate(angle, valuex, valuey, centerx, centery): valuex = np.array(valuex) valuey = np.array(valuey) nRotatex = (valuex-centerx)*cos(angle) - \ (valuey-centery)*sin(angle) + centerx nRotatey = (valuex-centerx)*sin(angle) + \ (valuey-centery)*cos(angle) + centery return nRotatex, nRotatey def get_csv_list(path): csv_file_list = [] file_list = os.listdir(path) for file_name in file_list: if file_name.endswith('csv'): csv_file_list.append(path + "/" + file_name) return csv_file_list def get_csv_data(path_list): sum_data = np.empty([0, 1024, 2], dtype=np.float32) for path in path_list: data = np.genfromtxt(path, delimiter=',', dtype=np.float32)[:, :2] data_len = len(data) empty_len = 1024 - data_len count = 0 while count < empty_len: data = np.append( data, [data[random.randint(0, data_len-1)]], axis=0) count += 1 sum_data = np.append(sum_data, [data], axis=0) print(sum_data.shape) return sum_data def exchange_data_index(sum_data, label_data): cursor_index = 0 max_range = len(sum_data) while cursor_index < max_range: random_index = random.randint(0, max_range-1) temp_sum_data = sum_data[0] temp_label_data = label_data[0] sum_data = np.delete(sum_data, 0, axis=0) label_data = np.delete(label_data, 0, axis=0) sum_data = np.insert(sum_data, random_index, temp_sum_data, axis=0) label_data = np.insert(label_data, random_index, temp_label_data, axis=0) cursor_index += 1 return sum_data, label_data def get_label_and_data(root_path, label_dirs): sum_data = np.empty([0, 1024, 2], dtype=np.float32) typical_data = np.empty([0], dtype=np.int32) for data_type, label_dir_set in enumerate(label_dirs): print(">> 现在进入【第%d类】数据" % (data_type+1)) for rotate_angle in label_dir_set: print("-- 需要旋转%d度的数据集:" % (rotate_angle)) csv_list = get_csv_list( root_path + str(data_type) + '/' + str(rotate_angle)) csv_data = get_csv_data(csv_list) for i, sample_data in enumerate(csv_data): centroid_x, centroid_y = get_centroid(sample_data) for index, coordinate in enumerate(sample_data): x, y = coordinate n_x, n_y = n_rotate( radians(rotate_angle), x, y, centroid_x, centroid_y) sample_data[index] = [n_x-centroid_x, n_y-centroid_y] csv_data[i] = sample_data typical_data = np.append(typical_data, [data_type], axis=0) sum_data = np.append(sum_data, csv_data, axis=0) return sum_data, typical_data if __name__ == "__main__": sum_train_data, train_typical_data = get_label_and_data( TRAIN_CSV_PATH, label_dirs) sum_test_data, test_typical_data = get_label_and_data( TEST_CSV_PATH, label_dirs) rand_sum_train_data, rand_train_typical_data = exchange_data_index( sum_train_data, train_typical_data) rand_sum_test_data, rand_test_typical_data = exchange_data_index( sum_test_data, test_typical_data) if os.access(data_path, os.F_OK) == False: os.mkdir(data_path) if os.access(train_file_path, os.F_OK) == True: os.remove(train_file_path) open(train_file_path, 'w') with h5py.File(train_file_path, 'r+') as f: f.create_dataset('data', data=rand_sum_train_data) f.create_dataset('label', data=rand_train_typical_data) if os.access(test_file_path, os.F_OK) == True: os.remove(test_file_path) open(test_file_path, 'w') with h5py.File(test_file_path, 'r+') as f: f.create_dataset('data', data=rand_sum_test_data) f.create_dataset('label', data=rand_test_typical_data)
true
true
f70eb7f417149b9a8081bb318f4066b6168a6304
1,289
py
Python
362 Design Hit Counter.py
ChiFire/legend_LeetCode
93fe97fef7e929fdbdc25fbb53955d44e14ecff8
[ "MIT" ]
872
2015-06-15T12:02:41.000Z
2022-03-30T08:44:35.000Z
362 Design Hit Counter.py
ChiFire/legend_LeetCode
93fe97fef7e929fdbdc25fbb53955d44e14ecff8
[ "MIT" ]
8
2015-06-21T15:11:59.000Z
2022-02-01T11:22:34.000Z
362 Design Hit Counter.py
ChiFire/legend_LeetCode
93fe97fef7e929fdbdc25fbb53955d44e14ecff8
[ "MIT" ]
328
2015-06-28T03:10:35.000Z
2022-03-29T11:05:28.000Z
""" Premium Question """ from collections import deque __author__ = 'Daniel' class HitCounter(object): def __init__(self): """ Initialize your data structure here. calls are being made to the system in chronological order. It is possible that several hits arrive roughly at the same time. What if the number of hits per second could be very large? Does your design scale? # use counter """ self.q = deque() def hit(self, timestamp): """ Record a hit. @param timestamp - The current timestamp (in seconds granularity). :type timestamp: int :rtype: void """ self.pop(timestamp) self.q.append(timestamp) def getHits(self, timestamp): """ Return the number of hits in the past 5 minutes. @param timestamp - The current timestamp (in seconds granularity). :type timestamp: int :rtype: int """ self.pop(timestamp) return len(self.q) def pop(self, timestamp): while self.q and timestamp - self.q[0] >= 300: self.q.popleft() # Your HitCounter object will be instantiated and called as such: # obj = HitCounter() # obj.hit(timestamp) # param_2 = obj.getHits(timestamp)
26.854167
105
0.612878
from collections import deque __author__ = 'Daniel' class HitCounter(object): def __init__(self): self.q = deque() def hit(self, timestamp): self.pop(timestamp) self.q.append(timestamp) def getHits(self, timestamp): self.pop(timestamp) return len(self.q) def pop(self, timestamp): while self.q and timestamp - self.q[0] >= 300: self.q.popleft()
true
true
f70eb992a38c3e76ec1dd03ed607975174757fd1
5,136
py
Python
pyinfra/operations/yum.py
vamshi091211/pyinfra
6e14b039422e00ebc68110eabbc6a3a543c96279
[ "MIT" ]
1
2022-03-24T05:44:45.000Z
2022-03-24T05:44:45.000Z
pyinfra/operations/yum.py
marinakravchenko21/pyinfra
6e14b039422e00ebc68110eabbc6a3a543c96279
[ "MIT" ]
null
null
null
pyinfra/operations/yum.py
marinakravchenko21/pyinfra
6e14b039422e00ebc68110eabbc6a3a543c96279
[ "MIT" ]
1
2021-11-12T18:36:01.000Z
2021-11-12T18:36:01.000Z
''' Manage yum packages and repositories. Note that yum package names are case-sensitive. ''' from __future__ import unicode_literals from pyinfra.api import operation from . import files from .util.packaging import ensure_packages, ensure_rpm, ensure_yum_repo @operation def key(state, host, key): ''' Add yum gpg keys with ``rpm``. + key: filename or URL Note: always returns one command, not state checking Example: .. code:: python linux_id = host.fact.linux_distribution['release_meta'].get('ID') yum.key( {'Add the Docker CentOS gpg key'}, 'https://download.docker.com/linux/{}/gpg'.format(linux_id), ) ''' yield 'rpm --import {0}'.format(key) @operation def repo( state, host, name, baseurl=None, present=True, description=None, enabled=True, gpgcheck=True, gpgkey=None, ): # NOTE: if updating this docstring also update `dnf.repo` # COMPAT: on v1 rearrange baseurl/present kwargs ''' Add/remove/update yum repositories. + name: URL or name for the ``.repo`` file + baseurl: the baseurl of the repo (if ``name`` is not a URL) + present: whether the ``.repo`` file should be present + description: optional verbose description + enabled: whether this repo is enabled + gpgcheck: whether set ``gpgcheck=1`` + gpgkey: the URL to the gpg key for this repo ``Baseurl``/``description``/``gpgcheck``/``gpgkey``: These are only valid when ``name`` is a filename (ie not a URL). This is for manual construction of repository files. Use a URL to download and install remote repository files. Examples: .. code:: python # Download a repository file yum.repo( {'Install Docker-CE repo via URL'}, 'https://download.docker.com/linux/centos/docker-ce.repo', ) # Create the repository file from baseurl/etc yum.repo( {'Add the Docker CentOS repo'}, name='DockerCE', baseurl='https://download.docker.com/linux/centos/7/$basearch/stable', ) ''' yield ensure_yum_repo( state, host, files, name, baseurl, present, description, enabled, gpgcheck, gpgkey, 'yum-config-manager', ) @operation def rpm(state, host, source, present=True): # NOTE: if updating this docstring also update `dnf.rpm` ''' Add/remove ``.rpm`` file packages. + source: filename or URL of the ``.rpm`` package + present: whether ore not the package should exist on the system URL sources with ``present=False``: If the ``.rpm`` file isn't downloaded, pyinfra can't remove any existing package as the file won't exist until mid-deploy. Example: .. code:: python yum.rpm( {'Install EPEL rpm to enable EPEL repo'}, 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-' '{{ host.fact.linux_distribution.major }}.noarch.rpm', ) ''' yield ensure_rpm(state, host, files, source, present, 'yum') @operation def update(state, host): ''' Updates all yum packages. ''' yield 'yum update -y' _update = update # noqa: E305 (for use below where update is a kwarg) @operation def packages( state, host, packages=None, present=True, latest=False, update=False, clean=False, nobest=False, extra_install_args='', extra_uninstall_args='', ): ''' Install/remove/update yum packages & updates. + packages: list of packages to ensure + present: whether the packages should be installed + latest: whether to upgrade packages without a specified version + update: run yum update + clean: run yum clean + nobest: add the no best option to install + extra_install_args: additional arguments to the yum install command + extra_uninstall_args: additional arguments to the yum uninstall command Versions: Package versions can be pinned like yum: ``<pkg>-<version>`` Examples: .. code:: python # Update package list and install packages yum.packages( {'Install Vim and Vim enhanced'}, ['vim-enhanced', 'vim'], update=True, ) # Install the latest versions of packages (always check) yum.packages( {'Install latest Vim'}, ['vim'], latest=True, ) ''' if clean: yield 'yum clean all' if update: yield _update(state, host) nobest_option = '' if nobest: nobest_option = ' --nobest' if extra_install_args != '': extra_install_args = ' ' + extra_install_args if extra_uninstall_args != '': extra_uninstall_args = ' ' + extra_uninstall_args yield ensure_packages( packages, host.fact.rpm_packages, present, install_command='yum install -y' + nobest_option + extra_install_args, uninstall_command='yum remove -y' + extra_uninstall_args, upgrade_command='yum update -y', version_join='-', latest=latest, )
27.319149
85
0.626558
from __future__ import unicode_literals from pyinfra.api import operation from . import files from .util.packaging import ensure_packages, ensure_rpm, ensure_yum_repo @operation def key(state, host, key): yield 'rpm --import {0}'.format(key) @operation def repo( state, host, name, baseurl=None, present=True, description=None, enabled=True, gpgcheck=True, gpgkey=None, ): yield ensure_yum_repo( state, host, files, name, baseurl, present, description, enabled, gpgcheck, gpgkey, 'yum-config-manager', ) @operation def rpm(state, host, source, present=True): yield ensure_rpm(state, host, files, source, present, 'yum') @operation def update(state, host): yield 'yum update -y' _update = update @operation def packages( state, host, packages=None, present=True, latest=False, update=False, clean=False, nobest=False, extra_install_args='', extra_uninstall_args='', ): if clean: yield 'yum clean all' if update: yield _update(state, host) nobest_option = '' if nobest: nobest_option = ' --nobest' if extra_install_args != '': extra_install_args = ' ' + extra_install_args if extra_uninstall_args != '': extra_uninstall_args = ' ' + extra_uninstall_args yield ensure_packages( packages, host.fact.rpm_packages, present, install_command='yum install -y' + nobest_option + extra_install_args, uninstall_command='yum remove -y' + extra_uninstall_args, upgrade_command='yum update -y', version_join='-', latest=latest, )
true
true
f70eba1aa1744645a51c91f874da542a669aeca5
168
py
Python
setup.py
jovanzac/Server
90932560b61f378122355bb8df18309b245858d5
[ "MIT" ]
null
null
null
setup.py
jovanzac/Server
90932560b61f378122355bb8df18309b245858d5
[ "MIT" ]
null
null
null
setup.py
jovanzac/Server
90932560b61f378122355bb8df18309b245858d5
[ "MIT" ]
null
null
null
from cx_Freeze import setup, Executable setup(name = "Server" , version = "1.0" , description = "" , executables = [Executable("server.py")])
21
46
0.583333
from cx_Freeze import setup, Executable setup(name = "Server" , version = "1.0" , description = "" , executables = [Executable("server.py")])
true
true
f70eba2ea25aab68aa58ed217078ed6007e84dc0
23,807
py
Python
conans/client/build/cmake.py
sigiesec/conan
f966d516452380918437888811bc833c804dac39
[ "MIT" ]
null
null
null
conans/client/build/cmake.py
sigiesec/conan
f966d516452380918437888811bc833c804dac39
[ "MIT" ]
null
null
null
conans/client/build/cmake.py
sigiesec/conan
f966d516452380918437888811bc833c804dac39
[ "MIT" ]
null
null
null
import os import platform from collections import OrderedDict from itertools import chain from conans.client import defs_to_string, join_arguments from conans.client.build.cppstd_flags import cppstd_flag from conans.client.tools import cross_building from conans.client.tools.oss import get_cross_building_settings from conans.errors import ConanException from conans.model.conan_file import ConanFile from conans.model.version import Version from conans.util.env_reader import get_env from conans.util.files import mkdir, get_abs_path from conans.tools import cpu_count, args_to_string from conans import tools from conans.util.log import logger from conans.util.config_parser import get_bool_from_text from conans.client.build.compiler_flags import architecture_flag def _get_env_cmake_system_name(): env_system_name = get_env("CONAN_CMAKE_SYSTEM_NAME", "") return {"False": False, "True": True, "": None}.get(env_system_name, env_system_name) class CMake(object): def __init__(self, conanfile, generator=None, cmake_system_name=True, parallel=True, build_type=None, toolset=None, make_program=None, set_cmake_flags=False): """ :param settings_or_conanfile: Conanfile instance (or settings for retro compatibility) :param generator: Generator name to use or none to autodetect :param cmake_system_name: False to not use CMAKE_SYSTEM_NAME variable, True for auto-detect or directly a string with the system name :param parallel: Try to build with multiple cores if available :param build_type: Overrides default build type comming from settings :param toolset: Toolset name to use (such as llvm-vs2014) or none for default one, applies only to certain generators (e.g. Visual Studio) :param set_cmake_flags: whether or not to set CMake flags like CMAKE_CXX_FLAGS, CMAKE_C_FLAGS, etc. it's vital to set for certain projects (e.g. using CMAKE_SIZEOF_VOID_P or CMAKE_LIBRARY_ARCHITECTURE) """ if not isinstance(conanfile, ConanFile): raise ConanException("First argument of CMake() has to be ConanFile. Use CMake(self)") self._settings = conanfile.settings self._conanfile = conanfile self._os = self._settings.get_safe("os") self._os_build, _, self._os_host, _ = get_cross_building_settings(self._settings) self._compiler = self._settings.get_safe("compiler") self._compiler_version = self._settings.get_safe("compiler.version") self._arch = self._settings.get_safe("arch") os_ver_str = "os.api_level" if self._os == "Android" else "os.version" self._op_system_version = self._settings.get_safe(os_ver_str) self._libcxx = self._settings.get_safe("compiler.libcxx") self._runtime = self._settings.get_safe("compiler.runtime") self._build_type = self._settings.get_safe("build_type") self._cppstd = self._settings.get_safe("cppstd") self.generator = generator or self._generator() self.toolset = self._toolset(toolset) self.build_dir = None self._cmake_system_name = _get_env_cmake_system_name() if self._cmake_system_name is None: # Not overwritten using environment self._cmake_system_name = cmake_system_name self.parallel = parallel self._set_cmake_flags = set_cmake_flags self.definitions = self._get_cmake_definitions() if build_type and build_type != self._build_type: # Call the setter to warn and update the definitions if needed self.build_type = build_type make_program = os.getenv("CONAN_MAKE_PROGRAM") or make_program if make_program: if not tools.which(make_program): self._conanfile.output.warn("The specified make program '%s' cannot be found" "and will be ignored" % make_program) else: self._conanfile.output.info("Using '%s' as CMAKE_MAKE_PROGRAM" % make_program) self.definitions["CMAKE_MAKE_PROGRAM"] = make_program @property def build_folder(self): return self.build_dir @build_folder.setter def build_folder(self, value): self.build_dir = value @property def build_type(self): return self._build_type @build_type.setter def build_type(self, build_type): settings_build_type = self._settings.get_safe("build_type") if build_type != settings_build_type: self._conanfile.output.warn( 'Set CMake build type "%s" is different than the settings build_type "%s"' % (build_type, settings_build_type)) self._build_type = build_type self.definitions.update(self._build_type_definition()) @property def flags(self): return defs_to_string(self.definitions) def _generator(self): if "CONAN_CMAKE_GENERATOR" in os.environ: return os.environ["CONAN_CMAKE_GENERATOR"] if not self._compiler or not self._compiler_version or not self._arch: if self._os_build == "Windows": # Not enough settings to set a generator in Windows return None return "Unix Makefiles" if self._compiler == "Visual Studio": _visuals = {'8': '8 2005', '9': '9 2008', '10': '10 2010', '11': '11 2012', '12': '12 2013', '14': '14 2015', '15': '15 2017'} base = "Visual Studio %s" % _visuals.get(self._compiler_version, "UnknownVersion %s" % self._compiler_version) if self._arch == "x86_64": return base + " Win64" elif "arm" in self._arch: return base + " ARM" else: return base # The generator depends on the build machine, not the target if self._os_build == "Windows": return "MinGW Makefiles" # it is valid only under Windows return "Unix Makefiles" def _toolset(self, toolset=None): if toolset: return toolset elif self._settings.get_safe("compiler") == "Visual Studio": subs_toolset = self._settings.get_safe("compiler.toolset") if subs_toolset: return subs_toolset return None def _cmake_compiler_options(self): cmake_definitions = OrderedDict() if str(self._os).lower() == "macos": if self._arch == "x86": cmake_definitions["CMAKE_OSX_ARCHITECTURES"] = "i386" return cmake_definitions def _cmake_cross_build_defines(self): ret = OrderedDict() os_ver = get_env("CONAN_CMAKE_SYSTEM_VERSION", self._op_system_version) toolchain_file = get_env("CONAN_CMAKE_TOOLCHAIN_FILE", "") if toolchain_file != "": logger.info("Setting Cross build toolchain file: %s" % toolchain_file) ret["CMAKE_TOOLCHAIN_FILE"] = toolchain_file return ret if self._cmake_system_name is False: return ret # System name and system version if self._cmake_system_name is not True: # String not empty ret["CMAKE_SYSTEM_NAME"] = self._cmake_system_name ret["CMAKE_SYSTEM_VERSION"] = os_ver else: # detect if we are cross building and the system name and version if cross_building(self._conanfile.settings): # We are cross building if self._os != self._os_build: if self._os: # the_os is the host (regular setting) ret["CMAKE_SYSTEM_NAME"] = "Darwin" if self._os in ["iOS", "tvOS", "watchOS"] else self._os if os_ver: ret["CMAKE_SYSTEM_VERSION"] = os_ver else: ret["CMAKE_SYSTEM_NAME"] = "Generic" # system processor cmake_system_processor = os.getenv("CONAN_CMAKE_SYSTEM_PROCESSOR", None) if cmake_system_processor: ret["CMAKE_SYSTEM_PROCESSOR"] = cmake_system_processor if ret: # If enabled cross compile for env_var in ["CONAN_CMAKE_FIND_ROOT_PATH", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE"]: value = os.getenv(env_var, None) if value: ret[env_var] = value if self._conanfile and self._conanfile.deps_cpp_info.sysroot: sysroot_path = self._conanfile.deps_cpp_info.sysroot else: sysroot_path = os.getenv("CONAN_CMAKE_FIND_ROOT_PATH", None) if sysroot_path: # Needs to be set here, can't be managed in the cmake generator, CMake needs # to know about the sysroot before any other thing ret["CMAKE_SYSROOT"] = sysroot_path.replace("\\", "/") # Adjust Android stuff if self._os == "Android": arch_abi_settings = {"armv8": "arm64-v8a", "armv7": "armeabi-v7a", "armv7hf": "armeabi-v7a", "armv6": "armeabi-v6", "armv5": "armeabi" }.get(self._arch, self._arch) if arch_abi_settings: ret["CMAKE_ANDROID_ARCH_ABI"] = arch_abi_settings logger.info("Setting Cross build flags: %s" % ", ".join(["%s=%s" % (k, v) for k, v in ret.items()])) return ret @property def is_multi_configuration(self): """ some IDEs are multi-configuration, as Visual. Makefiles or Ninja are single-conf """ if "Visual" in self.generator or "Xcode" in self.generator: return True # TODO: complete logic return False @property def command_line(self): args = ['-G "%s"' % self.generator] if self.generator else [] args.append(self.flags) args.append('-Wno-dev') if self.toolset: args.append('-T "%s"' % self.toolset) return join_arguments(args) def _build_type_definition(self): if self._build_type and not self.is_multi_configuration: return {'CMAKE_BUILD_TYPE': self._build_type} return {} @property def runtime(self): return defs_to_string(self._runtime_definition()) def _runtime_definition(self): if self._runtime: return {"CONAN_LINK_RUNTIME": "/%s" % self._runtime} return {} @property def build_config(self): """ cmake --build tool have a --config option for Multi-configuration IDEs """ if self._build_type and self.is_multi_configuration: return "--config %s" % self._build_type return "" def _get_cmake_definitions(self): def add_cmake_flag(cmake_flags, name, flag): """ appends compiler linker flags (if already present), or just sets """ if flag: if name not in cmake_flags: cmake_flags[name] = flag else: cmake_flags[name] = ' ' + flag return cmake_flags ret = OrderedDict() ret.update(self._build_type_definition()) ret.update(self._runtime_definition()) ret.update(self._cmake_compiler_options()) ret.update(self._cmake_cross_build_defines()) ret.update(self._get_cpp_standard_vars()) ret["CONAN_EXPORTED"] = "1" if self._compiler: ret["CONAN_COMPILER"] = self._compiler if self._compiler_version: ret["CONAN_COMPILER_VERSION"] = str(self._compiler_version) # Force compiler flags -- TODO: give as environment/setting parameter? arch_flag = architecture_flag(compiler=self._compiler, arch=self._arch) ret = add_cmake_flag(ret, 'CONAN_CXX_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CONAN_SHARED_LINKER_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CONAN_C_FLAGS', arch_flag) if self._set_cmake_flags: ret = add_cmake_flag(ret, 'CMAKE_CXX_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CMAKE_SHARED_LINKER_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CMAKE_C_FLAGS', arch_flag) if self._libcxx: ret["CONAN_LIBCXX"] = self._libcxx # Shared library try: ret["BUILD_SHARED_LIBS"] = "ON" if self._conanfile.options.shared else "OFF" except ConanException: pass # Install to package folder try: if self._conanfile.package_folder: ret["CMAKE_INSTALL_PREFIX"] = self._conanfile.package_folder except AttributeError: pass if str(self._os) in ["Windows", "WindowsStore"] and self._compiler == "Visual Studio": if self.parallel: cpus = tools.cpu_count() ret["CONAN_CXX_FLAGS"] = "/MP%s" % cpus ret["CONAN_C_FLAGS"] = "/MP%s" % cpus # fpic if str(self._os) not in ["Windows", "WindowsStore"]: fpic = self._conanfile.options.get_safe("fPIC") if fpic is not None: shared = self._conanfile.options.get_safe("shared") ret["CONAN_CMAKE_POSITION_INDEPENDENT_CODE"] = "ON" if (fpic or shared) else "OFF" # Adjust automatically the module path in case the conanfile is using the cmake_find_package if "cmake_find_package" in self._conanfile.generators: ret["CMAKE_MODULE_PATH"] = self._conanfile.install_folder.replace("\\", "/") # Disable CMake export registry #3070 (CMake installing modules in user home's) ret["CMAKE_EXPORT_NO_PACKAGE_REGISTRY"] = "ON" return ret def _get_dirs(self, source_folder, build_folder, source_dir, build_dir, cache_build_folder): if (source_folder or build_folder) and (source_dir or build_dir): raise ConanException("Use 'build_folder'/'source_folder' arguments") def get_dir(folder, origin): if folder: if os.path.isabs(folder): return folder return os.path.join(origin, folder) return origin if source_dir or build_dir: # OLD MODE build_ret = build_dir or self.build_dir or self._conanfile.build_folder source_ret = source_dir or self._conanfile.source_folder else: build_ret = get_dir(build_folder, self._conanfile.build_folder) source_ret = get_dir(source_folder, self._conanfile.source_folder) if self._conanfile.in_local_cache and cache_build_folder: build_ret = get_dir(cache_build_folder, self._conanfile.build_folder) return source_ret, build_ret def _run(self, command): if self._compiler == 'Visual Studio' and self.generator in ['Ninja', 'NMake Makefiles', 'NMake Makefiles JOM']: with tools.vcvars(self._settings, force=True, filter_known_paths=False): self._conanfile.run(command) else: self._conanfile.run(command) def configure(self, args=None, defs=None, source_dir=None, build_dir=None, source_folder=None, build_folder=None, cache_build_folder=None, pkg_config_paths=None): # TODO: Deprecate source_dir and build_dir in favor of xxx_folder if not self._conanfile.should_configure: return args = args or [] defs = defs or {} source_dir, self.build_dir = self._get_dirs(source_folder, build_folder, source_dir, build_dir, cache_build_folder) mkdir(self.build_dir) arg_list = join_arguments([ self.command_line, args_to_string(args), defs_to_string(defs), args_to_string([source_dir]) ]) if pkg_config_paths: pkg_env = {"PKG_CONFIG_PATH": os.pathsep.join(get_abs_path(f, self._conanfile.install_folder) for f in pkg_config_paths)} else: # If we are using pkg_config generator automate the pcs location, otherwise it could # read wrong files set_env = "pkg_config" in self._conanfile.generators \ and "PKG_CONFIG_PATH" not in os.environ pkg_env = {"PKG_CONFIG_PATH": self._conanfile.install_folder} if set_env else {} with tools.environment_append(pkg_env): command = "cd %s && cmake %s" % (args_to_string([self.build_dir]), arg_list) if platform.system() == "Windows" and self.generator == "MinGW Makefiles": with tools.remove_from_path("sh"): self._conanfile.run(command) else: self._conanfile.run(command) def build(self, args=None, build_dir=None, target=None): if not self._conanfile.should_build: return args = args or [] build_dir = build_dir or self.build_dir or self._conanfile.build_folder if target is not None: args = ["--target", target] + args if self.generator and self.parallel: if "Makefiles" in self.generator and "NMake" not in self.generator: if "--" not in args: args.append("--") args.append("-j%i" % cpu_count()) elif "Visual Studio" in self.generator and \ self._compiler_version and Version(self._compiler_version) >= "10": if "--" not in args: args.append("--") args.append("/m:%i" % cpu_count()) arg_list = join_arguments([ args_to_string([build_dir]), self.build_config, args_to_string(args) ]) command = "cmake --build %s" % arg_list self._run(command) def install(self, args=None, build_dir=None): if not self._conanfile.should_install: return mkdir(self._conanfile.package_folder) if not self.definitions.get("CMAKE_INSTALL_PREFIX"): raise ConanException("CMAKE_INSTALL_PREFIX not defined for 'cmake.install()'\n" "Make sure 'package_folder' is defined") self.build(args=args, build_dir=build_dir, target="install") def test(self, args=None, build_dir=None, target=None): if not self._conanfile.should_test: return if not target: target = "RUN_TESTS" if self.is_multi_configuration else "test" self.build(args=args, build_dir=build_dir, target=target) @property def verbose(self): try: verbose = self.definitions["CMAKE_VERBOSE_MAKEFILE"] return get_bool_from_text(str(verbose)) except KeyError: return False @verbose.setter def verbose(self, value): self.definitions["CMAKE_VERBOSE_MAKEFILE"] = "ON" if value else "OFF" def patch_config_paths(self): """ changes references to the absolute path of the installed package and its dependencies in exported cmake config files to the appropriate conan variable. This makes most (sensible) cmake config files portable. For example, if a package foo installs a file called "fooConfig.cmake" to be used by cmake's find_package method, normally this file will contain absolute paths to the installed package folder, for example it will contain a line such as: SET(Foo_INSTALL_DIR /home/developer/.conan/data/Foo/1.0.0/...) This will cause cmake find_package() method to fail when someone else installs the package via conan. This function will replace such mentions to SET(Foo_INSTALL_DIR ${CONAN_FOO_ROOT}) which is a variable that is set by conanbuildinfo.cmake, so that find_package() now correctly works on this conan package. For dependent packages, if a package foo installs a file called "fooConfig.cmake" to be used by cmake's find_package method and if it depends to a package bar, normally this file will contain absolute paths to the bar package folder, for example it will contain a line such as: SET_TARGET_PROPERTIES(foo PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "/home/developer/.conan/data/Bar/1.0.0/user/channel/id/include") This function will replace such mentions to SET_TARGET_PROPERTIES(foo PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${CONAN_BAR_ROOT}/include") If the install() method of the CMake object in the conan file is used, this function should be called _after_ that invocation. For example: def build(self): cmake = CMake(self) cmake.configure() cmake.build() cmake.install() cmake.patch_config_paths() """ if not self._conanfile.should_install: return if not self._conanfile.name: raise ConanException("cmake.patch_config_paths() can't work without package name. " "Define name in your recipe") pf = self.definitions.get("CMAKE_INSTALL_PREFIX") replstr = "${CONAN_%s_ROOT}" % self._conanfile.name.upper() allwalk = chain(os.walk(self._conanfile.build_folder), os.walk(self._conanfile.package_folder)) for root, _, files in allwalk: for f in files: if f.endswith(".cmake"): path = os.path.join(root, f) tools.replace_in_file(path, pf, replstr, strict=False) # patch paths of dependent packages that are found in any cmake files of the current package path_content = tools.load(path) for dep in self._conanfile.deps_cpp_info.deps: from_str = self._conanfile.deps_cpp_info[dep].rootpath # try to replace only if from str is found if path_content.find(from_str) != -1: dep_str = "${CONAN_%s_ROOT}" % dep.upper() self._conanfile.output.info("Patching paths for %s: %s to %s" % (dep, from_str, dep_str)) tools.replace_in_file(path, from_str, dep_str, strict=False) def _get_cpp_standard_vars(self): if not self._cppstd: return {} ret = {} if self._cppstd.startswith("gnu"): ret["CONAN_CMAKE_CXX_STANDARD"] = self._cppstd[3:] ret["CONAN_CMAKE_CXX_EXTENSIONS"] = "ON" else: ret["CONAN_CMAKE_CXX_STANDARD"] = self._cppstd ret["CONAN_CMAKE_CXX_EXTENSIONS"] = "OFF" ret["CONAN_STD_CXX_FLAG"] = cppstd_flag(self._compiler, self._compiler_version, self._cppstd) return ret
42.588551
119
0.604192
import os import platform from collections import OrderedDict from itertools import chain from conans.client import defs_to_string, join_arguments from conans.client.build.cppstd_flags import cppstd_flag from conans.client.tools import cross_building from conans.client.tools.oss import get_cross_building_settings from conans.errors import ConanException from conans.model.conan_file import ConanFile from conans.model.version import Version from conans.util.env_reader import get_env from conans.util.files import mkdir, get_abs_path from conans.tools import cpu_count, args_to_string from conans import tools from conans.util.log import logger from conans.util.config_parser import get_bool_from_text from conans.client.build.compiler_flags import architecture_flag def _get_env_cmake_system_name(): env_system_name = get_env("CONAN_CMAKE_SYSTEM_NAME", "") return {"False": False, "True": True, "": None}.get(env_system_name, env_system_name) class CMake(object): def __init__(self, conanfile, generator=None, cmake_system_name=True, parallel=True, build_type=None, toolset=None, make_program=None, set_cmake_flags=False): if not isinstance(conanfile, ConanFile): raise ConanException("First argument of CMake() has to be ConanFile. Use CMake(self)") self._settings = conanfile.settings self._conanfile = conanfile self._os = self._settings.get_safe("os") self._os_build, _, self._os_host, _ = get_cross_building_settings(self._settings) self._compiler = self._settings.get_safe("compiler") self._compiler_version = self._settings.get_safe("compiler.version") self._arch = self._settings.get_safe("arch") os_ver_str = "os.api_level" if self._os == "Android" else "os.version" self._op_system_version = self._settings.get_safe(os_ver_str) self._libcxx = self._settings.get_safe("compiler.libcxx") self._runtime = self._settings.get_safe("compiler.runtime") self._build_type = self._settings.get_safe("build_type") self._cppstd = self._settings.get_safe("cppstd") self.generator = generator or self._generator() self.toolset = self._toolset(toolset) self.build_dir = None self._cmake_system_name = _get_env_cmake_system_name() if self._cmake_system_name is None: self._cmake_system_name = cmake_system_name self.parallel = parallel self._set_cmake_flags = set_cmake_flags self.definitions = self._get_cmake_definitions() if build_type and build_type != self._build_type: self.build_type = build_type make_program = os.getenv("CONAN_MAKE_PROGRAM") or make_program if make_program: if not tools.which(make_program): self._conanfile.output.warn("The specified make program '%s' cannot be found" "and will be ignored" % make_program) else: self._conanfile.output.info("Using '%s' as CMAKE_MAKE_PROGRAM" % make_program) self.definitions["CMAKE_MAKE_PROGRAM"] = make_program @property def build_folder(self): return self.build_dir @build_folder.setter def build_folder(self, value): self.build_dir = value @property def build_type(self): return self._build_type @build_type.setter def build_type(self, build_type): settings_build_type = self._settings.get_safe("build_type") if build_type != settings_build_type: self._conanfile.output.warn( 'Set CMake build type "%s" is different than the settings build_type "%s"' % (build_type, settings_build_type)) self._build_type = build_type self.definitions.update(self._build_type_definition()) @property def flags(self): return defs_to_string(self.definitions) def _generator(self): if "CONAN_CMAKE_GENERATOR" in os.environ: return os.environ["CONAN_CMAKE_GENERATOR"] if not self._compiler or not self._compiler_version or not self._arch: if self._os_build == "Windows": return None return "Unix Makefiles" if self._compiler == "Visual Studio": _visuals = {'8': '8 2005', '9': '9 2008', '10': '10 2010', '11': '11 2012', '12': '12 2013', '14': '14 2015', '15': '15 2017'} base = "Visual Studio %s" % _visuals.get(self._compiler_version, "UnknownVersion %s" % self._compiler_version) if self._arch == "x86_64": return base + " Win64" elif "arm" in self._arch: return base + " ARM" else: return base if self._os_build == "Windows": return "MinGW Makefiles" return "Unix Makefiles" def _toolset(self, toolset=None): if toolset: return toolset elif self._settings.get_safe("compiler") == "Visual Studio": subs_toolset = self._settings.get_safe("compiler.toolset") if subs_toolset: return subs_toolset return None def _cmake_compiler_options(self): cmake_definitions = OrderedDict() if str(self._os).lower() == "macos": if self._arch == "x86": cmake_definitions["CMAKE_OSX_ARCHITECTURES"] = "i386" return cmake_definitions def _cmake_cross_build_defines(self): ret = OrderedDict() os_ver = get_env("CONAN_CMAKE_SYSTEM_VERSION", self._op_system_version) toolchain_file = get_env("CONAN_CMAKE_TOOLCHAIN_FILE", "") if toolchain_file != "": logger.info("Setting Cross build toolchain file: %s" % toolchain_file) ret["CMAKE_TOOLCHAIN_FILE"] = toolchain_file return ret if self._cmake_system_name is False: return ret if self._cmake_system_name is not True: ret["CMAKE_SYSTEM_NAME"] = self._cmake_system_name ret["CMAKE_SYSTEM_VERSION"] = os_ver else: if cross_building(self._conanfile.settings): if self._os != self._os_build: if self._os: ret["CMAKE_SYSTEM_NAME"] = "Darwin" if self._os in ["iOS", "tvOS", "watchOS"] else self._os if os_ver: ret["CMAKE_SYSTEM_VERSION"] = os_ver else: ret["CMAKE_SYSTEM_NAME"] = "Generic" cmake_system_processor = os.getenv("CONAN_CMAKE_SYSTEM_PROCESSOR", None) if cmake_system_processor: ret["CMAKE_SYSTEM_PROCESSOR"] = cmake_system_processor if ret: for env_var in ["CONAN_CMAKE_FIND_ROOT_PATH", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_PROGRAM", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_LIBRARY", "CONAN_CMAKE_FIND_ROOT_PATH_MODE_INCLUDE"]: value = os.getenv(env_var, None) if value: ret[env_var] = value if self._conanfile and self._conanfile.deps_cpp_info.sysroot: sysroot_path = self._conanfile.deps_cpp_info.sysroot else: sysroot_path = os.getenv("CONAN_CMAKE_FIND_ROOT_PATH", None) if sysroot_path: # to know about the sysroot before any other thing ret["CMAKE_SYSROOT"] = sysroot_path.replace("\\", "/") # Adjust Android stuff if self._os == "Android": arch_abi_settings = {"armv8": "arm64-v8a", "armv7": "armeabi-v7a", "armv7hf": "armeabi-v7a", "armv6": "armeabi-v6", "armv5": "armeabi" }.get(self._arch, self._arch) if arch_abi_settings: ret["CMAKE_ANDROID_ARCH_ABI"] = arch_abi_settings logger.info("Setting Cross build flags: %s" % ", ".join(["%s=%s" % (k, v) for k, v in ret.items()])) return ret @property def is_multi_configuration(self): if "Visual" in self.generator or "Xcode" in self.generator: return True # TODO: complete logic return False @property def command_line(self): args = ['-G "%s"' % self.generator] if self.generator else [] args.append(self.flags) args.append('-Wno-dev') if self.toolset: args.append('-T "%s"' % self.toolset) return join_arguments(args) def _build_type_definition(self): if self._build_type and not self.is_multi_configuration: return {'CMAKE_BUILD_TYPE': self._build_type} return {} @property def runtime(self): return defs_to_string(self._runtime_definition()) def _runtime_definition(self): if self._runtime: return {"CONAN_LINK_RUNTIME": "/%s" % self._runtime} return {} @property def build_config(self): if self._build_type and self.is_multi_configuration: return "--config %s" % self._build_type return "" def _get_cmake_definitions(self): def add_cmake_flag(cmake_flags, name, flag): if flag: if name not in cmake_flags: cmake_flags[name] = flag else: cmake_flags[name] = ' ' + flag return cmake_flags ret = OrderedDict() ret.update(self._build_type_definition()) ret.update(self._runtime_definition()) ret.update(self._cmake_compiler_options()) ret.update(self._cmake_cross_build_defines()) ret.update(self._get_cpp_standard_vars()) ret["CONAN_EXPORTED"] = "1" if self._compiler: ret["CONAN_COMPILER"] = self._compiler if self._compiler_version: ret["CONAN_COMPILER_VERSION"] = str(self._compiler_version) # Force compiler flags -- TODO: give as environment/setting parameter? arch_flag = architecture_flag(compiler=self._compiler, arch=self._arch) ret = add_cmake_flag(ret, 'CONAN_CXX_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CONAN_SHARED_LINKER_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CONAN_C_FLAGS', arch_flag) if self._set_cmake_flags: ret = add_cmake_flag(ret, 'CMAKE_CXX_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CMAKE_SHARED_LINKER_FLAGS', arch_flag) ret = add_cmake_flag(ret, 'CMAKE_C_FLAGS', arch_flag) if self._libcxx: ret["CONAN_LIBCXX"] = self._libcxx # Shared library try: ret["BUILD_SHARED_LIBS"] = "ON" if self._conanfile.options.shared else "OFF" except ConanException: pass # Install to package folder try: if self._conanfile.package_folder: ret["CMAKE_INSTALL_PREFIX"] = self._conanfile.package_folder except AttributeError: pass if str(self._os) in ["Windows", "WindowsStore"] and self._compiler == "Visual Studio": if self.parallel: cpus = tools.cpu_count() ret["CONAN_CXX_FLAGS"] = "/MP%s" % cpus ret["CONAN_C_FLAGS"] = "/MP%s" % cpus # fpic if str(self._os) not in ["Windows", "WindowsStore"]: fpic = self._conanfile.options.get_safe("fPIC") if fpic is not None: shared = self._conanfile.options.get_safe("shared") ret["CONAN_CMAKE_POSITION_INDEPENDENT_CODE"] = "ON" if (fpic or shared) else "OFF" # Adjust automatically the module path in case the conanfile is using the cmake_find_package if "cmake_find_package" in self._conanfile.generators: ret["CMAKE_MODULE_PATH"] = self._conanfile.install_folder.replace("\\", "/") # Disable CMake export registry #3070 (CMake installing modules in user home's) ret["CMAKE_EXPORT_NO_PACKAGE_REGISTRY"] = "ON" return ret def _get_dirs(self, source_folder, build_folder, source_dir, build_dir, cache_build_folder): if (source_folder or build_folder) and (source_dir or build_dir): raise ConanException("Use 'build_folder'/'source_folder' arguments") def get_dir(folder, origin): if folder: if os.path.isabs(folder): return folder return os.path.join(origin, folder) return origin if source_dir or build_dir: build_ret = build_dir or self.build_dir or self._conanfile.build_folder source_ret = source_dir or self._conanfile.source_folder else: build_ret = get_dir(build_folder, self._conanfile.build_folder) source_ret = get_dir(source_folder, self._conanfile.source_folder) if self._conanfile.in_local_cache and cache_build_folder: build_ret = get_dir(cache_build_folder, self._conanfile.build_folder) return source_ret, build_ret def _run(self, command): if self._compiler == 'Visual Studio' and self.generator in ['Ninja', 'NMake Makefiles', 'NMake Makefiles JOM']: with tools.vcvars(self._settings, force=True, filter_known_paths=False): self._conanfile.run(command) else: self._conanfile.run(command) def configure(self, args=None, defs=None, source_dir=None, build_dir=None, source_folder=None, build_folder=None, cache_build_folder=None, pkg_config_paths=None): if not self._conanfile.should_configure: return args = args or [] defs = defs or {} source_dir, self.build_dir = self._get_dirs(source_folder, build_folder, source_dir, build_dir, cache_build_folder) mkdir(self.build_dir) arg_list = join_arguments([ self.command_line, args_to_string(args), defs_to_string(defs), args_to_string([source_dir]) ]) if pkg_config_paths: pkg_env = {"PKG_CONFIG_PATH": os.pathsep.join(get_abs_path(f, self._conanfile.install_folder) for f in pkg_config_paths)} else: set_env = "pkg_config" in self._conanfile.generators \ and "PKG_CONFIG_PATH" not in os.environ pkg_env = {"PKG_CONFIG_PATH": self._conanfile.install_folder} if set_env else {} with tools.environment_append(pkg_env): command = "cd %s && cmake %s" % (args_to_string([self.build_dir]), arg_list) if platform.system() == "Windows" and self.generator == "MinGW Makefiles": with tools.remove_from_path("sh"): self._conanfile.run(command) else: self._conanfile.run(command) def build(self, args=None, build_dir=None, target=None): if not self._conanfile.should_build: return args = args or [] build_dir = build_dir or self.build_dir or self._conanfile.build_folder if target is not None: args = ["--target", target] + args if self.generator and self.parallel: if "Makefiles" in self.generator and "NMake" not in self.generator: if "--" not in args: args.append("--") args.append("-j%i" % cpu_count()) elif "Visual Studio" in self.generator and \ self._compiler_version and Version(self._compiler_version) >= "10": if "--" not in args: args.append("--") args.append("/m:%i" % cpu_count()) arg_list = join_arguments([ args_to_string([build_dir]), self.build_config, args_to_string(args) ]) command = "cmake --build %s" % arg_list self._run(command) def install(self, args=None, build_dir=None): if not self._conanfile.should_install: return mkdir(self._conanfile.package_folder) if not self.definitions.get("CMAKE_INSTALL_PREFIX"): raise ConanException("CMAKE_INSTALL_PREFIX not defined for 'cmake.install()'\n" "Make sure 'package_folder' is defined") self.build(args=args, build_dir=build_dir, target="install") def test(self, args=None, build_dir=None, target=None): if not self._conanfile.should_test: return if not target: target = "RUN_TESTS" if self.is_multi_configuration else "test" self.build(args=args, build_dir=build_dir, target=target) @property def verbose(self): try: verbose = self.definitions["CMAKE_VERBOSE_MAKEFILE"] return get_bool_from_text(str(verbose)) except KeyError: return False @verbose.setter def verbose(self, value): self.definitions["CMAKE_VERBOSE_MAKEFILE"] = "ON" if value else "OFF" def patch_config_paths(self): if not self._conanfile.should_install: return if not self._conanfile.name: raise ConanException("cmake.patch_config_paths() can't work without package name. " "Define name in your recipe") pf = self.definitions.get("CMAKE_INSTALL_PREFIX") replstr = "${CONAN_%s_ROOT}" % self._conanfile.name.upper() allwalk = chain(os.walk(self._conanfile.build_folder), os.walk(self._conanfile.package_folder)) for root, _, files in allwalk: for f in files: if f.endswith(".cmake"): path = os.path.join(root, f) tools.replace_in_file(path, pf, replstr, strict=False) # patch paths of dependent packages that are found in any cmake files of the current package path_content = tools.load(path) for dep in self._conanfile.deps_cpp_info.deps: from_str = self._conanfile.deps_cpp_info[dep].rootpath # try to replace only if from str is found if path_content.find(from_str) != -1: dep_str = "${CONAN_%s_ROOT}" % dep.upper() self._conanfile.output.info("Patching paths for %s: %s to %s" % (dep, from_str, dep_str)) tools.replace_in_file(path, from_str, dep_str, strict=False) def _get_cpp_standard_vars(self): if not self._cppstd: return {} ret = {} if self._cppstd.startswith("gnu"): ret["CONAN_CMAKE_CXX_STANDARD"] = self._cppstd[3:] ret["CONAN_CMAKE_CXX_EXTENSIONS"] = "ON" else: ret["CONAN_CMAKE_CXX_STANDARD"] = self._cppstd ret["CONAN_CMAKE_CXX_EXTENSIONS"] = "OFF" ret["CONAN_STD_CXX_FLAG"] = cppstd_flag(self._compiler, self._compiler_version, self._cppstd) return ret
true
true
f70eba7ac72db4241c482950c5d46e65d867d233
2,161
py
Python
apps/erms/api.py
remocrevo/celus
682b13168eb475d7f970502113e756e40a899877
[ "MIT" ]
7
2020-02-20T13:24:40.000Z
2022-01-28T19:36:04.000Z
apps/erms/api.py
remocrevo/celus
682b13168eb475d7f970502113e756e40a899877
[ "MIT" ]
15
2020-04-28T13:09:02.000Z
2021-11-03T15:21:24.000Z
apps/erms/api.py
remocrevo/celus
682b13168eb475d7f970502113e756e40a899877
[ "MIT" ]
4
2020-02-20T13:48:30.000Z
2021-03-19T00:33:34.000Z
import urllib.parse import requests class ERMSError(Exception): pass class ERMS(object): """ Possible queries: /object?id=eq.574 /object?id=in.(574,575) """ # endpoints EP_OBJECT = 'object' EP_IDENTITY = 'identity' EP_CONSORTIUM = 'consortium' EP_CONSORTIUM_MEMBER = 'consortium_member' EP_ACQUISITION = 'acquisition' EP_PROCUREMENT = 'procurement' EP_OFFER = 'offer' EP_OFFER_SPLIT = 'offer_split' # object classes CLS_PERSON = 'Person' CLS_ORGANIZATION = 'Organization' CLS_PLATFORM = 'Platform' def __init__(self, base_url="https://erms.czechelib.cz/api/"): self.base_url = base_url.rstrip('/') self.session = requests.Session() @classmethod def _construct_query_string(cls, value): if type(value) in (list, tuple, set): return 'in.({})'.format(','.join(str(_id) for _id in value)) return f'eq.{value}' def construct_object_url(self, cls=None, object_id=None): params = {} if cls: params['class'] = self._construct_query_string(cls) if object_id: params['id'] = self._construct_query_string(object_id) else: params['order'] = 'id' query = urllib.parse.urlencode(params) return f'{self.base_url}/{self.EP_OBJECT}?{query}' def fetch_url(self, url): response = self.session.get(url) if response.status_code == 200: return response.json() raise ERMSError(response) def fetch_objects(self, cls=None, object_id=None): url = self.construct_object_url(cls=cls, object_id=object_id) data = self.fetch_url(url) return data def fetch_endpoint(self, endpoint, object_id=None, **kwargs): url = f'{self.base_url}/{endpoint}' params = {} if object_id: params['id'] = self._construct_query_string(object_id) for key, value in kwargs.items(): params[key] = self._construct_query_string(value) if params: url += '?{}'.format(urllib.parse.urlencode(params)) return self.fetch_url(url)
28.064935
72
0.61777
import urllib.parse import requests class ERMSError(Exception): pass class ERMS(object): EP_OBJECT = 'object' EP_IDENTITY = 'identity' EP_CONSORTIUM = 'consortium' EP_CONSORTIUM_MEMBER = 'consortium_member' EP_ACQUISITION = 'acquisition' EP_PROCUREMENT = 'procurement' EP_OFFER = 'offer' EP_OFFER_SPLIT = 'offer_split' CLS_PERSON = 'Person' CLS_ORGANIZATION = 'Organization' CLS_PLATFORM = 'Platform' def __init__(self, base_url="https://erms.czechelib.cz/api/"): self.base_url = base_url.rstrip('/') self.session = requests.Session() @classmethod def _construct_query_string(cls, value): if type(value) in (list, tuple, set): return 'in.({})'.format(','.join(str(_id) for _id in value)) return f'eq.{value}' def construct_object_url(self, cls=None, object_id=None): params = {} if cls: params['class'] = self._construct_query_string(cls) if object_id: params['id'] = self._construct_query_string(object_id) else: params['order'] = 'id' query = urllib.parse.urlencode(params) return f'{self.base_url}/{self.EP_OBJECT}?{query}' def fetch_url(self, url): response = self.session.get(url) if response.status_code == 200: return response.json() raise ERMSError(response) def fetch_objects(self, cls=None, object_id=None): url = self.construct_object_url(cls=cls, object_id=object_id) data = self.fetch_url(url) return data def fetch_endpoint(self, endpoint, object_id=None, **kwargs): url = f'{self.base_url}/{endpoint}' params = {} if object_id: params['id'] = self._construct_query_string(object_id) for key, value in kwargs.items(): params[key] = self._construct_query_string(value) if params: url += '?{}'.format(urllib.parse.urlencode(params)) return self.fetch_url(url)
true
true
f70ebad00452b2d47ad5070a859d615e6a790ba0
5,906
py
Python
dash/html/Code.py
sthagen/plotly-dash
4a9412efe5229809016eee9a5f1c50f882720c4c
[ "MIT" ]
null
null
null
dash/html/Code.py
sthagen/plotly-dash
4a9412efe5229809016eee9a5f1c50f882720c4c
[ "MIT" ]
null
null
null
dash/html/Code.py
sthagen/plotly-dash
4a9412efe5229809016eee9a5f1c50f882720c4c
[ "MIT" ]
null
null
null
# AUTO GENERATED FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class Code(Component): """A Code component. Code is a wrapper for the <code> HTML5 element. For detailed attribute info see: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/code Keyword arguments: - children (a list of or a singular dash component, string or number; optional): The children of this component. - id (string; optional): The ID of this component, used to identify dash components in callbacks. The ID needs to be unique across all of the components in an app. - accessKey (string; optional): Keyboard shortcut to activate or add focus to the element. - aria-* (string; optional): A wildcard aria attribute. - className (string; optional): Often used with CSS to style elements with common properties. - contentEditable (string; optional): Indicates whether the element's content is editable. - contextMenu (string; optional): Defines the ID of a <menu> element which will serve as the element's context menu. - data-* (string; optional): A wildcard data attribute. - dir (string; optional): Defines the text direction. Allowed values are ltr (Left-To-Right) or rtl (Right-To-Left). - draggable (string; optional): Defines whether the element can be dragged. - hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional): Prevents rendering of given element, while keeping child elements, e.g. script elements, active. - key (string; optional): A unique identifier for the component, used to improve performance by React.js while rendering components See https://reactjs.org/docs/lists-and-keys.html for more info. - lang (string; optional): Defines the language used in the element. - loading_state (dict; optional): Object that holds the loading state object coming from dash-renderer. `loading_state` is a dict with keys: - component_name (string; optional): Holds the name of the component that is loading. - is_loading (boolean; optional): Determines if the component is loading or not. - prop_name (string; optional): Holds which property is loading. - n_clicks (number; default 0): An integer that represents the number of times that this element has been clicked on. - n_clicks_timestamp (number; default -1): An integer that represents the time (in ms since 1970) at which n_clicks changed. This can be used to tell which button was changed most recently. - role (string; optional): Defines an explicit role for an element for use by assistive technologies. - spellCheck (string; optional): Indicates whether spell checking is allowed for the element. - style (dict; optional): Defines CSS styles which will override styles previously set. - tabIndex (string; optional): Overrides the browser's default tab order and follows the one specified instead. - title (string; optional): Text to be displayed in a tooltip when hovering over the element.""" _children_props = [] _base_nodes = ["children"] _namespace = "dash_html_components" _type = "Code" @_explicitize_args def __init__( self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, role=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs ): self._prop_names = [ "children", "id", "accessKey", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "role", "spellCheck", "style", "tabIndex", "title", ] self._valid_wildcard_attributes = ["data-", "aria-"] self.available_properties = [ "children", "id", "accessKey", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "role", "spellCheck", "style", "tabIndex", "title", ] self.available_wildcard_properties = ["data-", "aria-"] _explicit_args = kwargs.pop("_explicit_args") _locals = locals() _locals.update(kwargs) # For wildcard attrs and excess named props args = {k: _locals[k] for k in _explicit_args if k != "children"} for k in []: if k not in args: raise TypeError("Required argument `" + k + "` was not specified.") super(Code, self).__init__(children=children, **args)
31.752688
84
0.596173
from dash.development.base_component import Component, _explicitize_args class Code(Component): _children_props = [] _base_nodes = ["children"] _namespace = "dash_html_components" _type = "Code" @_explicitize_args def __init__( self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, role=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs ): self._prop_names = [ "children", "id", "accessKey", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "role", "spellCheck", "style", "tabIndex", "title", ] self._valid_wildcard_attributes = ["data-", "aria-"] self.available_properties = [ "children", "id", "accessKey", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "role", "spellCheck", "style", "tabIndex", "title", ] self.available_wildcard_properties = ["data-", "aria-"] _explicit_args = kwargs.pop("_explicit_args") _locals = locals() _locals.update(kwargs) args = {k: _locals[k] for k in _explicit_args if k != "children"} for k in []: if k not in args: raise TypeError("Required argument `" + k + "` was not specified.") super(Code, self).__init__(children=children, **args)
true
true
f70ebb69800c57b924e20d307a2ea6cd4f9f431c
11,439
py
Python
python/train_model.py
skinnider/low-data-generative-models
6e743b6d1ba3265f58fcbd33f2c60e633cf25999
[ "MIT" ]
10
2021-02-01T06:21:52.000Z
2022-03-02T23:59:30.000Z
python/train_model.py
skinnider/low-data-generative-models
6e743b6d1ba3265f58fcbd33f2c60e633cf25999
[ "MIT" ]
1
2021-09-21T02:38:07.000Z
2021-09-21T02:38:07.000Z
python/train_model.py
skinnider/low-data-generative-models
6e743b6d1ba3265f58fcbd33f2c60e633cf25999
[ "MIT" ]
5
2021-04-16T18:11:48.000Z
2021-12-08T07:21:09.000Z
""" Train a language model to generate SMILES. """ import argparse import os import numpy as np import pandas as pd import random import sys import torch import torch.nn as nn import torch.optim as optim from torch.utils.data import DataLoader from tqdm import tqdm # suppress Chem.MolFromSmiles error output from rdkit import rdBase rdBase.DisableLog('rdApp.error') # set working directory git_dir = os.path.expanduser("~/git/low-data-generative-models") python_dir = git_dir + "/python" os.chdir(python_dir) # import classes from models import RNN, OneHotRNN, EarlyStopping from datasets import SmilesDataset, SelfiesDataset, SmilesCollate from functions import decrease_learning_rate, print_update, track_loss, \ sample_smiles, write_smiles ### CLI parser = argparse.ArgumentParser( description='Chemical structure language model interface') # input file parser.add_argument('--smiles_file', type=str, help='location of the SMILES file to train on') parser.add_argument('--selfies', dest='selfies', action='store_true') parser.set_defaults(selfies=False) # output files parser.add_argument('--output_dir', type=str, help='directory to save trained models to') # RNN parameters parser.add_argument('--rnn_type', type=str, choices=['RNN', 'LSTM', 'GRU'], default='GRU', help='type of language model to train') parser.add_argument('--embedding_size', type=int, default=128, help='size of vocabulary embedding') parser.add_argument('--hidden_size', type=int, default=512, help='size of language model hidden layers') parser.add_argument('--n_layers', type=int, default=3, help='number of layers in language model') parser.add_argument('--dropout', type=float, default=0, help='amount of dropout (0-1) to apply to model') parser.add_argument('--bidirectional', type=bool, default=False, help='for LSTMs only, train a bidirectional model') parser.add_argument('--nonlinearity', type=str, choices=['tanh', 'relu'], default='tanh', help='for RNNs only, nonlinearity to use') parser.add_argument('--tie_weights', dest='tie_weights', help='require embedding/dense linear layers use the ' +\ 'same weights', action='store_true') parser.set_defaults(tie_weights=False) # optimization parameters parser.add_argument('--learning_rate', type=float, default=0.001, help='initial learning rate') parser.add_argument('--learning_rate_decay', default=None, # type=float, help='amount (0-1) to decrease learning rate by every ' +\ 'fixed number of steps') parser.add_argument('--learning_rate_decay_steps', default=10000, type=int, help='# of steps between learning rate decrements') parser.add_argument('--gradient_clip', default=None, # type=float, help='amount to which to clip the gradients') # training schedule parser.add_argument('--seed', type=int, default=0, help='seed for random number generator') parser.add_argument('--batch_size', type=int, default=128, help='batch size') parser.add_argument('--max_epochs', type=int, default=1000, help='maximum number of epochs to train for') parser.add_argument('--patience', type=int, default=100, help='patience for early stopping') # sampling from trained models parser.add_argument('--sample_idx', type=int, default=0, help='index of the model being trained (zero-indexed)') parser.add_argument('--sample_every_epochs', type=int, help='if set, sample SMILES from the trained model' + 'every n epochs') parser.add_argument('--sample_every_steps', type=int, help='if set, sample SMILES from the trained model' + 'every n steps') parser.add_argument('--log_every_epochs', type=int, help='log training/validation losses every n epochs') parser.add_argument('--log_every_steps', type=int, help='log training/validation losses every n steps') parser.add_argument('--sample_size', type=int, default=100000, help='size of each sample from the trained model') # start with pretrained model parser.add_argument('--pretrain_model', type=str, default=None, help='load parameters from a pretrained model') # enforce a larger vocabulary parser.add_argument('--vocab_file', type=str, default=None, help='file containing all tokens in vocabulary') # for use in grid parser.add_argument('--stop_if_exists', dest='stop_if_exists', action='store_true') parser.set_defaults(stop_if_exists=False) # parse arguments args = parser.parse_args() # manually deal with gradient clipping try: args.gradient_clip = float(args.gradient_clip) except (ValueError, TypeError): args.gradient_clip = None # manually deal with learning rate decay try: args.learning_rate_decay = float(args.learning_rate_decay) except (ValueError, TypeError): args.learning_rate_decay = None # log args (make searching through logging directory easier) for arg in vars(args): print(arg, ": ", getattr(args, arg), "(", type(getattr(args, arg)), ")") # optionally stop if output file already exists if args.selfies: smiles_filename = "sample-" + str(args.sample_idx + 1) + "-SELFIES.smi" else: smiles_filename = "sample-" + str(args.sample_idx + 1) + "-SMILES.smi" smiles_file = os.path.join(args.output_dir, smiles_filename) if os.path.isfile(smiles_file) and args.stop_if_exists: print("output file " + smiles_file + " exists: stopping early") sys.exit() # make output directories if not os.path.isdir(args.output_dir): try: os.makedirs(args.output_dir) except FileExistsError: pass ## seed all RNGs torch.manual_seed(args.seed) random.seed(args.seed) np.random.seed(args.seed) if torch.cuda.is_available(): print("using cuda") torch.cuda.manual_seed_all(args.seed) # set up dataset if args.selfies: dataset = SelfiesDataset(selfies_file=args.smiles_file) else: dataset = SmilesDataset(smiles_file=args.smiles_file, vocab_file=args.vocab_file) # set up batching loader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True, drop_last=True, collate_fn=SmilesCollate(dataset.vocabulary)) # set up model if args.embedding_size > 0: model = RNN(vocabulary=dataset.vocabulary, rnn_type=args.rnn_type, embedding_size=args.embedding_size, hidden_size=args.hidden_size, n_layers=args.n_layers, dropout=args.dropout, bidirectional=args.bidirectional, tie_weights=args.tie_weights, nonlinearity=args.nonlinearity) else: # no embedding layer (one-hot encoding) model = OneHotRNN(vocabulary=dataset.vocabulary, rnn_type=args.rnn_type, hidden_size=args.hidden_size, n_layers=args.n_layers, dropout=args.dropout, bidirectional=args.bidirectional, nonlinearity=args.nonlinearity) # optionally, load model parameters from file if args.pretrain_model is not None: model.load_state_dict(torch.load(args.pretrain_model)) # set up optimizer optimizer = optim.Adam(model.parameters(), betas=(0.9, 0.999), ## default eps=1e-08, ## default lr=args.learning_rate) # set up early stopping early_stop = EarlyStopping(patience=args.patience) # set up training schedule file sched_filename = "training_schedule-" + str(args.sample_idx + 1) + ".csv" sched_file = os.path.join(args.output_dir, sched_filename) # iterate over epochs counter = 0 for epoch in range(args.max_epochs): # iterate over batches for batch_idx, batch in tqdm(enumerate(loader), total=len(loader)): batch, lengths = batch # increment counter counter += 1 # calculate loss log_p = model.loss(batch, lengths) loss = log_p.mean() # zero gradients, calculate new gradients, and take a step optimizer.zero_grad() loss.backward() # clip gradient if args.gradient_clip is not None: nn.utils.clip_grad_norm_(model.parameters(), args.gradient_clip) optimizer.step() # check learning rate decay if args.learning_rate_decay is not None and \ counter % args.learning_rate_decay_steps == 0: decrease_learning_rate(optimizer, multiplier=args.learning_rate_decay) # print update and write training schedule? if args.log_every_steps is not None: if counter % args.log_every_steps == 0: print_update(model, dataset, epoch, batch_idx + 1, loss.item(), args.batch_size, selfies=args.selfies) track_loss(sched_file, model, dataset, epoch, counter, loss.item(), args.batch_size) # save SMILES? if args.sample_every_steps is not None: if counter % args.sample_every_steps == 0: sample_smiles(args.output_dir, args.sample_idx, model, args.sample_size, epoch, counter) # calculate validation loss validation, lengths = dataset.get_validation(args.batch_size) validation_loss = model.loss(validation, lengths).mean().detach() # check early stopping model_filename = "model-" + str(args.sample_idx + 1) + ".pt" model_file = os.path.join(args.output_dir, model_filename) early_stop(validation_loss.item(), model, model_file, counter) if early_stop.stop: break # print update and write training schedule? if args.log_every_epochs is not None: print_update(model, dataset, epoch, 'NA', loss.item(), args.batch_size) track_loss(sched_file, model, dataset, epoch, counter, loss.item(), args.batch_size) # save SMILES? if args.sample_every_epochs is not None: sample_smiles(args.output_dir, args.sample_idx, model, args.sample_size, epoch, counter) if early_stop.stop: break # append information about final training step if args.log_every_epochs is not None or args.log_every_steps is not None: sched = pd.DataFrame({'epoch': [None], 'step': [early_stop.step_at_best], 'outcome': ['training loss'], 'value': [early_stop.best_loss]}) sched.to_csv(sched_file, index=False, mode='a', header=False) # load the best model model.load_state_dict(torch.load(model_file)) model.eval() ## enable evaluation modes # sample a set of SMILES from the final, trained model sampled_smiles = [] while len(sampled_smiles) < args.sample_size: sampled_smiles.extend(model.sample(args.batch_size, return_smiles=True)) # write sampled SMILES write_smiles(sampled_smiles, smiles_file)
39.309278
79
0.652417
import argparse import os import numpy as np import pandas as pd import random import sys import torch import torch.nn as nn import torch.optim as optim from torch.utils.data import DataLoader from tqdm import tqdm from rdkit import rdBase rdBase.DisableLog('rdApp.error') git_dir = os.path.expanduser("~/git/low-data-generative-models") python_dir = git_dir + "/python" os.chdir(python_dir) from models import RNN, OneHotRNN, EarlyStopping from datasets import SmilesDataset, SelfiesDataset, SmilesCollate from functions import decrease_learning_rate, print_update, track_loss, \ sample_smiles, write_smiles rgparse.ArgumentParser( description='Chemical structure language model interface') parser.add_argument('--smiles_file', type=str, help='location of the SMILES file to train on') parser.add_argument('--selfies', dest='selfies', action='store_true') parser.set_defaults(selfies=False) parser.add_argument('--output_dir', type=str, help='directory to save trained models to') parser.add_argument('--rnn_type', type=str, choices=['RNN', 'LSTM', 'GRU'], default='GRU', help='type of language model to train') parser.add_argument('--embedding_size', type=int, default=128, help='size of vocabulary embedding') parser.add_argument('--hidden_size', type=int, default=512, help='size of language model hidden layers') parser.add_argument('--n_layers', type=int, default=3, help='number of layers in language model') parser.add_argument('--dropout', type=float, default=0, help='amount of dropout (0-1) to apply to model') parser.add_argument('--bidirectional', type=bool, default=False, help='for LSTMs only, train a bidirectional model') parser.add_argument('--nonlinearity', type=str, choices=['tanh', 'relu'], default='tanh', help='for RNNs only, nonlinearity to use') parser.add_argument('--tie_weights', dest='tie_weights', help='require embedding/dense linear layers use the ' +\ 'same weights', action='store_true') parser.set_defaults(tie_weights=False) parser.add_argument('--learning_rate', type=float, default=0.001, help='initial learning rate') parser.add_argument('--learning_rate_decay', default=None, help='amount (0-1) to decrease learning rate by every ' +\ 'fixed number of steps') parser.add_argument('--learning_rate_decay_steps', default=10000, type=int, help='# of steps between learning rate decrements') parser.add_argument('--gradient_clip', default=None, help='amount to which to clip the gradients') parser.add_argument('--seed', type=int, default=0, help='seed for random number generator') parser.add_argument('--batch_size', type=int, default=128, help='batch size') parser.add_argument('--max_epochs', type=int, default=1000, help='maximum number of epochs to train for') parser.add_argument('--patience', type=int, default=100, help='patience for early stopping') parser.add_argument('--sample_idx', type=int, default=0, help='index of the model being trained (zero-indexed)') parser.add_argument('--sample_every_epochs', type=int, help='if set, sample SMILES from the trained model' + 'every n epochs') parser.add_argument('--sample_every_steps', type=int, help='if set, sample SMILES from the trained model' + 'every n steps') parser.add_argument('--log_every_epochs', type=int, help='log training/validation losses every n epochs') parser.add_argument('--log_every_steps', type=int, help='log training/validation losses every n steps') parser.add_argument('--sample_size', type=int, default=100000, help='size of each sample from the trained model') parser.add_argument('--pretrain_model', type=str, default=None, help='load parameters from a pretrained model') parser.add_argument('--vocab_file', type=str, default=None, help='file containing all tokens in vocabulary') parser.add_argument('--stop_if_exists', dest='stop_if_exists', action='store_true') parser.set_defaults(stop_if_exists=False) args = parser.parse_args() try: args.gradient_clip = float(args.gradient_clip) except (ValueError, TypeError): args.gradient_clip = None try: args.learning_rate_decay = float(args.learning_rate_decay) except (ValueError, TypeError): args.learning_rate_decay = None for arg in vars(args): print(arg, ": ", getattr(args, arg), "(", type(getattr(args, arg)), ")") if args.selfies: smiles_filename = "sample-" + str(args.sample_idx + 1) + "-SELFIES.smi" else: smiles_filename = "sample-" + str(args.sample_idx + 1) + "-SMILES.smi" smiles_file = os.path.join(args.output_dir, smiles_filename) if os.path.isfile(smiles_file) and args.stop_if_exists: print("output file " + smiles_file + " exists: stopping early") sys.exit() if not os.path.isdir(args.output_dir): try: os.makedirs(args.output_dir) except FileExistsError: pass eed(args.seed) random.seed(args.seed) np.random.seed(args.seed) if torch.cuda.is_available(): print("using cuda") torch.cuda.manual_seed_all(args.seed) if args.selfies: dataset = SelfiesDataset(selfies_file=args.smiles_file) else: dataset = SmilesDataset(smiles_file=args.smiles_file, vocab_file=args.vocab_file) loader = DataLoader(dataset, batch_size=args.batch_size, shuffle=True, drop_last=True, collate_fn=SmilesCollate(dataset.vocabulary)) if args.embedding_size > 0: model = RNN(vocabulary=dataset.vocabulary, rnn_type=args.rnn_type, embedding_size=args.embedding_size, hidden_size=args.hidden_size, n_layers=args.n_layers, dropout=args.dropout, bidirectional=args.bidirectional, tie_weights=args.tie_weights, nonlinearity=args.nonlinearity) else: model = OneHotRNN(vocabulary=dataset.vocabulary, rnn_type=args.rnn_type, hidden_size=args.hidden_size, n_layers=args.n_layers, dropout=args.dropout, bidirectional=args.bidirectional, nonlinearity=args.nonlinearity) if args.pretrain_model is not None: model.load_state_dict(torch.load(args.pretrain_model)) optimizer = optim.Adam(model.parameters(), betas=(0.9, 0.999), eps=1e-08, lr=args.learning_rate) early_stop = EarlyStopping(patience=args.patience) sched_filename = "training_schedule-" + str(args.sample_idx + 1) + ".csv" sched_file = os.path.join(args.output_dir, sched_filename) counter = 0 for epoch in range(args.max_epochs): for batch_idx, batch in tqdm(enumerate(loader), total=len(loader)): batch, lengths = batch counter += 1 log_p = model.loss(batch, lengths) loss = log_p.mean() optimizer.zero_grad() loss.backward() if args.gradient_clip is not None: nn.utils.clip_grad_norm_(model.parameters(), args.gradient_clip) optimizer.step() if args.learning_rate_decay is not None and \ counter % args.learning_rate_decay_steps == 0: decrease_learning_rate(optimizer, multiplier=args.learning_rate_decay) if args.log_every_steps is not None: if counter % args.log_every_steps == 0: print_update(model, dataset, epoch, batch_idx + 1, loss.item(), args.batch_size, selfies=args.selfies) track_loss(sched_file, model, dataset, epoch, counter, loss.item(), args.batch_size) if args.sample_every_steps is not None: if counter % args.sample_every_steps == 0: sample_smiles(args.output_dir, args.sample_idx, model, args.sample_size, epoch, counter) validation, lengths = dataset.get_validation(args.batch_size) validation_loss = model.loss(validation, lengths).mean().detach() model_filename = "model-" + str(args.sample_idx + 1) + ".pt" model_file = os.path.join(args.output_dir, model_filename) early_stop(validation_loss.item(), model, model_file, counter) if early_stop.stop: break if args.log_every_epochs is not None: print_update(model, dataset, epoch, 'NA', loss.item(), args.batch_size) track_loss(sched_file, model, dataset, epoch, counter, loss.item(), args.batch_size) if args.sample_every_epochs is not None: sample_smiles(args.output_dir, args.sample_idx, model, args.sample_size, epoch, counter) if early_stop.stop: break if args.log_every_epochs is not None or args.log_every_steps is not None: sched = pd.DataFrame({'epoch': [None], 'step': [early_stop.step_at_best], 'outcome': ['training loss'], 'value': [early_stop.best_loss]}) sched.to_csv(sched_file, index=False, mode='a', header=False) model.load_state_dict(torch.load(model_file)) model.eval() ile len(sampled_smiles) < args.sample_size: sampled_smiles.extend(model.sample(args.batch_size, return_smiles=True)) write_smiles(sampled_smiles, smiles_file)
true
true
f70ebc9c8b640280b26f6fd66deed90179dd2995
31,646
py
Python
pytype/analyze.py
Hirni-Meshram4/pytype
24b04237c15822b26f51c545646e4e5fff1ab709
[ "Apache-2.0" ]
null
null
null
pytype/analyze.py
Hirni-Meshram4/pytype
24b04237c15822b26f51c545646e4e5fff1ab709
[ "Apache-2.0" ]
null
null
null
pytype/analyze.py
Hirni-Meshram4/pytype
24b04237c15822b26f51c545646e4e5fff1ab709
[ "Apache-2.0" ]
null
null
null
"""Code for checking and inferring types.""" import collections import logging import re import subprocess from typing import Any, Dict, Union from pytype import abstract from pytype import abstract_utils from pytype import convert_structural from pytype import debug from pytype import function from pytype import metrics from pytype import special_builtins from pytype import state as frame_state from pytype import vm from pytype.overlays import typing_overlay from pytype.pytd import builtins from pytype.pytd import escape from pytype.pytd import optimize from pytype.pytd import pytd from pytype.pytd import pytd_utils from pytype.pytd import visitors from pytype.typegraph import cfg log = logging.getLogger(__name__) # Most interpreter functions (including lambdas) need to be analyzed as # stand-alone functions. The exceptions are comprehensions and generators, which # have names like "<listcomp>" and "<genexpr>". _SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$") CallRecord = collections.namedtuple( "CallRecord", ["node", "function", "signatures", "positional_arguments", "keyword_arguments", "return_value"]) # How deep to follow call chains: INIT_MAXIMUM_DEPTH = 4 # during module loading MAXIMUM_DEPTH = 3 # during non-quick analysis QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference class _Initializing: pass class CallTracer(vm.VirtualMachine): """Virtual machine that records all function calls. Attributes: exitpoint: A CFG node representing the program exit. Needs to be set before analyze_types. """ _CONSTRUCTORS = ("__new__", "__init__") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._unknowns = {} self._calls = set() self._method_calls = set() # Used by init_class. self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {} # Used by call_init. Can differ from _instance_cache because we also call # __init__ on classes not initialized via init_class. self._initialized_instances = set() self._interpreter_functions = [] self._interpreter_classes = [] self._analyzed_functions = set() self._analyzed_classes = set() self._generated_classes = {} self.exitpoint = None def create_varargs(self, node): value = abstract.Instance(self.convert.tuple_type, self) value.merge_instance_type_parameter( node, abstract_utils.T, self.convert.create_new_unknown(node)) return value.to_variable(node) def create_kwargs(self, node): key_type = self.convert.primitive_class_instances[str].to_variable(node) value_type = self.convert.create_new_unknown(node) kwargs = abstract.Instance(self.convert.dict_type, self) kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type) kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type) return kwargs.to_variable(node) def create_method_arguments(self, node, method, use_defaults=False): """Create arguments for the given method. Creates Unknown objects as arguments for the given method. Note that we don't need to take parameter annotations into account as InterpreterFunction.call() will take care of that. Args: node: The current node. method: An abstract.InterpreterFunction. use_defaults: Whether to use parameter defaults for arguments. When True, unknown arguments are created with force=False, as it is fine to use Unsolvable rather than Unknown objects for type-checking defaults. Returns: A tuple of a node and a function.Args object. """ args = [] num_posargs = method.argcount(node) num_posargs_no_default = num_posargs - len(method.defaults) for i in range(num_posargs): default_idx = i - num_posargs_no_default if use_defaults and default_idx >= 0: arg = method.defaults[default_idx] else: arg = self.convert.create_new_unknown(node, force=not use_defaults) args.append(arg) kws = {} for key in method.signature.kwonly_params: if use_defaults and key in method.kw_defaults: kws[key] = method.kw_defaults[key] else: kws[key] = self.convert.create_new_unknown(node, force=not use_defaults) starargs = self.create_varargs(node) if method.has_varargs() else None starstarargs = self.create_kwargs(node) if method.has_kwargs() else None return node, function.Args(posargs=tuple(args), namedargs=kws, starargs=starargs, starstarargs=starstarargs) def call_function_with_args(self, node, val, args): """Call a function. Args: node: The given node. val: A cfg.Binding containing the function. args: A function.Args object. Returns: A tuple of (1) a node and (2) a cfg.Variable of the return value. """ fvar = val.AssignToNewVariable(node) with val.data.record_calls(): new_node, ret = self.call_function_in_frame(node, fvar, *args) return new_node, ret def call_function_in_frame(self, node, var, args, kwargs, starargs, starstarargs): frame = frame_state.SimpleFrame(node=node) self.push_frame(frame) log.info("Analyzing %r", [v.name for v in var.data]) state = frame_state.FrameState.init(node, self) state, ret = self.call_function_with_state( state, var, args, kwargs, starargs, starstarargs) self.pop_frame(frame) return state.node, ret def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args): sig = func.signature if (args.posargs and sig.param_names and (sig.param_names[0] not in sig.annotations)): # fix "cls" parameter return args._replace( posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:]) else: return args def maybe_analyze_method(self, node, val, cls=None): method = val.data fname = val.data.name if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES): self._analyzed_functions.add(method.get_first_opcode()) if (not self.options.analyze_annotated and (method.signature.has_return_annotation or method.has_overloads) and fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS): log.info("%r has annotations, not analyzing further.", fname) else: for f in method.iter_signature_functions(): node, args = self.create_method_arguments(node, f) if f.is_classmethod and cls: args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args) node, _ = self.call_function_with_args(node, val, args) return node def _call_with_fake_args(self, node0, funcv): """Attempt to call the given function with made-up arguments.""" # TODO(tsudol): If expand this beyond __init__, need to handle # DictKeyMissing nodes = [] rets = [] for funcb in funcv.bindings: func = funcb.data log.info("Trying %s with fake arguments", func) if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES): node1, args = self.create_method_arguments(node0, func) # Once the args are generated, try calling the function. # call_function will check fallback_to_unsolvable if a DictKeyMissing or # FailedFunctionCall error is raised when the target function is called. # DictKeyMissing doesn't trigger call_with_fake_args, so that shouldn't # be raised again, and generating fake arguments should avoid any # FailedFunctionCall errors. To prevent an infinite recursion loop, set # fallback_to_unsolvable to False just in case. # This means any additional errors that may be raised will be passed to # the call_function that called this method in the first place. node2, ret = self.call_function(node1, funcb.AssignToNewVariable(), args, fallback_to_unsolvable=False) nodes.append(node2) rets.append(ret) if nodes: ret = self.join_variables(node0, rets) node = self.join_cfg_nodes(nodes) if ret.bindings: return node, ret else: node = node0 log.info("Unable to generate fake arguments for %s", funcv) return node, self.new_unsolvable(node) def analyze_method_var(self, node0, name, var, cls=None): log.info("Analyzing %s", name) node1 = node0.ConnectNew(name) for val in var.bindings: node2 = self.maybe_analyze_method(node1, val, cls) node2.ConnectTo(node0) return node0 def bind_method(self, node, name, methodvar, instance_var): bound = self.program.NewVariable() for m in methodvar.Data(node): if isinstance(m, special_builtins.ClassMethodInstance): m = m.func.data[0] is_cls = True else: is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod) bound.AddBinding(m.property_get(instance_var, is_cls), [], node) return bound def _instantiate_binding(self, node0, cls, container): """Instantiate a class binding.""" node1, new = cls.data.get_own_new(node0, cls) if not new or ( any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)): # This assumes that any inherited __new__ method defined in a pyi file # returns an instance of the current class. return node0, cls.data.instantiate(node0, container=container) instance = self.program.NewVariable() nodes = [] for b in new.bindings: self._analyzed_functions.add(b.data.get_first_opcode()) node2, args = self.create_method_arguments(node1, b.data) args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args) node3 = node2.ConnectNew() node4, ret = self.call_function_with_args(node3, b, args) instance.PasteVariable(ret) nodes.append(node4) return self.join_cfg_nodes(nodes), instance def _instantiate_var(self, node, clsv, container): """Build an (dummy) instance from a class, for analyzing it.""" n = self.program.NewVariable() for cls in clsv.Bindings(node, strict=False): node, var = self._instantiate_binding(node, cls, container) n.PasteVariable(var) return node, n def _mark_maybe_missing_members(self, values): """Set maybe_missing_members to True on these values and their type params. Args: values: A list of BaseValue objects. On every instance among the values, recursively set maybe_missing_members to True on the instance and its type parameters. """ values = list(values) seen = set() while values: v = values.pop(0) if v not in seen: seen.add(v) if isinstance(v, abstract.SimpleValue): v.maybe_missing_members = True for child in v.instance_type_parameters.values(): values.extend(child.data) def init_class(self, node, cls, container=None, extra_key=None): """Instantiate a class, and also call __init__. Calling __init__ can be expensive, so this method caches its created instances. If you don't need __init__ called, use cls.instantiate instead. Args: node: The current node. cls: The class to instantiate. container: Optionally, a container to pass to the class's instantiate() method, so that type parameters in the container's template are instantiated to TypeParameterInstance. extra_key: Optionally, extra information about the location at which the instantion occurs. By default, this method keys on the current opcode and the class, which sometimes isn't enough to disambiguate callers that shouldn't get back the same cached instance. Returns: A tuple of node and instance variable. """ key = (self.frame and self.frame.current_opcode, extra_key, cls) instance = self._instance_cache.get(key) if not instance or isinstance(instance, _Initializing): clsvar = cls.to_variable(node) node, instance = self._instantiate_var(node, clsvar, container) if key in self._instance_cache: # We've encountered a recursive pattern such as # class A: # def __init__(self, x: "A"): ... # Calling __init__ again would lead to an infinite loop, so # we instead create an incomplete instance that will be # overwritten later. Note that we have to create a new # instance rather than using the one that we're already in # the process of initializing - otherwise, setting # maybe_missing_members to True would cause pytype to ignore # all attribute errors on self in __init__. self._mark_maybe_missing_members(instance.data) else: self._instance_cache[key] = _Initializing() node = self.call_init(node, instance) self._instance_cache[key] = instance return node, instance def _call_method(self, node, binding, method_name): node, method = self.attribute_handler.get_attribute( node, binding.data.get_class(), method_name, binding) if method: bound_method = self.bind_method( node, method_name, method, binding.AssignToNewVariable()) node = self.analyze_method_var(node, method_name, bound_method) return node def _call_init_on_binding(self, node, b): if isinstance(b.data, abstract.SimpleValue): for param in b.data.instance_type_parameters.values(): node = self.call_init(node, param) node = self._call_method(node, b, "__init__") cls = b.data.get_class() if isinstance(cls, abstract.InterpreterClass): # Call any additional initalizers the class has registered. for method in cls.additional_init_methods: node = self._call_method(node, b, method) return node def call_init(self, node, instance): # Call __init__ on each binding. for b in instance.bindings: if b.data in self._initialized_instances: continue self._initialized_instances.add(b.data) node = self._call_init_on_binding(node, b) return node def reinitialize_if_initialized(self, node, instance): if instance in self._initialized_instances: self._call_init_on_binding(node, instance.to_binding(node)) def analyze_class(self, node, val): self._analyzed_classes.add(val.data) node, instance = self.init_class(node, val.data) good_instances = [b for b in instance.bindings if val.data == b.data.cls] if not good_instances: # __new__ returned something that's not an instance of our class. instance = val.data.instantiate(node) node = self.call_init(node, instance) elif len(good_instances) != len(instance.bindings): # __new__ returned some extra possibilities we don't need. instance = self.join_bindings(node, good_instances) for instance_value in instance.data: val.data.register_canonical_instance(instance_value) methods = sorted(val.data.members.items()) while methods: name, methodvar = methods.pop(0) if name in self._CONSTRUCTORS: continue # We already called this method during initialization. for v in methodvar.data: if (self.options.bind_properties and isinstance(v, special_builtins.PropertyInstance)): for m in (v.fget, v.fset, v.fdel): if m: methods.insert(0, (name, m)) b = self.bind_method(node, name, methodvar, instance) node = self.analyze_method_var(node, name, b, val) return node def analyze_function(self, node0, val): if val.data.is_attribute_of_class: # We'll analyze this function as part of a class. log.info("Analyze functions: Skipping class method %s", val.data.name) else: node1 = node0.ConnectNew(val.data.name) node2 = self.maybe_analyze_method(node1, val) node2.ConnectTo(node0) return node0 def _should_analyze_as_interpreter_function(self, data): # We record analyzed functions by opcode rather than function object. The # two ways of recording are equivalent except for closures, which are # re-generated when the variables they close over change, but we don't want # to re-analyze them. return (isinstance(data, abstract.InterpreterFunction) and not data.is_overload and not data.is_class_builder and data.get_first_opcode() not in self._analyzed_functions and not _SKIP_FUNCTION_RE.search(data.name)) def analyze_toplevel(self, node, defs): for name, var in sorted(defs.items()): # sort, for determinicity if not self._is_typing_member(name, var): for value in var.bindings: if isinstance(value.data, abstract.InterpreterClass): new_node = self.analyze_class(node, value) elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and not value.data.is_overload): new_node = self.analyze_function(node, value) else: continue if new_node is not node: new_node.ConnectTo(node) # Now go through all functions and classes we haven't analyzed yet. # These are typically hidden under a decorator. # Go through classes first so that the `is_attribute_of_class` will # be set for all functions in class. for c in self._interpreter_classes: for value in c.bindings: if (isinstance(value.data, abstract.InterpreterClass) and value.data not in self._analyzed_classes): node = self.analyze_class(node, value) for f in self._interpreter_functions: for value in f.bindings: if self._should_analyze_as_interpreter_function(value.data): node = self.analyze_function(node, value) return node def analyze(self, node, defs, maximum_depth): assert not self.frame self.maximum_depth = maximum_depth self._analyzing = True node = node.ConnectNew(name="Analyze") return self.analyze_toplevel(node, defs) def trace_unknown(self, name, unknown_binding): self._unknowns[name] = unknown_binding def trace_call(self, node, func, sigs, posargs, namedargs, result): """Add an entry into the call trace. Args: node: The CFG node right after this function call. func: A cfg.Binding of a function that was called. sigs: The signatures that the function might have been called with. posargs: The positional arguments, an iterable over cfg.Value. namedargs: The keyword arguments, a dict mapping str to cfg.Value. result: A Variable of the possible result values. """ log.debug("Logging call to %r with %d args, return %r", func, len(posargs), result) args = tuple(posargs) kwargs = tuple((namedargs or {}).items()) record = CallRecord(node, func, sigs, args, kwargs, result) if isinstance(func.data, abstract.BoundPyTDFunction): self._method_calls.add(record) elif isinstance(func.data, abstract.PyTDFunction): self._calls.add(record) def trace_functiondef(self, f): self._interpreter_functions.append(f) def trace_classdef(self, c): self._interpreter_classes.append(c) def trace_namedtuple(self, nt): # All namedtuple instances with the same name are equal, so it's fine to # overwrite previous instances. self._generated_classes[nt.name] = nt def pytd_classes_for_unknowns(self): classes = [] for name, val in self._unknowns.items(): if val in val.variable.Filter(self.exitpoint, strict=False): classes.append(val.data.to_structural_def(self.exitpoint, name)) return classes def pytd_for_types(self, defs): # If a variable is annotated, we'll always output that type. annotated_names = set() data = [] pytd_convert = self.convert.pytd_convert annots = abstract_utils.get_annotations_dict(defs) for name, t in pytd_convert.annotations_to_instance_types( self.exitpoint, annots): annotated_names.add(name) data.append(pytd.Constant(name, t)) for name, var in defs.items(): if (name in abstract_utils.TOP_LEVEL_IGNORE or name in annotated_names or self._is_typing_member(name, var)): continue options = var.FilteredData(self.exitpoint, strict=False) if (len(options) > 1 and not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)): if all(isinstance(o, (abstract.ParameterizedClass, abstract.TypeParameter, abstract.Union)) for o in options ) and self.options.preserve_union_macros: # type alias data.append(pytd_utils.JoinTypes(t.to_pytd_def(self.exitpoint, name) for t in options)) else: # It's ambiguous whether this is a type, a function or something # else, so encode it as a constant. combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint) for t in options) data.append(pytd.Constant(name, combined_types)) elif options: for option in options: try: d = option.to_pytd_def(self.exitpoint, name) # Deep definition except NotImplementedError: d = option.to_type(self.exitpoint) # Type only if isinstance(d, pytd.NothingType): if isinstance(option, abstract.Empty): d = pytd.AnythingType() else: assert isinstance(option, typing_overlay.NoReturn) if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter): data.append(pytd.Constant(name, d)) else: data.append(d) else: log.error("No visible options for %s", name) data.append(pytd.Constant(name, pytd.AnythingType())) return pytd_utils.WrapTypeDeclUnit("inferred", data) @staticmethod def _call_traces_to_function(call_traces, name_transform=lambda x: x): funcs = collections.defaultdict(pytd_utils.OrderedSet) for node, func, sigs, args, kws, retvar in call_traces: # The lengths may be different in the presence of optional and kw args. arg_names = max((sig.get_positional_names() for sig in sigs), key=len) for i in range(len(arg_names)): if not isinstance(func.data, abstract.BoundFunction) or i > 0: arg_names[i] = function.argname(i) arg_types = (a.data.to_type(node) for a in args) ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data) starargs = None starstarargs = None funcs[func.data.name].add(pytd.Signature( tuple(pytd.Parameter(n, t, False, False, None) for n, t in zip(arg_names, arg_types)) + tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None) for name, a in kws), starargs, starstarargs, ret, exceptions=(), template=())) functions = [] for name, signatures in funcs.items(): functions.append(pytd.Function(name_transform(name), tuple(signatures), pytd.MethodTypes.METHOD)) return functions def _is_typing_member(self, name, var): for module_name in ("typing", "typing_extensions"): if module_name not in self.loaded_overlays: continue module = self.loaded_overlays[module_name].get_module(name) if name in module.members and module.members[name].data == var.data: return True return False def pytd_functions_for_call_traces(self): return self._call_traces_to_function(self._calls, escape.pack_partial) def pytd_classes_for_call_traces(self): class_to_records = collections.defaultdict(list) for call_record in self._method_calls: args = call_record.positional_arguments if not any(isinstance(a.data, abstract.Unknown) for a in args): # We don't need to record call signatures that don't involve # unknowns - there's nothing to solve for. continue cls = args[0].data.get_class() if isinstance(cls, abstract.PyTDClass): class_to_records[cls].append(call_record) classes = [] for cls, call_records in class_to_records.items(): full_name = cls.module + "." + cls.name if cls.module else cls.name classes.append(pytd.Class( name=escape.pack_partial(full_name), metaclass=None, parents=(pytd.NamedType("builtins.object"),), # not used in solver methods=tuple(self._call_traces_to_function(call_records)), constants=(), classes=(), decorators=(), slots=None, template=(), )) return classes def pytd_classes_for_namedtuple_instances(self): return tuple(v.generate_ast() for v in self._generated_classes.values()) def compute_types(self, defs): classes = (tuple(self.pytd_classes_for_unknowns()) + tuple(self.pytd_classes_for_call_traces()) + self.pytd_classes_for_namedtuple_instances()) functions = tuple(self.pytd_functions_for_call_traces()) aliases = () # aliases are instead recorded as constants ty = pytd_utils.Concat( self.pytd_for_types(defs), pytd_utils.CreateModule("unknowns", classes=classes, functions=functions, aliases=aliases)) ty = ty.Visit(optimize.CombineReturnsAndExceptions()) ty = ty.Visit(optimize.PullInMethodClasses()) ty = ty.Visit(visitors.DefaceUnresolved( [ty, self.loader.concat_all()], escape.UNKNOWN)) return ty.Visit(visitors.AdjustTypeParameters()) def _check_return(self, node, actual, formal): if not self.options.report_errors: return True views = abstract_utils.get_views([actual], node) # Check for typevars in the return value first, since bad_matches # expects not to get any. bad = [view for view in views if actual in view and view[actual].data.formal] if not bad: bad = self.matcher(node).bad_matches(actual, formal) if bad: self.errorlog.bad_return_type( self.frames, node, formal, actual, bad) return not bad def check_types(src, filename, errorlog, options, loader, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, maximum_depth=None, **kwargs): """Verify the Python code.""" tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=False, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:check_types:tracer") if deep: if maximum_depth is None: maximum_depth = ( QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH) tracer.analyze(loc, defs, maximum_depth=maximum_depth) snapshotter.take_snapshot("analyze:check_types:post") _maybe_output_debug(options, tracer.program) def infer_types(src, errorlog, options, loader, filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, show_library_calls=False, maximum_depth=None, tracer_vm=None, **kwargs): """Given Python source return its types. Args: src: A string containing Python source code. errorlog: Where error messages go. Instance of errors.ErrorLog. options: config.Options object loader: A load_pytd.Loader instance to load PYI information. filename: Filename of the program we're parsing. deep: If True, analyze all functions, even the ones not called by the main execution flow. init_maximum_depth: Depth of analysis during module loading. show_library_calls: If True, call traces are kept in the output. maximum_depth: Depth of the analysis. Default: unlimited. tracer_vm: An instance of CallTracer, in case the caller wants to instantiate and retain the vm used for type inference. **kwargs: Additional parameters to pass to vm.VirtualMachine Returns: A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit) Raises: AssertionError: In case of a bad parameter combination. """ # If the caller has passed in a vm, use that. if tracer_vm: assert isinstance(tracer_vm, CallTracer) tracer = tracer_vm else: tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=options.protocols, store_all_calls=not deep, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) log.info("===Done running definitions and module-level code===") snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:infer_types:tracer") if deep: if maximum_depth is None: if not options.quick: maximum_depth = MAXIMUM_DEPTH elif options.analyze_annotated: # Since there's no point in analyzing annotated functions for inference, # the presence of this option means that the user wants checking, too. maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH else: maximum_depth = QUICK_INFER_MAXIMUM_DEPTH tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc snapshotter.take_snapshot("analyze:infer_types:post") ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports or any( a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS): if "__getattr__" not in ast: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) # If merged with other if statement, triggers a ValueError: Unresolved class # when attempts to load from the protocols file if options.protocols: protocols_pytd = tracer.loader.import_name("protocols") else: protocols_pytd = None builtins_pytd = tracer.loader.concat_all() # Insert type parameters, where appropriate ast = ast.Visit(visitors.CreateTypeParametersForSignatures()) if options.protocols: log.info("=========== PyTD to solve =============\n%s", pytd_utils.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") # Rename remaining "~unknown" to "?" ast = ast.Visit(visitors.RemoveUnknownClasses()) # Remove "~list" etc.: ast = convert_structural.extract_local(ast) _maybe_output_debug(options, tracer.program) return ast, builtins_pytd def _maybe_output_debug(options, program): """Maybe emit debugging output.""" if options.output_cfg or options.output_typegraph: dot = debug.program_to_dot(program, set([]), bool(options.output_cfg)) svg_file = options.output_cfg or options.output_typegraph with subprocess.Popen( ["/usr/bin/dot", "-T", "svg", "-o", svg_file], stdin=subprocess.PIPE, universal_newlines=True) as proc: (_, stderr) = proc.communicate(dot) if stderr: log.info("Failed to create %s: %s", svg_file, stderr) if options.output_debug: text = debug.program_to_text(program) if options.output_debug == "-": log.info("=========== Program Dump =============\n%s", text) else: with options.open_function(options.output_debug, "w") as fi: fi.write(text)
41.694335
80
0.684573
import collections import logging import re import subprocess from typing import Any, Dict, Union from pytype import abstract from pytype import abstract_utils from pytype import convert_structural from pytype import debug from pytype import function from pytype import metrics from pytype import special_builtins from pytype import state as frame_state from pytype import vm from pytype.overlays import typing_overlay from pytype.pytd import builtins from pytype.pytd import escape from pytype.pytd import optimize from pytype.pytd import pytd from pytype.pytd import pytd_utils from pytype.pytd import visitors from pytype.typegraph import cfg log = logging.getLogger(__name__) _SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$") CallRecord = collections.namedtuple( "CallRecord", ["node", "function", "signatures", "positional_arguments", "keyword_arguments", "return_value"]) INIT_MAXIMUM_DEPTH = 4 MAXIMUM_DEPTH = 3 QUICK_CHECK_MAXIMUM_DEPTH = 2 QUICK_INFER_MAXIMUM_DEPTH = 1 class _Initializing: pass class CallTracer(vm.VirtualMachine): _CONSTRUCTORS = ("__new__", "__init__") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._unknowns = {} self._calls = set() self._method_calls = set() self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {} self._initialized_instances = set() self._interpreter_functions = [] self._interpreter_classes = [] self._analyzed_functions = set() self._analyzed_classes = set() self._generated_classes = {} self.exitpoint = None def create_varargs(self, node): value = abstract.Instance(self.convert.tuple_type, self) value.merge_instance_type_parameter( node, abstract_utils.T, self.convert.create_new_unknown(node)) return value.to_variable(node) def create_kwargs(self, node): key_type = self.convert.primitive_class_instances[str].to_variable(node) value_type = self.convert.create_new_unknown(node) kwargs = abstract.Instance(self.convert.dict_type, self) kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type) kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type) return kwargs.to_variable(node) def create_method_arguments(self, node, method, use_defaults=False): args = [] num_posargs = method.argcount(node) num_posargs_no_default = num_posargs - len(method.defaults) for i in range(num_posargs): default_idx = i - num_posargs_no_default if use_defaults and default_idx >= 0: arg = method.defaults[default_idx] else: arg = self.convert.create_new_unknown(node, force=not use_defaults) args.append(arg) kws = {} for key in method.signature.kwonly_params: if use_defaults and key in method.kw_defaults: kws[key] = method.kw_defaults[key] else: kws[key] = self.convert.create_new_unknown(node, force=not use_defaults) starargs = self.create_varargs(node) if method.has_varargs() else None starstarargs = self.create_kwargs(node) if method.has_kwargs() else None return node, function.Args(posargs=tuple(args), namedargs=kws, starargs=starargs, starstarargs=starstarargs) def call_function_with_args(self, node, val, args): fvar = val.AssignToNewVariable(node) with val.data.record_calls(): new_node, ret = self.call_function_in_frame(node, fvar, *args) return new_node, ret def call_function_in_frame(self, node, var, args, kwargs, starargs, starstarargs): frame = frame_state.SimpleFrame(node=node) self.push_frame(frame) log.info("Analyzing %r", [v.name for v in var.data]) state = frame_state.FrameState.init(node, self) state, ret = self.call_function_with_state( state, var, args, kwargs, starargs, starstarargs) self.pop_frame(frame) return state.node, ret def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args): sig = func.signature if (args.posargs and sig.param_names and (sig.param_names[0] not in sig.annotations)): return args._replace( posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:]) else: return args def maybe_analyze_method(self, node, val, cls=None): method = val.data fname = val.data.name if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES): self._analyzed_functions.add(method.get_first_opcode()) if (not self.options.analyze_annotated and (method.signature.has_return_annotation or method.has_overloads) and fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS): log.info("%r has annotations, not analyzing further.", fname) else: for f in method.iter_signature_functions(): node, args = self.create_method_arguments(node, f) if f.is_classmethod and cls: args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args) node, _ = self.call_function_with_args(node, val, args) return node def _call_with_fake_args(self, node0, funcv): nodes = [] rets = [] for funcb in funcv.bindings: func = funcb.data log.info("Trying %s with fake arguments", func) if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES): node1, args = self.create_method_arguments(node0, func) node2, ret = self.call_function(node1, funcb.AssignToNewVariable(), args, fallback_to_unsolvable=False) nodes.append(node2) rets.append(ret) if nodes: ret = self.join_variables(node0, rets) node = self.join_cfg_nodes(nodes) if ret.bindings: return node, ret else: node = node0 log.info("Unable to generate fake arguments for %s", funcv) return node, self.new_unsolvable(node) def analyze_method_var(self, node0, name, var, cls=None): log.info("Analyzing %s", name) node1 = node0.ConnectNew(name) for val in var.bindings: node2 = self.maybe_analyze_method(node1, val, cls) node2.ConnectTo(node0) return node0 def bind_method(self, node, name, methodvar, instance_var): bound = self.program.NewVariable() for m in methodvar.Data(node): if isinstance(m, special_builtins.ClassMethodInstance): m = m.func.data[0] is_cls = True else: is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod) bound.AddBinding(m.property_get(instance_var, is_cls), [], node) return bound def _instantiate_binding(self, node0, cls, container): node1, new = cls.data.get_own_new(node0, cls) if not new or ( any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)): return node0, cls.data.instantiate(node0, container=container) instance = self.program.NewVariable() nodes = [] for b in new.bindings: self._analyzed_functions.add(b.data.get_first_opcode()) node2, args = self.create_method_arguments(node1, b.data) args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args) node3 = node2.ConnectNew() node4, ret = self.call_function_with_args(node3, b, args) instance.PasteVariable(ret) nodes.append(node4) return self.join_cfg_nodes(nodes), instance def _instantiate_var(self, node, clsv, container): n = self.program.NewVariable() for cls in clsv.Bindings(node, strict=False): node, var = self._instantiate_binding(node, cls, container) n.PasteVariable(var) return node, n def _mark_maybe_missing_members(self, values): values = list(values) seen = set() while values: v = values.pop(0) if v not in seen: seen.add(v) if isinstance(v, abstract.SimpleValue): v.maybe_missing_members = True for child in v.instance_type_parameters.values(): values.extend(child.data) def init_class(self, node, cls, container=None, extra_key=None): key = (self.frame and self.frame.current_opcode, extra_key, cls) instance = self._instance_cache.get(key) if not instance or isinstance(instance, _Initializing): clsvar = cls.to_variable(node) node, instance = self._instantiate_var(node, clsvar, container) if key in self._instance_cache: # class A: # def __init__(self, x: "A"): ... # Calling __init__ again would lead to an infinite loop, so # we instead create an incomplete instance that will be # overwritten later. Note that we have to create a new # instance rather than using the one that we're already in self._mark_maybe_missing_members(instance.data) else: self._instance_cache[key] = _Initializing() node = self.call_init(node, instance) self._instance_cache[key] = instance return node, instance def _call_method(self, node, binding, method_name): node, method = self.attribute_handler.get_attribute( node, binding.data.get_class(), method_name, binding) if method: bound_method = self.bind_method( node, method_name, method, binding.AssignToNewVariable()) node = self.analyze_method_var(node, method_name, bound_method) return node def _call_init_on_binding(self, node, b): if isinstance(b.data, abstract.SimpleValue): for param in b.data.instance_type_parameters.values(): node = self.call_init(node, param) node = self._call_method(node, b, "__init__") cls = b.data.get_class() if isinstance(cls, abstract.InterpreterClass): for method in cls.additional_init_methods: node = self._call_method(node, b, method) return node def call_init(self, node, instance): for b in instance.bindings: if b.data in self._initialized_instances: continue self._initialized_instances.add(b.data) node = self._call_init_on_binding(node, b) return node def reinitialize_if_initialized(self, node, instance): if instance in self._initialized_instances: self._call_init_on_binding(node, instance.to_binding(node)) def analyze_class(self, node, val): self._analyzed_classes.add(val.data) node, instance = self.init_class(node, val.data) good_instances = [b for b in instance.bindings if val.data == b.data.cls] if not good_instances: instance = val.data.instantiate(node) node = self.call_init(node, instance) elif len(good_instances) != len(instance.bindings): # __new__ returned some extra possibilities we don't need. instance = self.join_bindings(node, good_instances) for instance_value in instance.data: val.data.register_canonical_instance(instance_value) methods = sorted(val.data.members.items()) while methods: name, methodvar = methods.pop(0) if name in self._CONSTRUCTORS: continue for v in methodvar.data: if (self.options.bind_properties and isinstance(v, special_builtins.PropertyInstance)): for m in (v.fget, v.fset, v.fdel): if m: methods.insert(0, (name, m)) b = self.bind_method(node, name, methodvar, instance) node = self.analyze_method_var(node, name, b, val) return node def analyze_function(self, node0, val): if val.data.is_attribute_of_class: log.info("Analyze functions: Skipping class method %s", val.data.name) else: node1 = node0.ConnectNew(val.data.name) node2 = self.maybe_analyze_method(node1, val) node2.ConnectTo(node0) return node0 def _should_analyze_as_interpreter_function(self, data): # We record analyzed functions by opcode rather than function object. The # two ways of recording are equivalent except for closures, which are # re-generated when the variables they close over change, but we don't want return (isinstance(data, abstract.InterpreterFunction) and not data.is_overload and not data.is_class_builder and data.get_first_opcode() not in self._analyzed_functions and not _SKIP_FUNCTION_RE.search(data.name)) def analyze_toplevel(self, node, defs): for name, var in sorted(defs.items()): if not self._is_typing_member(name, var): for value in var.bindings: if isinstance(value.data, abstract.InterpreterClass): new_node = self.analyze_class(node, value) elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and not value.data.is_overload): new_node = self.analyze_function(node, value) else: continue if new_node is not node: new_node.ConnectTo(node) # These are typically hidden under a decorator. # Go through classes first so that the `is_attribute_of_class` will # be set for all functions in class. for c in self._interpreter_classes: for value in c.bindings: if (isinstance(value.data, abstract.InterpreterClass) and value.data not in self._analyzed_classes): node = self.analyze_class(node, value) for f in self._interpreter_functions: for value in f.bindings: if self._should_analyze_as_interpreter_function(value.data): node = self.analyze_function(node, value) return node def analyze(self, node, defs, maximum_depth): assert not self.frame self.maximum_depth = maximum_depth self._analyzing = True node = node.ConnectNew(name="Analyze") return self.analyze_toplevel(node, defs) def trace_unknown(self, name, unknown_binding): self._unknowns[name] = unknown_binding def trace_call(self, node, func, sigs, posargs, namedargs, result): log.debug("Logging call to %r with %d args, return %r", func, len(posargs), result) args = tuple(posargs) kwargs = tuple((namedargs or {}).items()) record = CallRecord(node, func, sigs, args, kwargs, result) if isinstance(func.data, abstract.BoundPyTDFunction): self._method_calls.add(record) elif isinstance(func.data, abstract.PyTDFunction): self._calls.add(record) def trace_functiondef(self, f): self._interpreter_functions.append(f) def trace_classdef(self, c): self._interpreter_classes.append(c) def trace_namedtuple(self, nt): # All namedtuple instances with the same name are equal, so it's fine to self._generated_classes[nt.name] = nt def pytd_classes_for_unknowns(self): classes = [] for name, val in self._unknowns.items(): if val in val.variable.Filter(self.exitpoint, strict=False): classes.append(val.data.to_structural_def(self.exitpoint, name)) return classes def pytd_for_types(self, defs): annotated_names = set() data = [] pytd_convert = self.convert.pytd_convert annots = abstract_utils.get_annotations_dict(defs) for name, t in pytd_convert.annotations_to_instance_types( self.exitpoint, annots): annotated_names.add(name) data.append(pytd.Constant(name, t)) for name, var in defs.items(): if (name in abstract_utils.TOP_LEVEL_IGNORE or name in annotated_names or self._is_typing_member(name, var)): continue options = var.FilteredData(self.exitpoint, strict=False) if (len(options) > 1 and not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)): if all(isinstance(o, (abstract.ParameterizedClass, abstract.TypeParameter, abstract.Union)) for o in options ) and self.options.preserve_union_macros: # type alias data.append(pytd_utils.JoinTypes(t.to_pytd_def(self.exitpoint, name) for t in options)) else: # It's ambiguous whether this is a type, a function or something combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint) for t in options) data.append(pytd.Constant(name, combined_types)) elif options: for option in options: try: d = option.to_pytd_def(self.exitpoint, name) except NotImplementedError: d = option.to_type(self.exitpoint) if isinstance(d, pytd.NothingType): if isinstance(option, abstract.Empty): d = pytd.AnythingType() else: assert isinstance(option, typing_overlay.NoReturn) if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter): data.append(pytd.Constant(name, d)) else: data.append(d) else: log.error("No visible options for %s", name) data.append(pytd.Constant(name, pytd.AnythingType())) return pytd_utils.WrapTypeDeclUnit("inferred", data) @staticmethod def _call_traces_to_function(call_traces, name_transform=lambda x: x): funcs = collections.defaultdict(pytd_utils.OrderedSet) for node, func, sigs, args, kws, retvar in call_traces: arg_names = max((sig.get_positional_names() for sig in sigs), key=len) for i in range(len(arg_names)): if not isinstance(func.data, abstract.BoundFunction) or i > 0: arg_names[i] = function.argname(i) arg_types = (a.data.to_type(node) for a in args) ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data) starargs = None starstarargs = None funcs[func.data.name].add(pytd.Signature( tuple(pytd.Parameter(n, t, False, False, None) for n, t in zip(arg_names, arg_types)) + tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None) for name, a in kws), starargs, starstarargs, ret, exceptions=(), template=())) functions = [] for name, signatures in funcs.items(): functions.append(pytd.Function(name_transform(name), tuple(signatures), pytd.MethodTypes.METHOD)) return functions def _is_typing_member(self, name, var): for module_name in ("typing", "typing_extensions"): if module_name not in self.loaded_overlays: continue module = self.loaded_overlays[module_name].get_module(name) if name in module.members and module.members[name].data == var.data: return True return False def pytd_functions_for_call_traces(self): return self._call_traces_to_function(self._calls, escape.pack_partial) def pytd_classes_for_call_traces(self): class_to_records = collections.defaultdict(list) for call_record in self._method_calls: args = call_record.positional_arguments if not any(isinstance(a.data, abstract.Unknown) for a in args): continue cls = args[0].data.get_class() if isinstance(cls, abstract.PyTDClass): class_to_records[cls].append(call_record) classes = [] for cls, call_records in class_to_records.items(): full_name = cls.module + "." + cls.name if cls.module else cls.name classes.append(pytd.Class( name=escape.pack_partial(full_name), metaclass=None, parents=(pytd.NamedType("builtins.object"),), # not used in solver methods=tuple(self._call_traces_to_function(call_records)), constants=(), classes=(), decorators=(), slots=None, template=(), )) return classes def pytd_classes_for_namedtuple_instances(self): return tuple(v.generate_ast() for v in self._generated_classes.values()) def compute_types(self, defs): classes = (tuple(self.pytd_classes_for_unknowns()) + tuple(self.pytd_classes_for_call_traces()) + self.pytd_classes_for_namedtuple_instances()) functions = tuple(self.pytd_functions_for_call_traces()) aliases = () # aliases are instead recorded as constants ty = pytd_utils.Concat( self.pytd_for_types(defs), pytd_utils.CreateModule("unknowns", classes=classes, functions=functions, aliases=aliases)) ty = ty.Visit(optimize.CombineReturnsAndExceptions()) ty = ty.Visit(optimize.PullInMethodClasses()) ty = ty.Visit(visitors.DefaceUnresolved( [ty, self.loader.concat_all()], escape.UNKNOWN)) return ty.Visit(visitors.AdjustTypeParameters()) def _check_return(self, node, actual, formal): if not self.options.report_errors: return True views = abstract_utils.get_views([actual], node) # Check for typevars in the return value first, since bad_matches # expects not to get any. bad = [view for view in views if actual in view and view[actual].data.formal] if not bad: bad = self.matcher(node).bad_matches(actual, formal) if bad: self.errorlog.bad_return_type( self.frames, node, formal, actual, bad) return not bad def check_types(src, filename, errorlog, options, loader, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, maximum_depth=None, **kwargs): tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=False, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:check_types:tracer") if deep: if maximum_depth is None: maximum_depth = ( QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH) tracer.analyze(loc, defs, maximum_depth=maximum_depth) snapshotter.take_snapshot("analyze:check_types:post") _maybe_output_debug(options, tracer.program) def infer_types(src, errorlog, options, loader, filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH, show_library_calls=False, maximum_depth=None, tracer_vm=None, **kwargs): # If the caller has passed in a vm, use that. if tracer_vm: assert isinstance(tracer_vm, CallTracer) tracer = tracer_vm else: tracer = CallTracer(errorlog=errorlog, options=options, generate_unknowns=options.protocols, store_all_calls=not deep, loader=loader, **kwargs) loc, defs = tracer.run_program(src, filename, init_maximum_depth) log.info("===Done running definitions and module-level code===") snapshotter = metrics.get_metric("memory", metrics.Snapshot) snapshotter.take_snapshot("analyze:infer_types:tracer") if deep: if maximum_depth is None: if not options.quick: maximum_depth = MAXIMUM_DEPTH elif options.analyze_annotated: # Since there's no point in analyzing annotated functions for inference, maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH else: maximum_depth = QUICK_INFER_MAXIMUM_DEPTH tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth) else: tracer.exitpoint = loc snapshotter.take_snapshot("analyze:infer_types:post") ast = tracer.compute_types(defs) ast = tracer.loader.resolve_ast(ast) if tracer.has_unknown_wildcard_imports or any( a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS): if "__getattr__" not in ast: ast = pytd_utils.Concat( ast, builtins.GetDefaultAst(options.python_version)) if options.protocols: protocols_pytd = tracer.loader.import_name("protocols") else: protocols_pytd = None builtins_pytd = tracer.loader.concat_all() ast = ast.Visit(visitors.CreateTypeParametersForSignatures()) if options.protocols: log.info("=========== PyTD to solve =============\n%s", pytd_utils.Print(ast)) ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd) elif not show_library_calls: log.info("Solving is turned off. Discarding call traces.") ast = ast.Visit(visitors.RemoveUnknownClasses()) ast = convert_structural.extract_local(ast) _maybe_output_debug(options, tracer.program) return ast, builtins_pytd def _maybe_output_debug(options, program): if options.output_cfg or options.output_typegraph: dot = debug.program_to_dot(program, set([]), bool(options.output_cfg)) svg_file = options.output_cfg or options.output_typegraph with subprocess.Popen( ["/usr/bin/dot", "-T", "svg", "-o", svg_file], stdin=subprocess.PIPE, universal_newlines=True) as proc: (_, stderr) = proc.communicate(dot) if stderr: log.info("Failed to create %s: %s", svg_file, stderr) if options.output_debug: text = debug.program_to_text(program) if options.output_debug == "-": log.info("=========== Program Dump =============\n%s", text) else: with options.open_function(options.output_debug, "w") as fi: fi.write(text)
true
true
f70ebcc7d351e260c5318ca161f5f74fcdefc39f
2,579
py
Python
item_44_pickle_copyreg.py
nickaigi/effective_python_tips
1a68b6eaed2e946b003c0cd0bdea03e79b8e8990
[ "Unlicense" ]
null
null
null
item_44_pickle_copyreg.py
nickaigi/effective_python_tips
1a68b6eaed2e946b003c0cd0bdea03e79b8e8990
[ "Unlicense" ]
null
null
null
item_44_pickle_copyreg.py
nickaigi/effective_python_tips
1a68b6eaed2e946b003c0cd0bdea03e79b8e8990
[ "Unlicense" ]
null
null
null
""" pickle can serialized python objects into a stream of bytes and deserialize bytes back into objects. Note: by design, pickle is unsafe! """ import pickle state_path = 'game_state.bin' class GameState(object): def __init__(self): self.level = 0 self.lives = 4 def save_game(state): with open(state_path, 'wb') as f: pickle.dump(state, f) def load_game(): state_after = {} with open(state_path, 'rb') as f: state_after = pickle.load(f) return state_after def example_one(): """ >>> {'level': 1, 'lives': 3} """ state = GameState() state.level += 1 state.lives -= 1 save_game(state) saved_state = load_game() print(saved_state.__dict__) class GameStateNew(object): def __init__(self): self.level = 0 self.lives = 4 self.points = 0 def example_two(): """ >>> {'level': 0, 'lives': 4, 'points': 0} """ state = GameStateNew() serialized = pickle.dumps(state) # dumps state_after = pickle.loads(serialized) # loads print(state_after.__dict__) def example_three(): """ - What happens when we try to access an older saved GameState but the defination of GameState has changed to GameStateNew ? """ state_after = load_game() try: assert isinstance(state_after, GameStateNew) except AssertionError: print('AssertionError: We knew') class GameStateDefaults(object): def __init__(self, level=0, lives=4, points=0): self.level = level self.lives = lives self.points = points def pickle_game_state(game_state): kwargs = game_state.__dict__ return unpickle_game_state, (kwargs, ) def unpickle_game_state(kwargs): return GameStateDefaults(**kwargs) class GameStateDefaultsMagic(object): def __init__(self, level=0, lives=4, points=0, magic=5): self.level = level self.lives = lives self.points = points self.magic = magic def example_four(): """ >>> {'level': 0, 'lives': 4, 'points': 1000} """ state = GameStateDefaults() state.points += 1000 serialized = pickle.dumps(state) state_after = pickle.loads(serialized) print(state_after.__dict__) def main(): example_four() # TODO things got unclear from this point onwards, # book describes removing lives form the game state, and versioning. # I have been putting off this book for 4 days, not making progress. # moving to item 45 if __name__ == '__main__': main()
20.468254
73
0.632803
import pickle state_path = 'game_state.bin' class GameState(object): def __init__(self): self.level = 0 self.lives = 4 def save_game(state): with open(state_path, 'wb') as f: pickle.dump(state, f) def load_game(): state_after = {} with open(state_path, 'rb') as f: state_after = pickle.load(f) return state_after def example_one(): state = GameState() state.level += 1 state.lives -= 1 save_game(state) saved_state = load_game() print(saved_state.__dict__) class GameStateNew(object): def __init__(self): self.level = 0 self.lives = 4 self.points = 0 def example_two(): state = GameStateNew() serialized = pickle.dumps(state) state_after = pickle.loads(serialized) print(state_after.__dict__) def example_three(): state_after = load_game() try: assert isinstance(state_after, GameStateNew) except AssertionError: print('AssertionError: We knew') class GameStateDefaults(object): def __init__(self, level=0, lives=4, points=0): self.level = level self.lives = lives self.points = points def pickle_game_state(game_state): kwargs = game_state.__dict__ return unpickle_game_state, (kwargs, ) def unpickle_game_state(kwargs): return GameStateDefaults(**kwargs) class GameStateDefaultsMagic(object): def __init__(self, level=0, lives=4, points=0, magic=5): self.level = level self.lives = lives self.points = points self.magic = magic def example_four(): state = GameStateDefaults() state.points += 1000 serialized = pickle.dumps(state) state_after = pickle.loads(serialized) print(state_after.__dict__) def main(): example_four() if __name__ == '__main__': main()
true
true
f70ebd3a3a9e6e7198ad4ce295f8d246601bb1e8
5,826
py
Python
homeassistant/components/push/camera.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
homeassistant/components/push/camera.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
24,710
2016-04-13T08:27:26.000Z
2020-03-02T12:59:13.000Z
homeassistant/components/push/camera.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""Camera platform that receives images through HTTP POST.""" from __future__ import annotations import asyncio from collections import deque from datetime import timedelta import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components import webhook from homeassistant.components.camera import PLATFORM_SCHEMA, STATE_IDLE, Camera from homeassistant.components.camera.const import DOMAIN from homeassistant.const import CONF_NAME, CONF_TIMEOUT, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) CONF_BUFFER_SIZE = "buffer" CONF_IMAGE_FIELD = "field" DEFAULT_NAME = "Push Camera" ATTR_FILENAME = "filename" ATTR_LAST_TRIP = "last_trip" PUSH_CAMERA_DATA = "push_camera" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_BUFFER_SIZE, default=1): cv.positive_int, vol.Optional(CONF_TIMEOUT, default=timedelta(seconds=5)): vol.All( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_IMAGE_FIELD, default="image"): cv.string, vol.Required(CONF_WEBHOOK_ID): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Push Camera platform.""" if PUSH_CAMERA_DATA not in hass.data: hass.data[PUSH_CAMERA_DATA] = {} webhook_id = config.get(CONF_WEBHOOK_ID) cameras = [ PushCamera( hass, config[CONF_NAME], config[CONF_BUFFER_SIZE], config[CONF_TIMEOUT], config[CONF_IMAGE_FIELD], webhook_id, ) ] async_add_entities(cameras) async def handle_webhook(hass, webhook_id, request): """Handle incoming webhook POST with image files.""" try: async with async_timeout.timeout(5): data = dict(await request.post()) except (asyncio.TimeoutError, aiohttp.web.HTTPException) as error: _LOGGER.error("Could not get information from POST <%s>", error) return camera = hass.data[PUSH_CAMERA_DATA][webhook_id] if camera.image_field not in data: _LOGGER.warning("Webhook call without POST parameter <%s>", camera.image_field) return await camera.update_image( data[camera.image_field].file.read(), data[camera.image_field].filename ) class PushCamera(Camera): """The representation of a Push camera.""" def __init__(self, hass, name, buffer_size, timeout, image_field, webhook_id): """Initialize push camera component.""" super().__init__() self._name = name self._last_trip = None self._filename = None self._expired_listener = None self._timeout = timeout self.queue = deque([], buffer_size) self._current_image = None self._image_field = image_field self.webhook_id = webhook_id self.webhook_url = webhook.async_generate_url(hass, webhook_id) async def async_added_to_hass(self): """Call when entity is added to hass.""" self.hass.data[PUSH_CAMERA_DATA][self.webhook_id] = self try: webhook.async_register( self.hass, DOMAIN, self.name, self.webhook_id, handle_webhook ) except ValueError: _LOGGER.error( "In <%s>, webhook_id <%s> already used", self.name, self.webhook_id ) @property def image_field(self): """HTTP field containing the image file.""" return self._image_field async def update_image(self, image, filename): """Update the camera image.""" if self.state == STATE_IDLE: self._attr_is_recording = True self._last_trip = dt_util.utcnow() self.queue.clear() self._filename = filename self.queue.appendleft(image) @callback def reset_state(now): """Set state to idle after no new images for a period of time.""" self._attr_is_recording = False self._expired_listener = None _LOGGER.debug("Reset state") self.async_write_ha_state() if self._expired_listener: self._expired_listener() self._expired_listener = async_track_point_in_utc_time( self.hass, reset_state, dt_util.utcnow() + self._timeout ) self.async_write_ha_state() async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response.""" if self.queue: if self.state == STATE_IDLE: self.queue.rotate(1) self._current_image = self.queue[0] return self._current_image @property def name(self): """Return the name of this camera.""" return self._name @property def motion_detection_enabled(self): """Camera Motion Detection Status.""" return False @property def extra_state_attributes(self): """Return the state attributes.""" return { name: value for name, value in ( (ATTR_LAST_TRIP, self._last_trip), (ATTR_FILENAME, self._filename), ) if value is not None }
30.825397
87
0.656368
from __future__ import annotations import asyncio from collections import deque from datetime import timedelta import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.components import webhook from homeassistant.components.camera import PLATFORM_SCHEMA, STATE_IDLE, Camera from homeassistant.components.camera.const import DOMAIN from homeassistant.const import CONF_NAME, CONF_TIMEOUT, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util _LOGGER = logging.getLogger(__name__) CONF_BUFFER_SIZE = "buffer" CONF_IMAGE_FIELD = "field" DEFAULT_NAME = "Push Camera" ATTR_FILENAME = "filename" ATTR_LAST_TRIP = "last_trip" PUSH_CAMERA_DATA = "push_camera" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_BUFFER_SIZE, default=1): cv.positive_int, vol.Optional(CONF_TIMEOUT, default=timedelta(seconds=5)): vol.All( cv.time_period, cv.positive_timedelta ), vol.Optional(CONF_IMAGE_FIELD, default="image"): cv.string, vol.Required(CONF_WEBHOOK_ID): cv.string, } ) async def async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: if PUSH_CAMERA_DATA not in hass.data: hass.data[PUSH_CAMERA_DATA] = {} webhook_id = config.get(CONF_WEBHOOK_ID) cameras = [ PushCamera( hass, config[CONF_NAME], config[CONF_BUFFER_SIZE], config[CONF_TIMEOUT], config[CONF_IMAGE_FIELD], webhook_id, ) ] async_add_entities(cameras) async def handle_webhook(hass, webhook_id, request): try: async with async_timeout.timeout(5): data = dict(await request.post()) except (asyncio.TimeoutError, aiohttp.web.HTTPException) as error: _LOGGER.error("Could not get information from POST <%s>", error) return camera = hass.data[PUSH_CAMERA_DATA][webhook_id] if camera.image_field not in data: _LOGGER.warning("Webhook call without POST parameter <%s>", camera.image_field) return await camera.update_image( data[camera.image_field].file.read(), data[camera.image_field].filename ) class PushCamera(Camera): def __init__(self, hass, name, buffer_size, timeout, image_field, webhook_id): super().__init__() self._name = name self._last_trip = None self._filename = None self._expired_listener = None self._timeout = timeout self.queue = deque([], buffer_size) self._current_image = None self._image_field = image_field self.webhook_id = webhook_id self.webhook_url = webhook.async_generate_url(hass, webhook_id) async def async_added_to_hass(self): self.hass.data[PUSH_CAMERA_DATA][self.webhook_id] = self try: webhook.async_register( self.hass, DOMAIN, self.name, self.webhook_id, handle_webhook ) except ValueError: _LOGGER.error( "In <%s>, webhook_id <%s> already used", self.name, self.webhook_id ) @property def image_field(self): return self._image_field async def update_image(self, image, filename): if self.state == STATE_IDLE: self._attr_is_recording = True self._last_trip = dt_util.utcnow() self.queue.clear() self._filename = filename self.queue.appendleft(image) @callback def reset_state(now): self._attr_is_recording = False self._expired_listener = None _LOGGER.debug("Reset state") self.async_write_ha_state() if self._expired_listener: self._expired_listener() self._expired_listener = async_track_point_in_utc_time( self.hass, reset_state, dt_util.utcnow() + self._timeout ) self.async_write_ha_state() async def async_camera_image( self, width: int | None = None, height: int | None = None ) -> bytes | None: if self.queue: if self.state == STATE_IDLE: self.queue.rotate(1) self._current_image = self.queue[0] return self._current_image @property def name(self): return self._name @property def motion_detection_enabled(self): return False @property def extra_state_attributes(self): return { name: value for name, value in ( (ATTR_LAST_TRIP, self._last_trip), (ATTR_FILENAME, self._filename), ) if value is not None }
true
true
f70ebd9d986c52d7cd9e9a67cef2870a625bb652
3,080
py
Python
scripts/eval_bleu.py
nng555/fairseq
c9730a125825a85f33042e1b9fd1959b8ca829e5
[ "MIT" ]
2
2020-10-05T08:52:01.000Z
2021-03-03T15:26:35.000Z
scripts/eval_bleu.py
nng555/fairseq
c9730a125825a85f33042e1b9fd1959b8ca829e5
[ "MIT" ]
null
null
null
scripts/eval_bleu.py
nng555/fairseq
c9730a125825a85f33042e1b9fd1959b8ca829e5
[ "MIT" ]
null
null
null
import os import sys import subprocess import hydra from omegaconf import DictConfig from hydra import slurm_utils @hydra.main(config_path='/h/nng/conf/robust/config.yaml') def gen_neighborhood_labels(cfg: DictConfig): base_path = '/h/nng/data' model_data_path = os.path.join(base_path, cfg.data.task, cfg.eval.model.data) eval_data_path = os.path.join(base_path, cfg.data.task, cfg.eval.data) model_path = os.path.join('/h/nng/slurm', cfg.eval.model.date, slurm_utils.resolve_name(cfg.eval.model.name)) if not os.path.exists(os.path.join(model_path, 'checkpoint_best.pt')): for f in sorted(os.listdir(model_path))[::-1]: if os.path.exists(os.path.join(model_path, f, 'checkpoint_best.pt')): model_path = os.path.join(model_path, f) break model_path = os.path.join(model_path, 'checkpoint_best.pt') bin_path = os.path.join(model_data_path, cfg.data.fdset, cfg.data.bin, 'bin') t_path = os.path.join(eval_data_path, cfg.data.tdset, 'orig', cfg.eval.split + '.bpe.' + cfg.data.src) ref_path = os.path.join(eval_data_path, cfg.data.tdset, 'orig', cfg.eval.split + '.raw.' + cfg.data.tgt) bpe_path = '/h/nng/programs/subword-nmt/subword_nmt' if cfg.data.fdset == 'iwslt': fair_sh = ['fairseq-generate', bin_path, \ '--path', model_path, \ '--beam', '10', \ '--remove-bpe', \ '--batch-size', '128', \ '--quiet'] fair_p = subprocess.Popen(fair_sh, stdout=subprocess.PIPE) output, err = fair_p.communicate() print(output) else: cat_sh = ['cat', t_path] fair_sh = ['fairseq-interactive', bin_path, \ '--path', model_path, \ '-s', cfg.data.src, \ '-t', cfg.data.tgt, \ '--beam', '10', \ '--remove-bpe', \ '--buffer-size', '1024', \ '--max-tokens', '8000'] grep_sh = ['grep', '^H-'] cut_sh = ['cut', '-f', '3-'] detoken_sh = ['sacremoses', 'detokenize', '-l', cfg.data.tgt, '-q'] score_sh = ['sacrebleu', ref_path, '-l', cfg.data.src + '-' + cfg.data.tgt, '-w', '2'] cat_p = subprocess.Popen(cat_sh, stdout=subprocess.PIPE) fair_p = subprocess.Popen(fair_sh, stdin=cat_p.stdout, stdout=subprocess.PIPE) cat_p.stdout.close() grep_p = subprocess.Popen(grep_sh, stdin=fair_p.stdout, stdout=subprocess.PIPE) fair_p.stdout.close() cut_p = subprocess.Popen(cut_sh, stdin=grep_p.stdout, stdout=subprocess.PIPE) grep_p.stdout.close() detoken_p = subprocess.Popen(detoken_sh, stdin=cut_p.stdout, stdout=subprocess.PIPE) cut_p.stdout.close() score_p = subprocess.Popen(score_sh, stdin=detoken_p.stdout, stdout=subprocess.PIPE) detoken_p.stdout.close() output, err = score_p.communicate() print(output) if __name__ == "__main__": gen_neighborhood_labels()
40.526316
113
0.593831
import os import sys import subprocess import hydra from omegaconf import DictConfig from hydra import slurm_utils @hydra.main(config_path='/h/nng/conf/robust/config.yaml') def gen_neighborhood_labels(cfg: DictConfig): base_path = '/h/nng/data' model_data_path = os.path.join(base_path, cfg.data.task, cfg.eval.model.data) eval_data_path = os.path.join(base_path, cfg.data.task, cfg.eval.data) model_path = os.path.join('/h/nng/slurm', cfg.eval.model.date, slurm_utils.resolve_name(cfg.eval.model.name)) if not os.path.exists(os.path.join(model_path, 'checkpoint_best.pt')): for f in sorted(os.listdir(model_path))[::-1]: if os.path.exists(os.path.join(model_path, f, 'checkpoint_best.pt')): model_path = os.path.join(model_path, f) break model_path = os.path.join(model_path, 'checkpoint_best.pt') bin_path = os.path.join(model_data_path, cfg.data.fdset, cfg.data.bin, 'bin') t_path = os.path.join(eval_data_path, cfg.data.tdset, 'orig', cfg.eval.split + '.bpe.' + cfg.data.src) ref_path = os.path.join(eval_data_path, cfg.data.tdset, 'orig', cfg.eval.split + '.raw.' + cfg.data.tgt) bpe_path = '/h/nng/programs/subword-nmt/subword_nmt' if cfg.data.fdset == 'iwslt': fair_sh = ['fairseq-generate', bin_path, \ '--path', model_path, \ '--beam', '10', \ '--remove-bpe', \ '--batch-size', '128', \ '--quiet'] fair_p = subprocess.Popen(fair_sh, stdout=subprocess.PIPE) output, err = fair_p.communicate() print(output) else: cat_sh = ['cat', t_path] fair_sh = ['fairseq-interactive', bin_path, \ '--path', model_path, \ '-s', cfg.data.src, \ '-t', cfg.data.tgt, \ '--beam', '10', \ '--remove-bpe', \ '--buffer-size', '1024', \ '--max-tokens', '8000'] grep_sh = ['grep', '^H-'] cut_sh = ['cut', '-f', '3-'] detoken_sh = ['sacremoses', 'detokenize', '-l', cfg.data.tgt, '-q'] score_sh = ['sacrebleu', ref_path, '-l', cfg.data.src + '-' + cfg.data.tgt, '-w', '2'] cat_p = subprocess.Popen(cat_sh, stdout=subprocess.PIPE) fair_p = subprocess.Popen(fair_sh, stdin=cat_p.stdout, stdout=subprocess.PIPE) cat_p.stdout.close() grep_p = subprocess.Popen(grep_sh, stdin=fair_p.stdout, stdout=subprocess.PIPE) fair_p.stdout.close() cut_p = subprocess.Popen(cut_sh, stdin=grep_p.stdout, stdout=subprocess.PIPE) grep_p.stdout.close() detoken_p = subprocess.Popen(detoken_sh, stdin=cut_p.stdout, stdout=subprocess.PIPE) cut_p.stdout.close() score_p = subprocess.Popen(score_sh, stdin=detoken_p.stdout, stdout=subprocess.PIPE) detoken_p.stdout.close() output, err = score_p.communicate() print(output) if __name__ == "__main__": gen_neighborhood_labels()
true
true
f70ebea5539a99338b6dc9117844ec03bdd9efa1
79,789
py
Python
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_01_01_preview/operations/_deployments_operations.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
207
2017-11-29T06:59:41.000Z
2022-03-31T10:00:53.000Z
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_01_01_preview/operations/_deployments_operations.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
4,061
2017-10-27T23:19:56.000Z
2022-03-31T23:18:30.000Z
src/spring-cloud/azext_spring_cloud/vendored_sdks/appplatform/v2022_01_01_preview/operations/_deployments_operations.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
802
2017-10-11T17:36:26.000Z
2022-03-31T22:24:32.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class DeploymentsOperations(object): """DeploymentsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.appplatform.v2022_01_01_preview.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def get( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.DeploymentResource" """Get a Deployment and its properties. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: DeploymentResource, or the result of cls(response) :rtype: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def _create_or_update_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str deployment_resource, # type: "_models.DeploymentResource" **kwargs # type: Any ): # type: (...) -> "_models.DeploymentResource" cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(deployment_resource, 'DeploymentResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def begin_create_or_update( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str deployment_resource, # type: "_models.DeploymentResource" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.DeploymentResource"] """Create a new Deployment or update an exiting Deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :param deployment_resource: Parameters for the create or update operation. :type deployment_resource: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DeploymentResource or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, deployment_resource=deployment_resource, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def _delete_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def begin_delete( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Operation to delete a Deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def _update_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str deployment_resource, # type: "_models.DeploymentResource" **kwargs # type: Any ): # type: (...) -> "_models.DeploymentResource" cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._update_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(deployment_resource, 'DeploymentResource') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def begin_update( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str deployment_resource, # type: "_models.DeploymentResource" **kwargs # type: Any ): # type: (...) -> LROPoller["_models.DeploymentResource"] """Operation to update an exiting Deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :param deployment_resource: Parameters for the update operation. :type deployment_resource: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResource :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either DeploymentResource or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResource] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResource"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, deployment_resource=deployment_resource, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} # type: ignore def list( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str version=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["_models.DeploymentResourceCollection"] """Handles requests to list all resources in an App. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param version: Version of the deployments to be listed. :type version: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DeploymentResourceCollection or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResourceCollection] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResourceCollection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if version is not None: query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version] request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments'} # type: ignore def list_for_cluster( self, resource_group_name, # type: str service_name, # type: str version=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> Iterable["_models.DeploymentResourceCollection"] """List deployments for a certain service. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param version: Version of the deployments to be listed. :type version: list[str] :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either DeploymentResourceCollection or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.appplatform.v2022_01_01_preview.models.DeploymentResourceCollection] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentResourceCollection"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_for_cluster.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if version is not None: query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version] request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_for_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments'} # type: ignore def _start_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} # type: ignore def begin_start( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Start the deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._start_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} # type: ignore def _stop_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} # type: ignore def begin_stop( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Stop the deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._stop_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} # type: ignore def _restart_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self._restart_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} # type: ignore def begin_restart( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Restart the deployment. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._restart_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} # type: ignore def get_log_file_url( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str **kwargs # type: Any ): # type: (...) -> Optional["_models.LogFileUrlResponse"] """Get deployment log file URL. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: LogFileUrlResponse, or the result of cls(response) :rtype: ~azure.mgmt.appplatform.v2022_01_01_preview.models.LogFileUrlResponse or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LogFileUrlResponse"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" # Construct URL url = self.get_log_file_url.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('LogFileUrlResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_log_file_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl'} # type: ignore def _generate_heap_dump_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._generate_heap_dump_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _generate_heap_dump_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump'} # type: ignore def begin_generate_heap_dump( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Generate Heap Dump. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :param diagnostic_parameters: Parameters for the diagnostic operation. :type diagnostic_parameters: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DiagnosticParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._generate_heap_dump_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_generate_heap_dump.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump'} # type: ignore def _generate_thread_dump_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._generate_thread_dump_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _generate_thread_dump_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump'} # type: ignore def begin_generate_thread_dump( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Generate Thread Dump. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :param diagnostic_parameters: Parameters for the diagnostic operation. :type diagnostic_parameters: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DiagnosticParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._generate_thread_dump_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_generate_thread_dump.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump'} # type: ignore def _start_jfr_initial( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._start_jfr_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_jfr_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR'} # type: ignore def begin_start_jfr( self, resource_group_name, # type: str service_name, # type: str app_name, # type: str deployment_name, # type: str diagnostic_parameters, # type: "_models.DiagnosticParameters" **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Start JFR. :param resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :type resource_group_name: str :param service_name: The name of the Service resource. :type service_name: str :param app_name: The name of the App resource. :type app_name: str :param deployment_name: The name of the Deployment resource. :type deployment_name: str :param diagnostic_parameters: Parameters for the diagnostic operation. :type diagnostic_parameters: ~azure.mgmt.appplatform.v2022_01_01_preview.models.DiagnosticParameters :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._start_jfr_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start_jfr.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR'} # type: ignore
51.443585
254
0.660492
from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class DeploymentsOperations(object): models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def get( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self.get.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def _create_or_update_initial( self, resource_group_name, service_name, app_name, deployment_name, deployment_resource, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" url = self._create_or_update_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(deployment_resource, 'DeploymentResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def begin_create_or_update( self, resource_group_name, service_name, app_name, deployment_name, deployment_resource, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, deployment_resource=deployment_resource, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def _delete_initial( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self._delete_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def begin_delete( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def _update_initial( self, resource_group_name, service_name, app_name, deployment_name, deployment_resource, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" url = self._update_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(deployment_resource, 'DeploymentResource') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('DeploymentResource', pipeline_response) if response.status_code == 202: deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def begin_update( self, resource_group_name, service_name, app_name, deployment_name, deployment_resource, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, deployment_resource=deployment_resource, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('DeploymentResource', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}'} def list( self, resource_group_name, service_name, app_name, version=None, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" def prepare_request(next_link=None): header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: url = self.list.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if version is not None: query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version] request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments'} def list_for_cluster( self, resource_group_name, service_name, version=None, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" def prepare_request(next_link=None): header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: url = self.list_for_cluster.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if version is not None: query_parameters['version'] = [self._serialize.query("version", q, 'str') if q is not None else '' for q in version] request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('DeploymentResourceCollection', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_for_cluster.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/deployments'} def _start_initial( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self._start_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} def begin_start( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._start_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/start'} def _stop_initial( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self._stop_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} def begin_stop( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._stop_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/stop'} def _restart_initial( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self._restart_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} def begin_restart( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._restart_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/restart'} def get_log_file_url( self, resource_group_name, service_name, app_name, deployment_name, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" accept = "application/json" url = self.get_log_file_url.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('LogFileUrlResponse', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_log_file_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/getLogFileUrl'} def _generate_heap_dump_initial( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" url = self._generate_heap_dump_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _generate_heap_dump_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump'} def begin_generate_heap_dump( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._generate_heap_dump_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_generate_heap_dump.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateHeapDump'} def _generate_thread_dump_initial( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" url = self._generate_thread_dump_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _generate_thread_dump_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump'} def begin_generate_thread_dump( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._generate_thread_dump_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_generate_thread_dump.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/generateThreadDump'} def _start_jfr_initial( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): cls = kwargs.pop('cls', None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-01-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" url = self._start_jfr_initial.metadata['url'] path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') header_parameters = {} header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} body_content = self._serialize.body(diagnostic_parameters, 'DiagnosticParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_jfr_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR'} def begin_start_jfr( self, resource_group_name, service_name, app_name, deployment_name, diagnostic_parameters, **kwargs ): polling = kwargs.pop('polling', True) cls = kwargs.pop('cls', None) lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) if cont_token is None: raw_result = self._start_jfr_initial( resource_group_name=resource_group_name, service_name=service_name, app_name=app_name, deployment_name=deployment_name, diagnostic_parameters=diagnostic_parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceName': self._serialize.url("service_name", service_name, 'str'), 'appName': self._serialize.url("app_name", app_name, 'str'), 'deploymentName': self._serialize.url("deployment_name", deployment_name, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start_jfr.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/apps/{appName}/deployments/{deploymentName}/startJFR'}
true
true
f70ec1157e54153c23a33efa2de815ddfff14a48
4,440
py
Python
src/helper/trainer.py
rlaehgns5399/GoogLeNet-Inception-tf
eb9597634eec9a7b511e967ad8c7b2552563755f
[ "MIT" ]
3
2018-12-02T13:34:32.000Z
2019-03-18T03:23:37.000Z
src/helper/trainer.py
rlaehgns5399/GoogLeNet-Inception-tf
eb9597634eec9a7b511e967ad8c7b2552563755f
[ "MIT" ]
null
null
null
src/helper/trainer.py
rlaehgns5399/GoogLeNet-Inception-tf
eb9597634eec9a7b511e967ad8c7b2552563755f
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # File: trainer.py # Author: Qian Ge <geqian1001@gmail.com> import os import numpy as np import tensorflow as tf def display(global_step, step, scaler_sum_list, name_list, collection, summary_val=None, summary_writer=None, ): print('[step: {}]'.format(global_step), end='') for val, name in zip(scaler_sum_list, name_list): print(' {}: {:.4f}'.format(name, val * 1. / step), end='') print('') if summary_writer is not None: s = tf.Summary() for val, name in zip(scaler_sum_list, name_list): s.value.add(tag='{}/{}'.format(collection, name), simple_value=val * 1. / step) summary_writer.add_summary(s, global_step) if summary_val is not None: summary_writer.add_summary(summary_val, global_step) class Trainer(object): def __init__(self, train_model, valid_model, train_data, init_lr=1e-3): self._t_model = train_model self._v_model = valid_model self._train_data = train_data self._init_lr = init_lr self._train_op = train_model.get_train_op() self._train_loss_op = train_model.get_loss() self._train_accuracy_op = train_model.get_accuracy() self._valid_loss_op = valid_model.get_loss() self._valid_accuracy_op = valid_model.get_accuracy() # self._train_summary_op = train_model.get_train_summary() # self._valid_summary_op = train_model.get_valid_summary() self.global_step = 0 self.epoch_id = 0 def train_epoch(self, sess, keep_prob=1., summary_writer=None): if self.epoch_id < 35: self._lr = self._init_lr elif self.epoch_id < 50: self._lr = self._init_lr / 10. else: self._lr = self._init_lr / 100. # self._t_model.set_is_training(True) display_name_list = ['loss', 'accuracy'] cur_summary = None cur_epoch = self._train_data.epochs_completed step = 0 loss_sum = 0 acc_sum = 0 self.epoch_id += 1 while cur_epoch == self._train_data.epochs_completed: self.global_step += 1 step += 1 batch_data = self._train_data.next_batch_dict() im = batch_data['image'] label = batch_data['label'] _, loss, acc = sess.run( [self._train_op, self._train_loss_op, self._train_accuracy_op], feed_dict={self._t_model.image: im, self._t_model.label: label, self._t_model.lr: self._lr, self._t_model.keep_prob: keep_prob}) loss_sum += loss acc_sum += acc if step % 100 == 0 or step == 1: display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'train', summary_val=cur_summary, summary_writer=summary_writer) print('==== epoch: {}, lr:{} ===='.format(cur_epoch, self._lr)) display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'train', summary_val=cur_summary, summary_writer=summary_writer) def valid_epoch(self, sess, dataflow, summary_writer=None): display_name_list = ['loss', 'accuracy'] cur_summary = None dataflow.reset_epoch() step = 0 loss_sum = 0 acc_sum = 0 while dataflow.epochs_completed < 1: step += 1 batch_data = dataflow.next_batch_dict() im = batch_data['image'] label = batch_data['label'] loss, acc = sess.run( [self._valid_loss_op, self._valid_accuracy_op], feed_dict={self._v_model.image: im, self._v_model.label: label}) loss_sum += loss acc_sum += acc print('[Valid]: ', end='') display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'valid', summary_val=cur_summary, summary_writer=summary_writer)
33.134328
80
0.547072
import os import numpy as np import tensorflow as tf def display(global_step, step, scaler_sum_list, name_list, collection, summary_val=None, summary_writer=None, ): print('[step: {}]'.format(global_step), end='') for val, name in zip(scaler_sum_list, name_list): print(' {}: {:.4f}'.format(name, val * 1. / step), end='') print('') if summary_writer is not None: s = tf.Summary() for val, name in zip(scaler_sum_list, name_list): s.value.add(tag='{}/{}'.format(collection, name), simple_value=val * 1. / step) summary_writer.add_summary(s, global_step) if summary_val is not None: summary_writer.add_summary(summary_val, global_step) class Trainer(object): def __init__(self, train_model, valid_model, train_data, init_lr=1e-3): self._t_model = train_model self._v_model = valid_model self._train_data = train_data self._init_lr = init_lr self._train_op = train_model.get_train_op() self._train_loss_op = train_model.get_loss() self._train_accuracy_op = train_model.get_accuracy() self._valid_loss_op = valid_model.get_loss() self._valid_accuracy_op = valid_model.get_accuracy() self.global_step = 0 self.epoch_id = 0 def train_epoch(self, sess, keep_prob=1., summary_writer=None): if self.epoch_id < 35: self._lr = self._init_lr elif self.epoch_id < 50: self._lr = self._init_lr / 10. else: self._lr = self._init_lr / 100. display_name_list = ['loss', 'accuracy'] cur_summary = None cur_epoch = self._train_data.epochs_completed step = 0 loss_sum = 0 acc_sum = 0 self.epoch_id += 1 while cur_epoch == self._train_data.epochs_completed: self.global_step += 1 step += 1 batch_data = self._train_data.next_batch_dict() im = batch_data['image'] label = batch_data['label'] _, loss, acc = sess.run( [self._train_op, self._train_loss_op, self._train_accuracy_op], feed_dict={self._t_model.image: im, self._t_model.label: label, self._t_model.lr: self._lr, self._t_model.keep_prob: keep_prob}) loss_sum += loss acc_sum += acc if step % 100 == 0 or step == 1: display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'train', summary_val=cur_summary, summary_writer=summary_writer) print('==== epoch: {}, lr:{} ===='.format(cur_epoch, self._lr)) display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'train', summary_val=cur_summary, summary_writer=summary_writer) def valid_epoch(self, sess, dataflow, summary_writer=None): display_name_list = ['loss', 'accuracy'] cur_summary = None dataflow.reset_epoch() step = 0 loss_sum = 0 acc_sum = 0 while dataflow.epochs_completed < 1: step += 1 batch_data = dataflow.next_batch_dict() im = batch_data['image'] label = batch_data['label'] loss, acc = sess.run( [self._valid_loss_op, self._valid_accuracy_op], feed_dict={self._v_model.image: im, self._v_model.label: label}) loss_sum += loss acc_sum += acc print('[Valid]: ', end='') display(self.global_step, step, [loss_sum, acc_sum], display_name_list, 'valid', summary_val=cur_summary, summary_writer=summary_writer)
true
true
f70ec119a082e7c27fa7682d87edbb9dac979d7d
601
py
Python
ndg/__init__.py
philipkershaw/ndg_xacml
f24ccbb4d4c053c585b0369dfe5a5949d49f9715
[ "BSD-3-Clause" ]
null
null
null
ndg/__init__.py
philipkershaw/ndg_xacml
f24ccbb4d4c053c585b0369dfe5a5949d49f9715
[ "BSD-3-Clause" ]
null
null
null
ndg/__init__.py
philipkershaw/ndg_xacml
f24ccbb4d4c053c585b0369dfe5a5949d49f9715
[ "BSD-3-Clause" ]
null
null
null
"""NDG XACML ndg namespace package NERC DataGrid This is a setuptools namespace_package. DO NOT place any other code in this file! There is no guarantee that it will be installed with easy_install. See: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages ... for details. """ __author__ = "P J Kershaw" __date__ = "19/02/10" __copyright__ = "(C) 2010 Science and Technology Facilities Council" __license__ = "BSD - see LICENSE file in top-level directory" __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' __import__('pkg_resources').declare_namespace(__name__)
30.05
69
0.768719
__author__ = "P J Kershaw" __date__ = "19/02/10" __copyright__ = "(C) 2010 Science and Technology Facilities Council" __license__ = "BSD - see LICENSE file in top-level directory" __contact__ = "Philip.Kershaw@stfc.ac.uk" __revision__ = '$Id$' __import__('pkg_resources').declare_namespace(__name__)
true
true
f70ec43dc91c15bd7888f7714dfbcacfc1d5d148
4,513
py
Python
fastai/torch_imports.py
CalebEverett/fastai-dl2
64d23592eddca6ca1f3647e73c319e97c8eb392b
[ "Apache-2.0" ]
4
2018-03-29T20:47:24.000Z
2019-05-06T12:31:11.000Z
fastai/torch_imports.py
CalebEverett/fastai-dl2
64d23592eddca6ca1f3647e73c319e97c8eb392b
[ "Apache-2.0" ]
null
null
null
fastai/torch_imports.py
CalebEverett/fastai-dl2
64d23592eddca6ca1f3647e73c319e97c8eb392b
[ "Apache-2.0" ]
null
null
null
import os import torch, torchvision, torchtext from torch import nn, cuda, backends, FloatTensor, LongTensor, optim import torch.nn.functional as F from torch.autograd import Variable from torch.utils.data import Dataset, TensorDataset from torch.nn.init import kaiming_uniform, kaiming_normal from torchvision.transforms import Compose from torchvision.models import resnet18, resnet34, resnet50, resnet101, resnet152 from torchvision.models import vgg16_bn, vgg19_bn from torchvision.models import densenet121, densenet161, densenet169, densenet201 from .models.resnext_50_32x4d import resnext_50_32x4d from .models.resnext_101_32x4d import resnext_101_32x4d from .models.resnext_101_64x4d import resnext_101_64x4d from .models.wrn_50_2f import wrn_50_2f from .models.inceptionresnetv2 import InceptionResnetV2 from .models.inceptionv4 import InceptionV4 from .models.nasnet import nasnetalarge from unet_models import unet11 import warnings warnings.filterwarnings('ignore', message='Implicit dimension choice', category=UserWarning) def children(m): return m if isinstance(m, (list, tuple)) else list(m.children()) def save_model(m, p): torch.save(m.state_dict(), p) def load_model(m, p): m.load_state_dict(torch.load(p, map_location=lambda storage, loc: storage)) def load_pre(pre, f, fn): m = f() path = os.path.dirname(__file__) if pre: load_model(m, f'{path}/weights/{fn}.pth') return m def _fastai_model(name, paper_title, paper_href): def add_docs_wrapper(f): f.__doc__ = f"""{name} model from `"{paper_title}" <{paper_href}>`_ Args: pre (bool): If True, returns a model pre-trained on ImageNet """ return f return add_docs_wrapper @_fastai_model('Inception 4', 'Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning', 'https://arxiv.org/pdf/1602.07261.pdf') def inception_4(pre): return children(inceptionv4(pretrained=pre))[0] @_fastai_model('Inception 4', 'Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning', 'https://arxiv.org/pdf/1602.07261.pdf') def inceptionresnet_2(pre): return load_pre(pre, InceptionResnetV2, 'inceptionresnetv2-d579a627') @_fastai_model('ResNeXt 50', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext50(pre): return load_pre(pre, resnext_50_32x4d, 'resnext_50_32x4d') @_fastai_model('ResNeXt 101_32', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext101(pre): return load_pre(pre, resnext_101_32x4d, 'resnext_101_32x4d') @_fastai_model('ResNeXt 101_64', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext101_64(pre): return load_pre(pre, resnext_101_64x4d, 'resnext_101_64x4d') @_fastai_model('Wide Residual Networks', 'Wide Residual Networks', 'https://arxiv.org/pdf/1605.07146.pdf') def wrn(pre): return load_pre(pre, wrn_50_2f, 'wrn_50_2f') @_fastai_model('Densenet-121', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn121(pre): return children(densenet121(pre))[0] @_fastai_model('Densenet-169', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn161(pre): return children(densenet161(pre))[0] @_fastai_model('Densenet-161', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn169(pre): return children(densenet169(pre))[0] @_fastai_model('Densenet-201', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn201(pre): return children(densenet201(pre))[0] @_fastai_model('Vgg-16 with batch norm added', 'Very Deep Convolutional Networks for Large-Scale Image Recognition', 'https://arxiv.org/pdf/1409.1556.pdf') def vgg16(pre): return children(vgg16_bn(pre))[0] @_fastai_model('Vgg-19 with batch norm added', 'Very Deep Convolutional Networks for Large-Scale Image Recognition', 'https://arxiv.org/pdf/1409.1556.pdf') def vgg19(pre): return children(vgg19_bn(pre))[0] @_fastai_model('Vgg-11 with U-Net', 'TernausNet: U-Net with VGG11 Encoder Pre-Trained on ImageNet for Image Segmentation', 'https://arxiv.org/pdf/1801.05746.pdf') def ternausnet(pre): return children(unet11(pre))
46.525773
122
0.739198
import os import torch, torchvision, torchtext from torch import nn, cuda, backends, FloatTensor, LongTensor, optim import torch.nn.functional as F from torch.autograd import Variable from torch.utils.data import Dataset, TensorDataset from torch.nn.init import kaiming_uniform, kaiming_normal from torchvision.transforms import Compose from torchvision.models import resnet18, resnet34, resnet50, resnet101, resnet152 from torchvision.models import vgg16_bn, vgg19_bn from torchvision.models import densenet121, densenet161, densenet169, densenet201 from .models.resnext_50_32x4d import resnext_50_32x4d from .models.resnext_101_32x4d import resnext_101_32x4d from .models.resnext_101_64x4d import resnext_101_64x4d from .models.wrn_50_2f import wrn_50_2f from .models.inceptionresnetv2 import InceptionResnetV2 from .models.inceptionv4 import InceptionV4 from .models.nasnet import nasnetalarge from unet_models import unet11 import warnings warnings.filterwarnings('ignore', message='Implicit dimension choice', category=UserWarning) def children(m): return m if isinstance(m, (list, tuple)) else list(m.children()) def save_model(m, p): torch.save(m.state_dict(), p) def load_model(m, p): m.load_state_dict(torch.load(p, map_location=lambda storage, loc: storage)) def load_pre(pre, f, fn): m = f() path = os.path.dirname(__file__) if pre: load_model(m, f'{path}/weights/{fn}.pth') return m def _fastai_model(name, paper_title, paper_href): def add_docs_wrapper(f): f.__doc__ = f"""{name} model from `"{paper_title}" <{paper_href}>`_ Args: pre (bool): If True, returns a model pre-trained on ImageNet """ return f return add_docs_wrapper @_fastai_model('Inception 4', 'Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning', 'https://arxiv.org/pdf/1602.07261.pdf') def inception_4(pre): return children(inceptionv4(pretrained=pre))[0] @_fastai_model('Inception 4', 'Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning', 'https://arxiv.org/pdf/1602.07261.pdf') def inceptionresnet_2(pre): return load_pre(pre, InceptionResnetV2, 'inceptionresnetv2-d579a627') @_fastai_model('ResNeXt 50', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext50(pre): return load_pre(pre, resnext_50_32x4d, 'resnext_50_32x4d') @_fastai_model('ResNeXt 101_32', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext101(pre): return load_pre(pre, resnext_101_32x4d, 'resnext_101_32x4d') @_fastai_model('ResNeXt 101_64', 'Aggregated Residual Transformations for Deep Neural Networks', 'https://arxiv.org/abs/1611.05431') def resnext101_64(pre): return load_pre(pre, resnext_101_64x4d, 'resnext_101_64x4d') @_fastai_model('Wide Residual Networks', 'Wide Residual Networks', 'https://arxiv.org/pdf/1605.07146.pdf') def wrn(pre): return load_pre(pre, wrn_50_2f, 'wrn_50_2f') @_fastai_model('Densenet-121', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn121(pre): return children(densenet121(pre))[0] @_fastai_model('Densenet-169', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn161(pre): return children(densenet161(pre))[0] @_fastai_model('Densenet-161', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn169(pre): return children(densenet169(pre))[0] @_fastai_model('Densenet-201', 'Densely Connected Convolutional Networks', 'https://arxiv.org/pdf/1608.06993.pdf') def dn201(pre): return children(densenet201(pre))[0] @_fastai_model('Vgg-16 with batch norm added', 'Very Deep Convolutional Networks for Large-Scale Image Recognition', 'https://arxiv.org/pdf/1409.1556.pdf') def vgg16(pre): return children(vgg16_bn(pre))[0] @_fastai_model('Vgg-19 with batch norm added', 'Very Deep Convolutional Networks for Large-Scale Image Recognition', 'https://arxiv.org/pdf/1409.1556.pdf') def vgg19(pre): return children(vgg19_bn(pre))[0] @_fastai_model('Vgg-11 with U-Net', 'TernausNet: U-Net with VGG11 Encoder Pre-Trained on ImageNet for Image Segmentation', 'https://arxiv.org/pdf/1801.05746.pdf') def ternausnet(pre): return children(unet11(pre))
true
true
f70ec5c93336e5b029a6279f82499cd519691765
6,514
py
Python
models/model.py
qq456cvb/CPPF
79366978854ae18b14c69ac850ea64b9dc286081
[ "MIT" ]
23
2022-03-06T12:01:00.000Z
2022-03-31T19:26:39.000Z
models/model.py
qq456cvb/CPPF
79366978854ae18b14c69ac850ea64b9dc286081
[ "MIT" ]
null
null
null
models/model.py
qq456cvb/CPPF
79366978854ae18b14c69ac850ea64b9dc286081
[ "MIT" ]
2
2022-03-23T07:14:13.000Z
2022-03-24T07:18:55.000Z
import torch import torch.nn as nn import torch.nn.functional as F from .sprin import GlobalInfoProp, SparseSO3Conv import numpy as np class ResLayer(torch.nn.Module): def __init__(self, dim_in, dim_out, bn=False) -> None: super().__init__() assert(bn is False) self.fc1 = torch.nn.Linear(dim_in, dim_out) if bn: self.bn1 = torch.nn.BatchNorm1d(dim_out) else: self.bn1 = lambda x: x self.fc2 = torch.nn.Linear(dim_out, dim_out) if bn: self.bn2 = torch.nn.BatchNorm1d(dim_out) else: self.bn2 = lambda x: x if dim_in != dim_out: self.fc0 = torch.nn.Linear(dim_in, dim_out) else: self.fc0 = None def forward(self, x): x_res = x if self.fc0 is None else self.fc0(x) x = F.relu(self.bn1(self.fc1(x))) x = self.bn2(self.fc2(x)) return x + x_res class PointEncoder(nn.Module): def __init__(self, k, spfcs, out_dim, num_layers=2, num_nbr_feats=2) -> None: super().__init__() self.k = k self.spconvs = nn.ModuleList() self.spconvs.append(SparseSO3Conv(32, num_nbr_feats, out_dim, *spfcs)) self.aggrs = nn.ModuleList() self.aggrs.append(GlobalInfoProp(out_dim, out_dim // 4)) for _ in range(num_layers - 1): self.spconvs.append(SparseSO3Conv(32, out_dim + out_dim // 4, out_dim, *spfcs)) self.aggrs.append(GlobalInfoProp(out_dim, out_dim // 4)) def forward(self, pc, pc_normal, dist): nbrs_idx = torch.topk(dist, self.k, largest=False, sorted=False)[1] #[..., N, K] pc_nbrs = torch.gather(pc.unsqueeze(-3).expand(*pc.shape[:-1], *pc.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc.shape[-1])) #[..., N, K, 3] pc_nbrs_centered = pc_nbrs - pc.unsqueeze(-2) #[..., N, K, 3] pc_nbrs_norm = torch.norm(pc_nbrs_centered, dim=-1, keepdim=True) pc_normal_nbrs = torch.gather(pc_normal.unsqueeze(-3).expand(*pc_normal.shape[:-1], *pc_normal.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc_normal.shape[-1])) #[..., N, K, 3] pc_normal_cos = torch.sum(pc_normal_nbrs * pc_normal.unsqueeze(-2), -1, keepdim=True) feat = self.aggrs[0](self.spconvs[0](pc_nbrs, torch.cat([pc_nbrs_norm, pc_normal_cos], -1), pc)) for i in range(len(self.spconvs) - 1): spconv = self.spconvs[i + 1] aggr = self.aggrs[i + 1] feat_nbrs = torch.gather(feat.unsqueeze(-3).expand(*feat.shape[:-1], *feat.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, feat.shape[-1])) feat = aggr(spconv(pc_nbrs, feat_nbrs, pc)) return feat def forward_nbrs(self, pc, pc_normal, nbrs_idx): pc_nbrs = torch.gather(pc.unsqueeze(-3).expand(*pc.shape[:-1], *pc.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc.shape[-1])) #[..., N, K, 3] pc_nbrs_centered = pc_nbrs - pc.unsqueeze(-2) #[..., N, K, 3] pc_nbrs_norm = torch.norm(pc_nbrs_centered, dim=-1, keepdim=True) pc_normal_nbrs = torch.gather(pc_normal.unsqueeze(-3).expand(*pc_normal.shape[:-1], *pc_normal.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc_normal.shape[-1])) #[..., N, K, 3] pc_normal_cos = torch.sum(pc_normal_nbrs * pc_normal.unsqueeze(-2), -1, keepdim=True) feat = self.aggrs[0](self.spconvs[0](pc_nbrs, torch.cat([pc_nbrs_norm, pc_normal_cos], -1), pc)) for i in range(len(self.spconvs) - 1): spconv = self.spconvs[i + 1] aggr = self.aggrs[i + 1] feat_nbrs = torch.gather(feat.unsqueeze(-3).expand(*feat.shape[:-1], *feat.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, feat.shape[-1])) feat = aggr(spconv(pc_nbrs, feat_nbrs, pc)) return feat class PPFEncoder(nn.Module): def __init__(self, ppffcs, out_dim) -> None: super().__init__() self.res_layers = nn.ModuleList() for i in range(len(ppffcs) - 1): dim_in, dim_out = ppffcs[i], ppffcs[i + 1] self.res_layers.append(ResLayer(dim_in, dim_out, bn=False)) self.final = nn.Linear(ppffcs[-1], out_dim) def forward(self, pc, pc_normal, feat, dist=None, idxs=None): if idxs is not None: return self.forward_with_idx(pc[0], pc_normal[0], feat[0], idxs)[None] xx = pc.unsqueeze(-2) - pc.unsqueeze(-3) xx_normed = xx / (dist[..., None] + 1e-7) outputs = [] for idx in torch.chunk(torch.arange(pc.shape[1]), 5): feat_chunk = feat[..., idx, :] target_shape = [*feat_chunk.shape[:-2], feat_chunk.shape[-2], feat.shape[-2], feat_chunk.shape[-1]] # B x NC x N x F xx_normed_chunk = xx_normed[..., idx, :, :] ppf = torch.cat([ torch.sum(pc_normal[..., idx, :].unsqueeze(-2) * xx_normed_chunk, -1, keepdim=True), torch.sum(pc_normal.unsqueeze(-3) * xx_normed_chunk, -1, keepdim=True), torch.sum(pc_normal[..., idx, :].unsqueeze(-2) * pc_normal.unsqueeze(-3), -1, keepdim=True), dist[..., idx, :, None], ], -1) # ppf.zero_() final_feat = torch.cat([feat_chunk[..., None, :].expand(*target_shape), feat[..., None, :, :].expand(*target_shape), ppf], -1) output = final_feat for res_layer in self.res_layers: output = res_layer(output) outputs.append(output) output = torch.cat(outputs, dim=-3) return self.final(output) def forward_with_idx(self, pc, pc_normal, feat, idxs): a_idxs = idxs[:, 0] b_idxs = idxs[:, 1] xy = pc[a_idxs] - pc[b_idxs] xy_norm = torch.norm(xy, dim=-1) xy_normed = xy / (xy_norm[..., None] + 1e-7) pnormal_cos = pc_normal[a_idxs] * pc_normal[b_idxs] ppf = torch.cat([ torch.sum(pc_normal[a_idxs] * xy_normed, -1, keepdim=True), torch.sum(pc_normal[b_idxs] * xy_normed, -1, keepdim=True), torch.sum(pnormal_cos, -1, keepdim=True), xy_norm[..., None], ], -1) # ppf.zero_() final_feat = torch.cat([feat[a_idxs], feat[b_idxs], ppf], -1) output = final_feat for res_layer in self.res_layers: output = res_layer(output) return self.final(output)
47.202899
202
0.57553
import torch import torch.nn as nn import torch.nn.functional as F from .sprin import GlobalInfoProp, SparseSO3Conv import numpy as np class ResLayer(torch.nn.Module): def __init__(self, dim_in, dim_out, bn=False) -> None: super().__init__() assert(bn is False) self.fc1 = torch.nn.Linear(dim_in, dim_out) if bn: self.bn1 = torch.nn.BatchNorm1d(dim_out) else: self.bn1 = lambda x: x self.fc2 = torch.nn.Linear(dim_out, dim_out) if bn: self.bn2 = torch.nn.BatchNorm1d(dim_out) else: self.bn2 = lambda x: x if dim_in != dim_out: self.fc0 = torch.nn.Linear(dim_in, dim_out) else: self.fc0 = None def forward(self, x): x_res = x if self.fc0 is None else self.fc0(x) x = F.relu(self.bn1(self.fc1(x))) x = self.bn2(self.fc2(x)) return x + x_res class PointEncoder(nn.Module): def __init__(self, k, spfcs, out_dim, num_layers=2, num_nbr_feats=2) -> None: super().__init__() self.k = k self.spconvs = nn.ModuleList() self.spconvs.append(SparseSO3Conv(32, num_nbr_feats, out_dim, *spfcs)) self.aggrs = nn.ModuleList() self.aggrs.append(GlobalInfoProp(out_dim, out_dim // 4)) for _ in range(num_layers - 1): self.spconvs.append(SparseSO3Conv(32, out_dim + out_dim // 4, out_dim, *spfcs)) self.aggrs.append(GlobalInfoProp(out_dim, out_dim // 4)) def forward(self, pc, pc_normal, dist): nbrs_idx = torch.topk(dist, self.k, largest=False, sorted=False)[1] pc_nbrs = torch.gather(pc.unsqueeze(-3).expand(*pc.shape[:-1], *pc.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc.shape[-1])) pc_nbrs_centered = pc_nbrs - pc.unsqueeze(-2) pc_nbrs_norm = torch.norm(pc_nbrs_centered, dim=-1, keepdim=True) pc_normal_nbrs = torch.gather(pc_normal.unsqueeze(-3).expand(*pc_normal.shape[:-1], *pc_normal.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc_normal.shape[-1])) pc_normal_cos = torch.sum(pc_normal_nbrs * pc_normal.unsqueeze(-2), -1, keepdim=True) feat = self.aggrs[0](self.spconvs[0](pc_nbrs, torch.cat([pc_nbrs_norm, pc_normal_cos], -1), pc)) for i in range(len(self.spconvs) - 1): spconv = self.spconvs[i + 1] aggr = self.aggrs[i + 1] feat_nbrs = torch.gather(feat.unsqueeze(-3).expand(*feat.shape[:-1], *feat.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, feat.shape[-1])) feat = aggr(spconv(pc_nbrs, feat_nbrs, pc)) return feat def forward_nbrs(self, pc, pc_normal, nbrs_idx): pc_nbrs = torch.gather(pc.unsqueeze(-3).expand(*pc.shape[:-1], *pc.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc.shape[-1])) pc_nbrs_centered = pc_nbrs - pc.unsqueeze(-2) pc_nbrs_norm = torch.norm(pc_nbrs_centered, dim=-1, keepdim=True) pc_normal_nbrs = torch.gather(pc_normal.unsqueeze(-3).expand(*pc_normal.shape[:-1], *pc_normal.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, pc_normal.shape[-1])) pc_normal_cos = torch.sum(pc_normal_nbrs * pc_normal.unsqueeze(-2), -1, keepdim=True) feat = self.aggrs[0](self.spconvs[0](pc_nbrs, torch.cat([pc_nbrs_norm, pc_normal_cos], -1), pc)) for i in range(len(self.spconvs) - 1): spconv = self.spconvs[i + 1] aggr = self.aggrs[i + 1] feat_nbrs = torch.gather(feat.unsqueeze(-3).expand(*feat.shape[:-1], *feat.shape[-2:]), -2, nbrs_idx[..., None].expand(*nbrs_idx.shape, feat.shape[-1])) feat = aggr(spconv(pc_nbrs, feat_nbrs, pc)) return feat class PPFEncoder(nn.Module): def __init__(self, ppffcs, out_dim) -> None: super().__init__() self.res_layers = nn.ModuleList() for i in range(len(ppffcs) - 1): dim_in, dim_out = ppffcs[i], ppffcs[i + 1] self.res_layers.append(ResLayer(dim_in, dim_out, bn=False)) self.final = nn.Linear(ppffcs[-1], out_dim) def forward(self, pc, pc_normal, feat, dist=None, idxs=None): if idxs is not None: return self.forward_with_idx(pc[0], pc_normal[0], feat[0], idxs)[None] xx = pc.unsqueeze(-2) - pc.unsqueeze(-3) xx_normed = xx / (dist[..., None] + 1e-7) outputs = [] for idx in torch.chunk(torch.arange(pc.shape[1]), 5): feat_chunk = feat[..., idx, :] target_shape = [*feat_chunk.shape[:-2], feat_chunk.shape[-2], feat.shape[-2], feat_chunk.shape[-1]] xx_normed_chunk = xx_normed[..., idx, :, :] ppf = torch.cat([ torch.sum(pc_normal[..., idx, :].unsqueeze(-2) * xx_normed_chunk, -1, keepdim=True), torch.sum(pc_normal.unsqueeze(-3) * xx_normed_chunk, -1, keepdim=True), torch.sum(pc_normal[..., idx, :].unsqueeze(-2) * pc_normal.unsqueeze(-3), -1, keepdim=True), dist[..., idx, :, None], ], -1) final_feat = torch.cat([feat_chunk[..., None, :].expand(*target_shape), feat[..., None, :, :].expand(*target_shape), ppf], -1) output = final_feat for res_layer in self.res_layers: output = res_layer(output) outputs.append(output) output = torch.cat(outputs, dim=-3) return self.final(output) def forward_with_idx(self, pc, pc_normal, feat, idxs): a_idxs = idxs[:, 0] b_idxs = idxs[:, 1] xy = pc[a_idxs] - pc[b_idxs] xy_norm = torch.norm(xy, dim=-1) xy_normed = xy / (xy_norm[..., None] + 1e-7) pnormal_cos = pc_normal[a_idxs] * pc_normal[b_idxs] ppf = torch.cat([ torch.sum(pc_normal[a_idxs] * xy_normed, -1, keepdim=True), torch.sum(pc_normal[b_idxs] * xy_normed, -1, keepdim=True), torch.sum(pnormal_cos, -1, keepdim=True), xy_norm[..., None], ], -1) final_feat = torch.cat([feat[a_idxs], feat[b_idxs], ppf], -1) output = final_feat for res_layer in self.res_layers: output = res_layer(output) return self.final(output)
true
true
f70ec5d7df1d46b90aac95d7005cb457c693a297
4,095
py
Python
ea_sim/visualization_adv_tasks/convert_history_to_archive.py
lis-epfl/Tensoft-G21
7a83c5dabc12906c0a6bd1da0a28a131e9d5e144
[ "Apache-2.0" ]
1
2021-08-03T10:52:20.000Z
2021-08-03T10:52:20.000Z
ea_sim/visualization_adv_tasks/convert_history_to_archive.py
lis-epfl/Tensoft-G21
7a83c5dabc12906c0a6bd1da0a28a131e9d5e144
[ "Apache-2.0" ]
null
null
null
ea_sim/visualization_adv_tasks/convert_history_to_archive.py
lis-epfl/Tensoft-G21
7a83c5dabc12906c0a6bd1da0a28a131e9d5e144
[ "Apache-2.0" ]
1
2021-09-18T07:23:35.000Z
2021-09-18T07:23:35.000Z
import argparse import json import numpy as np import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from params_conf import N_MODULES, MIN_NUM_MODULES, STIFF_TABLE from utils import parse_robot_string def convert_h_to_arch(h_file, morph_file, best_function, out_dir, seed): with open(morph_file) as morph_file: morphologies = json.load(morph_file) inverse_morph_dict = {v: k for k, v in morphologies.items()} archive = np.full(shape=(len(N_MODULES), len(STIFF_TABLE)), fill_value=None) last_gen = 0 with open(h_file) as hf: _ = hf.readline() # skip header for line in hf: last_gen, m_id, c_id, fit, _ = line.split(',') last_gen, m_id, c_id, fit = int(last_gen), int(m_id), int(c_id), float(fit) robot = parse_robot_string(inverse_morph_dict[m_id]) index_1 = len(robot) - MIN_NUM_MODULES index_2 = STIFF_TABLE.index(robot[0]['stiff']) value_1 = len(robot) value_2 = robot[0]['stiff'] if archive[index_1, index_2] is None or fit == best_function(archive[index_1, index_2][4], fit): archive[index_1, index_2] = (index_1, index_2, value_1, value_2, fit, m_id, c_id) out_filename = os.path.join(out_dir, 'entity_archive_{}_ngen_{}.csv'.format(seed, last_gen)) with open(out_filename, 'w') as out_file: out_file.write('1st_dim_indx,2nd_dim_indx,1st_dim:num_modules,2nd_dim:stiffness_value,fitness,e_id,nn_id\n') for entry in archive.flatten(): if entry is not None: index_1, index_2, value_1, value_2, fit, m_id, c_id = entry out_file.write('{},{},{},{},{:.4f},{},{}\n'.format( index_1, index_2, value_1, value_2, fit, m_id, c_id )) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Script for converting an history file into a archive based on ' 'morphological features.') parser.add_argument('res_dirs', metavar='res_dirs', type=str, nargs='+', help='list of folders containing evolution results') parser.add_argument('--fit-func-best', metavar='fitness_function_best', type=str, nargs='?', default='min', help='function used to determine the best fitness, allowed values are: min, max') parser.add_argument('--owner', metavar='owner', type=str, action='store', nargs='+', default=None, help='User and group for chown') args = parser.parse_args() if args.fit_func_best not in ['min', 'max']: raise Exception('The function provided to determine the best fitness' + 'is not valid') else: fit_func_best = min if args.fit_func_best == 'min' else max if args.res_dirs is not None: for res_dir in args.res_dirs: settings_file = os.path.join(res_dir, 'settings.json') with open(settings_file) as sf: settings = json.load(sf) abs_res_dir_path = os.path.abspath(res_dir) evo_info_dir = os.path.join(abs_res_dir_path, 'evolution_info') out_dir = os.path.join(abs_res_dir_path, 'archives') os.makedirs(out_dir, exist_ok=True) for seed in settings['seeds']: history_file = os.path.join(evo_info_dir, 'history_{}.csv'.format(seed)) morphologies_file = os.path.join(abs_res_dir_path, 'morphologies', 'morphologies_{}.json'.format(seed)) convert_h_to_arch(history_file, morphologies_file, fit_func_best, out_dir, seed) if args.owner is not None and len(args.owner) == 2: try: exec_string = 'chown -R {}:{} {}'.format(args.owner[0], args.owner[1], out_dir) c_proc = subprocess.run(exec_string.split(' '), capture_output=True) except: raise Exception('An error occurred during the owner setting')
45.5
119
0.620757
import argparse import json import numpy as np import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from params_conf import N_MODULES, MIN_NUM_MODULES, STIFF_TABLE from utils import parse_robot_string def convert_h_to_arch(h_file, morph_file, best_function, out_dir, seed): with open(morph_file) as morph_file: morphologies = json.load(morph_file) inverse_morph_dict = {v: k for k, v in morphologies.items()} archive = np.full(shape=(len(N_MODULES), len(STIFF_TABLE)), fill_value=None) last_gen = 0 with open(h_file) as hf: _ = hf.readline() for line in hf: last_gen, m_id, c_id, fit, _ = line.split(',') last_gen, m_id, c_id, fit = int(last_gen), int(m_id), int(c_id), float(fit) robot = parse_robot_string(inverse_morph_dict[m_id]) index_1 = len(robot) - MIN_NUM_MODULES index_2 = STIFF_TABLE.index(robot[0]['stiff']) value_1 = len(robot) value_2 = robot[0]['stiff'] if archive[index_1, index_2] is None or fit == best_function(archive[index_1, index_2][4], fit): archive[index_1, index_2] = (index_1, index_2, value_1, value_2, fit, m_id, c_id) out_filename = os.path.join(out_dir, 'entity_archive_{}_ngen_{}.csv'.format(seed, last_gen)) with open(out_filename, 'w') as out_file: out_file.write('1st_dim_indx,2nd_dim_indx,1st_dim:num_modules,2nd_dim:stiffness_value,fitness,e_id,nn_id\n') for entry in archive.flatten(): if entry is not None: index_1, index_2, value_1, value_2, fit, m_id, c_id = entry out_file.write('{},{},{},{},{:.4f},{},{}\n'.format( index_1, index_2, value_1, value_2, fit, m_id, c_id )) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Script for converting an history file into a archive based on ' 'morphological features.') parser.add_argument('res_dirs', metavar='res_dirs', type=str, nargs='+', help='list of folders containing evolution results') parser.add_argument('--fit-func-best', metavar='fitness_function_best', type=str, nargs='?', default='min', help='function used to determine the best fitness, allowed values are: min, max') parser.add_argument('--owner', metavar='owner', type=str, action='store', nargs='+', default=None, help='User and group for chown') args = parser.parse_args() if args.fit_func_best not in ['min', 'max']: raise Exception('The function provided to determine the best fitness' + 'is not valid') else: fit_func_best = min if args.fit_func_best == 'min' else max if args.res_dirs is not None: for res_dir in args.res_dirs: settings_file = os.path.join(res_dir, 'settings.json') with open(settings_file) as sf: settings = json.load(sf) abs_res_dir_path = os.path.abspath(res_dir) evo_info_dir = os.path.join(abs_res_dir_path, 'evolution_info') out_dir = os.path.join(abs_res_dir_path, 'archives') os.makedirs(out_dir, exist_ok=True) for seed in settings['seeds']: history_file = os.path.join(evo_info_dir, 'history_{}.csv'.format(seed)) morphologies_file = os.path.join(abs_res_dir_path, 'morphologies', 'morphologies_{}.json'.format(seed)) convert_h_to_arch(history_file, morphologies_file, fit_func_best, out_dir, seed) if args.owner is not None and len(args.owner) == 2: try: exec_string = 'chown -R {}:{} {}'.format(args.owner[0], args.owner[1], out_dir) c_proc = subprocess.run(exec_string.split(' '), capture_output=True) except: raise Exception('An error occurred during the owner setting')
true
true
f70ec64b9e31daafd1fb2f1ca0a900fb5ba86171
3,473
py
Python
pyexcel_xls/xlsw.py
pyexcel/pyexcel-xls
995cfd273d5360947a528ff3a1ed3f9e52a429ad
[ "BSD-3-Clause" ]
40
2016-05-18T20:09:39.000Z
2022-02-09T06:39:41.000Z
pyexcel_xls/xlsw.py
wenxuefeng3930/pyexcel-xls
995cfd273d5360947a528ff3a1ed3f9e52a429ad
[ "BSD-3-Clause" ]
46
2016-02-01T22:12:31.000Z
2021-10-07T18:57:05.000Z
pyexcel_xls/xlsw.py
wenxuefeng3930/pyexcel-xls
995cfd273d5360947a528ff3a1ed3f9e52a429ad
[ "BSD-3-Clause" ]
24
2016-01-29T12:26:27.000Z
2021-10-31T15:37:15.000Z
""" pyexcel_xlsw ~~~~~~~~~~~~~~~~~~~ The lower level xls file format handler using xlwt :copyright: (c) 2016-2021 by Onni Software Ltd :license: New BSD License """ import datetime import xlrd from xlwt import XFStyle, Workbook from pyexcel_io import constants from pyexcel_io.plugin_api import IWriter, ISheetWriter DEFAULT_DATE_FORMAT = "DD/MM/YY" DEFAULT_TIME_FORMAT = "HH:MM:SS" DEFAULT_LONGTIME_FORMAT = "[HH]:MM:SS" DEFAULT_DATETIME_FORMAT = "%s %s" % (DEFAULT_DATE_FORMAT, DEFAULT_TIME_FORMAT) EMPTY_SHEET_NOT_ALLOWED = "xlwt does not support a book without any sheets" class XLSheetWriter(ISheetWriter): """ xls sheet writer """ def __init__(self, xls_book, xls_sheet, sheet_name): if sheet_name is None: sheet_name = constants.DEFAULT_SHEET_NAME self._xls_book = xls_book self._xls_sheet = xls_sheet self._xls_sheet = self._xls_book.add_sheet(sheet_name) self.current_row = 0 def write_row(self, array): """ write a row into the file """ for i, value in enumerate(array): style = None tmp_array = [] if isinstance(value, datetime.datetime): tmp_array = [ value.year, value.month, value.day, value.hour, value.minute, value.second, ] value = xlrd.xldate.xldate_from_datetime_tuple(tmp_array, 0) style = XFStyle() style.num_format_str = DEFAULT_DATETIME_FORMAT elif isinstance(value, datetime.timedelta): value = value.days + value.seconds / 86_400 style = XFStyle() style.num_format_str = DEFAULT_LONGTIME_FORMAT elif isinstance(value, datetime.date): tmp_array = [value.year, value.month, value.day] value = xlrd.xldate.xldate_from_date_tuple(tmp_array, 0) style = XFStyle() style.num_format_str = DEFAULT_DATE_FORMAT elif isinstance(value, datetime.time): tmp_array = [value.hour, value.minute, value.second] value = xlrd.xldate.xldate_from_time_tuple(tmp_array) style = XFStyle() style.num_format_str = DEFAULT_TIME_FORMAT if style: self._xls_sheet.write(self.current_row, i, value, style) else: self._xls_sheet.write(self.current_row, i, value) self.current_row += 1 def close(self): pass class XLSWriter(IWriter): """ xls writer """ def __init__( self, file_alike_object, _, # file_type not used encoding="ascii", style_compression=2, **keywords, ): self.file_alike_object = file_alike_object self.work_book = Workbook( style_compression=style_compression, encoding=encoding ) def create_sheet(self, name): return XLSheetWriter(self.work_book, None, name) def write(self, incoming_dict): if incoming_dict: IWriter.write(self, incoming_dict) else: raise NotImplementedError(EMPTY_SHEET_NOT_ALLOWED) def close(self): """ This call actually save the file """ self.work_book.save(self.file_alike_object)
31.008929
78
0.589692
import datetime import xlrd from xlwt import XFStyle, Workbook from pyexcel_io import constants from pyexcel_io.plugin_api import IWriter, ISheetWriter DEFAULT_DATE_FORMAT = "DD/MM/YY" DEFAULT_TIME_FORMAT = "HH:MM:SS" DEFAULT_LONGTIME_FORMAT = "[HH]:MM:SS" DEFAULT_DATETIME_FORMAT = "%s %s" % (DEFAULT_DATE_FORMAT, DEFAULT_TIME_FORMAT) EMPTY_SHEET_NOT_ALLOWED = "xlwt does not support a book without any sheets" class XLSheetWriter(ISheetWriter): def __init__(self, xls_book, xls_sheet, sheet_name): if sheet_name is None: sheet_name = constants.DEFAULT_SHEET_NAME self._xls_book = xls_book self._xls_sheet = xls_sheet self._xls_sheet = self._xls_book.add_sheet(sheet_name) self.current_row = 0 def write_row(self, array): for i, value in enumerate(array): style = None tmp_array = [] if isinstance(value, datetime.datetime): tmp_array = [ value.year, value.month, value.day, value.hour, value.minute, value.second, ] value = xlrd.xldate.xldate_from_datetime_tuple(tmp_array, 0) style = XFStyle() style.num_format_str = DEFAULT_DATETIME_FORMAT elif isinstance(value, datetime.timedelta): value = value.days + value.seconds / 86_400 style = XFStyle() style.num_format_str = DEFAULT_LONGTIME_FORMAT elif isinstance(value, datetime.date): tmp_array = [value.year, value.month, value.day] value = xlrd.xldate.xldate_from_date_tuple(tmp_array, 0) style = XFStyle() style.num_format_str = DEFAULT_DATE_FORMAT elif isinstance(value, datetime.time): tmp_array = [value.hour, value.minute, value.second] value = xlrd.xldate.xldate_from_time_tuple(tmp_array) style = XFStyle() style.num_format_str = DEFAULT_TIME_FORMAT if style: self._xls_sheet.write(self.current_row, i, value, style) else: self._xls_sheet.write(self.current_row, i, value) self.current_row += 1 def close(self): pass class XLSWriter(IWriter): def __init__( self, file_alike_object, _, encoding="ascii", style_compression=2, **keywords, ): self.file_alike_object = file_alike_object self.work_book = Workbook( style_compression=style_compression, encoding=encoding ) def create_sheet(self, name): return XLSheetWriter(self.work_book, None, name) def write(self, incoming_dict): if incoming_dict: IWriter.write(self, incoming_dict) else: raise NotImplementedError(EMPTY_SHEET_NOT_ALLOWED) def close(self): self.work_book.save(self.file_alike_object)
true
true
f70ec7201f8c108b954356e237165ff9018c482b
3,781
py
Python
ethgreen/wallet/wallet_interested_store.py
ethgreen/ethgreen-blockchain
8f1a450897ab7a82326aea7e57e18ac2c03a9e83
[ "Apache-2.0" ]
11
2021-11-10T19:30:12.000Z
2022-02-09T04:30:29.000Z
ethgreen/wallet/wallet_interested_store.py
ethgreen/ethgreen-blockchain
8f1a450897ab7a82326aea7e57e18ac2c03a9e83
[ "Apache-2.0" ]
6
2021-11-16T17:11:03.000Z
2021-12-28T17:11:20.000Z
ethgreen/wallet/wallet_interested_store.py
ethgreen/ethgreen-blockchain
8f1a450897ab7a82326aea7e57e18ac2c03a9e83
[ "Apache-2.0" ]
3
2021-11-21T02:27:10.000Z
2022-03-15T08:34:47.000Z
from typing import List, Tuple, Optional import aiosqlite from ethgreen.types.blockchain_format.sized_bytes import bytes32 from ethgreen.util.db_wrapper import DBWrapper class WalletInterestedStore: """ Stores coin ids that we are interested in receiving """ db_connection: aiosqlite.Connection db_wrapper: DBWrapper @classmethod async def create(cls, wrapper: DBWrapper): self = cls() self.db_connection = wrapper.db self.db_wrapper = wrapper await self.db_connection.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)") await self.db_connection.execute( "CREATE TABLE IF NOT EXISTS interested_puzzle_hashes(puzzle_hash text PRIMARY KEY, wallet_id integer)" ) await self.db_connection.commit() return self async def _clear_database(self): cursor = await self.db_connection.execute("DELETE FROM puzzle_hashes") await cursor.close() cursor = await self.db_connection.execute("DELETE FROM interested_coins") await cursor.close() await self.db_connection.commit() async def get_interested_coin_ids(self) -> List[bytes32]: cursor = await self.db_connection.execute("SELECT coin_name FROM interested_coins") rows_hex = await cursor.fetchall() return [bytes32(bytes.fromhex(row[0])) for row in rows_hex] async def add_interested_coin_id(self, coin_id: bytes32, in_transaction: bool = False) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "INSERT OR REPLACE INTO interested_coins VALUES (?)", (coin_id.hex(),) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release() async def get_interested_puzzle_hashes(self) -> List[Tuple[bytes32, int]]: cursor = await self.db_connection.execute("SELECT puzzle_hash, wallet_id FROM interested_puzzle_hashes") rows_hex = await cursor.fetchall() return [(bytes32(bytes.fromhex(row[0])), row[1]) for row in rows_hex] async def get_interested_puzzle_hash_wallet_id(self, puzzle_hash: bytes32) -> Optional[int]: cursor = await self.db_connection.execute( "SELECT wallet_id FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),) ) row = await cursor.fetchone() if row is None: return None return row[0] async def add_interested_puzzle_hash( self, puzzle_hash: bytes32, wallet_id: int, in_transaction: bool = False ) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "INSERT OR REPLACE INTO interested_puzzle_hashes VALUES (?, ?)", (puzzle_hash.hex(), wallet_id) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release() async def remove_interested_puzzle_hash(self, puzzle_hash: bytes32, in_transaction: bool = False) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "DELETE FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release()
37.81
115
0.649035
from typing import List, Tuple, Optional import aiosqlite from ethgreen.types.blockchain_format.sized_bytes import bytes32 from ethgreen.util.db_wrapper import DBWrapper class WalletInterestedStore: db_connection: aiosqlite.Connection db_wrapper: DBWrapper @classmethod async def create(cls, wrapper: DBWrapper): self = cls() self.db_connection = wrapper.db self.db_wrapper = wrapper await self.db_connection.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)") await self.db_connection.execute( "CREATE TABLE IF NOT EXISTS interested_puzzle_hashes(puzzle_hash text PRIMARY KEY, wallet_id integer)" ) await self.db_connection.commit() return self async def _clear_database(self): cursor = await self.db_connection.execute("DELETE FROM puzzle_hashes") await cursor.close() cursor = await self.db_connection.execute("DELETE FROM interested_coins") await cursor.close() await self.db_connection.commit() async def get_interested_coin_ids(self) -> List[bytes32]: cursor = await self.db_connection.execute("SELECT coin_name FROM interested_coins") rows_hex = await cursor.fetchall() return [bytes32(bytes.fromhex(row[0])) for row in rows_hex] async def add_interested_coin_id(self, coin_id: bytes32, in_transaction: bool = False) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "INSERT OR REPLACE INTO interested_coins VALUES (?)", (coin_id.hex(),) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release() async def get_interested_puzzle_hashes(self) -> List[Tuple[bytes32, int]]: cursor = await self.db_connection.execute("SELECT puzzle_hash, wallet_id FROM interested_puzzle_hashes") rows_hex = await cursor.fetchall() return [(bytes32(bytes.fromhex(row[0])), row[1]) for row in rows_hex] async def get_interested_puzzle_hash_wallet_id(self, puzzle_hash: bytes32) -> Optional[int]: cursor = await self.db_connection.execute( "SELECT wallet_id FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),) ) row = await cursor.fetchone() if row is None: return None return row[0] async def add_interested_puzzle_hash( self, puzzle_hash: bytes32, wallet_id: int, in_transaction: bool = False ) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "INSERT OR REPLACE INTO interested_puzzle_hashes VALUES (?, ?)", (puzzle_hash.hex(), wallet_id) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release() async def remove_interested_puzzle_hash(self, puzzle_hash: bytes32, in_transaction: bool = False) -> None: if not in_transaction: await self.db_wrapper.lock.acquire() try: cursor = await self.db_connection.execute( "DELETE FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),) ) await cursor.close() finally: if not in_transaction: await self.db_connection.commit() self.db_wrapper.lock.release()
true
true
f70ec721475523ddf5e94b147c1e716254668fe7
574
py
Python
ui/maintenance_protocols/decompress.py
liyao001/BioQueue
2b2c9f023b988fd926a037eb4755f639632b2991
[ "Apache-2.0" ]
33
2017-03-12T16:26:45.000Z
2021-04-30T05:37:35.000Z
ui/maintenance_protocols/decompress.py
liyao001/BioQueue
2b2c9f023b988fd926a037eb4755f639632b2991
[ "Apache-2.0" ]
6
2017-04-21T08:44:47.000Z
2018-11-11T16:20:22.000Z
ui/maintenance_protocols/decompress.py
liyao001/BioQueue
2b2c9f023b988fd926a037eb4755f639632b2991
[ "Apache-2.0" ]
13
2017-03-12T16:26:56.000Z
2020-04-20T05:35:00.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 14/01/2018 01:04 AM # @Project : BioQueue # @Author : Li Yao # @File : gunzip.py def get_sub_protocol(db_obj, protocol_parent, step_order_start=1): steps = list() steps.append(db_obj(software='gunzip', parameter='{{InputFile}}', parent=protocol_parent, user_id=0, hash='541df26aff8e4d054a57c7e3717e91ca', step_order=step_order_start)) return step_order_start+len(steps), steps
31.888889
66
0.560976
def get_sub_protocol(db_obj, protocol_parent, step_order_start=1): steps = list() steps.append(db_obj(software='gunzip', parameter='{{InputFile}}', parent=protocol_parent, user_id=0, hash='541df26aff8e4d054a57c7e3717e91ca', step_order=step_order_start)) return step_order_start+len(steps), steps
true
true
f70ec72a7dd5c36e1338a62ba92bead14d1e8e76
846
py
Python
altair_transform/transform/joinaggregate.py
jakevdp/altair-transform
63094097e05891a8d12005b411ad8585b94e81fd
[ "MIT" ]
38
2019-03-23T20:12:06.000Z
2021-10-07T21:03:24.000Z
altair_transform/transform/joinaggregate.py
altair-viz/altair-transform
b65bf854de1e80f931e063d8fb2ec938773826fb
[ "MIT" ]
13
2019-07-19T03:33:07.000Z
2021-06-29T15:34:19.000Z
altair_transform/transform/joinaggregate.py
altair-viz/altair-transform
b65bf854de1e80f931e063d8fb2ec938773826fb
[ "MIT" ]
11
2019-07-19T02:48:35.000Z
2021-11-01T00:07:41.000Z
import altair as alt import pandas as pd from .visitor import visit from .aggregate import AGG_REPLACEMENTS @visit.register(alt.JoinAggregateTransform) def visit_joinaggregate( transform: alt.JoinAggregateTransform, df: pd.DataFrame ) -> pd.DataFrame: transform = transform.to_dict() groupby = transform.get("groupby") for aggregate in transform["joinaggregate"]: op = aggregate["op"] field = aggregate["field"] col = aggregate["as"] op = AGG_REPLACEMENTS.get(op, op) if field == "*" and field not in df.columns: field = df.columns[0] if groupby is None: df[col] = df[field].aggregate(op) else: result = df.groupby(groupby)[field].aggregate(op) result.name = col df = df.join(result, on=groupby) return df
29.172414
61
0.631206
import altair as alt import pandas as pd from .visitor import visit from .aggregate import AGG_REPLACEMENTS @visit.register(alt.JoinAggregateTransform) def visit_joinaggregate( transform: alt.JoinAggregateTransform, df: pd.DataFrame ) -> pd.DataFrame: transform = transform.to_dict() groupby = transform.get("groupby") for aggregate in transform["joinaggregate"]: op = aggregate["op"] field = aggregate["field"] col = aggregate["as"] op = AGG_REPLACEMENTS.get(op, op) if field == "*" and field not in df.columns: field = df.columns[0] if groupby is None: df[col] = df[field].aggregate(op) else: result = df.groupby(groupby)[field].aggregate(op) result.name = col df = df.join(result, on=groupby) return df
true
true
f70ec8116b154a5c5324c8498dcdda97090753ab
9,785
py
Python
habitat/tasks/nav/object_nav_task.py
Ram81/habitat-imitation-baselines
c6e11c8ebadbf1260e1bed58a5b8dfb7faf6a505
[ "MIT" ]
null
null
null
habitat/tasks/nav/object_nav_task.py
Ram81/habitat-imitation-baselines
c6e11c8ebadbf1260e1bed58a5b8dfb7faf6a505
[ "MIT" ]
null
null
null
habitat/tasks/nav/object_nav_task.py
Ram81/habitat-imitation-baselines
c6e11c8ebadbf1260e1bed58a5b8dfb7faf6a505
[ "MIT" ]
null
null
null
# Copyright (c) Facebook, Inc. and its affiliates. # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os from typing import Any, List, Optional import attr from cv2 import log import numpy as np from gym import spaces from habitat.config import Config from habitat.core.dataset import SceneState from habitat.core.logging import logger from habitat.core.registry import registry from habitat.core.simulator import AgentState, Sensor, SensorTypes from habitat.core.utils import not_none_validator from habitat.tasks.nav.nav import ( NavigationEpisode, NavigationGoal, NavigationTask ) try: from habitat.datasets.object_nav.object_nav_dataset import ( ObjectNavDatasetV1, ) except ImportError: pass task_cat2mpcat40 = [ 3, # ('chair', 2, 0) 5, # ('table', 4, 1) 6, # ('picture', 5, 2) 7, # ('cabinet', 6, 3) 8, # ('cushion', 7, 4) 10, # ('sofa', 9, 5), 11, # ('bed', 10, 6) 13, # ('chest_of_drawers', 12, 7), 14, # ('plant', 13, 8) 15, # ('sink', 14, 9) 18, # ('toilet', 17, 10), 19, # ('stool', 18, 11), 20, # ('towel', 19, 12) 22, # ('tv_monitor', 21, 13) 23, # ('shower', 22, 14) 25, # ('bathtub', 24, 15) 26, # ('counter', 25, 16), 27, # ('fireplace', 26, 17), 33, # ('gym_equipment', 32, 18), 34, # ('seating', 33, 19), 38, # ('clothes', 37, 20), 43, # ('foodstuff', 42, 21), 44, # ('stationery', 43, 22), 45, # ('fruit', 44, 23), 46, # ('plaything', 45, 24), 47, # ('hand_tool', 46, 25), 48, # ('game_equipment', 47, 26), 49, # ('kitchenware', 48, 27) ] mapping_mpcat40_to_goal21 = { 3: 1, 5: 2, 6: 3, 7: 4, 8: 5, 10: 6, 11: 7, 13: 8, 14: 9, 15: 10, 18: 11, 19: 12, 20: 13, 22: 14, 23: 15, 25: 16, 26: 17, 27: 18, 33: 19, 34: 20, 38: 21, 43: 22, # ('foodstuff', 42, task_cat: 21) 44: 28, # ('stationery', 43, task_cat: 22) 45: 26, # ('fruit', 44, task_cat: 23) 46: 25, # ('plaything', 45, task_cat: 24) 47: 24, # ('hand_tool', 46, task_cat: 25) 48: 23, # ('game_equipment', 47, task_cat: 26) 49: 27, # ('kitchenware', 48, task_cat: 27) } @attr.s(auto_attribs=True, kw_only=True) class AgentStateSpec: r"""Agent data specifications that capture states of agent and sensor in replay state. """ position: Optional[List[float]] = attr.ib(default=None) rotation: Optional[List[float]] = attr.ib(default=None) sensor_data: Optional[dict] = attr.ib(default=None) @attr.s(auto_attribs=True, kw_only=True) class ReplayActionSpec: r"""Replay specifications that capture metadata associated with action. """ action: str = attr.ib(default=None, validator=not_none_validator) agent_state: Optional[AgentStateSpec] = attr.ib(default=None) @attr.s(auto_attribs=True, kw_only=True) class ObjectGoalNavEpisode(NavigationEpisode): r"""ObjectGoal Navigation Episode :param object_category: Category of the obect """ object_category: Optional[str] = None reference_replay: Optional[List[ReplayActionSpec]] = None scene_state: Optional[List[SceneState]] = None is_thda: Optional[bool] = False scene_dataset: Optional[str] = "mp3d" @property def goals_key(self) -> str: r"""The key to retrieve the goals""" return f"{os.path.basename(self.scene_id)}_{self.object_category}" @attr.s(auto_attribs=True) class ObjectViewLocation: r"""ObjectViewLocation provides information about a position around an object goal usually that is navigable and the object is visible with specific agent configuration that episode's dataset was created. that is target for navigation. That can be specify object_id, position and object category. An important part for metrics calculation are view points that describe success area for the navigation. Args: agent_state: navigable AgentState with a position and a rotation where the object is visible. iou: an intersection of a union of the object and a rectangle in the center of view. This metric is used to evaluate how good is the object view form current position. Higher iou means better view, iou equals 1.0 if whole object is inside of the rectangle and no pixel inside the rectangle belongs to anything except the object. """ agent_state: AgentState iou: Optional[float] @attr.s(auto_attribs=True, kw_only=True) class ObjectGoal(NavigationGoal): r"""Object goal provides information about an object that is target for navigation. That can be specify object_id, position and object category. An important part for metrics calculation are view points that describe success area for the navigation. Args: object_id: id that can be used to retrieve object from the semantic scene annotation object_name: name of the object object_category: object category name usually similar to scene semantic categories room_id: id of a room where object is located, can be used to retrieve room from the semantic scene annotation room_name: name of the room, where object is located view_points: navigable positions around the object with specified proximity of the object surface used for navigation metrics calculation. The object is visible from these positions. """ object_id: str = attr.ib(default=None, validator=not_none_validator) object_name: Optional[str] = None object_name_id: Optional[int] = None object_category: Optional[str] = None room_id: Optional[str] = None room_name: Optional[str] = None view_points: Optional[List[ObjectViewLocation]] = None @registry.register_sensor class ObjectGoalSensor(Sensor): r"""A sensor for Object Goal specification as observations which is used in ObjectGoal Navigation. The goal is expected to be specified by object_id or semantic category id. For the agent in simulator the forward direction is along negative-z. In polar coordinate format the angle returned is azimuth to the goal. Args: sim: a reference to the simulator for calculating task observations. config: a config for the ObjectGoalSensor sensor. Can contain field GOAL_SPEC that specifies which id use for goal specification, GOAL_SPEC_MAX_VAL the maximum object_id possible used for observation space definition. dataset: a Object Goal navigation dataset that contains dictionaries of categories id to text mapping. """ cls_uuid: str = "objectgoal" def __init__( self, sim, config: Config, dataset: "ObjectNavDatasetV1", *args: Any, **kwargs: Any, ): self._sim = sim self._dataset = dataset super().__init__(config=config) def _get_uuid(self, *args: Any, **kwargs: Any) -> str: return self.cls_uuid def _get_sensor_type(self, *args: Any, **kwargs: Any): return SensorTypes.SEMANTIC def _get_observation_space(self, *args: Any, **kwargs: Any): sensor_shape = (1,) max_value = self.config.GOAL_SPEC_MAX_VAL - 1 if self.config.GOAL_SPEC == "TASK_CATEGORY_ID": max_value = max( self._dataset.category_to_task_category_id.values() ) logger.info("max object cat: {}".format(max_value)) logger.info("cats: {}".format(self._dataset.category_to_task_category_id.values())) return spaces.Box( low=0, high=max_value, shape=sensor_shape, dtype=np.int64 ) def get_observation( self, observations, *args: Any, episode: ObjectGoalNavEpisode, **kwargs: Any, ) -> Optional[int]: if len(episode.goals) == 0: logger.error( f"No goal specified for episode {episode.episode_id}." ) return None if not isinstance(episode.goals[0], ObjectGoal): logger.error( f"First goal should be ObjectGoal, episode {episode.episode_id}." ) return None category_name = episode.object_category if self.config.GOAL_SPEC == "TASK_CATEGORY_ID": return np.array( [self._dataset.category_to_task_category_id[category_name]], dtype=np.int64, ) elif self.config.GOAL_SPEC == "OBJECT_ID": obj_goal = episode.goals[0] assert isinstance(obj_goal, ObjectGoal) # for type checking return np.array([obj_goal.object_name_id], dtype=np.int64) else: raise RuntimeError( "Wrong GOAL_SPEC specified for ObjectGoalSensor." ) @registry.register_task(name="ObjectNav-v1") class ObjectNavigationTask(NavigationTask): r"""An Object Navigation Task class for a task specific methods. Used to explicitly state a type of the task in config. """ _is_episode_active: bool _prev_action: int def __init__(self, **kwargs) -> None: super().__init__(**kwargs) self._is_episode_active = False def overwrite_sim_config(self, sim_config, episode): super().overwrite_sim_config(sim_config, episode) sim_config.defrost() sim_config.scene_state = episode.scene_state sim_config.freeze() return sim_config def _check_episode_is_active(self, action, *args: Any, **kwargs: Any) -> bool: return not getattr(self, "is_stop_called", False)
33.62543
95
0.6465
import os from typing import Any, List, Optional import attr from cv2 import log import numpy as np from gym import spaces from habitat.config import Config from habitat.core.dataset import SceneState from habitat.core.logging import logger from habitat.core.registry import registry from habitat.core.simulator import AgentState, Sensor, SensorTypes from habitat.core.utils import not_none_validator from habitat.tasks.nav.nav import ( NavigationEpisode, NavigationGoal, NavigationTask ) try: from habitat.datasets.object_nav.object_nav_dataset import ( ObjectNavDatasetV1, ) except ImportError: pass task_cat2mpcat40 = [ 3, 5, 6, 7, 8, 10, 11, 13, 14, 15, 18, 19, 20, 22, 23, 25, 26, 27, 33, 34, 38, 43, 44, 45, 46, 47, 48, 49, ] mapping_mpcat40_to_goal21 = { 3: 1, 5: 2, 6: 3, 7: 4, 8: 5, 10: 6, 11: 7, 13: 8, 14: 9, 15: 10, 18: 11, 19: 12, 20: 13, 22: 14, 23: 15, 25: 16, 26: 17, 27: 18, 33: 19, 34: 20, 38: 21, 43: 22, 44: 28, 45: 26, 46: 25, 47: 24, 48: 23, 49: 27, } @attr.s(auto_attribs=True, kw_only=True) class AgentStateSpec: position: Optional[List[float]] = attr.ib(default=None) rotation: Optional[List[float]] = attr.ib(default=None) sensor_data: Optional[dict] = attr.ib(default=None) @attr.s(auto_attribs=True, kw_only=True) class ReplayActionSpec: action: str = attr.ib(default=None, validator=not_none_validator) agent_state: Optional[AgentStateSpec] = attr.ib(default=None) @attr.s(auto_attribs=True, kw_only=True) class ObjectGoalNavEpisode(NavigationEpisode): object_category: Optional[str] = None reference_replay: Optional[List[ReplayActionSpec]] = None scene_state: Optional[List[SceneState]] = None is_thda: Optional[bool] = False scene_dataset: Optional[str] = "mp3d" @property def goals_key(self) -> str: return f"{os.path.basename(self.scene_id)}_{self.object_category}" @attr.s(auto_attribs=True) class ObjectViewLocation: agent_state: AgentState iou: Optional[float] @attr.s(auto_attribs=True, kw_only=True) class ObjectGoal(NavigationGoal): object_id: str = attr.ib(default=None, validator=not_none_validator) object_name: Optional[str] = None object_name_id: Optional[int] = None object_category: Optional[str] = None room_id: Optional[str] = None room_name: Optional[str] = None view_points: Optional[List[ObjectViewLocation]] = None @registry.register_sensor class ObjectGoalSensor(Sensor): cls_uuid: str = "objectgoal" def __init__( self, sim, config: Config, dataset: "ObjectNavDatasetV1", *args: Any, **kwargs: Any, ): self._sim = sim self._dataset = dataset super().__init__(config=config) def _get_uuid(self, *args: Any, **kwargs: Any) -> str: return self.cls_uuid def _get_sensor_type(self, *args: Any, **kwargs: Any): return SensorTypes.SEMANTIC def _get_observation_space(self, *args: Any, **kwargs: Any): sensor_shape = (1,) max_value = self.config.GOAL_SPEC_MAX_VAL - 1 if self.config.GOAL_SPEC == "TASK_CATEGORY_ID": max_value = max( self._dataset.category_to_task_category_id.values() ) logger.info("max object cat: {}".format(max_value)) logger.info("cats: {}".format(self._dataset.category_to_task_category_id.values())) return spaces.Box( low=0, high=max_value, shape=sensor_shape, dtype=np.int64 ) def get_observation( self, observations, *args: Any, episode: ObjectGoalNavEpisode, **kwargs: Any, ) -> Optional[int]: if len(episode.goals) == 0: logger.error( f"No goal specified for episode {episode.episode_id}." ) return None if not isinstance(episode.goals[0], ObjectGoal): logger.error( f"First goal should be ObjectGoal, episode {episode.episode_id}." ) return None category_name = episode.object_category if self.config.GOAL_SPEC == "TASK_CATEGORY_ID": return np.array( [self._dataset.category_to_task_category_id[category_name]], dtype=np.int64, ) elif self.config.GOAL_SPEC == "OBJECT_ID": obj_goal = episode.goals[0] assert isinstance(obj_goal, ObjectGoal) return np.array([obj_goal.object_name_id], dtype=np.int64) else: raise RuntimeError( "Wrong GOAL_SPEC specified for ObjectGoalSensor." ) @registry.register_task(name="ObjectNav-v1") class ObjectNavigationTask(NavigationTask): _is_episode_active: bool _prev_action: int def __init__(self, **kwargs) -> None: super().__init__(**kwargs) self._is_episode_active = False def overwrite_sim_config(self, sim_config, episode): super().overwrite_sim_config(sim_config, episode) sim_config.defrost() sim_config.scene_state = episode.scene_state sim_config.freeze() return sim_config def _check_episode_is_active(self, action, *args: Any, **kwargs: Any) -> bool: return not getattr(self, "is_stop_called", False)
true
true
f70ec8b62d9552a0f4c485bf78d176b374cc936b
8,312
py
Python
kubernetes_asyncio/client/models/v2beta1_metric_spec.py
tomplus/kubernetes-asyncio
11c3eb4d50ae822545572aa7b8c15f7153f65a1c
[ "Apache-2.0" ]
null
null
null
kubernetes_asyncio/client/models/v2beta1_metric_spec.py
tomplus/kubernetes-asyncio
11c3eb4d50ae822545572aa7b8c15f7153f65a1c
[ "Apache-2.0" ]
null
null
null
kubernetes_asyncio/client/models/v2beta1_metric_spec.py
tomplus/kubernetes-asyncio
11c3eb4d50ae822545572aa7b8c15f7153f65a1c
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: v1.23.6 Generated by: https://openapi-generator.tech """ try: from inspect import getfullargspec except ImportError: from inspect import getargspec as getfullargspec import pprint import re # noqa: F401 import six from kubernetes_asyncio.client.configuration import Configuration class V2beta1MetricSpec(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'container_resource': 'V2beta1ContainerResourceMetricSource', 'external': 'V2beta1ExternalMetricSource', 'object': 'V2beta1ObjectMetricSource', 'pods': 'V2beta1PodsMetricSource', 'resource': 'V2beta1ResourceMetricSource', 'type': 'str' } attribute_map = { 'container_resource': 'containerResource', 'external': 'external', 'object': 'object', 'pods': 'pods', 'resource': 'resource', 'type': 'type' } def __init__(self, container_resource=None, external=None, object=None, pods=None, resource=None, type=None, local_vars_configuration=None): # noqa: E501 """V2beta1MetricSpec - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration.get_default_copy() self.local_vars_configuration = local_vars_configuration self._container_resource = None self._external = None self._object = None self._pods = None self._resource = None self._type = None self.discriminator = None if container_resource is not None: self.container_resource = container_resource if external is not None: self.external = external if object is not None: self.object = object if pods is not None: self.pods = pods if resource is not None: self.resource = resource self.type = type @property def container_resource(self): """Gets the container_resource of this V2beta1MetricSpec. # noqa: E501 :return: The container_resource of this V2beta1MetricSpec. # noqa: E501 :rtype: V2beta1ContainerResourceMetricSource """ return self._container_resource @container_resource.setter def container_resource(self, container_resource): """Sets the container_resource of this V2beta1MetricSpec. :param container_resource: The container_resource of this V2beta1MetricSpec. # noqa: E501 :type container_resource: V2beta1ContainerResourceMetricSource """ self._container_resource = container_resource @property def external(self): """Gets the external of this V2beta1MetricSpec. # noqa: E501 :return: The external of this V2beta1MetricSpec. # noqa: E501 :rtype: V2beta1ExternalMetricSource """ return self._external @external.setter def external(self, external): """Sets the external of this V2beta1MetricSpec. :param external: The external of this V2beta1MetricSpec. # noqa: E501 :type external: V2beta1ExternalMetricSource """ self._external = external @property def object(self): """Gets the object of this V2beta1MetricSpec. # noqa: E501 :return: The object of this V2beta1MetricSpec. # noqa: E501 :rtype: V2beta1ObjectMetricSource """ return self._object @object.setter def object(self, object): """Sets the object of this V2beta1MetricSpec. :param object: The object of this V2beta1MetricSpec. # noqa: E501 :type object: V2beta1ObjectMetricSource """ self._object = object @property def pods(self): """Gets the pods of this V2beta1MetricSpec. # noqa: E501 :return: The pods of this V2beta1MetricSpec. # noqa: E501 :rtype: V2beta1PodsMetricSource """ return self._pods @pods.setter def pods(self, pods): """Sets the pods of this V2beta1MetricSpec. :param pods: The pods of this V2beta1MetricSpec. # noqa: E501 :type pods: V2beta1PodsMetricSource """ self._pods = pods @property def resource(self): """Gets the resource of this V2beta1MetricSpec. # noqa: E501 :return: The resource of this V2beta1MetricSpec. # noqa: E501 :rtype: V2beta1ResourceMetricSource """ return self._resource @resource.setter def resource(self, resource): """Sets the resource of this V2beta1MetricSpec. :param resource: The resource of this V2beta1MetricSpec. # noqa: E501 :type resource: V2beta1ResourceMetricSource """ self._resource = resource @property def type(self): """Gets the type of this V2beta1MetricSpec. # noqa: E501 type is the type of metric source. It should be one of \"ContainerResource\", \"External\", \"Object\", \"Pods\" or \"Resource\", each mapping to a matching field in the object. Note: \"ContainerResource\" type is available on when the feature-gate HPAContainerMetrics is enabled # noqa: E501 :return: The type of this V2beta1MetricSpec. # noqa: E501 :rtype: str """ return self._type @type.setter def type(self, type): """Sets the type of this V2beta1MetricSpec. type is the type of metric source. It should be one of \"ContainerResource\", \"External\", \"Object\", \"Pods\" or \"Resource\", each mapping to a matching field in the object. Note: \"ContainerResource\" type is available on when the feature-gate HPAContainerMetrics is enabled # noqa: E501 :param type: The type of this V2beta1MetricSpec. # noqa: E501 :type type: str """ if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501 raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type def to_dict(self, serialize=False): """Returns the model properties as a dict""" result = {} def convert(x): if hasattr(x, "to_dict"): args = getfullargspec(x.to_dict).args if len(args) == 1: return x.to_dict() else: return x.to_dict(serialize) else: return x for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) attr = self.attribute_map.get(attr, attr) if serialize else attr if isinstance(value, list): result[attr] = list(map( lambda x: convert(x), value )) elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], convert(item[1])), value.items() )) else: result[attr] = convert(value) return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V2beta1MetricSpec): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V2beta1MetricSpec): return True return self.to_dict() != other.to_dict()
31.366038
302
0.617661
try: from inspect import getfullargspec except ImportError: from inspect import getargspec as getfullargspec import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V2beta1MetricSpec(object): openapi_types = { 'container_resource': 'V2beta1ContainerResourceMetricSource', 'external': 'V2beta1ExternalMetricSource', 'object': 'V2beta1ObjectMetricSource', 'pods': 'V2beta1PodsMetricSource', 'resource': 'V2beta1ResourceMetricSource', 'type': 'str' } attribute_map = { 'container_resource': 'containerResource', 'external': 'external', 'object': 'object', 'pods': 'pods', 'resource': 'resource', 'type': 'type' } def __init__(self, container_resource=None, external=None, object=None, pods=None, resource=None, type=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration.get_default_copy() self.local_vars_configuration = local_vars_configuration self._container_resource = None self._external = None self._object = None self._pods = None self._resource = None self._type = None self.discriminator = None if container_resource is not None: self.container_resource = container_resource if external is not None: self.external = external if object is not None: self.object = object if pods is not None: self.pods = pods if resource is not None: self.resource = resource self.type = type @property def container_resource(self): return self._container_resource @container_resource.setter def container_resource(self, container_resource): self._container_resource = container_resource @property def external(self): return self._external @external.setter def external(self, external): self._external = external @property def object(self): return self._object @object.setter def object(self, object): self._object = object @property def pods(self): return self._pods @pods.setter def pods(self, pods): self._pods = pods @property def resource(self): return self._resource @resource.setter def resource(self, resource): self._resource = resource @property def type(self): return self._type @type.setter def type(self, type): if self.local_vars_configuration.client_side_validation and type is None: raise ValueError("Invalid value for `type`, must not be `None`") self._type = type def to_dict(self, serialize=False): result = {} def convert(x): if hasattr(x, "to_dict"): args = getfullargspec(x.to_dict).args if len(args) == 1: return x.to_dict() else: return x.to_dict(serialize) else: return x for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) attr = self.attribute_map.get(attr, attr) if serialize else attr if isinstance(value, list): result[attr] = list(map( lambda x: convert(x), value )) elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], convert(item[1])), value.items() )) else: result[attr] = convert(value) return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, V2beta1MetricSpec): return False return self.to_dict() == other.to_dict() def __ne__(self, other): if not isinstance(other, V2beta1MetricSpec): return True return self.to_dict() != other.to_dict()
true
true
f70ec900d5a3779330fc5069041b7d547061f02c
765
py
Python
backend/api/fields.py
NeutralDread/overworld
0f608f83c10b666f2ed2cbe519186cf425a5e354
[ "Apache-2.0" ]
null
null
null
backend/api/fields.py
NeutralDread/overworld
0f608f83c10b666f2ed2cbe519186cf425a5e354
[ "Apache-2.0" ]
null
null
null
backend/api/fields.py
NeutralDread/overworld
0f608f83c10b666f2ed2cbe519186cf425a5e354
[ "Apache-2.0" ]
null
null
null
_game_fields = [ 'cover.image_id', 'first_release_date', 'genres.name', 'involved_companies.developer', 'involved_companies.publisher', 'involved_companies.company.country', 'involved_companies.company.name', 'name', 'platforms.name', 'screenshots.image_id', 'slug', 'summary', 'time_to_beat.normally', 'themes.name', ] _search_fields = [ 'first_release_date', 'name', 'slug', ] _popular_fields = [ 'cover.image_id', 'name', 'popularity', ] _backdrop_fields = [ 'name', 'screenshots.image_id', 'slug', ] game_fields = ','.join(_game_fields) search_fields = ','.join(_search_fields) popular_fields = ','.join(_popular_fields) backdrop_fields = ','.join(_backdrop_fields)
19.615385
44
0.648366
_game_fields = [ 'cover.image_id', 'first_release_date', 'genres.name', 'involved_companies.developer', 'involved_companies.publisher', 'involved_companies.company.country', 'involved_companies.company.name', 'name', 'platforms.name', 'screenshots.image_id', 'slug', 'summary', 'time_to_beat.normally', 'themes.name', ] _search_fields = [ 'first_release_date', 'name', 'slug', ] _popular_fields = [ 'cover.image_id', 'name', 'popularity', ] _backdrop_fields = [ 'name', 'screenshots.image_id', 'slug', ] game_fields = ','.join(_game_fields) search_fields = ','.join(_search_fields) popular_fields = ','.join(_popular_fields) backdrop_fields = ','.join(_backdrop_fields)
true
true
f70ec9f26a955802a506cc9f3c9095b2b09eb7e3
2,934
py
Python
Ryu/topology.py
JoshuaYu-crash/C4EP2-2021
62e2e5a2fb396c598e7e8a265515c342fb78f63e
[ "MIT" ]
null
null
null
Ryu/topology.py
JoshuaYu-crash/C4EP2-2021
62e2e5a2fb396c598e7e8a265515c342fb78f63e
[ "MIT" ]
null
null
null
Ryu/topology.py
JoshuaYu-crash/C4EP2-2021
62e2e5a2fb396c598e7e8a265515c342fb78f63e
[ "MIT" ]
null
null
null
from flask import Flask, render_template, request, jsonify from pyecharts import options as opts from pyecharts.charts import Graph import json import redis from flask_cors import * r = redis.Redis(host="127.0.0.1", port=6379) app = Flask(__name__) CORS(app, supports_credentials=True) @app.route("/dockermsg", methods=["POST"]) def dockerMsg(): data = request.json host = data["host"] datalist = data["data"] # print(datalist) r.set(host, json.dumps(datalist)) return "ok" @app.route("/getdockermsg", methods=["GET"]) def getDockerMsg(): host = request.args.get("host") docker = request.args.get("dockerdata") dockers = json.loads(r.get(host)) tar = None # print(dockers) for doc in dockers: print(doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"], docker) if docker == doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]: tar = doc break print(tar) return jsonify(tar) def graph_base() -> Graph: nodes = [] links = [] categories = [ {"symbol": "circle", 'name': 'ryu'}, {"symbol": "diamond", 'name': 'host'}, {"symbol": "roundRect", 'name': 'dockerdata'}, ] ryu = opts.GraphNode(name="RYU", symbol_size=40, category=0) # symbol='roundRect' nodes.append(ryu) doc_id = 1 for key in r.keys(): host = opts.GraphNode(name=key, symbol_size=30, category=1) # symbol='diamond' nodes.append(host) ryuHostLink = opts.GraphLink(source="RYU", target=key) links.append(ryuHostLink) dockerlist = json.loads(r.get(key)) for doc in dockerlist: docName = doc["Names"][0] docInfo = str(key, encoding='utf-8') + '/' + doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"] new_node = opts.GraphNode(name=str(doc_id) + docName, symbol_size=20, category=2, value=docInfo) nodes.append(new_node) hostDocLink = opts.GraphLink(source=key, target=str(doc_id) + docName) links.append(hostDocLink) doc_id += 1 linestyle_opts = opts.LineStyleOpts(is_show=True, width=2, curve=0.1, type_="solid", color="orange", ) g = ( Graph() .add("", nodes, links, repulsion=1000, categories=categories, label_opts=opts.LabelOpts(is_show=True, position="left", color='white'), linestyle_opts=linestyle_opts) .set_global_opts(title_opts=opts.TitleOpts(title="")) ) return g @app.route("/graphchart", methods=["GET"]) def get_bar_chart(): c = graph_base() return c.dump_options_with_quotes() if __name__ == '__main__': app.run(host="127.0.0.1", port=5000, debug=True)
33.340909
114
0.576005
from flask import Flask, render_template, request, jsonify from pyecharts import options as opts from pyecharts.charts import Graph import json import redis from flask_cors import * r = redis.Redis(host="127.0.0.1", port=6379) app = Flask(__name__) CORS(app, supports_credentials=True) @app.route("/dockermsg", methods=["POST"]) def dockerMsg(): data = request.json host = data["host"] datalist = data["data"] r.set(host, json.dumps(datalist)) return "ok" @app.route("/getdockermsg", methods=["GET"]) def getDockerMsg(): host = request.args.get("host") docker = request.args.get("dockerdata") dockers = json.loads(r.get(host)) tar = None for doc in dockers: print(doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"], docker) if docker == doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]: tar = doc break print(tar) return jsonify(tar) def graph_base() -> Graph: nodes = [] links = [] categories = [ {"symbol": "circle", 'name': 'ryu'}, {"symbol": "diamond", 'name': 'host'}, {"symbol": "roundRect", 'name': 'dockerdata'}, ] ryu = opts.GraphNode(name="RYU", symbol_size=40, category=0) nodes.append(ryu) doc_id = 1 for key in r.keys(): host = opts.GraphNode(name=key, symbol_size=30, category=1) nodes.append(host) ryuHostLink = opts.GraphLink(source="RYU", target=key) links.append(ryuHostLink) dockerlist = json.loads(r.get(key)) for doc in dockerlist: docName = doc["Names"][0] docInfo = str(key, encoding='utf-8') + '/' + doc["NetworkSettings"]["Networks"]["bridge"]["IPAddress"] new_node = opts.GraphNode(name=str(doc_id) + docName, symbol_size=20, category=2, value=docInfo) nodes.append(new_node) hostDocLink = opts.GraphLink(source=key, target=str(doc_id) + docName) links.append(hostDocLink) doc_id += 1 linestyle_opts = opts.LineStyleOpts(is_show=True, width=2, curve=0.1, type_="solid", color="orange", ) g = ( Graph() .add("", nodes, links, repulsion=1000, categories=categories, label_opts=opts.LabelOpts(is_show=True, position="left", color='white'), linestyle_opts=linestyle_opts) .set_global_opts(title_opts=opts.TitleOpts(title="")) ) return g @app.route("/graphchart", methods=["GET"]) def get_bar_chart(): c = graph_base() return c.dump_options_with_quotes() if __name__ == '__main__': app.run(host="127.0.0.1", port=5000, debug=True)
true
true
f70eca4f5372f1f42a5b19b9379ac5026d17bfca
829
py
Python
app/core/migrations/0010_requestlogs.py
shravands/django-restapi-recipe
c21d01ab3b0d92d249e638a1b503ea54dd6d69bd
[ "MIT" ]
null
null
null
app/core/migrations/0010_requestlogs.py
shravands/django-restapi-recipe
c21d01ab3b0d92d249e638a1b503ea54dd6d69bd
[ "MIT" ]
null
null
null
app/core/migrations/0010_requestlogs.py
shravands/django-restapi-recipe
c21d01ab3b0d92d249e638a1b503ea54dd6d69bd
[ "MIT" ]
null
null
null
# Generated by Django 3.0.3 on 2020-06-09 08:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0009_user_username'), ] operations = [ migrations.CreateModel( name='RequestLogs', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ip_address', models.CharField(blank=True, max_length=255)), ('user_id', models.IntegerField(blank=True)), ('method_type', models.CharField(blank=True, max_length=50)), ('request_path', models.CharField(blank=True, max_length=255)), ('response_code', models.CharField(blank=True, max_length=15)), ], ), ]
33.16
114
0.589867
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0009_user_username'), ] operations = [ migrations.CreateModel( name='RequestLogs', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ip_address', models.CharField(blank=True, max_length=255)), ('user_id', models.IntegerField(blank=True)), ('method_type', models.CharField(blank=True, max_length=50)), ('request_path', models.CharField(blank=True, max_length=255)), ('response_code', models.CharField(blank=True, max_length=15)), ], ), ]
true
true
f70ecb6c0cf51dc2a65ebc399f0d6b7793c3420a
5,755
py
Python
dumper.py
juliuswwj/wchprog
8cab50d60fb224a4a526d3cbddc5512a7a315c0c
[ "Unlicense" ]
24
2018-02-05T08:18:04.000Z
2021-06-09T07:48:18.000Z
dumper.py
juliuswwj/wchprog
8cab50d60fb224a4a526d3cbddc5512a7a315c0c
[ "Unlicense" ]
null
null
null
dumper.py
juliuswwj/wchprog
8cab50d60fb224a4a526d3cbddc5512a7a315c0c
[ "Unlicense" ]
4
2018-10-22T11:51:47.000Z
2022-01-03T18:31:17.000Z
#!/usr/bin/python from __future__ import print_function import struct import sys import usb.core import usb.util from intelhex import IntelHex scrambleCode = (0x29, 0x52, 0x8C, 0x70) stats = [0xff, 0x02, 0x00, 0xf5, 0xe5, 0x75, 0x03, 0x04, 0x80, 0x05, 0xd2, 0x01, 0xe4, 0xef, 0x82, 0x83, 0x08, 0x24, 0xc2, 0x60, 0xe0, 0x12, 0x7f, 0x34, 0x10, 0x07, 0x22, 0x40, 0x54, 0x94, 0x30, 0x70, 0xc0, 0xf0, 0xaf, 0xd0, 0x44, 0xa3, 0x36, 0x74, 0x15, 0xc3, 0x09, 0x93, 0x53, 0xec, 0x48, 0x06, 0x0a, 0x14, 0x20, 0x25, 0x50, 0x64, 0xd4, 0x16, 0x43, 0x47, 0xd6, 0xe7, 0xea, 0x0c, 0x32, 0x3f, 0x46, 0x90, 0xc8, 0xdf, 0x38, 0x45, 0xb4, 0xd3, 0xfa, 0xa1, 0xc5, 0xca, 0xcc, 0xde, 0xfc, 0x0b, 0x23, 0x37, 0x42, 0xed, 0xfb, 0x2f, 0x95, 0x55, 0x85, 0xdc, 0x18, 0x26, 0x33, 0x7d, 0x89, 0xac, 0xae, 0xfe, 0x0f, 0x17, 0x1b, 0x27, 0x35, 0x39, 0x3e, 0x57, 0x78, 0x8f, 0xa9, 0xaa, 0xc1, 0xd9, 0xdd, 0xe3, 0xf3, 0xf8, 0x0d, 0x21, 0x3b, 0x3c, 0x73, 0x81, 0x87, 0x88, 0x8a, 0x99, 0xbf, 0xdb, 0xf2, 0xfd, 0x1a, 0x1f, 0x31, 0x5f, 0x6c, 0x7a, 0x7e, 0x8e, 0xbc, 0xd5, 0xd8, 0xda, 0xe9, 0xeb, 0xee, 0xf6, 0x11, 0x1c, 0x29, 0x2d, 0x56, 0x58, 0x7c, 0x8d, 0x91, 0x98, 0xb3, 0xb9, 0xd7, 0xe1, 0xe6, 0xe8, 0xf9, 0x13, 0x1e, 0x28, 0x2e, 0x41, 0x4e, 0x69, 0x79, 0x7b, 0x9e, 0x9f, 0xa0, 0xab, 0xad, 0xcf, 0xe2, 0x0e, 0x19, 0x1d, 0x2a, 0x4b, 0x52, 0x5b, 0x63, 0x84, 0x86, 0x8c, 0x9d, 0xa2, 0xb1, 0xb2, 0xc4, 0x2b, 0x49, 0x4a, 0x4c, 0x4d, 0x59, 0x61, 0x67, 0x68, 0x6b, 0x6d, 0x6e, 0x6f, 0x77, 0x92, 0x96, 0x9a, 0xa6, 0xa8, 0xb0, 0xb5, 0xbb, 0xc6, 0xc7, 0xc9, 0xcd, 0xd1, 0xf4, 0x2c, 0x3a, 0x3d, 0x4f, 0x51, 0x5a, 0x5c, 0x5d, 0x5e, 0x62, 0x65, 0x66, 0x6a, 0x71, 0x72, 0x76, 0x8b, 0x97, 0x9b, 0x9c, 0xa4, 0xa5, 0xa7, 0xb6, 0xb7, 0xb8, 0xba, 0xbd, 0xbe, 0xcb, 0xce, 0xf1, 0xf7] def scramble(l): return [v ^ scrambleCode[i%4] for i, v in enumerate(l)] def binStrOfList(l): return ''.join(chr(x) for x in l) class WCHISP: def __init__(self): # find our device dev = usb.core.find(idVendor=0x4348, idProduct=0x55e0) if dev is None: raise ValueError('Device not found') dev.set_configuration() cfg = dev.get_active_configuration() intf = cfg[(0, 0)] self.epout = usb.util.find_descriptor(intf, custom_match = lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_OUT) self.epin = usb.util.find_descriptor(intf, custom_match = lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_IN) def cmd(self, msg, length=64): self.writeb(msg) b = self.readb(length) if len(b) == 2: return struct.unpack('<H', b)[0] return b def xcmd(self, msg, exp): #xmsg = map(lambda x: hex(ord(x))[2:], msg) #print ' '.join(xmsg) #return 0 ret = self.cmd(msg) if ret != exp: xmsg = map(lambda x: hex(ord(x)), msg[0:4]) raise Exception('cmd[%s] return %d != %d' % (','.join(xmsg), ret, exp)) def info(self): v = self.cmd('\xa2\x13USB DBG CH559 & ISP' + '\0') self.cmd('\xbb\x00') return v def readb(self, size): return self.epin.read(size) def writeb(self, b): self.epout.write(b) def dump(self): # send the key b = '\xa6\x04' + struct.pack('BBBB', *scrambleCode) self.xcmd(b, 0) # find block of 16 0xFF at the end of the device memory block = [0xff] * 16 found = False for address in range(0x3ff0, -1, -1): print('\rLooking for address 0x{:04X}'.format(address), end='') r = self.cmd('\xa7\16' + struct.pack('<H', address) + binStrOfList(scramble(block))) if r == 0: print('\nFound 0xFF block at address 0x{:04X}'.format(address)) found = True break if not found: print('\nUnable to find 0xFF block') return memdump = IntelHex() memdump.puts(address, binStrOfList(block)) print('Starting flash dumping') base = [0xa7, 16, 0, 0] nTry = 0 nBytes = 0 for address in range(address - 1, - 1, -1): block[1:] = block[:-1] # shift base[2:4] = address & 0xFF, address >> 8 found = False for i in range(256): i = stats[i] block[0] = i nTry += 1 r = self.cmd(binStrOfList(base + scramble(block)), 4) if r == 0: # verification ok, we found the correct byte print('{:02X} '.format(i), end='') sys.stdout.flush() found = True nBytes += 1 memdump[address] = i break if not found: raise ValueError('Unable to find correct ' 'byte for address 0x{:04X}'.format(address)) output_bin = 'out.bin' output_hex = 'out.hex' print('\nDone, writing output files {} and {}'. format(output_bin, output_hex)) print('Ntry = {} {:.2f}try/bytes'.format(nTry, float(nTry) / nBytes)) memdump.tobinfile(output_bin) memdump.tofile(output_hex, format='hex') isp = WCHISP() # check chip ID and bootloader presence if isp.info() != 0x52: raise IOError("not a CH552T device") # dump flash isp.dump()
36.194969
150
0.541095
from __future__ import print_function import struct import sys import usb.core import usb.util from intelhex import IntelHex scrambleCode = (0x29, 0x52, 0x8C, 0x70) stats = [0xff, 0x02, 0x00, 0xf5, 0xe5, 0x75, 0x03, 0x04, 0x80, 0x05, 0xd2, 0x01, 0xe4, 0xef, 0x82, 0x83, 0x08, 0x24, 0xc2, 0x60, 0xe0, 0x12, 0x7f, 0x34, 0x10, 0x07, 0x22, 0x40, 0x54, 0x94, 0x30, 0x70, 0xc0, 0xf0, 0xaf, 0xd0, 0x44, 0xa3, 0x36, 0x74, 0x15, 0xc3, 0x09, 0x93, 0x53, 0xec, 0x48, 0x06, 0x0a, 0x14, 0x20, 0x25, 0x50, 0x64, 0xd4, 0x16, 0x43, 0x47, 0xd6, 0xe7, 0xea, 0x0c, 0x32, 0x3f, 0x46, 0x90, 0xc8, 0xdf, 0x38, 0x45, 0xb4, 0xd3, 0xfa, 0xa1, 0xc5, 0xca, 0xcc, 0xde, 0xfc, 0x0b, 0x23, 0x37, 0x42, 0xed, 0xfb, 0x2f, 0x95, 0x55, 0x85, 0xdc, 0x18, 0x26, 0x33, 0x7d, 0x89, 0xac, 0xae, 0xfe, 0x0f, 0x17, 0x1b, 0x27, 0x35, 0x39, 0x3e, 0x57, 0x78, 0x8f, 0xa9, 0xaa, 0xc1, 0xd9, 0xdd, 0xe3, 0xf3, 0xf8, 0x0d, 0x21, 0x3b, 0x3c, 0x73, 0x81, 0x87, 0x88, 0x8a, 0x99, 0xbf, 0xdb, 0xf2, 0xfd, 0x1a, 0x1f, 0x31, 0x5f, 0x6c, 0x7a, 0x7e, 0x8e, 0xbc, 0xd5, 0xd8, 0xda, 0xe9, 0xeb, 0xee, 0xf6, 0x11, 0x1c, 0x29, 0x2d, 0x56, 0x58, 0x7c, 0x8d, 0x91, 0x98, 0xb3, 0xb9, 0xd7, 0xe1, 0xe6, 0xe8, 0xf9, 0x13, 0x1e, 0x28, 0x2e, 0x41, 0x4e, 0x69, 0x79, 0x7b, 0x9e, 0x9f, 0xa0, 0xab, 0xad, 0xcf, 0xe2, 0x0e, 0x19, 0x1d, 0x2a, 0x4b, 0x52, 0x5b, 0x63, 0x84, 0x86, 0x8c, 0x9d, 0xa2, 0xb1, 0xb2, 0xc4, 0x2b, 0x49, 0x4a, 0x4c, 0x4d, 0x59, 0x61, 0x67, 0x68, 0x6b, 0x6d, 0x6e, 0x6f, 0x77, 0x92, 0x96, 0x9a, 0xa6, 0xa8, 0xb0, 0xb5, 0xbb, 0xc6, 0xc7, 0xc9, 0xcd, 0xd1, 0xf4, 0x2c, 0x3a, 0x3d, 0x4f, 0x51, 0x5a, 0x5c, 0x5d, 0x5e, 0x62, 0x65, 0x66, 0x6a, 0x71, 0x72, 0x76, 0x8b, 0x97, 0x9b, 0x9c, 0xa4, 0xa5, 0xa7, 0xb6, 0xb7, 0xb8, 0xba, 0xbd, 0xbe, 0xcb, 0xce, 0xf1, 0xf7] def scramble(l): return [v ^ scrambleCode[i%4] for i, v in enumerate(l)] def binStrOfList(l): return ''.join(chr(x) for x in l) class WCHISP: def __init__(self): dev = usb.core.find(idVendor=0x4348, idProduct=0x55e0) if dev is None: raise ValueError('Device not found') dev.set_configuration() cfg = dev.get_active_configuration() intf = cfg[(0, 0)] self.epout = usb.util.find_descriptor(intf, custom_match = lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_OUT) self.epin = usb.util.find_descriptor(intf, custom_match = lambda e: usb.util.endpoint_direction(e.bEndpointAddress) == usb.util.ENDPOINT_IN) def cmd(self, msg, length=64): self.writeb(msg) b = self.readb(length) if len(b) == 2: return struct.unpack('<H', b)[0] return b def xcmd(self, msg, exp): ret = self.cmd(msg) if ret != exp: xmsg = map(lambda x: hex(ord(x)), msg[0:4]) raise Exception('cmd[%s] return %d != %d' % (','.join(xmsg), ret, exp)) def info(self): v = self.cmd('\xa2\x13USB DBG CH559 & ISP' + '\0') self.cmd('\xbb\x00') return v def readb(self, size): return self.epin.read(size) def writeb(self, b): self.epout.write(b) def dump(self): b = '\xa6\x04' + struct.pack('BBBB', *scrambleCode) self.xcmd(b, 0) block = [0xff] * 16 found = False for address in range(0x3ff0, -1, -1): print('\rLooking for address 0x{:04X}'.format(address), end='') r = self.cmd('\xa7\16' + struct.pack('<H', address) + binStrOfList(scramble(block))) if r == 0: print('\nFound 0xFF block at address 0x{:04X}'.format(address)) found = True break if not found: print('\nUnable to find 0xFF block') return memdump = IntelHex() memdump.puts(address, binStrOfList(block)) print('Starting flash dumping') base = [0xa7, 16, 0, 0] nTry = 0 nBytes = 0 for address in range(address - 1, - 1, -1): block[1:] = block[:-1] base[2:4] = address & 0xFF, address >> 8 found = False for i in range(256): i = stats[i] block[0] = i nTry += 1 r = self.cmd(binStrOfList(base + scramble(block)), 4) if r == 0: print('{:02X} '.format(i), end='') sys.stdout.flush() found = True nBytes += 1 memdump[address] = i break if not found: raise ValueError('Unable to find correct ' 'byte for address 0x{:04X}'.format(address)) output_bin = 'out.bin' output_hex = 'out.hex' print('\nDone, writing output files {} and {}'. format(output_bin, output_hex)) print('Ntry = {} {:.2f}try/bytes'.format(nTry, float(nTry) / nBytes)) memdump.tobinfile(output_bin) memdump.tofile(output_hex, format='hex') isp = WCHISP() if isp.info() != 0x52: raise IOError("not a CH552T device") isp.dump()
true
true
f70eccdf3d70966d6d64d44596755eb21c6d962f
2,833
py
Python
plugins/Limiter/test.py
jlu5/Limnoria
0e1e37a5a2bd5b717e11320b20773644b44502dd
[ "BSD-3-Clause" ]
40
2015-01-28T22:16:36.000Z
2021-12-15T02:22:56.000Z
plugins/Limiter/test.py
jlu5/Limnoria
0e1e37a5a2bd5b717e11320b20773644b44502dd
[ "BSD-3-Clause" ]
17
2015-01-05T21:06:22.000Z
2015-12-07T20:45:44.000Z
plugins/Limiter/test.py
jlu5/Limnoria
0e1e37a5a2bd5b717e11320b20773644b44502dd
[ "BSD-3-Clause" ]
28
2015-01-03T23:53:58.000Z
2021-06-01T05:27:20.000Z
### # Copyright (c) 2004-2005, Jeremiah Fincher # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * class LimiterTestCase(ChannelPluginTestCase): plugins = ('Limiter',) config = {'supybot.plugins.Limiter.enable': True} def testEnforceLimit(self): origMin = conf.supybot.plugins.Limiter.minimumExcess() origMax = conf.supybot.plugins.Limiter.maximumExcess() try: conf.supybot.plugins.Limiter.minimumExcess.setValue(5) conf.supybot.plugins.Limiter.maximumExcess.setValue(10) self.irc.feedMsg(ircmsgs.join('#foo', prefix='foo!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+10)) self.irc.feedMsg(ircmsgs.join('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.failIf(m is not None) conf.supybot.plugins.Limiter.maximumExcess.setValue(7) self.irc.feedMsg(ircmsgs.part('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+5)) finally: conf.supybot.plugins.Limiter.minimumExcess.setValue(origMin) conf.supybot.plugins.Limiter.maximumExcess.setValue(origMax) # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
49.701754
79
0.715849
rom supybot.test import * class LimiterTestCase(ChannelPluginTestCase): plugins = ('Limiter',) config = {'supybot.plugins.Limiter.enable': True} def testEnforceLimit(self): origMin = conf.supybot.plugins.Limiter.minimumExcess() origMax = conf.supybot.plugins.Limiter.maximumExcess() try: conf.supybot.plugins.Limiter.minimumExcess.setValue(5) conf.supybot.plugins.Limiter.maximumExcess.setValue(10) self.irc.feedMsg(ircmsgs.join('#foo', prefix='foo!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+10)) self.irc.feedMsg(ircmsgs.join('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.failIf(m is not None) conf.supybot.plugins.Limiter.maximumExcess.setValue(7) self.irc.feedMsg(ircmsgs.part('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+5)) finally: conf.supybot.plugins.Limiter.minimumExcess.setValue(origMin) conf.supybot.plugins.Limiter.maximumExcess.setValue(origMax)
true
true
f70ecd4ecdb9ab2866986a996b957e57d611e42c
1,547
py
Python
aiostripe/http_client.py
cypreess/aiostripe
8453a377b95373192c2e1598b9a951e8437d2bc9
[ "MIT" ]
null
null
null
aiostripe/http_client.py
cypreess/aiostripe
8453a377b95373192c2e1598b9a951e8437d2bc9
[ "MIT" ]
1
2018-08-18T16:14:50.000Z
2018-08-18T16:14:50.000Z
aiostripe/http_client.py
cypreess/aiostripe
8453a377b95373192c2e1598b9a951e8437d2bc9
[ "MIT" ]
null
null
null
import textwrap import aiohttp from aiostripe import error def new_default_http_client(*args, **kwargs): return AsyncioClient(*args, **kwargs) class HTTPClient(object): def __init__(self, verify_ssl_certs=True): self._verify_ssl_certs = verify_ssl_certs def request(self, method, url, headers, post_data=None): raise NotImplementedError('HTTPClient subclasses must implement `request`') class AsyncioClient(HTTPClient): name = 'aiohttp' async def request(self, method, url, headers, post_data=None): if isinstance(post_data, str): post_data = post_data.encode('utf8') with aiohttp.ClientSession(headers=headers, skip_auto_headers=('User-Agent', 'Content-Type', 'Authorization')) as client: try: async with client.request(method.upper(), url, data=post_data) as res: rbody = await res.read() rstatus = res.status rheaders = {k.lower(): v for k, v in res.headers.items()} except Exception as e: self._handle_request_error(e) assert False, 'unreachable' return rbody, rstatus, rheaders @staticmethod def _handle_request_error(e): msg = 'Unexpected error communicating with Stripe. If this problem persists, let me know at ' \ '<alex@downtownapp.co>.' msg = textwrap.fill(msg) + '\n\n(Network error: %r)' % e raise error.APIConnectionError(msg) from e
32.914894
112
0.625081
import textwrap import aiohttp from aiostripe import error def new_default_http_client(*args, **kwargs): return AsyncioClient(*args, **kwargs) class HTTPClient(object): def __init__(self, verify_ssl_certs=True): self._verify_ssl_certs = verify_ssl_certs def request(self, method, url, headers, post_data=None): raise NotImplementedError('HTTPClient subclasses must implement `request`') class AsyncioClient(HTTPClient): name = 'aiohttp' async def request(self, method, url, headers, post_data=None): if isinstance(post_data, str): post_data = post_data.encode('utf8') with aiohttp.ClientSession(headers=headers, skip_auto_headers=('User-Agent', 'Content-Type', 'Authorization')) as client: try: async with client.request(method.upper(), url, data=post_data) as res: rbody = await res.read() rstatus = res.status rheaders = {k.lower(): v for k, v in res.headers.items()} except Exception as e: self._handle_request_error(e) assert False, 'unreachable' return rbody, rstatus, rheaders @staticmethod def _handle_request_error(e): msg = 'Unexpected error communicating with Stripe. If this problem persists, let me know at ' \ '<alex@downtownapp.co>.' msg = textwrap.fill(msg) + '\n\n(Network error: %r)' % e raise error.APIConnectionError(msg) from e
true
true
f70ecd852b59e53b25f1ade391a6eaf6cb8f5d9e
9,513
py
Python
config/settings/base.py
JuvasArray/algerian-library
3e19fe23799198b0e0218148a692bcdd5df812b7
[ "MIT" ]
null
null
null
config/settings/base.py
JuvasArray/algerian-library
3e19fe23799198b0e0218148a692bcdd5df812b7
[ "MIT" ]
null
null
null
config/settings/base.py
JuvasArray/algerian-library
3e19fe23799198b0e0218148a692bcdd5df812b7
[ "MIT" ]
null
null
null
""" Base settings to build other settings files upon. """ import environ ROOT_DIR = environ.Path(__file__) - 3 # (algerian_library/config/settings/base.py - 3 = algerian_library/) APPS_DIR = ROOT_DIR.path('algerian_library') env = environ.Env() READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False) if READ_DOT_ENV_FILE: # OS environment variables take precedence over variables from .env env.read_env(str(ROOT_DIR.path('.env'))) # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = env.bool('DJANGO_DEBUG', False) # Local time zone. Choices are # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # though not all of them may be available with every OS. # In Windows, this must be set to your system time zone. TIME_ZONE = 'UTC' # https://docs.djangoproject.com/en/dev/ref/settings/#language-code LANGUAGE_CODE = 'en-us' # https://docs.djangoproject.com/en/dev/ref/settings/#site-id SITE_ID = 1 # https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n USE_I18N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n USE_L10N = True # https://docs.djangoproject.com/en/dev/ref/settings/#use-tz USE_TZ = True # DATABASES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = { 'default': env.db('DATABASE_URL', default='postgres:///algerian_library'), } DATABASES['default']['ATOMIC_REQUESTS'] = True # URLS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf ROOT_URLCONF = 'config.urls' # https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application WSGI_APPLICATION = 'config.wsgi.application' # APPS # ------------------------------------------------------------------------------ DJANGO_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # 'django.contrib.humanize', # Handy template tags 'django.contrib.admin', ] THIRD_PARTY_APPS = [ 'crispy_forms', 'allauth', 'allauth.account', 'allauth.socialaccount', 'rest_framework', ] LOCAL_APPS = [ 'algerian_library.users.apps.UsersAppConfig', 'catalog.apps.CatalogConfig', ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS # MIGRATIONS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules MIGRATION_MODULES = { 'sites': 'algerian_library.contrib.sites.migrations' } # AUTHENTICATION # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends AUTHENTICATION_BACKENDS = [ 'django.contrib.auth.backends.ModelBackend', 'allauth.account.auth_backends.AuthenticationBackend', ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model AUTH_USER_MODEL = 'users.User' # https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url LOGIN_REDIRECT_URL = 'users:redirect' # https://docs.djangoproject.com/en/dev/ref/settings/#login-url LOGIN_URL = 'account_login' # PASSWORDS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers PASSWORD_HASHERS = [ # https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django 'django.contrib.auth.hashers.Argon2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', 'django.contrib.auth.hashers.BCryptPasswordHasher', ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # MIDDLEWARE # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#middleware MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] # STATIC # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#static-root STATIC_ROOT = str(ROOT_DIR('staticfiles')) # https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = '/static/' # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS STATICFILES_DIRS = [ str(APPS_DIR.path('static')), ] # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ] # MEDIA # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#media-root MEDIA_ROOT = str(APPS_DIR('media')) # https://docs.djangoproject.com/en/dev/ref/settings/#media-url MEDIA_URL = '/media/' # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates TEMPLATES = [ { # https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND 'BACKEND': 'django.template.backends.django.DjangoTemplates', # https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs 'DIRS': [ str(APPS_DIR.path('templates')), ], 'OPTIONS': { # https://docs.djangoproject.com/en/dev/ref/settings/#template-debug 'debug': DEBUG, # https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders # https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types 'loaders': [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ], # https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] # http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs CRISPY_TEMPLATE_PACK = 'bootstrap4' # FIXTURES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs FIXTURE_DIRS = ( str(APPS_DIR.path('fixtures')), ) # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend') # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL. ADMIN_URL = 'admin/' # https://docs.djangoproject.com/en/dev/ref/settings/#admins ADMINS = [ ("""Achour Ait Hamiche""", 'axchouraithamiche40@gmail.com'), ] # https://docs.djangoproject.com/en/dev/ref/settings/#managers MANAGERS = ADMINS # django-allauth # ------------------------------------------------------------------------------ ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True) # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_AUTHENTICATION_METHOD = 'username' # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_EMAIL_REQUIRED = True # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_EMAIL_VERIFICATION = 'mandatory' # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_ADAPTER = 'algerian_library.users.adapters.AccountAdapter' # https://django-allauth.readthedocs.io/en/latest/configuration.html SOCIALACCOUNT_ADAPTER = 'algerian_library.users.adapters.SocialAccountAdapter' # Your stuff... # ------------------------------------------------------------------------------
39.473029
107
0.63019
import environ ROOT_DIR = environ.Path(__file__) - 3 APPS_DIR = ROOT_DIR.path('algerian_library') env = environ.Env() READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False) if READ_DOT_ENV_FILE: env.read_env(str(ROOT_DIR.path('.env'))) = env.bool('DJANGO_DEBUG', False) TIME_ZONE = 'UTC' = 'en-us' = 1 = True = True = True S = { 'default': env.db('DATABASE_URL', default='postgres:///algerian_library'), } DATABASES['default']['ATOMIC_REQUESTS'] = True = 'config.urls' = 'config.wsgi.application' DJANGO_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', admin', ] THIRD_PARTY_APPS = [ 'crispy_forms', 'allauth', 'allauth.account', 'allauth.socialaccount', 'rest_framework', ] LOCAL_APPS = [ 'algerian_library.users.apps.UsersAppConfig', 'catalog.apps.CatalogConfig', ] = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS = { 'sites': 'algerian_library.contrib.sites.migrations' } = [ 'django.contrib.auth.backends.ModelBackend', 'allauth.account.auth_backends.AuthenticationBackend', ] = 'users.User' = 'users:redirect' = 'account_login' = [ .hashers.Argon2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', 'django.contrib.auth.hashers.BCryptPasswordHasher', ] = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] = str(ROOT_DIR('staticfiles')) = '/static/' (APPS_DIR.path('static')), ] = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ] = str(APPS_DIR('media')) = '/media/' = [ { mplate.backends.django.DjangoTemplates', ': [ str(APPS_DIR.path('templates')), ], 'OPTIONS': { ebug': DEBUG, ders': [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ], sors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] E_PACK = 'bootstrap4' = ( str(APPS_DIR.path('fixtures')), ) = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend') ADMIN_URL = 'admin/' = [ ("""Achour Ait Hamiche""", 'axchouraithamiche40@gmail.com'), ] = ADMINS ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True) ACCOUNT_AUTHENTICATION_METHOD = 'username' ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_EMAIL_VERIFICATION = 'mandatory' ACCOUNT_ADAPTER = 'algerian_library.users.adapters.AccountAdapter' SOCIALACCOUNT_ADAPTER = 'algerian_library.users.adapters.SocialAccountAdapter'
true
true
f70ece528875d32f06dc3c54bf04e158f1a2d255
1,041
py
Python
Colloquiums/2015-2016/Colloquium_2/Exercise_3.py
Szymon-Budziak/ASD_exercises_solutions
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
[ "MIT" ]
7
2021-12-28T23:38:42.000Z
2022-03-29T16:36:16.000Z
Colloquiums/2015-2016/Colloquium_2/Exercise_3.py
Szymon-Budziak/ASD_exercises_solutions
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
[ "MIT" ]
null
null
null
Colloquiums/2015-2016/Colloquium_2/Exercise_3.py
Szymon-Budziak/ASD_exercises_solutions
36ccbdae03a6c7e4ad141a2b7b01bef9353574ee
[ "MIT" ]
4
2021-06-29T20:21:52.000Z
2022-03-12T10:04:17.000Z
# Zbiór przedziałów [(a[1], b[1]), ..., (a[n], b[n])], każdy przedział należy do [0, 1]. Opisać algorytm # który sprawdzi czy jest możliwy taki wybór przedziałów, aby cały przedział [0, 1] zawierał się # w wybranych odcinkach. Przedział ma składać się z jak najmniejszej ilości odcinków. def minimum_intervals(T): T.sort(key=lambda x: x[0]) i = 0 end = 0 result = [] while i < len(T) and end != 1: actual_start = T[i][0] actual_end = T[i][1] flag = True while i != len(T) and T[i][0] <= end: if actual_end < T[i][1]: actual_start = T[i][0] actual_end = T[i][1] i += 1 flag = False if flag: i += 1 result.append((actual_start, actual_end)) end = actual_end return result T = [[0, 0.4], [0, 0.35], [0.2, 0.6], [0.4, 0.6], [0.5, 0.6], [0.1, 0.9], [0.85, 1], [0.9, 1], [0.3, 0.4], [0.35, 0.4], [0.2, 0.75], [0.4, 1], [0.55, 1], [0.6, 1], [0.9, 1]] print(minimum_intervals(T))
33.580645
104
0.511047
def minimum_intervals(T): T.sort(key=lambda x: x[0]) i = 0 end = 0 result = [] while i < len(T) and end != 1: actual_start = T[i][0] actual_end = T[i][1] flag = True while i != len(T) and T[i][0] <= end: if actual_end < T[i][1]: actual_start = T[i][0] actual_end = T[i][1] i += 1 flag = False if flag: i += 1 result.append((actual_start, actual_end)) end = actual_end return result T = [[0, 0.4], [0, 0.35], [0.2, 0.6], [0.4, 0.6], [0.5, 0.6], [0.1, 0.9], [0.85, 1], [0.9, 1], [0.3, 0.4], [0.35, 0.4], [0.2, 0.75], [0.4, 1], [0.55, 1], [0.6, 1], [0.9, 1]] print(minimum_intervals(T))
true
true
f70ecf74f79d361841d4e6df6243e83e7cf59479
3,423
py
Python
fuelweb_test/tests/tests_strength/test_master_node_failover.py
Fiware/ops.Fuel-main-dev
779ffdcc9630d780777c60270fdc2f8baf87750a
[ "Apache-2.0" ]
null
null
null
fuelweb_test/tests/tests_strength/test_master_node_failover.py
Fiware/ops.Fuel-main-dev
779ffdcc9630d780777c60270fdc2f8baf87750a
[ "Apache-2.0" ]
null
null
null
fuelweb_test/tests/tests_strength/test_master_node_failover.py
Fiware/ops.Fuel-main-dev
779ffdcc9630d780777c60270fdc2f8baf87750a
[ "Apache-2.0" ]
null
null
null
# Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from proboscis.asserts import assert_equal from proboscis import test from fuelweb_test.helpers import common from fuelweb_test.helpers import os_actions from fuelweb_test import settings from fuelweb_test import logger from fuelweb_test.tests import base_test_case @test(groups=["thread_non_func_1"]) class DeployHAOneControllerMasterNodeFail(base_test_case.TestBasic): @test(depends_on=[base_test_case.SetupEnvironment.prepare_slaves_3], groups=["non_functional", "deploy_ha_one_controller_flat_master_node_fail"]) def deploy_ha_one_controller_flat_master_node_fail(self): """Deploy HA cluster with nova-network and check it without master node Scenario: 1. Create cluster in ha mode with 1 controller 2. Add 1 node with controller role 3. Add 1 node with compute role 4. Deploy the cluster 5. Validate cluster was set up correctly, there are no dead services, there are no errors in logs 6. Verify networks 7. Verify network configuration on controller 8. Run OSTF 9. Shut down master node 10. Run openstack verification Duration 1000m """ self.env.revert_snapshot("ready_with_3_slaves") cluster_id = self.fuel_web.create_cluster( name=self.__class__.__name__, mode=settings.DEPLOYMENT_MODE ) self.fuel_web.update_nodes( cluster_id, { 'slave-01': ['controller'], 'slave-02': ['compute'] } ) self.fuel_web.deploy_cluster_wait(cluster_id) controller_ip = self.fuel_web.get_public_vip(cluster_id) os_conn = os_actions.OpenStackActions(controller_ip) self.fuel_web.assert_cluster_ready( os_conn, smiles_count=6, networks_count=1, timeout=300) self.fuel_web.verify_network(cluster_id) logger.info('PASS DEPLOYMENT') self.fuel_web.run_ostf( cluster_id=cluster_id) logger.info('PASS OSTF') logger.info('Destroy admin node...') self.env.nodes().admin.destroy() logger.info('Admin node destroyed') common_func = common.Common( controller_ip, settings.SERVTEST_USERNAME, settings.SERVTEST_PASSWORD, settings.SERVTEST_TENANT) # create instance server = common_func.create_instance() # get_instance details details = common_func.get_instance_detail(server) assert_equal(details.name, 'test_instance') # Check if instacne active common_func.verify_instance_status(server, 'ACTIVE') # delete instance common_func.delete_instance(server)
35.28866
79
0.665498
from proboscis.asserts import assert_equal from proboscis import test from fuelweb_test.helpers import common from fuelweb_test.helpers import os_actions from fuelweb_test import settings from fuelweb_test import logger from fuelweb_test.tests import base_test_case @test(groups=["thread_non_func_1"]) class DeployHAOneControllerMasterNodeFail(base_test_case.TestBasic): @test(depends_on=[base_test_case.SetupEnvironment.prepare_slaves_3], groups=["non_functional", "deploy_ha_one_controller_flat_master_node_fail"]) def deploy_ha_one_controller_flat_master_node_fail(self): self.env.revert_snapshot("ready_with_3_slaves") cluster_id = self.fuel_web.create_cluster( name=self.__class__.__name__, mode=settings.DEPLOYMENT_MODE ) self.fuel_web.update_nodes( cluster_id, { 'slave-01': ['controller'], 'slave-02': ['compute'] } ) self.fuel_web.deploy_cluster_wait(cluster_id) controller_ip = self.fuel_web.get_public_vip(cluster_id) os_conn = os_actions.OpenStackActions(controller_ip) self.fuel_web.assert_cluster_ready( os_conn, smiles_count=6, networks_count=1, timeout=300) self.fuel_web.verify_network(cluster_id) logger.info('PASS DEPLOYMENT') self.fuel_web.run_ostf( cluster_id=cluster_id) logger.info('PASS OSTF') logger.info('Destroy admin node...') self.env.nodes().admin.destroy() logger.info('Admin node destroyed') common_func = common.Common( controller_ip, settings.SERVTEST_USERNAME, settings.SERVTEST_PASSWORD, settings.SERVTEST_TENANT) server = common_func.create_instance() details = common_func.get_instance_detail(server) assert_equal(details.name, 'test_instance') common_func.verify_instance_status(server, 'ACTIVE') common_func.delete_instance(server)
true
true
f70ed06f593fd1a20fc7b1ab5ee37ec4ed1cbc04
6,980
py
Python
denonavr/denon_receiver_xml.py
scarface-4711/misc_python_tools
66ce06f2f77d2e49bdbac7fd11e6fa7cbb6e56c9
[ "MIT" ]
null
null
null
denonavr/denon_receiver_xml.py
scarface-4711/misc_python_tools
66ce06f2f77d2e49bdbac7fd11e6fa7cbb6e56c9
[ "MIT" ]
null
null
null
denonavr/denon_receiver_xml.py
scarface-4711/misc_python_tools
66ce06f2f77d2e49bdbac7fd11e6fa7cbb6e56c9
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This python program saves test XMLs from denon receiver to current directory. Usage: python denon_receiver_xml.py --host 192.168.0.250 --prefix AVR-X4100W :copyright: (c) 2017 by Oliver Goetz. :license: MIT, see LICENSE for more details. """ import argparse from io import BytesIO import requests import xml.etree.ElementTree as ET from collections import namedtuple XML = namedtuple("XML", ["port", "type", "path", "tags", "filename"]) SAVED_XML = [XML("80", "post", "/goform/AppCommand.xml", ["GetFriendlyName"], "AppCommand-setup"), XML("80", "post", "/goform/AppCommand.xml", ["GetAllZonePowerStatus", "GetAllZoneSource", "GetRenameSource", "GetDeletedSource", "GetSurroundModeStatus", "GetToneControl", "GetAllZoneVolume", "GetAllZoneMuteStatus"], "AppCommand-update"), XML("80", "get", "/goform/Deviceinfo.xml", [], "Deviceinfo.xml"), XML("80", "get", "/goform/formMainZone_MainZoneXmlStatus.xml", [], "formMainZone_MainZoneXmlStatus"), XML("80", "get", "/goform/formMainZone_MainZoneXml.xml", [], "formMainZone_MainZoneXml"), XML("80", "get", "/goform/formNetAudio_StatusXml.xml", [], "formNetAudio_StatusXml"), XML("80", "get", "/goform/formTuner_TunerXml.xml", [], "formTuner_TunerXml"), XML("80", "get", "/goform/formTuner_HdXml.xml", [], "formTuner_HdXml"), XML("80", "get", "/goform/formZone2_Zone2XmlStatus.xml", [], "formZone2_Zone2XmlStatus"), XML("80", "get", "/goform/formZone3_Zone3XmlStatus.xml", [], "formZone3_Zone3XmlStatus"), XML("8080", "post", "/goform/AppCommand.xml", ["GetFriendlyName"], "AppCommand-setup"), XML("8080", "post", "/goform/AppCommand.xml", ["GetAllZonePowerStatus", "GetAllZoneSource", "GetRenameSource", "GetDeletedSource", "GetSurroundModeStatus", "GetToneControl", "GetAllZoneVolume", "GetAllZoneMuteStatus"], "AppCommand-update"), XML("8080", "get", "/goform/Deviceinfo.xml", [], "Deviceinfo.xml"), XML("8080", "get", "/goform/formMainZone_MainZoneXmlStatus.xml", [], "formMainZone_MainZoneXmlStatus"), XML("8080", "get", "/goform/formMainZone_MainZoneXml.xml", [], "formMainZone_MainZoneXml"), XML("8080", "get", "/goform/formNetAudio_StatusXml.xml", [], "formNetAudio_StatusXml"), XML("8080", "get", "/goform/formTuner_TunerXml.xml", [], "formTuner_TunerXml"), XML("8080", "get", "/goform/formTuner_HdXml.xml", [], "formTuner_HdXml"), XML("8080", "get", "/goform/formZone2_Zone2XmlStatus.xml", [], "formZone2_Zone2XmlStatus"), XML("8080", "get", "/goform/formZone3_Zone3XmlStatus.xml", [], "formZone3_Zone3XmlStatus")] def create_post_body(attribute_list): # Buffer XML body as binary IO body = BytesIO() chunks = [attribute_list[i:i+5] for i in range( 0, len(attribute_list), 5)] for i, chunk in enumerate(chunks): # Prepare POST XML body for AppCommand.xml post_root = ET.Element("tx") for attribute in chunk: # Append tags for each attribute item = ET.Element("cmd") item.set("id", "1") item.text = attribute post_root.append(item) post_tree = ET.ElementTree(post_root) post_tree.write(body, encoding="utf-8", xml_declaration=bool(i == 0)) body_bytes = body.getvalue() body.close() return body_bytes def http_post(host, port, path, tags, filename): filename = filename + "-" + str(port) data = create_post_body(tags) try: r = requests.post( "http://{host}:{port}/{path}".format( host=host, port=port, path=path), data=data) except requests.exceptions.ConnectionError: print("ConnectionError retrieving data from host {} port {} \ path {}".format(host, port, path)) filename = filename + "-ConnectionError.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) except requests.exceptions.Timeout: print("Timeout retrieving data from host {} port {} path {}".format( host, port, path)) filename = filename + "-Timeout.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) else: print("HTTP Status Code of {}: {}".format(path, r.status_code)) filename = filename + "-" + str(r.status_code) + ".xml" with open("./{}".format(filename), "wb") as file: file.write(r.content) def http_get(host, port, path, filename): filename = filename + "-" + str(port) try: r = requests.get( "http://{host}:{port}/{path}".format( host=host, port=port, path=path)) except requests.exceptions.ConnectionError: print("ConnectionError retrieving data from host {} path {}".format( host, path)) filename = filename + "-ConnectionError.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) except requests.exceptions.Timeout: print("Timeout retrieving data from host {} path {}".format( host, path)) filename = filename + "-Timeout.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) else: print("HTTP Status Code of {}: {}".format(path, r.status_code)) filename = filename + "-" + str(r.status_code) + ".xml" with open("./{}".format(filename), "wb") as file: file.write(r.content) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--host', type=str, default='192.168.0.250', help='host of Denon AVR receiver') parser.add_argument('--prefix', type=str, default='AVR', help='prefix of filenames to be saved') args = parser.parse_args() for entry in SAVED_XML: if entry.type == "post": http_post(args.host, entry.port, entry.path, entry.tags, "{}-{}".format(args.prefix, entry.filename)) elif entry.type == "get": http_get(args.host, entry.port, entry.path, "{}-{}".format( args.prefix, entry.filename)) else: print("wrong type, only \"get\" and \"post\" are allowed")
40.581395
78
0.557593
import argparse from io import BytesIO import requests import xml.etree.ElementTree as ET from collections import namedtuple XML = namedtuple("XML", ["port", "type", "path", "tags", "filename"]) SAVED_XML = [XML("80", "post", "/goform/AppCommand.xml", ["GetFriendlyName"], "AppCommand-setup"), XML("80", "post", "/goform/AppCommand.xml", ["GetAllZonePowerStatus", "GetAllZoneSource", "GetRenameSource", "GetDeletedSource", "GetSurroundModeStatus", "GetToneControl", "GetAllZoneVolume", "GetAllZoneMuteStatus"], "AppCommand-update"), XML("80", "get", "/goform/Deviceinfo.xml", [], "Deviceinfo.xml"), XML("80", "get", "/goform/formMainZone_MainZoneXmlStatus.xml", [], "formMainZone_MainZoneXmlStatus"), XML("80", "get", "/goform/formMainZone_MainZoneXml.xml", [], "formMainZone_MainZoneXml"), XML("80", "get", "/goform/formNetAudio_StatusXml.xml", [], "formNetAudio_StatusXml"), XML("80", "get", "/goform/formTuner_TunerXml.xml", [], "formTuner_TunerXml"), XML("80", "get", "/goform/formTuner_HdXml.xml", [], "formTuner_HdXml"), XML("80", "get", "/goform/formZone2_Zone2XmlStatus.xml", [], "formZone2_Zone2XmlStatus"), XML("80", "get", "/goform/formZone3_Zone3XmlStatus.xml", [], "formZone3_Zone3XmlStatus"), XML("8080", "post", "/goform/AppCommand.xml", ["GetFriendlyName"], "AppCommand-setup"), XML("8080", "post", "/goform/AppCommand.xml", ["GetAllZonePowerStatus", "GetAllZoneSource", "GetRenameSource", "GetDeletedSource", "GetSurroundModeStatus", "GetToneControl", "GetAllZoneVolume", "GetAllZoneMuteStatus"], "AppCommand-update"), XML("8080", "get", "/goform/Deviceinfo.xml", [], "Deviceinfo.xml"), XML("8080", "get", "/goform/formMainZone_MainZoneXmlStatus.xml", [], "formMainZone_MainZoneXmlStatus"), XML("8080", "get", "/goform/formMainZone_MainZoneXml.xml", [], "formMainZone_MainZoneXml"), XML("8080", "get", "/goform/formNetAudio_StatusXml.xml", [], "formNetAudio_StatusXml"), XML("8080", "get", "/goform/formTuner_TunerXml.xml", [], "formTuner_TunerXml"), XML("8080", "get", "/goform/formTuner_HdXml.xml", [], "formTuner_HdXml"), XML("8080", "get", "/goform/formZone2_Zone2XmlStatus.xml", [], "formZone2_Zone2XmlStatus"), XML("8080", "get", "/goform/formZone3_Zone3XmlStatus.xml", [], "formZone3_Zone3XmlStatus")] def create_post_body(attribute_list): body = BytesIO() chunks = [attribute_list[i:i+5] for i in range( 0, len(attribute_list), 5)] for i, chunk in enumerate(chunks): post_root = ET.Element("tx") for attribute in chunk: item = ET.Element("cmd") item.set("id", "1") item.text = attribute post_root.append(item) post_tree = ET.ElementTree(post_root) post_tree.write(body, encoding="utf-8", xml_declaration=bool(i == 0)) body_bytes = body.getvalue() body.close() return body_bytes def http_post(host, port, path, tags, filename): filename = filename + "-" + str(port) data = create_post_body(tags) try: r = requests.post( "http://{host}:{port}/{path}".format( host=host, port=port, path=path), data=data) except requests.exceptions.ConnectionError: print("ConnectionError retrieving data from host {} port {} \ path {}".format(host, port, path)) filename = filename + "-ConnectionError.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) except requests.exceptions.Timeout: print("Timeout retrieving data from host {} port {} path {}".format( host, port, path)) filename = filename + "-Timeout.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) else: print("HTTP Status Code of {}: {}".format(path, r.status_code)) filename = filename + "-" + str(r.status_code) + ".xml" with open("./{}".format(filename), "wb") as file: file.write(r.content) def http_get(host, port, path, filename): filename = filename + "-" + str(port) try: r = requests.get( "http://{host}:{port}/{path}".format( host=host, port=port, path=path)) except requests.exceptions.ConnectionError: print("ConnectionError retrieving data from host {} path {}".format( host, path)) filename = filename + "-ConnectionError.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) except requests.exceptions.Timeout: print("Timeout retrieving data from host {} path {}".format( host, path)) filename = filename + "-Timeout.xml" with open("./{}".format(filename), "wb") as file: file.write("".encode()) else: print("HTTP Status Code of {}: {}".format(path, r.status_code)) filename = filename + "-" + str(r.status_code) + ".xml" with open("./{}".format(filename), "wb") as file: file.write(r.content) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--host', type=str, default='192.168.0.250', help='host of Denon AVR receiver') parser.add_argument('--prefix', type=str, default='AVR', help='prefix of filenames to be saved') args = parser.parse_args() for entry in SAVED_XML: if entry.type == "post": http_post(args.host, entry.port, entry.path, entry.tags, "{}-{}".format(args.prefix, entry.filename)) elif entry.type == "get": http_get(args.host, entry.port, entry.path, "{}-{}".format( args.prefix, entry.filename)) else: print("wrong type, only \"get\" and \"post\" are allowed")
true
true
f70ed079519d07f15d65e53bdf79fb821a1b32ee
572
py
Python
pajbot/models/roulette.py
sadlyfell/bullbot
b6ef96f61678fab4a245d8ccddf9d1ae7aae9fee
[ "MIT" ]
null
null
null
pajbot/models/roulette.py
sadlyfell/bullbot
b6ef96f61678fab4a245d8ccddf9d1ae7aae9fee
[ "MIT" ]
null
null
null
pajbot/models/roulette.py
sadlyfell/bullbot
b6ef96f61678fab4a245d8ccddf9d1ae7aae9fee
[ "MIT" ]
null
null
null
import logging from sqlalchemy import Column, INT from sqlalchemy_utc import UtcDateTime from pajbot import utils from pajbot.managers.db import Base log = logging.getLogger(__name__) class Roulette(Base): __tablename__ = "roulette" id = Column(INT, primary_key=True) user_id = Column(INT, index=True, nullable=False) created_at = Column(UtcDateTime(), nullable=False) points = Column(INT, nullable=False) def __init__(self, user_id, points): self.user_id = user_id self.created_at = utils.now() self.points = points
23.833333
54
0.715035
import logging from sqlalchemy import Column, INT from sqlalchemy_utc import UtcDateTime from pajbot import utils from pajbot.managers.db import Base log = logging.getLogger(__name__) class Roulette(Base): __tablename__ = "roulette" id = Column(INT, primary_key=True) user_id = Column(INT, index=True, nullable=False) created_at = Column(UtcDateTime(), nullable=False) points = Column(INT, nullable=False) def __init__(self, user_id, points): self.user_id = user_id self.created_at = utils.now() self.points = points
true
true
f70ed2aba5dd366ab069ddd450e6381cb10d7545
1,620
py
Python
course1/fibonacci.py
ropable/algorithmic_toolbox
b4dcf4fda19c394da2baa6eced0732bf50585237
[ "MIT" ]
1
2017-11-21T08:08:55.000Z
2017-11-21T08:08:55.000Z
course1/fibonacci.py
ropable/algorithmic_toolbox
b4dcf4fda19c394da2baa6eced0732bf50585237
[ "MIT" ]
null
null
null
course1/fibonacci.py
ropable/algorithmic_toolbox
b4dcf4fda19c394da2baa6eced0732bf50585237
[ "MIT" ]
null
null
null
# python3 import sys def fib_slow(n): '''Dumb (slow) example solution. ''' if (n <= 1): return n return fib_slow(n - 1) + fib_slow(n - 2) def fib_countup(n): '''Less-dumb 'count up as you go' solution. ''' if (n <= 1): return n x, y = 0, 1 for i in range(n): x, y = y, x + y return x def fib_memoize(n, saved={0: 0, 1: 1}): '''Use memoization to speed things up. ''' if (n <= 1): return n if n not in saved: saved[n] = fib_memoize(n-1, saved) + fib_memoize(n-2, saved) return saved[n] def fib_matrix(n): '''Use matrix multiplication to solve it. Ref: https://en.wikipedia.org/wiki/Fibonacci_number#Matrix_form ''' if (n <= 1): return n v1, v2, v3 = 1, 1, 0 # Initialise a matrix [[1,1],[1,0]] for rec in bin(n)[3:]: # Raise it to the nth power calc = v2 * v2 v1, v2, v3 = v1 * v1 + calc, (v1 + v3) * v2, calc + v3 * v3 if rec == '1': v1, v2, v3 = v1 + v2, v1, v2 return v2 def fib_fast_double(n): '''Use fast doubling method. Ref: https://www.nayuki.io/page/fast-fibonacci-algorithms ''' if n == 0: return (0, 1) else: a, b = fib_fast_double(n // 2) c = a * (b * 2 - a) d = a * a + b * b if n % 2 == 0: return (c, d) else: return (d, c + d) def fibonacci(n): """Returns F(n) """ if n < 0: raise ValueError return fib_fast_double(n)[0] if __name__ == '__main__': n = int(sys.stdin.read()) print(fibonacci(n))
21.315789
68
0.5
import sys def fib_slow(n): if (n <= 1): return n return fib_slow(n - 1) + fib_slow(n - 2) def fib_countup(n): if (n <= 1): return n x, y = 0, 1 for i in range(n): x, y = y, x + y return x def fib_memoize(n, saved={0: 0, 1: 1}): if (n <= 1): return n if n not in saved: saved[n] = fib_memoize(n-1, saved) + fib_memoize(n-2, saved) return saved[n] def fib_matrix(n): if (n <= 1): return n v1, v2, v3 = 1, 1, 0 for rec in bin(n)[3:]: calc = v2 * v2 v1, v2, v3 = v1 * v1 + calc, (v1 + v3) * v2, calc + v3 * v3 if rec == '1': v1, v2, v3 = v1 + v2, v1, v2 return v2 def fib_fast_double(n): if n == 0: return (0, 1) else: a, b = fib_fast_double(n // 2) c = a * (b * 2 - a) d = a * a + b * b if n % 2 == 0: return (c, d) else: return (d, c + d) def fibonacci(n): if n < 0: raise ValueError return fib_fast_double(n)[0] if __name__ == '__main__': n = int(sys.stdin.read()) print(fibonacci(n))
true
true
f70ed379460fc7865b9be45f22f1afeb631c5ba3
4,077
py
Python
stream_alert/rule_processor/config.py
serhatcan/streamalert
0549bb05163b7b54c30365312948caa21e44be98
[ "Apache-2.0" ]
1
2020-03-10T16:20:31.000Z
2020-03-10T16:20:31.000Z
stream_alert/rule_processor/config.py
serhatcan/streamalert
0549bb05163b7b54c30365312948caa21e44be98
[ "Apache-2.0" ]
null
null
null
stream_alert/rule_processor/config.py
serhatcan/streamalert
0549bb05163b7b54c30365312948caa21e44be98
[ "Apache-2.0" ]
null
null
null
''' Copyright 2017-present, Airbnb Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import json import os from collections import OrderedDict class ConfigError(Exception): pass def load_config(conf_dir='conf/'): """Load the configuration for StreamAlert. All configuration files live in the `conf` directory in JSON format. `sources` define a colleciton of AWS services (S3, Kinesis) supported as inputs to StreamAlert, specific entities (S3 buckets, Kinesis streams), and log types emitted from them. `logs` declare the schema for the listed log types in `sources`. Each key denotes the name of the log type, and includes 'keys' used to match rules to log fields. """ conf_files = { 'sources': 'sources.json', 'logs': 'logs.json' } config = {} for desc, filename in conf_files.iteritems(): with open(os.path.join(conf_dir, filename)) as data: try: config[desc] = json.load(data, object_pairs_hook=OrderedDict) except ValueError: raise ConfigError('Invalid JSON format for {}.json'.format(desc)) if validate_config(config): return config def validate_config(config): """Validate the StreamAlert configuration contains a valid structure. Checks for `logs.json`: - each log has a schema and parser declared Checks for `sources.json` - the sources contains either kinesis or s3 keys - each sources has a list of logs declared """ for config_key, settings in config.iteritems(): # check log declarations if config_key == 'logs': for log, attrs in settings.iteritems(): if not {'schema', 'parser'}.issubset(set(attrs.keys())): raise ConfigError('Schema or parser missing for {}'.format(log)) # check sources attributes elif config_key == 'sources': if not set(settings.keys()).issubset({'kinesis', 's3', 'sns'}): raise ConfigError('Sources missing \'kinesis\', \'s3\', or \'sns\' keys') for log, attrs in settings.iteritems(): for entity, entity_attrs in attrs.iteritems(): if 'logs' not in set(entity_attrs.keys()): raise ConfigError('Logs are not declared for {}'.format(entity)) if len(entity_attrs['logs']) == 0: raise ConfigError('Log list is empty for {}'.format(entity)) return True def load_env(context): """Get the current environment for the running Lambda function. Parses the invoked_function_arn from the given context object to get the name of the currently running alias (either production or staging) and the name of the function. Example: arn:aws:lambda:aws-region:acct-id:function:stream_alert:production Args: context: The AWS Lambda context object. Returns: {'lambda_region': 'region_name', 'account_id': <ACCOUNT_ID>, 'lambda_function_name': 'function_name', 'lambda_alias': 'qualifier'} """ env = {} if context: arn = context.invoked_function_arn.split(':') env['lambda_region'] = arn[3] env['account_id'] = arn[4] env['lambda_function_name'] = arn[6] env['lambda_alias'] = arn[7] else: env['lambda_region'] = 'us-east-1' env['account_id'] = '123456789012' env['lambda_function_name'] = 'test_streamalert_rule_processor' env['lambda_alias'] = 'development' return env
36.079646
89
0.646063
import json import os from collections import OrderedDict class ConfigError(Exception): pass def load_config(conf_dir='conf/'): conf_files = { 'sources': 'sources.json', 'logs': 'logs.json' } config = {} for desc, filename in conf_files.iteritems(): with open(os.path.join(conf_dir, filename)) as data: try: config[desc] = json.load(data, object_pairs_hook=OrderedDict) except ValueError: raise ConfigError('Invalid JSON format for {}.json'.format(desc)) if validate_config(config): return config def validate_config(config): for config_key, settings in config.iteritems(): if config_key == 'logs': for log, attrs in settings.iteritems(): if not {'schema', 'parser'}.issubset(set(attrs.keys())): raise ConfigError('Schema or parser missing for {}'.format(log)) elif config_key == 'sources': if not set(settings.keys()).issubset({'kinesis', 's3', 'sns'}): raise ConfigError('Sources missing \'kinesis\', \'s3\', or \'sns\' keys') for log, attrs in settings.iteritems(): for entity, entity_attrs in attrs.iteritems(): if 'logs' not in set(entity_attrs.keys()): raise ConfigError('Logs are not declared for {}'.format(entity)) if len(entity_attrs['logs']) == 0: raise ConfigError('Log list is empty for {}'.format(entity)) return True def load_env(context): env = {} if context: arn = context.invoked_function_arn.split(':') env['lambda_region'] = arn[3] env['account_id'] = arn[4] env['lambda_function_name'] = arn[6] env['lambda_alias'] = arn[7] else: env['lambda_region'] = 'us-east-1' env['account_id'] = '123456789012' env['lambda_function_name'] = 'test_streamalert_rule_processor' env['lambda_alias'] = 'development' return env
true
true
f70ed39ecc8bdbd9e8238a2309e2bc5cdca95fb6
5,838
py
Python
openstack_dashboard/test/integration_tests/helpers.py
izadorozhna/dashboard_integration_tests
c1ee45b98832cf3a4bb01aa37410afc3a96fd2a3
[ "Apache-2.0" ]
null
null
null
openstack_dashboard/test/integration_tests/helpers.py
izadorozhna/dashboard_integration_tests
c1ee45b98832cf3a4bb01aa37410afc3a96fd2a3
[ "Apache-2.0" ]
null
null
null
openstack_dashboard/test/integration_tests/helpers.py
izadorozhna/dashboard_integration_tests
c1ee45b98832cf3a4bb01aa37410afc3a96fd2a3
[ "Apache-2.0" ]
null
null
null
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import os import sys import time import traceback import uuid import testtools import xvfbwrapper from openstack_dashboard.test.integration_tests import config from openstack_dashboard.test.integration_tests.pages import loginpage from openstack_dashboard.test.integration_tests import webdriver ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) if ROOT_PATH not in sys.path: sys.path.append(ROOT_PATH) def gen_random_resource_name(resource="", timestamp=True): """Generate random resource name using uuid and timestamp. Input fields are usually limited to 255 or 80 characters hence their provide enough space for quite long resource names, but it might be the case that maximum field length is quite restricted, it is then necessary to consider using shorter resource argument or avoid using timestamp by setting timestamp argument to False. """ fields = ["horizon"] if resource: fields.append(resource) if timestamp: tstamp = time.strftime("%d-%m-%H-%M-%S") fields.append(tstamp) fields.append(str(uuid.uuid4()).replace("-", "")) return "_".join(fields) class BaseTestCase(testtools.TestCase): CONFIG = config.get_config() def setUp(self): if os.environ.get('INTEGRATION_TESTS', False): # Start a virtual display server for running the tests headless. if os.environ.get('SELENIUM_HEADLESS', False): self.vdisplay = xvfbwrapper.Xvfb(width=1280, height=720) args = [] # workaround for memory leak in Xvfb taken from: # http://blog.jeffterrace.com/2012/07/xvfb-memory-leak-workaround.html args.append("-noreset") # disables X access control args.append("-ac") if hasattr(self.vdisplay, 'extra_xvfb_args'): # xvfbwrapper 0.2.8 or newer self.vdisplay.extra_xvfb_args.extend(args) else: self.vdisplay.xvfb_cmd.extend(args) self.vdisplay.start() # Start the Selenium webdriver and setup configuration. self.driver = webdriver.WebDriverWrapper() self.driver.maximize_window() self.driver.implicitly_wait(self.CONFIG.selenium.implicit_wait) self.driver.set_page_load_timeout( self.CONFIG.selenium.page_timeout) self.addOnException(self._dump_page_html_source) self.addOnException(self._save_screenshot) else: msg = "The INTEGRATION_TESTS env variable is not set." raise self.skipException(msg) super(BaseTestCase, self).setUp() def _dump_page_html_source(self, exc_info): content = None try: pg_source = self._get_page_html_source() content = testtools.content.Content( testtools.content_type.ContentType('text', 'html'), lambda: pg_source) except Exception: exc_traceback = traceback.format_exc() content = testtools.content.text_content(exc_traceback) finally: self.addDetail("PageHTMLSource.html", content) def _save_screenshot(self, exc_info): screenshot_dir = os.path.join( ROOT_PATH, self.CONFIG.selenium.screenshots_directory) if not os.path.exists(screenshot_dir): os.makedirs(screenshot_dir) date_string = datetime.datetime.now().strftime( '%Y.%m.%d-%H%M%S') test_name = self._testMethodName name = '%s_%s.png' % (test_name, date_string) filename = os.path.join(screenshot_dir, name) self.driver.get_screenshot_as_file(filename) content = testtools.content.text_content(filename) self.addDetail("Screenshot", content) def _get_page_html_source(self): """Gets html page source. self.driver.page_source is not used on purpose because it does not display html code generated/changed by javascript. """ html_elem = self.driver.find_element_by_tag_name("html") return html_elem.get_attribute("innerHTML").encode("UTF-8") def tearDown(self): if os.environ.get('INTEGRATION_TESTS', False): self.driver.quit() if hasattr(self, 'vdisplay'): self.vdisplay.stop() super(BaseTestCase, self).tearDown() class TestCase(BaseTestCase): TEST_USER_NAME = BaseTestCase.CONFIG.identity.username TEST_PASSWORD = BaseTestCase.CONFIG.identity.password def setUp(self): super(TestCase, self).setUp() self.login_pg = loginpage.LoginPage(self.driver, self.CONFIG) self.login_pg.go_to_login_page() self.home_pg = self.login_pg.login(self.TEST_USER_NAME, self.TEST_PASSWORD) def tearDown(self): try: if self.home_pg.is_logged_in: self.home_pg.go_to_home_page() self.home_pg.log_out() finally: super(TestCase, self).tearDown() class AdminTestCase(TestCase): TEST_USER_NAME = TestCase.CONFIG.identity.admin_username TEST_PASSWORD = TestCase.CONFIG.identity.admin_password
36.4875
86
0.658787
import datetime import os import sys import time import traceback import uuid import testtools import xvfbwrapper from openstack_dashboard.test.integration_tests import config from openstack_dashboard.test.integration_tests.pages import loginpage from openstack_dashboard.test.integration_tests import webdriver ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) if ROOT_PATH not in sys.path: sys.path.append(ROOT_PATH) def gen_random_resource_name(resource="", timestamp=True): fields = ["horizon"] if resource: fields.append(resource) if timestamp: tstamp = time.strftime("%d-%m-%H-%M-%S") fields.append(tstamp) fields.append(str(uuid.uuid4()).replace("-", "")) return "_".join(fields) class BaseTestCase(testtools.TestCase): CONFIG = config.get_config() def setUp(self): if os.environ.get('INTEGRATION_TESTS', False): if os.environ.get('SELENIUM_HEADLESS', False): self.vdisplay = xvfbwrapper.Xvfb(width=1280, height=720) args = [] args.append("-noreset") args.append("-ac") if hasattr(self.vdisplay, 'extra_xvfb_args'): self.vdisplay.extra_xvfb_args.extend(args) else: self.vdisplay.xvfb_cmd.extend(args) self.vdisplay.start() self.driver = webdriver.WebDriverWrapper() self.driver.maximize_window() self.driver.implicitly_wait(self.CONFIG.selenium.implicit_wait) self.driver.set_page_load_timeout( self.CONFIG.selenium.page_timeout) self.addOnException(self._dump_page_html_source) self.addOnException(self._save_screenshot) else: msg = "The INTEGRATION_TESTS env variable is not set." raise self.skipException(msg) super(BaseTestCase, self).setUp() def _dump_page_html_source(self, exc_info): content = None try: pg_source = self._get_page_html_source() content = testtools.content.Content( testtools.content_type.ContentType('text', 'html'), lambda: pg_source) except Exception: exc_traceback = traceback.format_exc() content = testtools.content.text_content(exc_traceback) finally: self.addDetail("PageHTMLSource.html", content) def _save_screenshot(self, exc_info): screenshot_dir = os.path.join( ROOT_PATH, self.CONFIG.selenium.screenshots_directory) if not os.path.exists(screenshot_dir): os.makedirs(screenshot_dir) date_string = datetime.datetime.now().strftime( '%Y.%m.%d-%H%M%S') test_name = self._testMethodName name = '%s_%s.png' % (test_name, date_string) filename = os.path.join(screenshot_dir, name) self.driver.get_screenshot_as_file(filename) content = testtools.content.text_content(filename) self.addDetail("Screenshot", content) def _get_page_html_source(self): html_elem = self.driver.find_element_by_tag_name("html") return html_elem.get_attribute("innerHTML").encode("UTF-8") def tearDown(self): if os.environ.get('INTEGRATION_TESTS', False): self.driver.quit() if hasattr(self, 'vdisplay'): self.vdisplay.stop() super(BaseTestCase, self).tearDown() class TestCase(BaseTestCase): TEST_USER_NAME = BaseTestCase.CONFIG.identity.username TEST_PASSWORD = BaseTestCase.CONFIG.identity.password def setUp(self): super(TestCase, self).setUp() self.login_pg = loginpage.LoginPage(self.driver, self.CONFIG) self.login_pg.go_to_login_page() self.home_pg = self.login_pg.login(self.TEST_USER_NAME, self.TEST_PASSWORD) def tearDown(self): try: if self.home_pg.is_logged_in: self.home_pg.go_to_home_page() self.home_pg.log_out() finally: super(TestCase, self).tearDown() class AdminTestCase(TestCase): TEST_USER_NAME = TestCase.CONFIG.identity.admin_username TEST_PASSWORD = TestCase.CONFIG.identity.admin_password
true
true
f70ed42cc612bcaad5c20d49c4025518f3b003ee
753
py
Python
issues/migrations/0006_auto_20200501_0352.py
QizaiMing/ergo-project-manager
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
[ "MIT" ]
null
null
null
issues/migrations/0006_auto_20200501_0352.py
QizaiMing/ergo-project-manager
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
[ "MIT" ]
3
2020-11-01T22:08:38.000Z
2022-03-12T00:49:00.000Z
issues/migrations/0006_auto_20200501_0352.py
QizaiMing/ergo-project-manager
2b02b2ab6d9e48bfccbbca8c05180b07177dcb77
[ "MIT" ]
2
2021-01-03T07:17:16.000Z
2021-05-29T17:27:11.000Z
# Generated by Django 2.2.12 on 2020-05-01 03:52 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('issues', '0005_auto_20200501_0350'), ] operations = [ migrations.AlterField( model_name='issue', name='assignee', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assigned_to', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='issue', name='linked_to', field=models.ManyToManyField(blank=True, related_name='_issue_linked_to_+', to='issues.Issue'), ), ]
28.961538
138
0.648074
from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('issues', '0005_auto_20200501_0350'), ] operations = [ migrations.AlterField( model_name='issue', name='assignee', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assigned_to', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='issue', name='linked_to', field=models.ManyToManyField(blank=True, related_name='_issue_linked_to_+', to='issues.Issue'), ), ]
true
true
f70ed45272dbd6f48258226d6075604286c262fa
1,308
py
Python
tests/test_xml_bibs.py
Mfgloger/overload
b34f88750d9272db268324807f7e8ba308940d1e
[ "MIT" ]
null
null
null
tests/test_xml_bibs.py
Mfgloger/overload
b34f88750d9272db268324807f7e8ba308940d1e
[ "MIT" ]
58
2017-10-16T13:09:33.000Z
2022-01-10T15:35:58.000Z
tests/test_xml_bibs.py
Mfgloger/overload
b34f88750d9272db268324807f7e8ba308940d1e
[ "MIT" ]
2
2019-04-15T16:04:38.000Z
2020-11-03T17:58:33.000Z
# -*- coding: utf-8 -*- import xml.etree.ElementTree as ET import unittest from context import xml_bibs as xb class TestGetSubjectFields(unittest.TestCase): """Tests parsing of subjects from marcxml""" def setUp(self): tree = ET.parse("sample_marcxml.xml") self.data1 = tree.getroot() tree = ET.parse("missing_tags_sample_marcxml.xml") self.data2 = tree.getroot() def test_none(self): self.assertEqual(xb.get_subject_fields(None), {}) def test_missing_tag(self): self.assertEqual(xb.get_subject_fields(self.data2), {}) def test_present_600_tag(self): self.assertEqual(xb.get_subject_fields(self.data1), {"600": "Elizabeth II"}) class TestGetTag082(unittest.TestCase): """Tests parsing 082 tag subfield a""" def setUp(self): tree = ET.parse("sample_marcxml.xml") self.data1 = tree.getroot() tree = ET.parse("missing_tags_sample_marcxml.xml") self.data2 = tree.getroot() def test_none(self): self.assertIsNone(xb.get_tag_082(None)) def test_missing_tag(self): self.assertIsNone(xb.get_tag_082(self.data2)) def test_found_tag(self): self.assertEqual(xb.get_tag_082(self.data1), "973.9/092/2") if __name__ == "__main__": unittest.main()
25.153846
84
0.666667
import xml.etree.ElementTree as ET import unittest from context import xml_bibs as xb class TestGetSubjectFields(unittest.TestCase): def setUp(self): tree = ET.parse("sample_marcxml.xml") self.data1 = tree.getroot() tree = ET.parse("missing_tags_sample_marcxml.xml") self.data2 = tree.getroot() def test_none(self): self.assertEqual(xb.get_subject_fields(None), {}) def test_missing_tag(self): self.assertEqual(xb.get_subject_fields(self.data2), {}) def test_present_600_tag(self): self.assertEqual(xb.get_subject_fields(self.data1), {"600": "Elizabeth II"}) class TestGetTag082(unittest.TestCase): def setUp(self): tree = ET.parse("sample_marcxml.xml") self.data1 = tree.getroot() tree = ET.parse("missing_tags_sample_marcxml.xml") self.data2 = tree.getroot() def test_none(self): self.assertIsNone(xb.get_tag_082(None)) def test_missing_tag(self): self.assertIsNone(xb.get_tag_082(self.data2)) def test_found_tag(self): self.assertEqual(xb.get_tag_082(self.data1), "973.9/092/2") if __name__ == "__main__": unittest.main()
true
true
f70ed4a6ff4da8d2043098a40b6f32d51fb96cb8
8,719
py
Python
run_preprocessing.py
ehwa009/Eye_Motion_Dataset
42a1c897dc4209c6bb2de94c915ab36995855202
[ "MIT" ]
null
null
null
run_preprocessing.py
ehwa009/Eye_Motion_Dataset
42a1c897dc4209c6bb2de94c915ab36995855202
[ "MIT" ]
null
null
null
run_preprocessing.py
ehwa009/Eye_Motion_Dataset
42a1c897dc4209c6bb2de94c915ab36995855202
[ "MIT" ]
1
2020-03-11T02:14:14.000Z
2020-03-11T02:14:14.000Z
import pickle import argparse import pandas as pd import numpy as np import math from tqdm import tqdm from sklearn import decomposition CENTER_X = int(960 / 3 / 2) CENTER_Y = int(540 / 3 / 2) # CENTER_X = 0 # CENTER_Y = 0 def load_data(path, data_size=None): with open(path, 'rb') as f: data = pickle.load(f) if data_size != -1: dataset = data[:data_size] else: dataset = data[:] return dataset def save_data(path, data): with open(path, 'wb') as f: pickle.dump(data, f) ''' filling empty coordination, relocate landmark position, and filtering landmarks which have abnormal pulpil coordination ''' def run_fill_filter(eye_dataset): for ed in tqdm(eye_dataset): # preprocessing landmarks # print('[INFO] Current video: {}'.format(ed['vid'])) for clip_info in ed['clip_info']: landmarks = clip_info['landmarks'] filled_landmarks = [] for landmark in landmarks: ci_df = pd.DataFrame(np.array(landmark)) ci_df = ci_df.replace(0, np.nan) ci_df = ci_df.fillna(method='ffill') # fill NaN values in dataset ci_df = ci_df.rolling(3).mean() # moving average filtering temp_lm = [] for landmark in ci_df.values.tolist(): filled = [int(lm) for lm in landmark if not(np.isnan(lm))] if len(filled) == 50: # centering diff_x = CENTER_X - filled[48] diff_y = CENTER_Y - filled[49] for f_i in range(0, len(filled), 2): filled[f_i] += diff_x filled[f_i+1] += diff_y # check right pupil is outside of eye region condition1 = filled[0] > filled[4] and filled[0] < filled[10] condition2 = filled[1] > filled[7] and filled[1] > filled[9] condition3 = filled[1] < filled[13] and filled[1] < filled[14] if condition1 and condition2 and condition3: temp_lm.append(filled) filled_landmarks.append(temp_lm) clip_info['landmarks'] = filled_landmarks return eye_dataset ''' Normalize eye expression motion scale over whole dataset. To avoid pulpil dislocation, we use same vector on right and left pulpil. ''' def run_normalization(eye_dataset): eb_standard_len = 100 def get_dist(x1, y1, x2, y2): return np.sqrt((x1-x2) ** 2 + (y1- y2) ** 2) def get_theta(var_x, var_y, fix_x, fix_y): return math.atan2(var_y - fix_y, var_x - fix_x) def get_new_coor(theta, dist, point): return dist * np.array([math.cos(theta), math.sin(theta)]) + np.array([point[0], point[1]]) def run_len_norm(var_x, var_y, fix_x, fix_y, expected_len): angle = get_theta(var_x, var_y, fix_x, fix_y) new_coor = get_new_coor(angle, expected_len, [fix_x, fix_y]) return new_coor for ed in tqdm(eye_dataset): # preprocessing landmarks # print('[INFO] Current video: {}'.format(ed['vid'])) for clip_info in ed['clip_info']: tmp_landmarks = [] for landmark in clip_info['landmarks']: tmp_landmark = [] for lm in landmark: # calculate different ratio with standard length right_len_ratio = eb_standard_len / get_dist(lm[46], lm[47], lm[48], lm[49]) left_len_ratio = eb_standard_len / get_dist(lm[28], lm[29], lm[48], lm[49]) len_ratio = (right_len_ratio + left_len_ratio) / 2 fix_x, fix_y = lm[48], lm[49] new_coor_list = [] for lm_i in range(0, len(lm[:48]), 2): new_coor = run_len_norm(lm[lm_i], lm[lm_i+1], fix_x, fix_y, get_dist(lm[lm_i], lm[lm_i+1], fix_x, fix_y) * len_ratio) new_coor_list += [int(new_coor[0]), int(new_coor[1])] # pupil preprocessing right_theta = get_theta(lm[0], lm[1], lm[6], lm[7]) right_dist = get_dist(lm[0], lm[1], lm[6], lm[7]) left_new_pulpil = get_new_coor(right_theta, right_dist, [lm[18], lm[19]]) lm[2] = int(left_new_pulpil[0]) lm[3] = int(left_new_pulpil[1]) new_coor_list += [fix_x, fix_y] tmp_landmark.append(new_coor_list) tmp_landmarks.append(tmp_landmark) clip_info['landmarks'] = tmp_landmarks return eye_dataset ''' Run PCA. We set 7 components to run pca. ''' def run_estimator(eye_dataset, opt): landmark_list = [] for ed in eye_dataset: for clip_info in ed['clip_info']: for clip_landmarks in clip_info['landmarks']: for landmarks in clip_landmarks: landmark_list.append(landmarks) landmark_array = np.array(landmark_list) n_samples, n_features = landmark_array.shape print('[INFO] n_samples:{}, n_features:{}'.format(n_samples, n_features)) print('[INFO] Estimated running time: {:0.2f} hrs with {} fps'.format(n_samples/opt.fps/60/60, opt.fps)) data = landmark_array[:, :-2] estimator = decomposition.PCA(opt.n_components, svd_solver='randomized', whiten=True) estimator.fit(data) var_ratio = estimator.explained_variance_ratio_ print('[INFO] {} number of components explain {:0.2f} of original dataset.'.format(opt.n_components, np.sum(var_ratio))) print('[INFO] Without first and seconde axis, rest of hyperplain consists of {:0.2f} of original dataset.'.format(np.sum(var_ratio[3:]))) return estimator ''' Based on learned PCA eigen vectors (7 hyperplanes that can explain original dataset), We transform 50 dimention to 7 dimention to represent eye expression. Due to first and second egien vectors represent rotating motion in our pca space, we make these values to zero. ''' def run_transform(eye_dataset, estimator, opt): for ed in tqdm(eye_dataset): for clip_info in ed['clip_info']: landmarks = clip_info['landmarks'] transformed_landmarks = [] for landmark in landmarks: tmp_trans = [] for lm in landmark: transformed_array = estimator.transform(np.array([lm[:-2]])) transformed_list = transformed_array.tolist()[0] if opt.is_rotation_killed: # we killed pca hyperplanes which have a rotation # transformed_list[0] = int(transformed_list[0]/3) # transformed_list[1] = int(transformed_list[1]/3) transformed_list[0] = 0 transformed_list[1] = 0 tmp_trans.append(transformed_list) transformed_landmarks.append(tmp_trans) clip_info['landmarks'] = transformed_landmarks return eye_dataset def main(): parser = argparse.ArgumentParser() parser.add_argument('-dataset_path', default='./dataset') parser.add_argument('-data_size', type=int, default=-1) # -1 means whole dataset parser.add_argument('-fps', type=int, default=10) parser.add_argument('-n_components', type=int, default=7) parser.add_argument('-is_rotation_killed', type=bool, default=True) opt = parser.parse_args() eye_dataset = load_data('{}/eye_motion_dataset.pickle'.format(opt.dataset_path), opt.data_size) print('[INFO] Dataset length: {}'.format(len(eye_dataset))) print('[INFO] Filling, filtering and centering is now processing.') eye_dataset = run_fill_filter(eye_dataset) print('[INFO] Normalization is now processing.') eye_dataset = run_normalization(eye_dataset) print('[INFO] Estimator is now running.') estimator = run_estimator(eye_dataset, opt) print('[INFO] Landmarks are now transforming.') eye_dataset = run_transform(eye_dataset, estimator, opt) # save processed dataset processed_dataset = {'eye_dataset': eye_dataset, 'estimator': estimator, } save_path = '{}/processed_eye_motion_dataset_pca_{}.pickle'.format(opt.dataset_path, estimator.n_components) print('[INFO] Save preprocessed dataset at {}'.format(save_path)) save_data(save_path, processed_dataset) if __name__ == '__main__': main()
40.553488
141
0.592384
import pickle import argparse import pandas as pd import numpy as np import math from tqdm import tqdm from sklearn import decomposition CENTER_X = int(960 / 3 / 2) CENTER_Y = int(540 / 3 / 2) def load_data(path, data_size=None): with open(path, 'rb') as f: data = pickle.load(f) if data_size != -1: dataset = data[:data_size] else: dataset = data[:] return dataset def save_data(path, data): with open(path, 'wb') as f: pickle.dump(data, f) def run_fill_filter(eye_dataset): for ed in tqdm(eye_dataset): for clip_info in ed['clip_info']: landmarks = clip_info['landmarks'] filled_landmarks = [] for landmark in landmarks: ci_df = pd.DataFrame(np.array(landmark)) ci_df = ci_df.replace(0, np.nan) ci_df = ci_df.fillna(method='ffill') ci_df = ci_df.rolling(3).mean() temp_lm = [] for landmark in ci_df.values.tolist(): filled = [int(lm) for lm in landmark if not(np.isnan(lm))] if len(filled) == 50: diff_x = CENTER_X - filled[48] diff_y = CENTER_Y - filled[49] for f_i in range(0, len(filled), 2): filled[f_i] += diff_x filled[f_i+1] += diff_y condition1 = filled[0] > filled[4] and filled[0] < filled[10] condition2 = filled[1] > filled[7] and filled[1] > filled[9] condition3 = filled[1] < filled[13] and filled[1] < filled[14] if condition1 and condition2 and condition3: temp_lm.append(filled) filled_landmarks.append(temp_lm) clip_info['landmarks'] = filled_landmarks return eye_dataset def run_normalization(eye_dataset): eb_standard_len = 100 def get_dist(x1, y1, x2, y2): return np.sqrt((x1-x2) ** 2 + (y1- y2) ** 2) def get_theta(var_x, var_y, fix_x, fix_y): return math.atan2(var_y - fix_y, var_x - fix_x) def get_new_coor(theta, dist, point): return dist * np.array([math.cos(theta), math.sin(theta)]) + np.array([point[0], point[1]]) def run_len_norm(var_x, var_y, fix_x, fix_y, expected_len): angle = get_theta(var_x, var_y, fix_x, fix_y) new_coor = get_new_coor(angle, expected_len, [fix_x, fix_y]) return new_coor for ed in tqdm(eye_dataset): for clip_info in ed['clip_info']: tmp_landmarks = [] for landmark in clip_info['landmarks']: tmp_landmark = [] for lm in landmark: right_len_ratio = eb_standard_len / get_dist(lm[46], lm[47], lm[48], lm[49]) left_len_ratio = eb_standard_len / get_dist(lm[28], lm[29], lm[48], lm[49]) len_ratio = (right_len_ratio + left_len_ratio) / 2 fix_x, fix_y = lm[48], lm[49] new_coor_list = [] for lm_i in range(0, len(lm[:48]), 2): new_coor = run_len_norm(lm[lm_i], lm[lm_i+1], fix_x, fix_y, get_dist(lm[lm_i], lm[lm_i+1], fix_x, fix_y) * len_ratio) new_coor_list += [int(new_coor[0]), int(new_coor[1])] right_theta = get_theta(lm[0], lm[1], lm[6], lm[7]) right_dist = get_dist(lm[0], lm[1], lm[6], lm[7]) left_new_pulpil = get_new_coor(right_theta, right_dist, [lm[18], lm[19]]) lm[2] = int(left_new_pulpil[0]) lm[3] = int(left_new_pulpil[1]) new_coor_list += [fix_x, fix_y] tmp_landmark.append(new_coor_list) tmp_landmarks.append(tmp_landmark) clip_info['landmarks'] = tmp_landmarks return eye_dataset def run_estimator(eye_dataset, opt): landmark_list = [] for ed in eye_dataset: for clip_info in ed['clip_info']: for clip_landmarks in clip_info['landmarks']: for landmarks in clip_landmarks: landmark_list.append(landmarks) landmark_array = np.array(landmark_list) n_samples, n_features = landmark_array.shape print('[INFO] n_samples:{}, n_features:{}'.format(n_samples, n_features)) print('[INFO] Estimated running time: {:0.2f} hrs with {} fps'.format(n_samples/opt.fps/60/60, opt.fps)) data = landmark_array[:, :-2] estimator = decomposition.PCA(opt.n_components, svd_solver='randomized', whiten=True) estimator.fit(data) var_ratio = estimator.explained_variance_ratio_ print('[INFO] {} number of components explain {:0.2f} of original dataset.'.format(opt.n_components, np.sum(var_ratio))) print('[INFO] Without first and seconde axis, rest of hyperplain consists of {:0.2f} of original dataset.'.format(np.sum(var_ratio[3:]))) return estimator def run_transform(eye_dataset, estimator, opt): for ed in tqdm(eye_dataset): for clip_info in ed['clip_info']: landmarks = clip_info['landmarks'] transformed_landmarks = [] for landmark in landmarks: tmp_trans = [] for lm in landmark: transformed_array = estimator.transform(np.array([lm[:-2]])) transformed_list = transformed_array.tolist()[0] if opt.is_rotation_killed: transformed_list[0] = 0 transformed_list[1] = 0 tmp_trans.append(transformed_list) transformed_landmarks.append(tmp_trans) clip_info['landmarks'] = transformed_landmarks return eye_dataset def main(): parser = argparse.ArgumentParser() parser.add_argument('-dataset_path', default='./dataset') parser.add_argument('-data_size', type=int, default=-1) parser.add_argument('-fps', type=int, default=10) parser.add_argument('-n_components', type=int, default=7) parser.add_argument('-is_rotation_killed', type=bool, default=True) opt = parser.parse_args() eye_dataset = load_data('{}/eye_motion_dataset.pickle'.format(opt.dataset_path), opt.data_size) print('[INFO] Dataset length: {}'.format(len(eye_dataset))) print('[INFO] Filling, filtering and centering is now processing.') eye_dataset = run_fill_filter(eye_dataset) print('[INFO] Normalization is now processing.') eye_dataset = run_normalization(eye_dataset) print('[INFO] Estimator is now running.') estimator = run_estimator(eye_dataset, opt) print('[INFO] Landmarks are now transforming.') eye_dataset = run_transform(eye_dataset, estimator, opt) processed_dataset = {'eye_dataset': eye_dataset, 'estimator': estimator, } save_path = '{}/processed_eye_motion_dataset_pca_{}.pickle'.format(opt.dataset_path, estimator.n_components) print('[INFO] Save preprocessed dataset at {}'.format(save_path)) save_data(save_path, processed_dataset) if __name__ == '__main__': main()
true
true
f70ed57a100157333fff98601e50b3e635feeceb
1,144
py
Python
summarize_clstr_table.py
FischbachLab/hCom_variable_regions
6f1108c461a7e31964d1d81a83c03b9f4dad4c76
[ "MIT" ]
null
null
null
summarize_clstr_table.py
FischbachLab/hCom_variable_regions
6f1108c461a7e31964d1d81a83c03b9f4dad4c76
[ "MIT" ]
null
null
null
summarize_clstr_table.py
FischbachLab/hCom_variable_regions
6f1108c461a7e31964d1d81a83c03b9f4dad4c76
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 ## How many clusters have more than one organisms as it's members import sys import pandas as pd import logging def main(): clstr_table = sys.argv[1] output = sys.argv[2] clstr_df = pd.read_table(clstr_table, header=0) clstr_df["organism"] = clstr_df["id"].apply(lambda x: x.split(":")[2].split("_")[0]) summ_df = clstr_df.groupby("clstr").agg( num_organisms=("organism", pd.Series.nunique), organism_list=("organism", set) ) close_strains = set() for row in summ_df.query("num_organisms > 1").itertuples(index=False): close_strains.update(row.organism_list) logging.info( f"There are {len(close_strains)} strains in the community for which another strain exists with an identical V3-V4 region" ) summ_df["organism_list"] = summ_df["organism_list"].apply( lambda x: "; ".join(set(x)) ) summ_df = summ_df.sort_values("num_organisms", ascending=False) summ_df.to_csv(output) if __name__ == "__main__": logging.basicConfig( level=logging.INFO, format="%(asctime)s\t[%(levelname)s]:\t%(message)s", ) main()
28.6
129
0.664336
clstr_table = sys.argv[1] output = sys.argv[2] clstr_df = pd.read_table(clstr_table, header=0) clstr_df["organism"] = clstr_df["id"].apply(lambda x: x.split(":")[2].split("_")[0]) summ_df = clstr_df.groupby("clstr").agg( num_organisms=("organism", pd.Series.nunique), organism_list=("organism", set) ) close_strains = set() for row in summ_df.query("num_organisms > 1").itertuples(index=False): close_strains.update(row.organism_list) logging.info( f"There are {len(close_strains)} strains in the community for which another strain exists with an identical V3-V4 region" ) summ_df["organism_list"] = summ_df["organism_list"].apply( lambda x: "; ".join(set(x)) ) summ_df = summ_df.sort_values("num_organisms", ascending=False) summ_df.to_csv(output) if __name__ == "__main__": logging.basicConfig( level=logging.INFO, format="%(asctime)s\t[%(levelname)s]:\t%(message)s", ) main()
true
true
f70ed5fc8d9dec798cc962a5f3c011a61fe76693
2,444
py
Python
scraper/views.py
ziibii88/The_Doe_Agency
2545aeae71c779166bef78941cac36551498ca76
[ "MIT" ]
2
2021-07-16T10:25:15.000Z
2021-08-07T04:44:08.000Z
scraper/views.py
ziibii88/The_Doe_Agency
2545aeae71c779166bef78941cac36551498ca76
[ "MIT" ]
null
null
null
scraper/views.py
ziibii88/The_Doe_Agency
2545aeae71c779166bef78941cac36551498ca76
[ "MIT" ]
null
null
null
from http import HTTPStatus from rest_framework import views, viewsets from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response from scraper import models, serializers, tasks from scraper.utils import get_random_working_proxy class WebsiteViewSet(viewsets.ModelViewSet): queryset = models.Website.objects.all() serializer_class = serializers.WebsiteSerializer filterset_fields = ("is_active",) search_field = ("name", "code", "url") ordering_fields = ("name", "code", "id") class PageViewSet(viewsets.ModelViewSet): queryset = models.Page.objects.all() serializer_class = serializers.PageSerializer filterset_fields = ("is_active", "has_js") search_field = ("site__name", "site__code", "path") ordering_fields = ("site", "id") class ProxyViewSet(viewsets.ModelViewSet): queryset = models.Proxy.objects.all() serializer_class = serializers.ProxySerializer filterset_fields = ("is_active", "is_dead", "anonymity", "protocol") search_field = ("ip", "port", "country") ordering_fields = ("id", "ip", "port", "country") class ScrapeSitesAPI(views.APIView): def post(self, request): task = tasks.scrape_sites.apply_async() return Response({"task_id": task.id, "status": task.status}) class CheckProxiesAPI(views.APIView): def post(self, request): task = tasks.check_proxies.apply_async() return Response({"task_id": task.id, "status": task.status}) class GetProxyAPI(views.APIView): def get_proxy( self, output: str = "dict", test_urls: list or tuple = None ) -> Response: result = get_random_working_proxy(output="dict", test_urls=test_urls) if result: return Response( {"result": result, "status": "SUCCESS"}, status=HTTPStatus.OK ) # 200 OK else: return Response( {"status": "NO PROXY FOUND"}, status=HTTPStatus.NO_CONTENT ) # 204 No content def get(self, request: Request): return self.get_proxy() def post(self, request: Request): test_urls = request.POST.get("test_urls", None) if not test_urls: raise ParseError("Must provide test_urls for proxy check") if isinstance(test_urls, str): test_urls = (test_urls,) return self.get_proxy(test_urls=test_urls)
33.944444
77
0.673486
from http import HTTPStatus from rest_framework import views, viewsets from rest_framework.exceptions import ParseError from rest_framework.request import Request from rest_framework.response import Response from scraper import models, serializers, tasks from scraper.utils import get_random_working_proxy class WebsiteViewSet(viewsets.ModelViewSet): queryset = models.Website.objects.all() serializer_class = serializers.WebsiteSerializer filterset_fields = ("is_active",) search_field = ("name", "code", "url") ordering_fields = ("name", "code", "id") class PageViewSet(viewsets.ModelViewSet): queryset = models.Page.objects.all() serializer_class = serializers.PageSerializer filterset_fields = ("is_active", "has_js") search_field = ("site__name", "site__code", "path") ordering_fields = ("site", "id") class ProxyViewSet(viewsets.ModelViewSet): queryset = models.Proxy.objects.all() serializer_class = serializers.ProxySerializer filterset_fields = ("is_active", "is_dead", "anonymity", "protocol") search_field = ("ip", "port", "country") ordering_fields = ("id", "ip", "port", "country") class ScrapeSitesAPI(views.APIView): def post(self, request): task = tasks.scrape_sites.apply_async() return Response({"task_id": task.id, "status": task.status}) class CheckProxiesAPI(views.APIView): def post(self, request): task = tasks.check_proxies.apply_async() return Response({"task_id": task.id, "status": task.status}) class GetProxyAPI(views.APIView): def get_proxy( self, output: str = "dict", test_urls: list or tuple = None ) -> Response: result = get_random_working_proxy(output="dict", test_urls=test_urls) if result: return Response( {"result": result, "status": "SUCCESS"}, status=HTTPStatus.OK ) else: return Response( {"status": "NO PROXY FOUND"}, status=HTTPStatus.NO_CONTENT ) def get(self, request: Request): return self.get_proxy() def post(self, request: Request): test_urls = request.POST.get("test_urls", None) if not test_urls: raise ParseError("Must provide test_urls for proxy check") if isinstance(test_urls, str): test_urls = (test_urls,) return self.get_proxy(test_urls=test_urls)
true
true
f70ed613214d53ae4a2ef2bfdd73ff060ad133c9
1,723
py
Python
rainforest/resources/generator.py
apibitsco/rainforestapp-python
426245fdb93748d4c2a6f3f12bcce3af20320137
[ "MIT" ]
1
2016-10-25T18:47:55.000Z
2016-10-25T18:47:55.000Z
rainforest/resources/generator.py
apibitsco/rainforestapp-python
426245fdb93748d4c2a6f3f12bcce3af20320137
[ "MIT" ]
6
2015-10-15T11:27:42.000Z
2019-01-31T10:12:42.000Z
rainforest/resources/generator.py
rainforestapp/rainforest-python
c0e5c5dba55c000ee6f92ae5fc9db5f90e415500
[ "MIT" ]
null
null
null
from ..apibits import * from ..endpoints import GeneratorsEndpoint from ..endpoints import GeneratorRowsEndpoint class Generator(ApiResource): @classmethod def all(cls, params={}, headers={}): res = cls.default_client().generators().all(params, headers) return res @classmethod def retrieve(cls, generator_id, params={}, headers={}): res = cls.default_client().generators().retrieve(generator_id, params, headers) return res @classmethod def update(cls, generator_id, params={}, headers={}): res = cls.default_client().generators().update(generator_id, params, headers) return res @classmethod def create(cls, params={}, headers={}): res = cls.default_client().generators().create(params, headers) return res def refresh(self, params={}, headers={}): res = self.get_client().generators().retrieve(self.id, params, headers) return self.refresh_from(res.json, res.api_method, res.client) def delete(self, params={}, headers={}): res = self.get_client().generators().delete(self.id, params, headers) return res def rows(self): from ..endpoints import GeneratorRowsEndpoint return GeneratorRowsEndpoint(self.client, self) # Everything below here is used behind the scenes. def __init__(self, *args, **kwargs): super(Generator, self).__init__(*args, **kwargs) ApiResource.register_api_subclass(self, "generator") _api_attributes = { "columns" : {}, "created_at" : {}, "data" : {}, "description" : {}, "generator_type" : {}, "id" : {}, "name" : {}, "row_count" : {}, }
31.907407
87
0.623331
from ..apibits import * from ..endpoints import GeneratorsEndpoint from ..endpoints import GeneratorRowsEndpoint class Generator(ApiResource): @classmethod def all(cls, params={}, headers={}): res = cls.default_client().generators().all(params, headers) return res @classmethod def retrieve(cls, generator_id, params={}, headers={}): res = cls.default_client().generators().retrieve(generator_id, params, headers) return res @classmethod def update(cls, generator_id, params={}, headers={}): res = cls.default_client().generators().update(generator_id, params, headers) return res @classmethod def create(cls, params={}, headers={}): res = cls.default_client().generators().create(params, headers) return res def refresh(self, params={}, headers={}): res = self.get_client().generators().retrieve(self.id, params, headers) return self.refresh_from(res.json, res.api_method, res.client) def delete(self, params={}, headers={}): res = self.get_client().generators().delete(self.id, params, headers) return res def rows(self): from ..endpoints import GeneratorRowsEndpoint return GeneratorRowsEndpoint(self.client, self) def __init__(self, *args, **kwargs): super(Generator, self).__init__(*args, **kwargs) ApiResource.register_api_subclass(self, "generator") _api_attributes = { "columns" : {}, "created_at" : {}, "data" : {}, "description" : {}, "generator_type" : {}, "id" : {}, "name" : {}, "row_count" : {}, }
true
true
f70ed6226b4c9cac885acb0f851cfe76cf8b5c36
2,526
py
Python
plugins/Relay/__init__.py
mogad0n/Limnoria
f31e5c4b9a77e30918d6b93f69d69f3b8f910e3c
[ "BSD-3-Clause" ]
476
2015-01-04T17:42:59.000Z
2021-08-13T07:40:54.000Z
plugins/Relay/__init__.py
mogad0n/Limnoria
f31e5c4b9a77e30918d6b93f69d69f3b8f910e3c
[ "BSD-3-Clause" ]
491
2015-01-01T04:12:23.000Z
2021-08-12T19:24:47.000Z
plugins/Relay/__init__.py
mogad0n/Limnoria
f31e5c4b9a77e30918d6b93f69d69f3b8f910e3c
[ "BSD-3-Clause" ]
203
2015-01-02T18:29:43.000Z
2021-08-15T12:52:22.000Z
### # Copyright (c) 2005, Jeremiah Fincher # Copyright (c) 2010-2021, The Limnoria Contributors # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### """ Handles relaying between networks. """ import supybot import supybot.world as world # Use this for the version of this plugin. You may wish to put a CVS keyword # in here if you're keeping the plugin in CVS or some similar system. __version__ = "%%VERSION%%" __author__ = supybot.authors.jemfinch __maintainer__ = supybot.authors.limnoria_core # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} from . import config from . import plugin from importlib import reload reload(plugin) # In case we're being reloaded. # Add more reloads here if you add third-party modules and want them to be # reloaded when this plugin is reloaded. Don't forget to import them as well! if world.testing: from . import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
39.46875
79
0.770388
import supybot import supybot.world as world __version__ = "%%VERSION%%" __author__ = supybot.authors.jemfinch __maintainer__ = supybot.authors.limnoria_core # This is a dictionary mapping supybot.Author instances to lists of # contributions. __contributors__ = {} from . import config from . import plugin from importlib import reload reload(plugin) # In case we're being reloaded. if world.testing: from . import test Class = plugin.Class configure = config.configure # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
true
true
f70ed72ef52e88cf5e169ee95f60603d4184a74c
10,110
py
Python
jina/__init__.py
DeepikaGupta09/jina
2fc328d10f2fbc79d943064a90dd3170cb83f979
[ "Apache-2.0" ]
1
2021-01-30T11:59:27.000Z
2021-01-30T11:59:27.000Z
jina/__init__.py
DeepikaGupta09/jina
2fc328d10f2fbc79d943064a90dd3170cb83f979
[ "Apache-2.0" ]
null
null
null
jina/__init__.py
DeepikaGupta09/jina
2fc328d10f2fbc79d943064a90dd3170cb83f979
[ "Apache-2.0" ]
null
null
null
__copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved." __license__ = "Apache-2.0" # do not change this line manually # this is managed by git tag and updated on every release __version__ = '0.6.8' # do not change this line manually # this is managed by proto/build-proto.sh and updated on every execution __proto_version__ = '0.0.65' import platform import sys # do some os-wise patches if sys.version_info < (3, 7, 0): raise OSError('Jina requires Python 3.7 and above, but yours is %s' % sys.version_info) if sys.version_info >= (3, 8, 0) and platform.system() == 'Darwin': # temporary fix for python 3.8 on macos where the default start is set to "spawn" # https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods from multiprocessing import set_start_method set_start_method('fork') from datetime import datetime from types import SimpleNamespace import os # fix fork error on MacOS but seems no effect? must do EXPORT manually before jina start os.environ['OBJC_DISABLE_INITIALIZE_FORK_SAFETY'] = 'YES' __uptime__ = datetime.now().strftime('%Y%m%d%H%M%S') # update on MacOS # 1. clean this tuple, # 2. grep -ohE "\'JINA_.*?\'" **/*.py | sort -u | sed "s/$/,/g" # 3. copy all lines EXCEPT the first (which is the grep command in the last line) __jina_env__ = ('JINA_ARRAY_QUANT', 'JINA_BINARY_DELIMITER', 'JINA_CONTRIB_MODULE', 'JINA_CONTRIB_MODULE_IS_LOADING', 'JINA_CONTROL_PORT', 'JINA_DB_COLLECTION', 'JINA_DB_HOSTNAME', 'JINA_DB_NAME', 'JINA_DB_PASSWORD', 'JINA_DB_USERNAME', 'JINA_DEFAULT_HOST', 'JINA_DISABLE_UVLOOP', 'JINA_EXECUTOR_WORKDIR', 'JINA_FULL_CLI', 'JINA_IPC_SOCK_TMP', 'JINA_LOG_CONFIG', 'JINA_LOG_NO_COLOR', 'JINA_POD_NAME', 'JINA_PROFILING', 'JINA_RANDOM_PORTS', 'JINA_SOCKET_HWM', 'JINA_TEST_GPU', 'JINA_TEST_PRETRAINED', 'JINA_VCS_VERSION', 'JINA_WARN_UNNAMED') __default_host__ = os.environ.get('JINA_DEFAULT_HOST', '0.0.0.0') __ready_msg__ = 'ready and listening' __stop_msg__ = 'terminated' __unable_to_load_pretrained_model_msg__ = 'Executor depending on pretrained model file could not find the pretrained model' __binary_delimiter__ = os.environ.get('JINA_BINARY_DELIMITER', '460841a0a8a430ae25d9ad7c1f048c57').encode() JINA_GLOBAL = SimpleNamespace() JINA_GLOBAL.imported = SimpleNamespace() JINA_GLOBAL.imported.executors = False JINA_GLOBAL.imported.drivers = False JINA_GLOBAL.imported.hub = False JINA_GLOBAL.logserver = SimpleNamespace() def import_classes(namespace: str, targets=None, show_import_table: bool = False, import_once: bool = False): """ Import all or selected executors into the runtime. This is called when Jina is first imported for registering the YAML constructor beforehand. It can be also used to import third-part or external executors. :param namespace: the namespace to import :param targets: the list of executor names to import :param show_import_table: show the import result as a table :param import_once: import everything only once, to avoid repeated import """ import os, re from .logging import default_logger if namespace == 'jina.executors': import_type = 'ExecutorType' if import_once and JINA_GLOBAL.imported.executors: return elif namespace == 'jina.drivers': import_type = 'DriverType' if import_once and JINA_GLOBAL.imported.drivers: return elif namespace == 'jina.hub': import_type = 'ExecutorType' if import_once and JINA_GLOBAL.imported.hub: return else: raise TypeError(f'namespace: {namespace} is unrecognized') from setuptools import find_packages import pkgutil from pkgutil import iter_modules try: path = os.path.dirname(pkgutil.get_loader(namespace).path) except AttributeError: if namespace == 'jina.hub': default_logger.debug(f'hub submodule is not initialized. Please try "git submodule update --init"') return {} modules = set() for info in iter_modules([path]): if (namespace != 'jina.hub' and not info.ispkg) or (namespace == 'jina.hub' and info.ispkg): modules.add('.'.join([namespace, info.name])) for pkg in find_packages(path): modules.add('.'.join([namespace, pkg])) pkgpath = path + '/' + pkg.replace('.', '/') for info in iter_modules([pkgpath]): if (namespace != 'jina.hub' and not info.ispkg) or (namespace == 'jina.hub' and info.ispkg): modules.add('.'.join([namespace, pkg, info.name])) # filter ignored_module_pattern = r'\.tests|\.api|\.bump_version' modules = {m for m in modules if not re.findall(ignored_module_pattern, m)} from collections import defaultdict load_stat = defaultdict(list) bad_imports = [] if isinstance(targets, str): targets = {targets} elif isinstance(targets, list): targets = set(targets) elif targets is None: targets = {} else: raise TypeError(f'target must be a set, but received {targets!r}') depend_tree = {} import importlib from .helper import colored for m in modules: try: mod = importlib.import_module(m) for k in dir(mod): # import the class if (getattr(mod, k).__class__.__name__ == import_type) and (not targets or k in targets): try: _c = getattr(mod, k) load_stat[m].append( (k, True, colored('▸', 'green').join(f'{vvv.__name__}' for vvv in _c.mro()[:-1][::-1]))) d = depend_tree for vvv in _c.mro()[:-1][::-1]: if vvv.__name__ not in d: d[vvv.__name__] = {} d = d[vvv.__name__] d['module'] = m if k in targets: targets.remove(k) if not targets: return # target execs are all found and loaded, return try: # load the default request for this executor if possible from .executors.requests import get_default_reqs get_default_reqs(type.mro(getattr(mod, k))) except ValueError: pass except Exception as ex: load_stat[m].append((k, False, ex)) bad_imports.append('.'.join([m, k])) if k in targets: raise ex # target class is found but not loaded, raise return except Exception as ex: load_stat[m].append(('', False, ex)) bad_imports.append(m) if targets: raise ImportError(f'{targets} can not be found in jina') if show_import_table: from .helper import print_load_table, print_dep_tree_rst print_load_table(load_stat) else: if bad_imports: if namespace != 'jina.hub': default_logger.error( f'theses modules or classes can not be imported {bad_imports}. ' f'You can use `jina check` to list all executors and drivers') else: default_logger.warning( f'due to the missing dependencies or bad implementations, {bad_imports} can not be imported ' f'if you are using these executors/drivers, they wont work. ' f'You can use `jina check` to list all executors and drivers') if namespace == 'jina.executors': JINA_GLOBAL.imported.executors = True elif namespace == 'jina.drivers': JINA_GLOBAL.imported.drivers = True elif namespace == 'jina.hub': JINA_GLOBAL.imported.hub = True return depend_tree # driver first, as executor may contain driver import_classes('jina.drivers', show_import_table=False, import_once=True) import_classes('jina.executors', show_import_table=False, import_once=True) import_classes('jina.hub', show_import_table=False, import_once=True) # manually install the default signal handler import signal signal.signal(signal.SIGINT, signal.default_int_handler) def set_nofile(nofile_atleast=4096): """ sets nofile soft limit to at least 4096, useful for running matlplotlib/seaborn on parallel executing plot generators vs. Ubuntu default ulimit -n 1024 or OS X El Captian 256 temporary setting extinguishing with Python session. """ try: import resource as res except ImportError: # Windows res = None from .logging import default_logger if res is None: return (None,) * 2 soft, ohard = res.getrlimit(res.RLIMIT_NOFILE) hard = ohard if soft < nofile_atleast: soft = nofile_atleast if hard < soft: hard = soft default_logger.debug(f'setting soft & hard ulimit -n {soft} {hard}') try: res.setrlimit(res.RLIMIT_NOFILE, (soft, hard)) except (ValueError, res.error): try: hard = soft default_logger.warning(f'trouble with max limit, retrying with soft,hard {soft},{hard}') res.setrlimit(res.RLIMIT_NOFILE, (soft, hard)) except Exception: default_logger.warning('failed to set ulimit, giving up') soft, hard = res.getrlimit(res.RLIMIT_NOFILE) default_logger.debug(f'ulimit -n soft,hard: {soft} {hard}') return soft, hard set_nofile()
37.583643
123
0.608902
__copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved." __license__ = "Apache-2.0" __version__ = '0.6.8' __proto_version__ = '0.0.65' import platform import sys if sys.version_info < (3, 7, 0): raise OSError('Jina requires Python 3.7 and above, but yours is %s' % sys.version_info) if sys.version_info >= (3, 8, 0) and platform.system() == 'Darwin': mport set_start_method set_start_method('fork') from datetime import datetime from types import SimpleNamespace import os os.environ['OBJC_DISABLE_INITIALIZE_FORK_SAFETY'] = 'YES' __uptime__ = datetime.now().strftime('%Y%m%d%H%M%S') __jina_env__ = ('JINA_ARRAY_QUANT', 'JINA_BINARY_DELIMITER', 'JINA_CONTRIB_MODULE', 'JINA_CONTRIB_MODULE_IS_LOADING', 'JINA_CONTROL_PORT', 'JINA_DB_COLLECTION', 'JINA_DB_HOSTNAME', 'JINA_DB_NAME', 'JINA_DB_PASSWORD', 'JINA_DB_USERNAME', 'JINA_DEFAULT_HOST', 'JINA_DISABLE_UVLOOP', 'JINA_EXECUTOR_WORKDIR', 'JINA_FULL_CLI', 'JINA_IPC_SOCK_TMP', 'JINA_LOG_CONFIG', 'JINA_LOG_NO_COLOR', 'JINA_POD_NAME', 'JINA_PROFILING', 'JINA_RANDOM_PORTS', 'JINA_SOCKET_HWM', 'JINA_TEST_GPU', 'JINA_TEST_PRETRAINED', 'JINA_VCS_VERSION', 'JINA_WARN_UNNAMED') __default_host__ = os.environ.get('JINA_DEFAULT_HOST', '0.0.0.0') __ready_msg__ = 'ready and listening' __stop_msg__ = 'terminated' __unable_to_load_pretrained_model_msg__ = 'Executor depending on pretrained model file could not find the pretrained model' __binary_delimiter__ = os.environ.get('JINA_BINARY_DELIMITER', '460841a0a8a430ae25d9ad7c1f048c57').encode() JINA_GLOBAL = SimpleNamespace() JINA_GLOBAL.imported = SimpleNamespace() JINA_GLOBAL.imported.executors = False JINA_GLOBAL.imported.drivers = False JINA_GLOBAL.imported.hub = False JINA_GLOBAL.logserver = SimpleNamespace() def import_classes(namespace: str, targets=None, show_import_table: bool = False, import_once: bool = False): import os, re from .logging import default_logger if namespace == 'jina.executors': import_type = 'ExecutorType' if import_once and JINA_GLOBAL.imported.executors: return elif namespace == 'jina.drivers': import_type = 'DriverType' if import_once and JINA_GLOBAL.imported.drivers: return elif namespace == 'jina.hub': import_type = 'ExecutorType' if import_once and JINA_GLOBAL.imported.hub: return else: raise TypeError(f'namespace: {namespace} is unrecognized') from setuptools import find_packages import pkgutil from pkgutil import iter_modules try: path = os.path.dirname(pkgutil.get_loader(namespace).path) except AttributeError: if namespace == 'jina.hub': default_logger.debug(f'hub submodule is not initialized. Please try "git submodule update --init"') return {} modules = set() for info in iter_modules([path]): if (namespace != 'jina.hub' and not info.ispkg) or (namespace == 'jina.hub' and info.ispkg): modules.add('.'.join([namespace, info.name])) for pkg in find_packages(path): modules.add('.'.join([namespace, pkg])) pkgpath = path + '/' + pkg.replace('.', '/') for info in iter_modules([pkgpath]): if (namespace != 'jina.hub' and not info.ispkg) or (namespace == 'jina.hub' and info.ispkg): modules.add('.'.join([namespace, pkg, info.name])) ignored_module_pattern = r'\.tests|\.api|\.bump_version' modules = {m for m in modules if not re.findall(ignored_module_pattern, m)} from collections import defaultdict load_stat = defaultdict(list) bad_imports = [] if isinstance(targets, str): targets = {targets} elif isinstance(targets, list): targets = set(targets) elif targets is None: targets = {} else: raise TypeError(f'target must be a set, but received {targets!r}') depend_tree = {} import importlib from .helper import colored for m in modules: try: mod = importlib.import_module(m) for k in dir(mod): if (getattr(mod, k).__class__.__name__ == import_type) and (not targets or k in targets): try: _c = getattr(mod, k) load_stat[m].append( (k, True, colored('▸', 'green').join(f'{vvv.__name__}' for vvv in _c.mro()[:-1][::-1]))) d = depend_tree for vvv in _c.mro()[:-1][::-1]: if vvv.__name__ not in d: d[vvv.__name__] = {} d = d[vvv.__name__] d['module'] = m if k in targets: targets.remove(k) if not targets: return try: from .executors.requests import get_default_reqs get_default_reqs(type.mro(getattr(mod, k))) except ValueError: pass except Exception as ex: load_stat[m].append((k, False, ex)) bad_imports.append('.'.join([m, k])) if k in targets: raise ex except Exception as ex: load_stat[m].append(('', False, ex)) bad_imports.append(m) if targets: raise ImportError(f'{targets} can not be found in jina') if show_import_table: from .helper import print_load_table, print_dep_tree_rst print_load_table(load_stat) else: if bad_imports: if namespace != 'jina.hub': default_logger.error( f'theses modules or classes can not be imported {bad_imports}. ' f'You can use `jina check` to list all executors and drivers') else: default_logger.warning( f'due to the missing dependencies or bad implementations, {bad_imports} can not be imported ' f'if you are using these executors/drivers, they wont work. ' f'You can use `jina check` to list all executors and drivers') if namespace == 'jina.executors': JINA_GLOBAL.imported.executors = True elif namespace == 'jina.drivers': JINA_GLOBAL.imported.drivers = True elif namespace == 'jina.hub': JINA_GLOBAL.imported.hub = True return depend_tree import_classes('jina.drivers', show_import_table=False, import_once=True) import_classes('jina.executors', show_import_table=False, import_once=True) import_classes('jina.hub', show_import_table=False, import_once=True) import signal signal.signal(signal.SIGINT, signal.default_int_handler) def set_nofile(nofile_atleast=4096): try: import resource as res except ImportError: res = None from .logging import default_logger if res is None: return (None,) * 2 soft, ohard = res.getrlimit(res.RLIMIT_NOFILE) hard = ohard if soft < nofile_atleast: soft = nofile_atleast if hard < soft: hard = soft default_logger.debug(f'setting soft & hard ulimit -n {soft} {hard}') try: res.setrlimit(res.RLIMIT_NOFILE, (soft, hard)) except (ValueError, res.error): try: hard = soft default_logger.warning(f'trouble with max limit, retrying with soft,hard {soft},{hard}') res.setrlimit(res.RLIMIT_NOFILE, (soft, hard)) except Exception: default_logger.warning('failed to set ulimit, giving up') soft, hard = res.getrlimit(res.RLIMIT_NOFILE) default_logger.debug(f'ulimit -n soft,hard: {soft} {hard}') return soft, hard set_nofile()
true
true
f70ed7a3904e97fcc423d33699a21383a00fac2d
1,141
py
Python
starthinker/task/dcm_api/schema/contentCategory.py
quan/starthinker
4e392415d77affd4a3d91165d1141ab38efd3b8b
[ "Apache-2.0" ]
null
null
null
starthinker/task/dcm_api/schema/contentCategory.py
quan/starthinker
4e392415d77affd4a3d91165d1141ab38efd3b8b
[ "Apache-2.0" ]
null
null
null
starthinker/task/dcm_api/schema/contentCategory.py
quan/starthinker
4e392415d77affd4a3d91165d1141ab38efd3b8b
[ "Apache-2.0" ]
null
null
null
########################################################################### # # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ########################################################################### contentCategory_Schema = [{ 'description': '', 'name': 'accountId', 'type': 'INT64', 'mode': 'NULLABLE' }, { 'description': '', 'name': 'id', 'type': 'INT64', 'mode': 'NULLABLE' }, { 'description': '', 'name': 'kind', 'type': 'STRING', 'mode': 'NULLABLE' }, { 'description': '', 'name': 'name', 'type': 'STRING', 'mode': 'NULLABLE' }]
28.525
75
0.548642
true
true
f70ed86c28d0cafaa9d770eaabf664fc7e463062
24,589
py
Python
federatedml/ftl/hetero_ftl/hetero_ftl_host.py
ZZIQIN/FATE
cc6783927564cbb15c067d5010f1cdf82a5de20a
[ "Apache-2.0" ]
1
2019-07-29T13:22:36.000Z
2019-07-29T13:22:36.000Z
federatedml/ftl/hetero_ftl/hetero_ftl_host.py
ZZIQIN/FATE
cc6783927564cbb15c067d5010f1cdf82a5de20a
[ "Apache-2.0" ]
null
null
null
federatedml/ftl/hetero_ftl/hetero_ftl_host.py
ZZIQIN/FATE
cc6783927564cbb15c067d5010f1cdf82a5de20a
[ "Apache-2.0" ]
null
null
null
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import time import numpy as np from arch.api.utils import log_utils from federatedml.evaluation import Evaluation from federatedml.ftl.data_util.common_data_util import overlapping_samples_converter, load_model_parameters, \ save_model_parameters, create_table, convert_instance_table_to_dict, convert_instance_table_to_array, \ add_random_mask_for_list_of_values, remove_random_mask_from_list_of_values from federatedml.ftl.data_util.log_util import create_shape_msg from federatedml.ftl.eggroll_computation.helper import decrypt_matrix from federatedml.ftl.encrypted_ftl import EncryptedFTLHostModel from federatedml.ftl.encryption.encryption import generate_encryption_key_pair, decrypt_scalar, decrypt_array from federatedml.ftl.faster_encrypted_ftl import FasterEncryptedFTLHostModel from federatedml.ftl.hetero_ftl.hetero_ftl_base import HeteroFTLParty from federatedml.ftl.plain_ftl import PlainFTLHostModel from federatedml.param.param import FTLModelParam from federatedml.util import consts from federatedml.util.transfer_variable import HeteroFTLTransferVariable LOGGER = log_utils.getLogger() class HeteroFTLHost(HeteroFTLParty): def __init__(self, host: PlainFTLHostModel, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroFTLHost, self).__init__() self.host_model = host self.model_param = model_param self.transfer_variable = transfer_variable self.max_iter = model_param.max_iter self.n_iter_ = 0 def prepare_data(self, host_data): LOGGER.info("@ start host prepare data") host_features_dict, _, host_sample_indexes = convert_instance_table_to_dict(host_data) host_sample_indexes = np.array(host_sample_indexes) self._do_remote(host_sample_indexes, name=self.transfer_variable.host_sample_indexes.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_sample_indexes), role=consts.GUEST, idx=-1) guest_sample_indexes = self._do_get(name=self.transfer_variable.guest_sample_indexes.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.guest_sample_indexes), idx=-1)[0] host_features, overlap_indexes, _ = overlapping_samples_converter(host_features_dict, host_sample_indexes, guest_sample_indexes) return host_features, overlap_indexes def classified(self, prob_table, threshold): """ convert a probability table into a predicted class table. """ predict_table = prob_table.mapValues(lambda x: 1 if x > threshold else 0) return predict_table def evaluate(self, labels, pred_prob, pred_labels, evaluate_param): LOGGER.info("@ start host evaluate") predict_res = None if evaluate_param.classi_type == consts.BINARY: predict_res = pred_prob elif evaluate_param.classi_type == consts.MULTY: predict_res = pred_labels else: LOGGER.warning("unknown classification type, return None as evaluation results") eva = Evaluation(evaluate_param.classi_type) eva_report = eva.report(labels, predict_res, evaluate_param.metrics, evaluate_param.thresholds, evaluate_param.pos_label) LOGGER.info("@ evaluation report:" + str(eva_report)) return eva_report def predict(self, host_data, predict_param): LOGGER.info("@ start host predict") features, labels, instances_indexes = convert_instance_table_to_array(host_data) host_x = np.squeeze(features) LOGGER.debug("host_x: " + str(host_x.shape)) host_prob = self.host_model.predict(host_x) self._do_remote(host_prob, name=self.transfer_variable.host_prob.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.host_prob), role=consts.GUEST, idx=-1) pred_prob = self._do_get(name=self.transfer_variable.pred_prob.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.pred_prob), idx=-1)[0] pred_prob = np.squeeze(pred_prob) LOGGER.debug("pred_prob: " + str(pred_prob.shape)) pred_prob_table = create_table(pred_prob, instances_indexes) actual_label_table = create_table(labels, instances_indexes) pred_label_table = self.classified(pred_prob_table, predict_param.threshold) if predict_param.with_proba: predict_result = actual_label_table.join(pred_prob_table, lambda label, prob: (label if label > 0 else 0, prob)) predict_result = predict_result.join(pred_label_table, lambda x, y: (x[0], x[1], y)) else: predict_result = actual_label_table.join(pred_label_table, lambda a_label, p_label: (a_label, None, p_label)) return predict_result def load_model(self, model_table_name, model_namespace): LOGGER.info("@ load host model from name/ns" + ", " + str(model_table_name) + ", " + str(model_namespace)) model_parameters = load_model_parameters(model_table_name, model_namespace) self.host_model.restore_model(model_parameters) def save_model(self, model_table_name, model_namespace): LOGGER.info("@ save host model to name/ns" + ", " + str(model_table_name) + ", " + str(model_namespace)) _ = save_model_parameters(self.host_model.get_model_parameters(), model_table_name, model_namespace) class HeteroPlainFTLHost(HeteroFTLHost): def __init__(self, host: PlainFTLHostModel, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroPlainFTLHost, self).__init__(host, model_param, transfer_variable) def fit(self, host_data): LOGGER.info("@ start host fit") host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) while self.n_iter_ < self.max_iter: host_comp = self.host_model.send_components() self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] self.host_model.receive_components(guest_comp) is_stop = self._do_get(name=self.transfer_variable.is_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break """ Centralized encryption scheme with an arbiter in the loop for decryption. """ class HeteroEncryptFTLHost(HeteroFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: EncryptedFTLHostModel = host def _precompute(self): pass def fit(self, host_data): LOGGER.info("@ start host fit") # get public key from arbiter public_key = self._do_get(name=self.transfer_variable.paillier_pubkey.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.paillier_pubkey), idx=-1)[0] host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) self.host_model.set_public_key(public_key) start_time = time.time() while self.n_iter_ < self.max_iter: host_comp = self.host_model.send_components() self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] self.host_model.receive_components(guest_comp) self._precompute() encrypt_host_gradients = self.host_model.send_gradients() self._do_remote(encrypt_host_gradients, name=self.transfer_variable.encrypt_host_gradient.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.encrypt_host_gradient, self.n_iter_), role=consts.ARBITER, idx=-1) decrypt_host_gradients = self._do_get(name=self.transfer_variable.decrypt_host_gradient.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.decrypt_host_gradient, self.n_iter_), idx=-1)[0] self.host_model.receive_gradients(decrypt_host_gradients) is_stop = self._do_get(name=self.transfer_variable.is_encrypted_ftl_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_encrypted_ftl_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break end_time = time.time() LOGGER.info("@ running time: " + str(end_time - start_time)) class FasterHeteroEncryptFTLHost(HeteroEncryptFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(FasterHeteroEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: FasterEncryptedFTLHostModel = host def _precompute(self): LOGGER.info("@ start host precompute") host_precomputed_comp = self.host_model.send_precomputed_components() self._do_remote(host_precomputed_comp, name=self.transfer_variable.host_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_precomputed_comp_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_precomputed_comp = self._do_get(name=self.transfer_variable.guest_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_precomputed_comp_list, self.n_iter_), idx=-1)[0] self.host_model.receive_precomputed_components(guest_precomputed_comp) """ Decentralized encryption scheme without arbiter in the loop. """ class HeteroDecentralizedEncryptFTLHost(HeteroFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroDecentralizedEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: EncryptedFTLHostModel = host self.public_key = None self.private_key = None self.guest_public_key = None def _precompute(self): pass def prepare_encryption_key_pair(self): LOGGER.info("@ start host prepare encryption key pair") self.public_key, self.private_key = generate_encryption_key_pair() # exchange public_key with guest self._do_remote(self.public_key, name=self.transfer_variable.host_public_key.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_public_key, self.n_iter_), role=consts.GUEST, idx=-1) self.guest_public_key = self._do_get(name=self.transfer_variable.guest_public_key.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_public_key, self.n_iter_), idx=-1)[0] def fit(self, host_data): LOGGER.info("@ start host fit") self.prepare_encryption_key_pair() host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) self.host_model.set_public_key(self.public_key) self.host_model.set_guest_public_key(self.guest_public_key) self.host_model.set_private_key(self.private_key) start_time = time.time() while self.n_iter_ < self.max_iter: # Stage 1: compute and encrypt components (using host public key) required by guest to # calculate gradients and loss. LOGGER.debug("@ Stage 1: ") host_comp = self.host_model.send_components() LOGGER.debug("send enc host_comp: " + create_shape_msg(host_comp)) self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) # Stage 2: receive guest components in encrypted form (encrypted by guest public key), # and calculate host gradients in encrypted form (encrypted by guest public key), # and send them to guest for decryption LOGGER.debug("@ Stage 2: ") guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] LOGGER.debug("receive enc guest_comp: " + create_shape_msg(guest_comp)) self.host_model.receive_components(guest_comp) self._precompute() # calculate host gradients in encrypted form (encrypted by guest public key) encrypt_host_gradients = self.host_model.send_gradients() LOGGER.debug("send encrypt_guest_gradients: " + create_shape_msg(encrypt_host_gradients)) # add random mask to encrypt_host_gradients and send them to guest for decryption masked_enc_host_gradients, gradients_masks = add_random_mask_for_list_of_values(encrypt_host_gradients) LOGGER.debug("send masked_enc_host_gradients: " + create_shape_msg(masked_enc_host_gradients)) self._do_remote(masked_enc_host_gradients, name=self.transfer_variable.masked_enc_host_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_host_gradients, self.n_iter_), role=consts.GUEST, idx=-1) # Stage 3: receive and then decrypt masked encrypted guest gradients and masked encrypted guest loss, # and send them to guest LOGGER.debug("@ Stage 3: ") masked_enc_guest_gradients = self._do_get(name=self.transfer_variable.masked_enc_guest_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_guest_gradients, self.n_iter_), idx=-1)[0] masked_enc_guest_loss = self._do_get(name=self.transfer_variable.masked_enc_loss.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_loss, self.n_iter_), idx=-1)[0] masked_dec_guest_gradients = self.__decrypt_gradients(masked_enc_guest_gradients) masked_dec_guest_loss = self.__decrypt_loss(masked_enc_guest_loss) LOGGER.debug("send masked_dec_guest_gradients: " + create_shape_msg(masked_dec_guest_gradients)) self._do_remote(masked_dec_guest_gradients, name=self.transfer_variable.masked_dec_guest_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_dec_guest_gradients, self.n_iter_), role=consts.GUEST, idx=-1) LOGGER.debug("send masked_dec_guest_loss: " + str(masked_dec_guest_loss)) self._do_remote(masked_dec_guest_loss, name=self.transfer_variable.masked_dec_loss.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_dec_loss, self.n_iter_), role=consts.GUEST, idx=-1) # Stage 4: receive masked but decrypted host gradients from guest and remove mask, # and update host model parameters using these gradients. LOGGER.debug("@ Stage 4: ") masked_dec_host_gradients = self._do_get(name=self.transfer_variable.masked_dec_host_gradients.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.masked_dec_host_gradients, self.n_iter_), idx=-1)[0] LOGGER.debug("receive masked_dec_host_gradients: " + create_shape_msg(masked_dec_host_gradients)) cleared_dec_host_gradients = remove_random_mask_from_list_of_values(masked_dec_host_gradients, gradients_masks) # update host model parameters using these gradients. self.host_model.receive_gradients(cleared_dec_host_gradients) # Stage 5: determine whether training is terminated. LOGGER.debug("@ Stage 5: ") is_stop = self._do_get(name=self.transfer_variable.is_decentralized_enc_ftl_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_decentralized_enc_ftl_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break end_time = time.time() LOGGER.info("@ running time: " + str(end_time - start_time)) def __decrypt_gradients(self, encrypt_gradients): return decrypt_matrix(self.private_key, encrypt_gradients[0]), decrypt_array(self.private_key, encrypt_gradients[1]) def __decrypt_loss(self, encrypt_loss): return decrypt_scalar(self.private_key, encrypt_loss) class FasterHeteroDecentralizedEncryptFTLHost(HeteroDecentralizedEncryptFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(FasterHeteroDecentralizedEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: FasterEncryptedFTLHostModel = host def _precompute(self): LOGGER.debug("@ start precompute") host_precomputed_comp = self.host_model.send_precomputed_components() self._do_remote(host_precomputed_comp, name=self.transfer_variable.host_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_precomputed_comp_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_precomputed_comp = self._do_get(name=self.transfer_variable.guest_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_precomputed_comp_list, self.n_iter_), idx=-1)[0] self.host_model.receive_precomputed_components(guest_precomputed_comp) class HostFactory(object): @classmethod def create(cls, ftl_model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable, ftl_local_model): if ftl_model_param.is_encrypt: if ftl_model_param.enc_ftl == "dct_enc_ftl": # decentralized encrypted ftl host LOGGER.debug("@ create decentralized encrypted ftl_host") host_model = EncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroDecentralizedEncryptFTLHost(host_model, ftl_model_param, transfer_variable) elif ftl_model_param.enc_ftl == "dct_enc_ftl2": # decentralized encrypted faster ftl host LOGGER.debug("@ create decentralized encrypted faster ftl_host") host_model = FasterEncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = FasterHeteroDecentralizedEncryptFTLHost(host_model, ftl_model_param, transfer_variable) elif ftl_model_param.enc_ftl == "enc_ftl2": # encrypted faster ftl host LOGGER.debug("@ create encrypted faster ftl_host") host_model = FasterEncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = FasterHeteroEncryptFTLHost(host_model, ftl_model_param, transfer_variable) else: # encrypted ftl host LOGGER.debug("@ create encrypted ftl_host") host_model = EncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroEncryptFTLHost(host_model, ftl_model_param, transfer_variable) else: # plain ftl host LOGGER.debug("@ create plain ftl_host") host_model = PlainFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroPlainFTLHost(host_model, ftl_model_param, transfer_variable) return host
52.87957
163
0.65086
import time import numpy as np from arch.api.utils import log_utils from federatedml.evaluation import Evaluation from federatedml.ftl.data_util.common_data_util import overlapping_samples_converter, load_model_parameters, \ save_model_parameters, create_table, convert_instance_table_to_dict, convert_instance_table_to_array, \ add_random_mask_for_list_of_values, remove_random_mask_from_list_of_values from federatedml.ftl.data_util.log_util import create_shape_msg from federatedml.ftl.eggroll_computation.helper import decrypt_matrix from federatedml.ftl.encrypted_ftl import EncryptedFTLHostModel from federatedml.ftl.encryption.encryption import generate_encryption_key_pair, decrypt_scalar, decrypt_array from federatedml.ftl.faster_encrypted_ftl import FasterEncryptedFTLHostModel from federatedml.ftl.hetero_ftl.hetero_ftl_base import HeteroFTLParty from federatedml.ftl.plain_ftl import PlainFTLHostModel from federatedml.param.param import FTLModelParam from federatedml.util import consts from federatedml.util.transfer_variable import HeteroFTLTransferVariable LOGGER = log_utils.getLogger() class HeteroFTLHost(HeteroFTLParty): def __init__(self, host: PlainFTLHostModel, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroFTLHost, self).__init__() self.host_model = host self.model_param = model_param self.transfer_variable = transfer_variable self.max_iter = model_param.max_iter self.n_iter_ = 0 def prepare_data(self, host_data): LOGGER.info("@ start host prepare data") host_features_dict, _, host_sample_indexes = convert_instance_table_to_dict(host_data) host_sample_indexes = np.array(host_sample_indexes) self._do_remote(host_sample_indexes, name=self.transfer_variable.host_sample_indexes.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_sample_indexes), role=consts.GUEST, idx=-1) guest_sample_indexes = self._do_get(name=self.transfer_variable.guest_sample_indexes.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.guest_sample_indexes), idx=-1)[0] host_features, overlap_indexes, _ = overlapping_samples_converter(host_features_dict, host_sample_indexes, guest_sample_indexes) return host_features, overlap_indexes def classified(self, prob_table, threshold): predict_table = prob_table.mapValues(lambda x: 1 if x > threshold else 0) return predict_table def evaluate(self, labels, pred_prob, pred_labels, evaluate_param): LOGGER.info("@ start host evaluate") predict_res = None if evaluate_param.classi_type == consts.BINARY: predict_res = pred_prob elif evaluate_param.classi_type == consts.MULTY: predict_res = pred_labels else: LOGGER.warning("unknown classification type, return None as evaluation results") eva = Evaluation(evaluate_param.classi_type) eva_report = eva.report(labels, predict_res, evaluate_param.metrics, evaluate_param.thresholds, evaluate_param.pos_label) LOGGER.info("@ evaluation report:" + str(eva_report)) return eva_report def predict(self, host_data, predict_param): LOGGER.info("@ start host predict") features, labels, instances_indexes = convert_instance_table_to_array(host_data) host_x = np.squeeze(features) LOGGER.debug("host_x: " + str(host_x.shape)) host_prob = self.host_model.predict(host_x) self._do_remote(host_prob, name=self.transfer_variable.host_prob.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.host_prob), role=consts.GUEST, idx=-1) pred_prob = self._do_get(name=self.transfer_variable.pred_prob.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.pred_prob), idx=-1)[0] pred_prob = np.squeeze(pred_prob) LOGGER.debug("pred_prob: " + str(pred_prob.shape)) pred_prob_table = create_table(pred_prob, instances_indexes) actual_label_table = create_table(labels, instances_indexes) pred_label_table = self.classified(pred_prob_table, predict_param.threshold) if predict_param.with_proba: predict_result = actual_label_table.join(pred_prob_table, lambda label, prob: (label if label > 0 else 0, prob)) predict_result = predict_result.join(pred_label_table, lambda x, y: (x[0], x[1], y)) else: predict_result = actual_label_table.join(pred_label_table, lambda a_label, p_label: (a_label, None, p_label)) return predict_result def load_model(self, model_table_name, model_namespace): LOGGER.info("@ load host model from name/ns" + ", " + str(model_table_name) + ", " + str(model_namespace)) model_parameters = load_model_parameters(model_table_name, model_namespace) self.host_model.restore_model(model_parameters) def save_model(self, model_table_name, model_namespace): LOGGER.info("@ save host model to name/ns" + ", " + str(model_table_name) + ", " + str(model_namespace)) _ = save_model_parameters(self.host_model.get_model_parameters(), model_table_name, model_namespace) class HeteroPlainFTLHost(HeteroFTLHost): def __init__(self, host: PlainFTLHostModel, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroPlainFTLHost, self).__init__(host, model_param, transfer_variable) def fit(self, host_data): LOGGER.info("@ start host fit") host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) while self.n_iter_ < self.max_iter: host_comp = self.host_model.send_components() self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] self.host_model.receive_components(guest_comp) is_stop = self._do_get(name=self.transfer_variable.is_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break class HeteroEncryptFTLHost(HeteroFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: EncryptedFTLHostModel = host def _precompute(self): pass def fit(self, host_data): LOGGER.info("@ start host fit") public_key = self._do_get(name=self.transfer_variable.paillier_pubkey.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.paillier_pubkey), idx=-1)[0] host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) self.host_model.set_public_key(public_key) start_time = time.time() while self.n_iter_ < self.max_iter: host_comp = self.host_model.send_components() self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] self.host_model.receive_components(guest_comp) self._precompute() encrypt_host_gradients = self.host_model.send_gradients() self._do_remote(encrypt_host_gradients, name=self.transfer_variable.encrypt_host_gradient.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.encrypt_host_gradient, self.n_iter_), role=consts.ARBITER, idx=-1) decrypt_host_gradients = self._do_get(name=self.transfer_variable.decrypt_host_gradient.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.decrypt_host_gradient, self.n_iter_), idx=-1)[0] self.host_model.receive_gradients(decrypt_host_gradients) is_stop = self._do_get(name=self.transfer_variable.is_encrypted_ftl_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_encrypted_ftl_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break end_time = time.time() LOGGER.info("@ running time: " + str(end_time - start_time)) class FasterHeteroEncryptFTLHost(HeteroEncryptFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(FasterHeteroEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: FasterEncryptedFTLHostModel = host def _precompute(self): LOGGER.info("@ start host precompute") host_precomputed_comp = self.host_model.send_precomputed_components() self._do_remote(host_precomputed_comp, name=self.transfer_variable.host_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_precomputed_comp_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_precomputed_comp = self._do_get(name=self.transfer_variable.guest_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_precomputed_comp_list, self.n_iter_), idx=-1)[0] self.host_model.receive_precomputed_components(guest_precomputed_comp) class HeteroDecentralizedEncryptFTLHost(HeteroFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(HeteroDecentralizedEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: EncryptedFTLHostModel = host self.public_key = None self.private_key = None self.guest_public_key = None def _precompute(self): pass def prepare_encryption_key_pair(self): LOGGER.info("@ start host prepare encryption key pair") self.public_key, self.private_key = generate_encryption_key_pair() self._do_remote(self.public_key, name=self.transfer_variable.host_public_key.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_public_key, self.n_iter_), role=consts.GUEST, idx=-1) self.guest_public_key = self._do_get(name=self.transfer_variable.guest_public_key.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_public_key, self.n_iter_), idx=-1)[0] def fit(self, host_data): LOGGER.info("@ start host fit") self.prepare_encryption_key_pair() host_x, overlap_indexes = self.prepare_data(host_data) LOGGER.debug("host_x: " + str(host_x.shape)) LOGGER.debug("overlap_indexes: " + str(len(overlap_indexes))) self.host_model.set_batch(host_x, overlap_indexes) self.host_model.set_public_key(self.public_key) self.host_model.set_guest_public_key(self.guest_public_key) self.host_model.set_private_key(self.private_key) start_time = time.time() while self.n_iter_ < self.max_iter: LOGGER.debug("@ Stage 1: ") host_comp = self.host_model.send_components() LOGGER.debug("send enc host_comp: " + create_shape_msg(host_comp)) self._do_remote(host_comp, name=self.transfer_variable.host_component_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_component_list, self.n_iter_), role=consts.GUEST, idx=-1) LOGGER.debug("@ Stage 2: ") guest_comp = self._do_get(name=self.transfer_variable.guest_component_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_component_list, self.n_iter_), idx=-1)[0] LOGGER.debug("receive enc guest_comp: " + create_shape_msg(guest_comp)) self.host_model.receive_components(guest_comp) self._precompute() encrypt_host_gradients = self.host_model.send_gradients() LOGGER.debug("send encrypt_guest_gradients: " + create_shape_msg(encrypt_host_gradients)) masked_enc_host_gradients, gradients_masks = add_random_mask_for_list_of_values(encrypt_host_gradients) LOGGER.debug("send masked_enc_host_gradients: " + create_shape_msg(masked_enc_host_gradients)) self._do_remote(masked_enc_host_gradients, name=self.transfer_variable.masked_enc_host_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_host_gradients, self.n_iter_), role=consts.GUEST, idx=-1) LOGGER.debug("@ Stage 3: ") masked_enc_guest_gradients = self._do_get(name=self.transfer_variable.masked_enc_guest_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_guest_gradients, self.n_iter_), idx=-1)[0] masked_enc_guest_loss = self._do_get(name=self.transfer_variable.masked_enc_loss.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_enc_loss, self.n_iter_), idx=-1)[0] masked_dec_guest_gradients = self.__decrypt_gradients(masked_enc_guest_gradients) masked_dec_guest_loss = self.__decrypt_loss(masked_enc_guest_loss) LOGGER.debug("send masked_dec_guest_gradients: " + create_shape_msg(masked_dec_guest_gradients)) self._do_remote(masked_dec_guest_gradients, name=self.transfer_variable.masked_dec_guest_gradients.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_dec_guest_gradients, self.n_iter_), role=consts.GUEST, idx=-1) LOGGER.debug("send masked_dec_guest_loss: " + str(masked_dec_guest_loss)) self._do_remote(masked_dec_guest_loss, name=self.transfer_variable.masked_dec_loss.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.masked_dec_loss, self.n_iter_), role=consts.GUEST, idx=-1) LOGGER.debug("@ Stage 4: ") masked_dec_host_gradients = self._do_get(name=self.transfer_variable.masked_dec_host_gradients.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.masked_dec_host_gradients, self.n_iter_), idx=-1)[0] LOGGER.debug("receive masked_dec_host_gradients: " + create_shape_msg(masked_dec_host_gradients)) cleared_dec_host_gradients = remove_random_mask_from_list_of_values(masked_dec_host_gradients, gradients_masks) self.host_model.receive_gradients(cleared_dec_host_gradients) LOGGER.debug("@ Stage 5: ") is_stop = self._do_get(name=self.transfer_variable.is_decentralized_enc_ftl_stopped.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.is_decentralized_enc_ftl_stopped, self.n_iter_), idx=-1)[0] LOGGER.info("@ time: " + str(time.time()) + ", ep: " + str(self.n_iter_) + ", converged: " + str(is_stop)) self.n_iter_ += 1 if is_stop: break end_time = time.time() LOGGER.info("@ running time: " + str(end_time - start_time)) def __decrypt_gradients(self, encrypt_gradients): return decrypt_matrix(self.private_key, encrypt_gradients[0]), decrypt_array(self.private_key, encrypt_gradients[1]) def __decrypt_loss(self, encrypt_loss): return decrypt_scalar(self.private_key, encrypt_loss) class FasterHeteroDecentralizedEncryptFTLHost(HeteroDecentralizedEncryptFTLHost): def __init__(self, host, model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable): super(FasterHeteroDecentralizedEncryptFTLHost, self).__init__(host, model_param, transfer_variable) self.host_model: FasterEncryptedFTLHostModel = host def _precompute(self): LOGGER.debug("@ start precompute") host_precomputed_comp = self.host_model.send_precomputed_components() self._do_remote(host_precomputed_comp, name=self.transfer_variable.host_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid(self.transfer_variable.host_precomputed_comp_list, self.n_iter_), role=consts.GUEST, idx=-1) guest_precomputed_comp = self._do_get(name=self.transfer_variable.guest_precomputed_comp_list.name, tag=self.transfer_variable.generate_transferid( self.transfer_variable.guest_precomputed_comp_list, self.n_iter_), idx=-1)[0] self.host_model.receive_precomputed_components(guest_precomputed_comp) class HostFactory(object): @classmethod def create(cls, ftl_model_param: FTLModelParam, transfer_variable: HeteroFTLTransferVariable, ftl_local_model): if ftl_model_param.is_encrypt: if ftl_model_param.enc_ftl == "dct_enc_ftl": LOGGER.debug("@ create decentralized encrypted ftl_host") host_model = EncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroDecentralizedEncryptFTLHost(host_model, ftl_model_param, transfer_variable) elif ftl_model_param.enc_ftl == "dct_enc_ftl2": LOGGER.debug("@ create decentralized encrypted faster ftl_host") host_model = FasterEncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = FasterHeteroDecentralizedEncryptFTLHost(host_model, ftl_model_param, transfer_variable) elif ftl_model_param.enc_ftl == "enc_ftl2": LOGGER.debug("@ create encrypted faster ftl_host") host_model = FasterEncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = FasterHeteroEncryptFTLHost(host_model, ftl_model_param, transfer_variable) else: LOGGER.debug("@ create encrypted ftl_host") host_model = EncryptedFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroEncryptFTLHost(host_model, ftl_model_param, transfer_variable) else: LOGGER.debug("@ create plain ftl_host") host_model = PlainFTLHostModel(local_model=ftl_local_model, model_param=ftl_model_param) host = HeteroPlainFTLHost(host_model, ftl_model_param, transfer_variable) return host
true
true
f70ed94f13b9a9e2b5858fd0c87188c4b200f45e
12,025
py
Python
h5pyViewer/FrmPyFAI.py
warlock8hz/h5pyViewer
4955aa6fdd66255738bd86d7b8947282133c5b82
[ "BSD-2-Clause" ]
null
null
null
h5pyViewer/FrmPyFAI.py
warlock8hz/h5pyViewer
4955aa6fdd66255738bd86d7b8947282133c5b82
[ "BSD-2-Clause" ]
null
null
null
h5pyViewer/FrmPyFAI.py
warlock8hz/h5pyViewer
4955aa6fdd66255738bd86d7b8947282133c5b82
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python #*-----------------------------------------------------------------------* #| | #| Copyright (c) 2013 by Paul Scherrer Institute (http://www.psi.ch) | #| | #| Author Thierry Zamofing (thierry.zamofing@psi.ch) | #*-----------------------------------------------------------------------* ''' implements an image view to show a colored image of a hdf5 dataset. ''' if __name__ == '__main__': #Used to guarantee to use at least Wx2.8 import wxversion wxversion.ensureMinimal('2.8') import wx import matplotlib as mpl if __name__ == '__main__': mpl.use('WXAgg') #or mpl.use('WX') #matplotlib.get_backend() import os,h5py import numpy as np import utilities as ut from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas import os,h5py from GLCanvasImg import * import pyFAI from hdfImageGL import HdfImageGLFrame from glumpy.image.texture import Texture from scipy import ndimage as ndi def FindCenter(arr): m=ndi.median_filter(arr, 5) sx=m.sum(1) sy=m.sum(0) shape=arr.shape xx=np.arange(shape[0]) yy=np.arange(shape[1]) x=(xx*sx).sum()/sx.sum() y=(yy*sy).sum()/sy.sum() #print x,y #import pylab as plt #used for the colormaps #plt.figure() #plt.subplot(211) #plt.plot(sx) #plt.subplot(212) #plt.plot(sy) #plt.show(block=False) return (x,y) class MPLCanvasPyFAI1D(FigureCanvas): def __init__(self,parent,SetStatusCB=None): if SetStatusCB: self.SetStatusCB=SetStatusCB fig = mpl.figure.Figure() ax = fig.add_axes([0.075,0.1,0.75,0.85]) FigureCanvas.__init__(self,parent, -1, fig) #self.mpl_connect('motion_notify_event', self.OnMotion) #self.mpl_connect('button_press_event', self.OnBtnPress) #self.mpl_connect('button_release_event', self.OnBtnRelease) #self.mpl_connect('scroll_event', self.OnBtnScroll) #self.mpl_connect('key_press_event',self.OnKeyPress) self.fig=fig self.ax=ax def InitChild(self,data): fig=self.fig ax=self.ax ctrX,ctrY=self.center=FindCenter(data) self.ai = pyFAI.AzimuthalIntegrator(1.e3, ctrX, ctrY, 0.0, 0.0, 0.0, 1.e0, 1.e0) #canvas=self.canvas self.numPtTh=int(np.average(data.shape)/2.) out=self.ai.xrpd(data,self.numPtTh) self.hl=ax.plot(*out) ax.set_yscale('log') #canvas.data=imgPolar #print imgPolar.shape #out=ai.xrpd(imgData,1000) #out=ai.xrpd_OpenCL(imgData,1000) #import pylab #pylab.plot(*out) #pylab.yscale("log") #pylab.show() class HdfPyFAI1DFrame(wx.Frame): def __init__(self, parent,lbl,hid): wx.Frame.__init__(self, parent, title=lbl, size=wx.Size(850, 650)) imgDir=ut.Path.GetImage() icon = wx.Icon(os.path.join(imgDir,'h5pyViewer.ico'), wx.BITMAP_TYPE_ICO) self.SetIcon(icon) t=type(hid) if t==h5py.h5d.DatasetID: data=h5py.Dataset(hid) canvas = MPLCanvasPyFAI1D(self,self.SetStatusCB) sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(canvas, 1, wx.LEFT | wx.TOP | wx.GROW) self.SetSizer(sizer) toolbar=ut.AddToolbar(canvas,sizer) wxAxCtrlLst=[] l=len(data.shape) idxXY=(l-2,l-1) for idx,l in enumerate(data.shape): if idx in idxXY: continue wxAxCtrl=ut.SliderGroup(self, label='Axis:%d'%idx,range=(0,l-1)) wxAxCtrl.idx=idx wxAxCtrlLst.append(wxAxCtrl) sizer.Add(wxAxCtrl.sizer, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=5) wxAxCtrl.SetCallback(HdfPyFAI1DFrame.OnSetView,wxAxCtrl) sl=ut.GetSlice(idxXY,data.shape,wxAxCtrlLst) canvas.InitChild(data[sl]) #self.Fit() self.Centre() self.BuildMenu() self.canvas=canvas self.sizer=sizer self.toolbar=toolbar self.data=data self.idxXY=idxXY self.wxAxCtrlLst=wxAxCtrlLst def BuildMenu(self): mnBar = wx.MenuBar() #-------- Edit Menu -------- mn = wx.Menu() #mnItem=mn.Append(wx.ID_ANY, 'Setup Colormap', 'Setup the color mapping ');self.Bind(wx.EVT_MENU, self.OnColmapSetup, mnItem) #mnItem=mn.Append(wx.ID_ANY, 'Linear Mapping', 'Use a linear values to color mapping ');self.Bind(wx.EVT_MENU, self.OnMapLin, mnItem) #mnItem=mn.Append(wx.ID_ANY, 'Log Mapping', 'Use a logarithmic values to color mapping ');self.Bind(wx.EVT_MENU, self.OnMapLog, mnItem) #mnItem=mn.Append(wx.ID_ANY, 'Invert X-Axis', kind=wx.ITEM_CHECK);self.Bind(wx.EVT_MENU, self.OnInvertAxis, mnItem) #self.mnIDxAxis=mnItem.GetId() #mnItem=mn.Append(wx.ID_ANY, 'Invert Y-Axis', kind=wx.ITEM_CHECK);self.Bind(wx.EVT_MENU, self.OnInvertAxis, mnItem) mnBar.Append(mn, '&Edit') mn = wx.Menu() #mnItem=mn.Append(wx.ID_ANY, 'Help', 'How to use the image viewer');self.Bind(wx.EVT_MENU, self.OnHelp, mnItem) mnBar.Append(mn, '&Help') self.SetMenuBar(mnBar) self.CreateStatusBar() def SetIdxXY(self,x,y): self.idxXY=(x,y) @staticmethod def SetStatusCB(obj,mode,v): if mode==0: obj.SetStatusText( "x= %d y=%d val=%g"%v,0) elif mode==1: obj.SetStatusText( "Colormap Value %d (drag to scale)"%v,0) else: raise KeyError('wrong mode') @staticmethod def OnSetView(usrData,value,msg): 'called when a slice is selected with the slider controls' imgFrm=usrData.slider.Parent #imgFrm.img.set_array(imgFrm.data[usrData.value,...]) data=imgFrm.data sl=ut.GetSlice(imgFrm.idxXY,data.shape,imgFrm.wxAxCtrlLst) hl=imgFrm.canvas.hl ai=imgFrm.canvas.ai numPtTh=imgFrm.canvas.numPtTh out=ai.xrpd(data[sl],numPtTh) hl[0].set_ydata(out[1]) imgFrm.canvas.draw() pass ########################################### class HdfPyFAIFrame(HdfImageGLFrame): def __init__(self, parent, title, hid): HdfImageGLFrame.__init__(self, parent, title, hid) #HdfPyFAI1DFrame(self, title, hid) canvas=self.canvas raw=canvas.data ctrX,ctrY=FindCenter(raw) self.ai = pyFAI.AzimuthalIntegrator(1.e3, ctrX, ctrY, 0.0, 0.0, 0.0, 1.e0, 1.e0) raw self.numPtTh=int(np.average(raw.shape)/2.) self.numPtCh=360 imgPolar,theta,chi=self.ai.xrpd2(raw,self.numPtTh,self.numPtCh) canvas.data=imgPolar print (imgPolar.shape) def BuildMenu(self): HdfImageGLFrame.BuildMenu(self) mnBar=self.GetMenuBar() mn=mnBar.GetMenu(0) itemLst=mn.GetMenuItems() it=itemLst[0] it.GetItemLabel() mnItem=mn.Append(wx.ID_ANY, 'Setup FAI', 'Setup fast azimutal integration ');self.Bind(wx.EVT_MENU, self.OnFAISetup, mnItem) @staticmethod def OnSetView(usrData,value,msg): 'called when a slice is selected with the slider controls' frm=usrData.slider.Parent ds=frm.dataSet canvas=frm.canvas glImg=canvas.glImg sl=ut.GetSlice(frm.idxXY,ds.shape,frm.wxAxCtrlLst) imgPolar,theta,chi=frm.ai.xrpd2(ds[sl],frm.numPtTh,frm.numPtCh) canvas.data[:]=imgPolar[:] glImg.data[:]=canvas.GetTxrData() glImg.update() canvas.OnPaint(None)#force to repaint, Refresh and Update do not force ! #canvas.Refresh(False) #canvas.Update() pass def OnFAISetup(self, event): dlg=DlgSetupPyFAI(self) if dlg.ShowModal()==wx.ID_OK: pass dlg.Destroy() class DlgSetupPyFAI(wx.Dialog): def __init__(self,parent): wx.Dialog.__init__(self,parent,-1,'pyFAI Setup') ai=parent.ai #glColBar=parent.glColBar #dataRange=parent.dataRange txtCtrX=wx.StaticText(self,-1,'center X') txtCtrY=wx.StaticText(self,-1,'center Y') txtNumPtTh=wx.StaticText(self,-1,'number of pt in Theta') txtNumPtCh=wx.StaticText(self,-1,'number of pt in Chi') txtMethod=wx.StaticText(self,-1,'method') self.edCtrX=edCtrX=wx.TextCtrl(self,-1,'%g'%ai.get_poni1(),style=wx.TE_PROCESS_ENTER) self.edCtrY=edCtrY=wx.TextCtrl(self,-1,'%g'%ai.get_poni2(),style=wx.TE_PROCESS_ENTER) self.edNumPtTh=edNumPtTh=wx.TextCtrl(self,-1,'%g'%parent.numPtTh,style=wx.TE_PROCESS_ENTER) self.edNumPtCh=edNumPtCh=wx.TextCtrl(self,-1,'%g'%parent.numPtCh,style=wx.TE_PROCESS_ENTER) self.cbMethod=cbMethod=wx.ComboBox(self, -1, choices=('default','numny'), style=wx.CB_READONLY) #cbtxrFunc.SetSelection(parent.txrTrfFunc) sizer=wx.BoxSizer(wx.VERTICAL) fgs=wx.FlexGridSizer(5,2,5,5) fgs.Add(txtCtrX,0,wx.ALIGN_RIGHT) fgs.Add(edCtrX,0,wx.EXPAND) fgs.Add(txtCtrY,0,wx.ALIGN_RIGHT) fgs.Add(edCtrY,0,wx.EXPAND) fgs.Add(txtNumPtTh,0,wx.ALIGN_RIGHT) fgs.Add(edNumPtTh,0,wx.EXPAND) fgs.Add(txtNumPtCh,0,wx.ALIGN_RIGHT) fgs.Add(edNumPtCh,0,wx.EXPAND) fgs.Add(txtMethod,0,wx.ALIGN_RIGHT) fgs.Add(cbMethod,0,wx.EXPAND) sizer.Add(fgs,0,wx.EXPAND|wx.ALL,5) #edVMin.SetFocus() btns = self.CreateButtonSizer(wx.OK|wx.CANCEL) btnApply=wx.Button(self, -1, 'Apply') btns.Add(btnApply, 0, wx.ALL, 5) sizer.Add(btns,0,wx.EXPAND|wx.ALL,5) self.Bind(wx.EVT_BUTTON, self.OnModify, id=wx.ID_OK) self.Bind(wx.EVT_BUTTON, self.OnModify, btnApply) #self.Bind(wx.EVT_TEXT, self.OnModify, edCtrX) #self.Bind(wx.EVT_TEXT, self.OnModify, edCtrY) #self.Bind(wx.EVT_TEXT, self.OnModify, edNumSector) self.Bind(wx.EVT_COMBOBOX, self.OnModify, cbMethod) self.SetSizer(sizer) sizer.Fit(self) def OnModify(self, event): print ('OnModify') frm=self.GetParent() ds=frm.dataSet canvas=frm.canvas glImg=canvas.glImg ai=frm.ai ai.set_poni1(float(self.edCtrX.Value)) ai.set_poni2(float(self.edCtrY.Value)) frm.numPtTh=int(self.edNumPtTh.Value) frm.numPtCh=int(self.edNumPtCh.Value) sl=ut.GetSlice(frm.idxXY,ds.shape,frm.wxAxCtrlLst) imgPolar,theta,chi=frm.ai.xrpd2(ds[sl],frm.numPtTh,frm.numPtCh) if canvas.data.shape==imgPolar.shape: canvas.data[:]=imgPolar[:] glImg.data[:]=canvas.GetTxrData() else: canvas.data=imgPolar; glImg._data=canvas.GetTxrData() glImg._texture=Texture(glImg._data) #self.glImg=glImg=glumpy.image.Image(txrData, colormap=colMap,vmin=txrRng[0], vmax=txrRng[1]) print (canvas.data.shape,glImg.data.shape) glImg.update() canvas.OnPaint(None)#force to repaint, Refresh and Update do not force ! frm.Refresh(False) if event.GetId()==wx.ID_OK: event.Skip()#do not consume (use event to close the window and sent return code) if __name__ == '__main__': import os,sys,argparse #since python 2.7 def GetParser(required=True): fnHDF='/scratch/detectorData/e14472_00033.hdf5' #lbl='mcs' lbl='pilatus_1' #lbl='spec' elem='/entry/data/'+lbl exampleCmd='--hdfFile='+fnHDF+' --elem='+elem parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__, epilog='Example:\n'+os.path.basename(sys.argv[0])+' '+exampleCmd+'\n ') parser.add_argument('--hdfFile', required=required, default=fnHDF, help='the hdf5 to show') parser.add_argument('--elem', required=required, default=elem, help='the path to the element in the hdf5 file') return parser args = parser.parse_args() return args class App(wx.App): def OnInit(self): parser=GetParser() #parser=GetParser(False) # debug with exampleCmd args = parser.parse_args() try: self.fid=fid=h5py.h5f.open(args.hdfFile) except IOError as e: sys.stderr.write('Unable to open File: '+args.hdfFile+'\n') parser.print_usage(sys.stderr) return True try: hid = h5py.h5o.open(fid,args.elem) except KeyError as e: sys.stderr.write('Unable to open Object: '+args.elem+'\n') parser.print_usage(sys.stderr) return True frame = HdfPyFAIFrame(None,args.elem,hid) #frame = HdfPyFAI1DFrame(None,args.elem,hid) frame.Show() self.SetTopWindow(frame) return True def OnExit(self): self.fid.close() ut.StopWatch.Start() app = App() app.MainLoop()
33.495822
139
0.66079
if __name__ == '__main__': import wxversion wxversion.ensureMinimal('2.8') import wx import matplotlib as mpl if __name__ == '__main__': mpl.use('WXAgg') import os,h5py import numpy as np import utilities as ut from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas import os,h5py from GLCanvasImg import * import pyFAI from hdfImageGL import HdfImageGLFrame from glumpy.image.texture import Texture from scipy import ndimage as ndi def FindCenter(arr): m=ndi.median_filter(arr, 5) sx=m.sum(1) sy=m.sum(0) shape=arr.shape xx=np.arange(shape[0]) yy=np.arange(shape[1]) x=(xx*sx).sum()/sx.sum() y=(yy*sy).sum()/sy.sum() turn (x,y) class MPLCanvasPyFAI1D(FigureCanvas): def __init__(self,parent,SetStatusCB=None): if SetStatusCB: self.SetStatusCB=SetStatusCB fig = mpl.figure.Figure() ax = fig.add_axes([0.075,0.1,0.75,0.85]) FigureCanvas.__init__(self,parent, -1, fig) self.fig=fig self.ax=ax def InitChild(self,data): fig=self.fig ax=self.ax ctrX,ctrY=self.center=FindCenter(data) self.ai = pyFAI.AzimuthalIntegrator(1.e3, ctrX, ctrY, 0.0, 0.0, 0.0, 1.e0, 1.e0) self.numPtTh=int(np.average(data.shape)/2.) out=self.ai.xrpd(data,self.numPtTh) self.hl=ax.plot(*out) ax.set_yscale('log') class HdfPyFAI1DFrame(wx.Frame): def __init__(self, parent,lbl,hid): wx.Frame.__init__(self, parent, title=lbl, size=wx.Size(850, 650)) imgDir=ut.Path.GetImage() icon = wx.Icon(os.path.join(imgDir,'h5pyViewer.ico'), wx.BITMAP_TYPE_ICO) self.SetIcon(icon) t=type(hid) if t==h5py.h5d.DatasetID: data=h5py.Dataset(hid) canvas = MPLCanvasPyFAI1D(self,self.SetStatusCB) sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(canvas, 1, wx.LEFT | wx.TOP | wx.GROW) self.SetSizer(sizer) toolbar=ut.AddToolbar(canvas,sizer) wxAxCtrlLst=[] l=len(data.shape) idxXY=(l-2,l-1) for idx,l in enumerate(data.shape): if idx in idxXY: continue wxAxCtrl=ut.SliderGroup(self, label='Axis:%d'%idx,range=(0,l-1)) wxAxCtrl.idx=idx wxAxCtrlLst.append(wxAxCtrl) sizer.Add(wxAxCtrl.sizer, 0, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, border=5) wxAxCtrl.SetCallback(HdfPyFAI1DFrame.OnSetView,wxAxCtrl) sl=ut.GetSlice(idxXY,data.shape,wxAxCtrlLst) canvas.InitChild(data[sl]) self.Centre() self.BuildMenu() self.canvas=canvas self.sizer=sizer self.toolbar=toolbar self.data=data self.idxXY=idxXY self.wxAxCtrlLst=wxAxCtrlLst def BuildMenu(self): mnBar = wx.MenuBar() mn = wx.Menu() mnBar.Append(mn, '&Edit') mn = wx.Menu() mnBar.Append(mn, '&Help') self.SetMenuBar(mnBar) self.CreateStatusBar() def SetIdxXY(self,x,y): self.idxXY=(x,y) @staticmethod def SetStatusCB(obj,mode,v): if mode==0: obj.SetStatusText( "x= %d y=%d val=%g"%v,0) elif mode==1: obj.SetStatusText( "Colormap Value %d (drag to scale)"%v,0) else: raise KeyError('wrong mode') @staticmethod def OnSetView(usrData,value,msg): imgFrm=usrData.slider.Parent data=imgFrm.data sl=ut.GetSlice(imgFrm.idxXY,data.shape,imgFrm.wxAxCtrlLst) hl=imgFrm.canvas.hl ai=imgFrm.canvas.ai numPtTh=imgFrm.canvas.numPtTh out=ai.xrpd(data[sl],numPtTh) hl[0].set_ydata(out[1]) imgFrm.canvas.draw() pass dataSet canvas=frm.canvas glImg=canvas.glImg sl=ut.GetSlice(frm.idxXY,ds.shape,frm.wxAxCtrlLst) imgPolar,theta,chi=frm.ai.xrpd2(ds[sl],frm.numPtTh,frm.numPtCh) canvas.data[:]=imgPolar[:] glImg.data[:]=canvas.GetTxrData() glImg.update() canvas.OnPaint(None) pass def OnFAISetup(self, event): dlg=DlgSetupPyFAI(self) if dlg.ShowModal()==wx.ID_OK: pass dlg.Destroy() class DlgSetupPyFAI(wx.Dialog): def __init__(self,parent): wx.Dialog.__init__(self,parent,-1,'pyFAI Setup') ai=parent.ai txtCtrX=wx.StaticText(self,-1,'center X') txtCtrY=wx.StaticText(self,-1,'center Y') txtNumPtTh=wx.StaticText(self,-1,'number of pt in Theta') txtNumPtCh=wx.StaticText(self,-1,'number of pt in Chi') txtMethod=wx.StaticText(self,-1,'method') self.edCtrX=edCtrX=wx.TextCtrl(self,-1,'%g'%ai.get_poni1(),style=wx.TE_PROCESS_ENTER) self.edCtrY=edCtrY=wx.TextCtrl(self,-1,'%g'%ai.get_poni2(),style=wx.TE_PROCESS_ENTER) self.edNumPtTh=edNumPtTh=wx.TextCtrl(self,-1,'%g'%parent.numPtTh,style=wx.TE_PROCESS_ENTER) self.edNumPtCh=edNumPtCh=wx.TextCtrl(self,-1,'%g'%parent.numPtCh,style=wx.TE_PROCESS_ENTER) self.cbMethod=cbMethod=wx.ComboBox(self, -1, choices=('default','numny'), style=wx.CB_READONLY) sizer=wx.BoxSizer(wx.VERTICAL) fgs=wx.FlexGridSizer(5,2,5,5) fgs.Add(txtCtrX,0,wx.ALIGN_RIGHT) fgs.Add(edCtrX,0,wx.EXPAND) fgs.Add(txtCtrY,0,wx.ALIGN_RIGHT) fgs.Add(edCtrY,0,wx.EXPAND) fgs.Add(txtNumPtTh,0,wx.ALIGN_RIGHT) fgs.Add(edNumPtTh,0,wx.EXPAND) fgs.Add(txtNumPtCh,0,wx.ALIGN_RIGHT) fgs.Add(edNumPtCh,0,wx.EXPAND) fgs.Add(txtMethod,0,wx.ALIGN_RIGHT) fgs.Add(cbMethod,0,wx.EXPAND) sizer.Add(fgs,0,wx.EXPAND|wx.ALL,5) btns = self.CreateButtonSizer(wx.OK|wx.CANCEL) btnApply=wx.Button(self, -1, 'Apply') btns.Add(btnApply, 0, wx.ALL, 5) sizer.Add(btns,0,wx.EXPAND|wx.ALL,5) self.Bind(wx.EVT_BUTTON, self.OnModify, id=wx.ID_OK) self.Bind(wx.EVT_BUTTON, self.OnModify, btnApply) self.Bind(wx.EVT_COMBOBOX, self.OnModify, cbMethod) self.SetSizer(sizer) sizer.Fit(self) def OnModify(self, event): print ('OnModify') frm=self.GetParent() ds=frm.dataSet canvas=frm.canvas glImg=canvas.glImg ai=frm.ai ai.set_poni1(float(self.edCtrX.Value)) ai.set_poni2(float(self.edCtrY.Value)) frm.numPtTh=int(self.edNumPtTh.Value) frm.numPtCh=int(self.edNumPtCh.Value) sl=ut.GetSlice(frm.idxXY,ds.shape,frm.wxAxCtrlLst) imgPolar,theta,chi=frm.ai.xrpd2(ds[sl],frm.numPtTh,frm.numPtCh) if canvas.data.shape==imgPolar.shape: canvas.data[:]=imgPolar[:] glImg.data[:]=canvas.GetTxrData() else: canvas.data=imgPolar; glImg._data=canvas.GetTxrData() glImg._texture=Texture(glImg._data) print (canvas.data.shape,glImg.data.shape) glImg.update() canvas.OnPaint(None) frm.Refresh(False) if event.GetId()==wx.ID_OK: event.Skip() if __name__ == '__main__': import os,sys,argparse def GetParser(required=True): fnHDF='/scratch/detectorData/e14472_00033.hdf5' lbl='pilatus_1' elem='/entry/data/'+lbl exampleCmd='--hdfFile='+fnHDF+' --elem='+elem parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__, epilog='Example:\n'+os.path.basename(sys.argv[0])+' '+exampleCmd+'\n ') parser.add_argument('--hdfFile', required=required, default=fnHDF, help='the hdf5 to show') parser.add_argument('--elem', required=required, default=elem, help='the path to the element in the hdf5 file') return parser args = parser.parse_args() return args class App(wx.App): def OnInit(self): parser=GetParser() rse_args() try: self.fid=fid=h5py.h5f.open(args.hdfFile) except IOError as e: sys.stderr.write('Unable to open File: '+args.hdfFile+'\n') parser.print_usage(sys.stderr) return True try: hid = h5py.h5o.open(fid,args.elem) except KeyError as e: sys.stderr.write('Unable to open Object: '+args.elem+'\n') parser.print_usage(sys.stderr) return True frame = HdfPyFAIFrame(None,args.elem,hid) frame.Show() self.SetTopWindow(frame) return True def OnExit(self): self.fid.close() ut.StopWatch.Start() app = App() app.MainLoop()
true
true
f70eda35866cba2084e688dbeedbd967c56a9f44
548
py
Python
my_agents/evaluate.py
ramirezalbert3/my_agents
fd8ffc8c2f157292b4d309ab1899326007aea726
[ "MIT" ]
null
null
null
my_agents/evaluate.py
ramirezalbert3/my_agents
fd8ffc8c2f157292b4d309ab1899326007aea726
[ "MIT" ]
null
null
null
my_agents/evaluate.py
ramirezalbert3/my_agents
fd8ffc8c2f157292b4d309ab1899326007aea726
[ "MIT" ]
null
null
null
import gym from gym import logger from core.states import StateSerializer from core.runner import Runner from agents.dqn_agent import DQNAgent logger.set_level(logger.INFO) env_name = 'CartPole-v0' env = gym.make(env_name) env._max_episode_steps = 500 serializer = StateSerializer(env.observation_space.shape) agent = DQNAgent.from_h5(file_path=env_name+'.h5') runner = Runner(env, serializer, agent, epsilon_policy = lambda e: 0, max_episode_steps = 500) runner.render() runner.demonstrate(num_episodes=100)
22.833333
57
0.751825
import gym from gym import logger from core.states import StateSerializer from core.runner import Runner from agents.dqn_agent import DQNAgent logger.set_level(logger.INFO) env_name = 'CartPole-v0' env = gym.make(env_name) env._max_episode_steps = 500 serializer = StateSerializer(env.observation_space.shape) agent = DQNAgent.from_h5(file_path=env_name+'.h5') runner = Runner(env, serializer, agent, epsilon_policy = lambda e: 0, max_episode_steps = 500) runner.render() runner.demonstrate(num_episodes=100)
true
true