code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import functools import logging import os import pytest import tempfile import devns import devns.cli from mock import MagicMock @pytest.fixture def config(): return devns.Config() @pytest.yield_fixture def resolver_dir(config): resolvers = [] config.resolver_dir = os.path.join( tempfile.gettempdir(), "{0}-{1}".format(tempfile.gettempprefix(), "resolver") ) resolvers.append(config.resolver_dir) yield config.resolver_dir resolvers.append(config.resolver_dir) for resolver in filter(None, set(resolvers)): if os.path.isdir(resolver): os.rmdir(resolver) @pytest.fixture def logger(request): return logging.getLogger(request.node.nodeid) @pytest.fixture def parse_args(config): return functools.partial(devns.cli.parse_args, config=config) @pytest.yield_fixture def server(config, resolver_dir): yield devns.server.DevNS(config) @pytest.fixture def Connection(): class Connection(object): settimeout = MagicMock() bind = MagicMock() sendto = MagicMock() def __init__(self, responses, expected): self.responses = responses self.expected = expected def getsockname(self): return "0.0.0.0", 53535 def recvfrom(self, length): response = self.responses.pop() if isinstance(response, tuple): return response raise response() return Connection
[ "functools.partial", "devns.Config", "os.path.isdir", "tempfile.gettempdir", "devns.server.DevNS", "os.rmdir", "mock.MagicMock", "tempfile.gettempprefix", "logging.getLogger" ]
[((174, 188), 'devns.Config', 'devns.Config', ([], {}), '()\n', (186, 188), False, 'import devns\n'), ((679, 717), 'logging.getLogger', 'logging.getLogger', (['request.node.nodeid'], {}), '(request.node.nodeid)\n', (696, 717), False, 'import logging\n'), ((771, 825), 'functools.partial', 'functools.partial', (['devns.cli.parse_args'], {'config': 'config'}), '(devns.cli.parse_args, config=config)\n', (788, 825), False, 'import functools\n'), ((306, 327), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (325, 327), False, 'import tempfile\n'), ((573, 596), 'os.path.isdir', 'os.path.isdir', (['resolver'], {}), '(resolver)\n', (586, 596), False, 'import os\n'), ((894, 920), 'devns.server.DevNS', 'devns.server.DevNS', (['config'], {}), '(config)\n', (912, 920), False, 'import devns\n'), ((1008, 1019), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1017, 1019), False, 'from mock import MagicMock\n'), ((1035, 1046), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1044, 1046), False, 'from mock import MagicMock\n'), ((1064, 1075), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1073, 1075), False, 'from mock import MagicMock\n'), ((354, 378), 'tempfile.gettempprefix', 'tempfile.gettempprefix', ([], {}), '()\n', (376, 378), False, 'import tempfile\n'), ((610, 628), 'os.rmdir', 'os.rmdir', (['resolver'], {}), '(resolver)\n', (618, 628), False, 'import os\n')]
from .Node import Node from .FusedNode import FusedNode from .SimpleBranchConditionNode import SimpleBranchConditionNode from .MultipleBranchConditionNode import MultipleBranchConditionNode from .ControlLoopNode import ControlLoopNode from .LabelLoopNode import LabelLoopNode from .LoopNode import LoopNode from .MultipleLabelLoopNode import MultipleLabelLoopNode from Utils.config import * from Utils.utils import clean_regex import graphviz class Graph: def __init__(self, start_node): self.start_node = start_node self.last_node = start_node self.open_ifs = [] self.open_loops = {} self.open_control_loops = [] self.all_labels = {} self.all_nodes = [start_node] def get_last_node(self): return self.all_nodes[-1] def match_if(self, branch_node): res = None # print(f"Searching corr node for: {branch_node}") for cond_node in self.open_ifs: # print(f"Looking at {cond_node} at depth {cond_node.get_depth()}") # print(f"{cond_node.get_regex()} VS {branch_node.get_regex()}") if cond_node.get_depth() == branch_node.get_depth() and ( cond_node.get_regex() == branch_node.get_regex() or branch_node.get_type() == NODE_COND_END_ANY): # print("match !") res = cond_node return res def match_loops(self, node): to_remove = [] # print(f"IN MATCH LOOPS {node}") # print(self.open_loops) if node.get_label() in self.open_loops.keys(): for loop in self.open_loops[node.get_label()]: # print("MATCH") loop.add_child(node) if loop.is_complete(): to_remove.append(loop) for n in to_remove: self.open_loops[node.get_label()].remove(n) def match_control_node(self, control_node, child): to_remove = [] found = False for n in self.open_control_loops: if isinstance(n, ControlLoopNode) and control_node.get_type() == n.get_control(): n.add_child(child, match=True) found = True to_remove.append(n) for n in to_remove: self.open_control_loops.remove(n) return found def match_labels(self, loop_node): if isinstance(loop_node, ControlLoopNode): return True # print(f"IN MATCH LABELS {loop_node}") # print(self.all_labels) if isinstance(loop_node, MultipleLabelLoopNode): for l in loop_node.get_label(): if l in self.all_labels.keys(): # print("MATCH") loop_node.add_child(self.all_labels[l]) if len(loop_node.get_childs()) == len(loop_node.get_label())+1: return False elif isinstance(loop_node, LabelLoopNode): if loop_node.get_label() in self.all_labels.keys(): # print("MATCH") loop_node.add_child(self.all_labels[loop_node.get_label()]) return False return True def update_open_loops(self, node): if node.is_control(): self.open_control_loops.append(node) elif self.match_labels(node): if node.is_multiple_labels(): for l in node.get_label(): if l in self.open_loops.keys(): self.open_loops[l].append(node) else: self.open_loops[l] = [node] else: if node.get_label() in self.open_loops.keys(): self.open_loops[node.get_label()].append(node) else: self.open_loops[node.get_label()] = [node] def add_single_node(self, node): if self.last_node is not None: self.last_node.add_child(node) self.last_node = node self.all_nodes.append(node) def add_node(self, node): # print(f"Adding node {node}") if node.get_type() == NODE_COND_START: # print(">>> In IF") self.add_single_node(node) self.open_ifs.append(node) elif node.get_type() == NODE_SQL: self.add_single_node(node) elif node.get_type() == NODE_LABEL: # print(">>> In EXEC") self.add_single_node(node) self.all_labels[node.get_label()] = node self.match_loops(node) elif node.get_type() == NODE_LOOP: self.add_single_node(node) #print(f"Loop node {node}") if not node.is_goback_node(): #If we don't need to go back to this node, don't print("Node was a GOTO, cancel last node") self.last_node = None self.update_open_loops(node) elif node.get_type() == NODE_COND_BRANCH: # print(">>> In ELSE") corr_if = self.match_if(node) if corr_if is None: print(f"Was looking for node to match {node} in array {self.open_ifs}, but found none") raise Exception('Missmatched condition branch') temp = Node(corr_if.get_depth(), NODE_CONTROL) if isinstance(node, MultipleBranchConditionNode): # We have a multiple condition, grab the condition corr_if.add_branch_condition(node, temp) corr_if.close_branch() # If we found an else, we should close the previous branch corr_if.add_child(temp) self.all_nodes.append(temp) self.last_node = temp elif node.get_type() == NODE_COND_END or node.get_type() == NODE_COND_END_ANY: # print(">>> In END-IF") corr_if = self.match_if(node) temp = Node(0, NODE_CONTROL) if corr_if is None else Node(corr_if.get_depth(), NODE_CONTROL) f = False if node.get_type() == NODE_COND_END_ANY: f = self.match_control_node(node, temp) if corr_if is None and not f: print(f"Was looking for node to match {node} in array {self.open_ifs} or {self.open_control_loops}, but found none") raise Exception('Missmatched condition end') # print(f"Found matching if: {corr_if}", flush=True) if corr_if is not None: corr_if.close(temp) self.open_ifs.remove(corr_if) elif self.last_node is not None: self.last_node.add_child(temp) self.last_node = temp self.all_nodes.append(temp) elif node.get_type() == "END": # Adding the end_node self.add_single_node(node) # print("Added the last_node") else: print(f"Issue during adding node {node}") def get_start_node(self): return self.start_node def get_size(self): return len(self.all_nodes) def get_all_nodes(self): return self.all_nodes def replace_child(self, target, old_child, new_child): if isinstance(target, SimpleBranchConditionNode): if target.true_child == old_child: target.remove_child(old_child) target.add_child(new_child, end=True, branch=True) if target.false_child == old_child: target.remove_child(old_child) target.add_child(new_child, end=True, branch=False) elif isinstance(target, LoopNode): target.remove_child(old_child) target.add_child(new_child, match=True) else: target.remove_child(old_child) target.add_child(new_child) for grand_child in old_child.get_childs(): grand_child.remove_parent(old_child) grand_child.add_parent(target) if old_child in self.all_nodes: self.all_nodes.remove(old_child) def cleanup_triangle(self, current_node, new_child): self.replace_child(current_node.get_parent()[0], current_node, new_child) def one_parent(self, node): if len(node.get_parent()) == 0: return False if len(node.get_parent()) == 1: # Easy case, only one parent return True first = node.get_parent()[0] for elem in node.get_parent(): if elem != node: # Dirty if elem != first: return False return True def cleanup(self, label_clean=False): #for o in self.open_loops: # if self.open_loops[o] != []: # print(f"Found open loop: {o}, {self.open_loops[o]}") cleaned = True while cleaned == True: visited = [] cleaned = False start_node = self.all_nodes.copy() while len(start_node) != 0: current_node = start_node[0] visited.append(current_node) if current_node.get_type() == NODE_CONTROL: children = current_node.get_childs() # print(f">>> Found control node ! len children: {len(children)} len grand_children: {len(children[0].get_childs())}") if len(children) == 2: if children[0] == children[1]: # Two links pointing the same direction current_node.remove_child(children[0]) cleaned = True # We are dealing with a triangle (V1) elif children[1] == children[0].get_childs()[0]: self.cleanup_triangle(current_node, children[1]) cleaned = True # We are dealing with a triangle (V2) elif children[0] == children[1].get_childs()[0]: self.cleanup_triangle(current_node, children[0]) cleaned = True elif len(children) == 1: # We are in a control node having a single child of a control node parent_node = current_node.get_parent().copy() for p in parent_node: self.replace_child(p, current_node, children[0]) cleaned = True elif current_node.get_type() == NODE_LABEL and label_clean: parents = current_node.get_parent() if len(parents) == 1 and parents[0].get_type() != NODE_LOOP: # A single parent that is not a GOTO child_node = current_node.get_childs().copy() for c in child_node: self.replace_child(parents[0], current_node, c) cleaned = True for child in current_node.get_childs().copy(): # Look at a node's childrens if child.get_type() == NODE_CONTROL: # When we find a control node if len(child.get_childs()) == 1 and self.one_parent(child): # Only one parent and one child self.replace_child(current_node, child, child.get_childs()[0]) cleaned = True start_node.remove(current_node) def fuse(self, node_up, node_down): if node_down.get_type() == NODE_FUSED: node_down.fuse_node(node_up, up=True) self.all_nodes.remove(node_up) return node_down elif node_up.get_type() == NODE_FUSED: node_up.fuse_node(node_down, down=True) self.all_nodes.remove(node_down) else: node = FusedNode(node_up.get_depth(), NODE_FUSED) self.all_nodes.append(node) node.fuse_node(node_up, up=True) self.all_nodes.remove(node_up) node.fuse_node(node_down, down=True) self.all_nodes.remove(node_down) return node def squish(self): squished = True while squished: squished = False start_node = self.all_nodes.copy() all_nodes = [] while len(start_node) != 0: current_node = start_node[0] all_nodes.append(current_node) if ( current_node.get_type() == NODE_COND_START or current_node.get_type() == NODE_FUSED) and current_node.point_to_one(): # We are in a if node that points to a single node # print(f"Found a node that points to one: {current_node} with childs: {current_node.get_childs()}") child = current_node.get_childs()[0] if child.get_type() != "END" and child.get_type() != NODE_SQL: merge = True for c in child.get_childs(): if c.get_type() == NODE_SQL: merge = False if merge: # print("Decided to merge !") res = self.fuse(current_node, child) squished = True break elif current_node.get_type() == NODE_FUSED and any( child.get_type() == NODE_FUSED for child in current_node.get_childs()): # print("Found a fused node having fused child") # We found a fused node having a FUSED child to_fuse = None for child in current_node.get_childs(): if child.get_type() == NODE_FUSED: to_fuse = child if to_fuse != None: # print("Decided to fuse the fuse") res = self.fuse(current_node, to_fuse) squished = True start_node.remove(current_node) def save_as_file(self, filename, output_dir='doctest-output'): dot = graphviz.Digraph(filename) for current_node in self.all_nodes: if current_node.get_type() == NODE_COND_START: dot.attr('node', shape='ellipse') dot.node(str(current_node.id), clean_regex(current_node) + " " + current_node.condition) elif current_node.get_type() == NODE_LOOP: dot.attr('node', shape='ellipse') if isinstance(current_node, ControlLoopNode): dot.node(str(current_node.id), clean_regex(current_node)) elif isinstance(current_node, MultipleLabelLoopNode): dot.node(str(current_node.id), "PERFORM " + current_node.get_label()[0]+" THRU "+ current_node.get_label()[1]) elif isinstance(current_node, LabelLoopNode): dot.node(str(current_node.id), clean_regex(current_node)+" " + current_node.get_label()) elif current_node.get_type() == NODE_SQL: dot.attr('node', shape='box') dot.node(str(current_node.id), current_node.parsable) elif current_node.get_type() == NODE_LABEL: dot.attr('node', shape='note') dot.node(str(current_node.id), current_node.label) elif current_node.get_type() == NODE_CONTROL: dot.node(str(current_node.id), str(current_node.id)) elif current_node.get_type() == "START": dot.attr('node', shape='diamond') dot.node(str(current_node.id), 'START') elif current_node.get_type() == "END": dot.attr('node', shape='diamond') dot.node(str(current_node.id), 'END') elif current_node.get_type() == NODE_FUSED: dot.attr('node', shape='circle') dot.node(str(current_node.id), str(current_node.amount_contained())) for n in self.all_nodes: if n.get_type() == NODE_COND_START and isinstance(n, SimpleBranchConditionNode): dot.edge(str(n.id), str(n.true_child.id), label='True') dot.edge(str(n.id), str(n.false_child.id), label='False') elif n.get_type() == NODE_COND_START and isinstance(n, MultipleBranchConditionNode): for condition in n.get_branch_childs().keys(): # print(condition) dot.edge(str(n.id), str(n.get_branch_child(condition).id), label=condition) elif n.get_type() == NODE_LOOP and n.is_goback_node(): # print(f"Doing node {n}") for link in n.get_childs(): dot.edge(str(n.id), str(link.id)) if link.get_type() == NODE_LABEL and link.get_label() == n.go_back_label(): dot.edge(str(link.id), str(n.id), label="Go back") else: for link in n.get_childs(): dot.edge(str(n.id), str(link.id)) dot.render(directory=output_dir, view=False) def add_node_to_list(self, node): self.all_nodes.append(node) def __str__(self): result = "Nodes in graph:\n" for n in self.all_nodes: # print(n.id) result += n.__repr__() + "\n" return result
[ "graphviz.Digraph", "Utils.utils.clean_regex" ]
[((11111, 11137), 'graphviz.Digraph', 'graphviz.Digraph', (['filename'], {}), '(filename)\n', (11127, 11137), False, 'import graphviz\n'), ((11299, 11324), 'Utils.utils.clean_regex', 'clean_regex', (['current_node'], {}), '(current_node)\n', (11310, 11324), False, 'from Utils.utils import clean_regex\n'), ((11527, 11552), 'Utils.utils.clean_regex', 'clean_regex', (['current_node'], {}), '(current_node)\n', (11538, 11552), False, 'from Utils.utils import clean_regex\n'), ((11814, 11839), 'Utils.utils.clean_regex', 'clean_regex', (['current_node'], {}), '(current_node)\n', (11825, 11839), False, 'from Utils.utils import clean_regex\n')]
import torch.nn as nn from loss.gradient import grad_loss from loss.tvloss import tv_loss from loss.vggloss import vgg_loss from loss.ssim import ssim_loss as criterionSSIM from options import opt criterionCAE = nn.L1Loss() criterionL1 = criterionCAE criterionBCE = nn.BCELoss() criterionMSE = nn.MSELoss() def get_default_loss(recovered, y, avg_meters): ssim = - criterionSSIM(recovered, y) ssim_loss = ssim * opt.weight_ssim # Compute L1 loss (not used) l1_loss = criterionL1(recovered, y) l1_loss = l1_loss * opt.weight_l1 loss = ssim_loss + l1_loss # record losses avg_meters.update({'ssim': -ssim.item(), 'L1': l1_loss.item()}) if opt.weight_grad: loss_grad = grad_loss(recovered, y) * opt.weight_grad loss += loss_grad avg_meters.update({'gradient': loss_grad.item()}) if opt.weight_vgg: content_loss = vgg_loss(recovered, y) * opt.weight_vgg loss += content_loss avg_meters.update({'vgg': content_loss.item()}) return loss
[ "loss.ssim.ssim_loss", "torch.nn.MSELoss", "torch.nn.BCELoss", "torch.nn.L1Loss", "loss.vggloss.vgg_loss", "loss.gradient.grad_loss" ]
[((213, 224), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {}), '()\n', (222, 224), True, 'import torch.nn as nn\n'), ((267, 279), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (277, 279), True, 'import torch.nn as nn\n'), ((295, 307), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (305, 307), True, 'import torch.nn as nn\n'), ((371, 398), 'loss.ssim.ssim_loss', 'criterionSSIM', (['recovered', 'y'], {}), '(recovered, y)\n', (384, 398), True, 'from loss.ssim import ssim_loss as criterionSSIM\n'), ((716, 739), 'loss.gradient.grad_loss', 'grad_loss', (['recovered', 'y'], {}), '(recovered, y)\n', (725, 739), False, 'from loss.gradient import grad_loss\n'), ((889, 911), 'loss.vggloss.vgg_loss', 'vgg_loss', (['recovered', 'y'], {}), '(recovered, y)\n', (897, 911), False, 'from loss.vggloss import vgg_loss\n')]
from __future__ import unicode_literals from setuptools import setup, find_packages with open("requirements.txt") as f: install_requires = f.read().strip().split("\n") # get version from __version__ variable in network_billing_system/__init__.py from network_billing_system import __version__ as version setup( name="network_billing_system", version=version, description="A complete billing system integrated with pfsense", author="stephen", author_email="<EMAIL>", packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=install_requires, )
[ "setuptools.find_packages" ]
[((507, 522), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (520, 522), False, 'from setuptools import setup, find_packages\n')]
from django.shortcuts import render, HttpResponse, redirect from django.contrib import messages from .models import Review, Author, Book from ..login_registration.models import User # Create your views here. def books(request): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: context = { 'recent': Review.objects.recent_and_not()[0], 'more': Review.objects.recent_and_not()[1] } return render(request, 'books/index.html', context) def add(request): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: context = { 'authors': Author.objects.all() } return render(request, 'books/edit.html', context) def create(request): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: errs = Review.objects.validate_review(request.POST) if errs: for e in errs: messages.error(request, e) else: book_id = Review.objects.create_review(request.POST, request.session['user_id']).book.id return redirect('/books/{}'.format(book_id)) def create_additional(request, book_id): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: the_book = Book.objects.get(id=book_id) new_book_data = { 'title': the_book.title, 'author': the_book.author.id, 'rating': request.POST['rating'], 'review': request.POST['review'], 'new_author': '' } errs = Review.objects.validate_review(new_book_data) if errs: for e in errs: messages.error(request, e) else: Review.objects.create_review(new_book_data, request.session['user_id']) return redirect('/books/' + book_id) def show(request, book_id): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: context = { 'book': Book.objects.get(id=book_id) } return render(request, 'books/review.html', context) def profile(request, user_id): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: user = User.objects.get(id=user_id) unique_ids = user.reviews_left.all().values('book').distinct() unique_books = [] for book in unique_ids: unique_books.append(Book.objects.get(id=book['book'])) context = { 'user': user, 'unique_book_reviews': unique_books } return render(request, 'books/profile.html', context) def delete(request, review_id): if 'login' not in request.session or request.session['login'] == False: return redirect('/') else: book = Book.objects.get(reviews= review_id).id Review.objects.get(id=review_id).delete() return redirect('/books/'+ str(book))
[ "django.shortcuts.redirect", "django.contrib.messages.error", "django.shortcuts.render" ]
[((320, 333), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (328, 333), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((502, 546), 'django.shortcuts.render', 'render', (['request', '"""books/index.html"""', 'context'], {}), "(request, 'books/index.html', context)\n", (508, 546), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((657, 670), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (665, 670), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((770, 813), 'django.shortcuts.render', 'render', (['request', '"""books/edit.html"""', 'context'], {}), "(request, 'books/edit.html', context)\n", (776, 813), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((927, 940), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (935, 940), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((1400, 1413), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1408, 1413), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((1969, 1998), 'django.shortcuts.redirect', 'redirect', (["('/books/' + book_id)"], {}), "('/books/' + book_id)\n", (1977, 1998), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2119, 2132), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2127, 2132), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2237, 2282), 'django.shortcuts.render', 'render', (['request', '"""books/review.html"""', 'context'], {}), "(request, 'books/review.html', context)\n", (2243, 2282), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2406, 2419), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2414, 2419), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2790, 2836), 'django.shortcuts.render', 'render', (['request', '"""books/profile.html"""', 'context'], {}), "(request, 'books/profile.html', context)\n", (2796, 2836), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2961, 2974), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2969, 2974), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((1071, 1097), 'django.contrib.messages.error', 'messages.error', (['request', 'e'], {}), '(request, e)\n', (1085, 1097), False, 'from django.contrib import messages\n'), ((1829, 1855), 'django.contrib.messages.error', 'messages.error', (['request', 'e'], {}), '(request, e)\n', (1843, 1855), False, 'from django.contrib import messages\n')]
import pandas as pd import plotly.express as px import plotly.graph_objects as go import numpy as np def rmovie_basicvar(cdf, var = 'tg1', Mm = False, km = False, savefig = False, figname = 'radynvar.html', color = 'steelblue'): ''' A function to produce an animated figure of RADYN variables. This version is pre-constructed and lets you just input the variable you want to plot. Other variables (such as populations) will require more input, and are separate functions. Turns the output into a pandas dataframe, which is then passed to plotly express to create the animated figure Parameters __________ cdf : The radyn cdf object var : str The variable to plot (default = 'tg1') Mm : Boolean Plot height in Mm (default = False) km : Boolean Plot height in km (default = False) savefig : Boolean Save the figure (html file) figname : str Filename, if saving the output NOTES : So far, allowed variables are tg1 - temperature ne1 - electron density bheat1 - beam heating rate d1 - mass density vz1 - velocity np - proton density <NAME>, March 2021 ''' ######################################################################## # Some preliminary set up ######################################################################## if Mm == True: xtitle = 'Height [Mm]' height = cdf.z1/1e8 elif km == True: xtitle = 'Height [km]' height = cdf.z1/1e5 else: xtitle = 'Height [cm]' height = cdf.z1 if var == 'tg1': rvar = cdf.tg1 ytitle = 'Temperature [K]' ylog = True xlog = False elif var == 'ne1': rvar = cdf.ne1 ytitle = 'Electron Density [cm<sup>-3</sup>]' ylog = True xlog = False elif var == 'bheat1': rvar = cdf.bheat1 ytitle = 'Q<sub>beam</sub> [erg cm<sup>-3</sup> s<sup>-1</sup>]' ylog = False xlog = False elif var == 'd1': rvar = cdf.d1 ytitle = 'Mass Density [g cm<sup>-3</sup>]' ylog = True xlog = False elif var == 'vz1': rvar = cdf.vz1/1e5 ytitle = 'Velocity [km s<sup>-1</sup>]' ylog = False xlog = False elif var == 'np': rvar = cdf.n1[:,:,5,0] ytitle = 'Proton Density [cm<sup>-3</sup>]' ylog = True xlog = False template = dict( layout = go.Layout(font = dict(family = "Rockwell", size = 16), title_font = dict(family = "Rockwell", size = 20), plot_bgcolor = 'white', paper_bgcolor = 'white', xaxis = dict( showexponent = 'all', exponentformat = 'e', tickangle = 0, linewidth = 3, showgrid = True, ), yaxis = dict( showexponent = 'all', exponentformat = 'e', linewidth = 3, showgrid = True, anchor = 'free', position = 0, domain = [0.0,1] ), coloraxis_colorbar = dict( thickness = 15, tickformat = '0.2f', ticks = 'outside', titleside = 'right' ) )) ######################################################################## # Build the dataframe ######################################################################## col1 = ytitle col2 = xtitle time = 'Time [s]' timeind = 'Time index' df_list = [] for i in range(len(cdf.time)): data = {col1:rvar[i,:], col2:height[i,:], time: cdf.time[i], timeind: i } df_list.append(pd.DataFrame(data)) df = pd.concat(df_list) ######################################################################## # Plot the variable ######################################################################## h1 = 700 w1 = 700 fig1 = px.line(df, x = df.columns[1], y = df.columns[0], # animation_group = 'Time [s]', animation_frame = 'Time [s]', log_x = xlog, log_y = ylog, template = template, color_discrete_sequence = [color]) fig1.show() if savefig == True: fig1.write_html(figname) return df def rmovie(var1, var2, time = [-10.0], savefig = False, figname = 'radynvar.html', xtitle = 'Var 1', ytitle = 'Var 2', title = ' ', color = 'steelblue', xlog = False, ylog = False): ''' A function to produce an animated figure of RADYN variables. This version is 'dumb' and just plots col1 vs col2 without any axes labels, unless passed through the function fall. Variables must be input as [time, dim1] Turns the output into a pandas dataframe, which is then passed to plotly express to create the animated figure Parameters __________ var1 : float The variable to plot on the x-axis [time, dim1] var2 : float The variable to plot on the y-axis [time, dim1] xtitle : str The xaxis label (default "Var 1") ytitle : str The xaxis label (default "Var 2") title : str A plot title (default " ") savefig : Boolean Save the figure (html file) figname : str Filename, if saving the output xlog : boolean Default is false. Set to True to have log x-axis ylog : boolean Default is false. Set to True to have log y-axis NOTES : <NAME>, March 2021 ''' ######################################################################## # Some preliminary set up ######################################################################## if time[0] == -10: time = np.arange(0,var1.shape[0]) col3 = 'Time [index]' else: col3 = 'Time [s]' template = dict( layout = go.Layout(font = dict(family = "Rockwell", size = 16), title_font = dict(family = "Rockwell", size = 20), plot_bgcolor = 'white', paper_bgcolor = 'white', xaxis = dict( showexponent = 'all', exponentformat = 'e', tickangle = 0, linewidth = 3, showgrid = True, ), yaxis = dict( showexponent = 'all', exponentformat = 'e', linewidth = 3, showgrid = True, anchor = 'free', position = 0, domain = [0.0,1] ), coloraxis_colorbar = dict( thickness = 15, tickformat = '0.2f', ticks = 'outside', titleside = 'right' ) )) ######################################################################## # Build the dataframe ######################################################################## col1 = xtitle col2 = ytitle df_list = [] for i in range(len(time)): data = {col1:var1[i,:], col2:var2[i,:], col3: time[i], } df_list.append(pd.DataFrame(data)) df = pd.concat(df_list) ######################################################################## # Plot the variable ######################################################################## h1 = 700 w1 = 700 fig1 = px.line(df, x = df.columns[0], y = df.columns[1], # animation_group = 'Time [s]', animation_frame = df.columns[2], log_x = xlog, log_y = ylog, title = title, color_discrete_sequence = [color], template = template) fig1.show() if savefig == True: fig1.write_html(figname) return df
[ "pandas.DataFrame", "pandas.concat", "numpy.arange", "plotly.express.line" ]
[((4737, 4890), 'plotly.express.line', 'px.line', (['df'], {'x': 'df.columns[1]', 'y': 'df.columns[0]', 'animation_frame': '"""Time [s]"""', 'log_x': 'xlog', 'log_y': 'ylog', 'template': 'template', 'color_discrete_sequence': '[color]'}), "(df, x=df.columns[1], y=df.columns[0], animation_frame='Time [s]',\n log_x=xlog, log_y=ylog, template=template, color_discrete_sequence=[color])\n", (4744, 4890), True, 'import plotly.express as px\n'), ((8830, 9003), 'plotly.express.line', 'px.line', (['df'], {'x': 'df.columns[0]', 'y': 'df.columns[1]', 'animation_frame': 'df.columns[2]', 'log_x': 'xlog', 'log_y': 'ylog', 'title': 'title', 'color_discrete_sequence': '[color]', 'template': 'template'}), '(df, x=df.columns[0], y=df.columns[1], animation_frame=df.columns[2],\n log_x=xlog, log_y=ylog, title=title, color_discrete_sequence=[color],\n template=template)\n', (8837, 9003), True, 'import plotly.express as px\n'), ((4498, 4516), 'pandas.concat', 'pd.concat', (['df_list'], {}), '(df_list)\n', (4507, 4516), True, 'import pandas as pd\n'), ((6793, 6820), 'numpy.arange', 'np.arange', (['(0)', 'var1.shape[0]'], {}), '(0, var1.shape[0])\n', (6802, 6820), True, 'import numpy as np\n'), ((8590, 8608), 'pandas.concat', 'pd.concat', (['df_list'], {}), '(df_list)\n', (8599, 8608), True, 'import pandas as pd\n'), ((4460, 4478), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (4472, 4478), True, 'import pandas as pd\n'), ((8552, 8570), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (8564, 8570), True, 'import pandas as pd\n')]
import FWCore.ParameterSet.Config as cms particleFlowRecHitHBHE = cms.EDProducer("PFRecHitProducer", navigator = cms.PSet( hcalEnums = cms.vint32(1, 2), name = cms.string('PFRecHitHCALDenseIdNavigator') ), producers = cms.VPSet(cms.PSet( name = cms.string('PFHBHERecHitCreator'), qualityTests = cms.VPSet( cms.PSet( cuts = cms.VPSet( cms.PSet( depth = cms.vint32(1, 2, 3, 4), detectorEnum = cms.int32(1), threshold = cms.vdouble(0.8, 1.2, 1.2, 1.2) ), cms.PSet( depth = cms.vint32( 1, 2, 3, 4, 5, 6, 7 ), detectorEnum = cms.int32(2), threshold = cms.vdouble( 0.1, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2 ) ) ), name = cms.string('PFRecHitQTestHCALThresholdVsDepth') ), cms.PSet( cleaningThresholds = cms.vdouble(0.0), flags = cms.vstring('Standard'), maxSeverities = cms.vint32(11), name = cms.string('PFRecHitQTestHCALChannel') ) ), src = cms.InputTag("hbhereco") )) )
[ "FWCore.ParameterSet.Config.string", "FWCore.ParameterSet.Config.vint32", "FWCore.ParameterSet.Config.vdouble", "FWCore.ParameterSet.Config.vstring", "FWCore.ParameterSet.Config.int32", "FWCore.ParameterSet.Config.InputTag" ]
[((148, 164), 'FWCore.ParameterSet.Config.vint32', 'cms.vint32', (['(1)', '(2)'], {}), '(1, 2)\n', (158, 164), True, 'import FWCore.ParameterSet.Config as cms\n'), ((181, 223), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""PFRecHitHCALDenseIdNavigator"""'], {}), "('PFRecHitHCALDenseIdNavigator')\n", (191, 223), True, 'import FWCore.ParameterSet.Config as cms\n'), ((282, 315), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""PFHBHERecHitCreator"""'], {}), "('PFHBHERecHitCreator')\n", (292, 315), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1434, 1458), 'FWCore.ParameterSet.Config.InputTag', 'cms.InputTag', (['"""hbhereco"""'], {}), "('hbhereco')\n", (1446, 1458), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1096, 1143), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""PFRecHitQTestHCALThresholdVsDepth"""'], {}), "('PFRecHitQTestHCALThresholdVsDepth')\n", (1106, 1143), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1218, 1234), 'FWCore.ParameterSet.Config.vdouble', 'cms.vdouble', (['(0.0)'], {}), '(0.0)\n', (1229, 1234), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1260, 1283), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""Standard"""'], {}), "('Standard')\n", (1271, 1283), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1317, 1331), 'FWCore.ParameterSet.Config.vint32', 'cms.vint32', (['(11)'], {}), '(11)\n', (1327, 1331), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1356, 1394), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""PFRecHitQTestHCALChannel"""'], {}), "('PFRecHitQTestHCALChannel')\n", (1366, 1394), True, 'import FWCore.ParameterSet.Config as cms\n'), ((469, 491), 'FWCore.ParameterSet.Config.vint32', 'cms.vint32', (['(1)', '(2)', '(3)', '(4)'], {}), '(1, 2, 3, 4)\n', (479, 491), True, 'import FWCore.ParameterSet.Config as cms\n'), ((532, 544), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(1)'], {}), '(1)\n', (541, 544), True, 'import FWCore.ParameterSet.Config as cms\n'), ((582, 613), 'FWCore.ParameterSet.Config.vdouble', 'cms.vdouble', (['(0.8)', '(1.2)', '(1.2)', '(1.2)'], {}), '(0.8, 1.2, 1.2, 1.2)\n', (593, 613), True, 'import FWCore.ParameterSet.Config as cms\n'), ((699, 730), 'FWCore.ParameterSet.Config.vint32', 'cms.vint32', (['(1)', '(2)', '(3)', '(4)', '(5)', '(6)', '(7)'], {}), '(1, 2, 3, 4, 5, 6, 7)\n', (709, 730), True, 'import FWCore.ParameterSet.Config as cms\n'), ((853, 865), 'FWCore.ParameterSet.Config.int32', 'cms.int32', (['(2)'], {}), '(2)\n', (862, 865), True, 'import FWCore.ParameterSet.Config as cms\n'), ((903, 949), 'FWCore.ParameterSet.Config.vdouble', 'cms.vdouble', (['(0.1)', '(0.2)', '(0.2)', '(0.2)', '(0.2)', '(0.2)', '(0.2)'], {}), '(0.1, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2)\n', (914, 949), True, 'import FWCore.ParameterSet.Config as cms\n')]
from seeq import spy import pytest import time @pytest.mark.system class TestCreate: def test_create_package_and_function(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') assert 'testPackage' in ui.backend.fetch_udf_packages() assert 'testPackage' in ui.app.search_display.package_list assert 'testFunction' in [func_name.name for func_name in ui.backend.selected_package.package.functions] assert 'testFunction' + '($Signal, $Signal)' in ui.app.search_display.function_list def test_create_function_same_name(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.function_parameters_display.params_and_types = [{'name': 'a', 'type': 'Signal'}, {'name': 'b', 'type': 'Signal'}, {'name': 'c', 'type': 'Scalar'}] ui.app.function_parameters_display.formula = '$a + $b * $c' ui.app.function_documentation.func_description = '<p>Test function with the same name</p>' ui.app.function_documentation.examples_and_descriptions = [ {'description': 'Example 1', 'formula': '$a + $b * $c'}, {'description': 'Example 2', 'formula': '$c + $d * $e'}] ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') assert 'testFunction' + '($Signal, $Signal)' in ui.app.search_display.function_list assert 'testFunction' + '($Signal, $Signal, $Scalar)' in ui.app.search_display.function_list @pytest.mark.system class TestModify: def test_modify_formula(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.function_parameters_display.params_and_types = [{'name': 'a', 'type': 'Signal'}, {'name': 'newParam', 'type': 'Scalar'}] ui.app.function_parameters_display.formula = '$a * $newParam' ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') ui.app.search_display.vue_update_package_object(data='testPackage') ui.app.search_display.vue_update_function(data='testFunction' + '($Signal, $Scalar)') assert '$newParam' in ui.app.function_parameters_display.formula assert '$newParam' in ui.backend.selected_function.formula def test_func_description(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.function_documentation.func_description_markdown = '## Test Function Description' ui.app.function_documentation.vue_update_func_desc_html(data='') # The markdown-to-html converter has a delay time.sleep(0.5) assert '<h2>Test Function Description</h2>' in ui.app.function_documentation.func_description_html ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') ui.app.search_display.vue_update_package_object(data='testPackage') ui.app.search_display.vue_update_function(data='testFunction($Signal, $Signal)') assert '<h2>Test Function Description</h2>' in ui.backend.selected_function.description def test_package_description(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.function_documentation.package_description_markdown = '## Test Package Description' ui.app.function_documentation.vue_update_package_desc_html(data='') # The markdown-to-html converter has a delay time.sleep(0.5) assert '<h2>Test Package Description</h2>' in ui.app.function_documentation.package_description_html ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') ui.app.search_display.vue_update_package_object(data='testPackage') ui.app.search_display.vue_update_function(data='testFunction($Signal, $Signal)') assert '<h2>Test Package Description</h2>' in ui.backend.selected_package.description def test_add_examples(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') examples = [{'description': 'Example 1', 'formula': '$a + $b'}, {'description': 'Example 2', 'formula': '$c + $d'}] ui.app.function_documentation.examples_and_descriptions = examples ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') ui.app.search_display.vue_update_package_object(data='testPackage') ui.app.search_display.vue_update_function(data='testFunction($Signal, $Signal)') assert ui.backend.selected_function.examples_and_descriptions == examples assert ui.app.function_documentation.examples_and_descriptions == examples def test_access_control(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackageAccessControl', 'testFunction') ui.app.function_parameters_display.params_and_types = [{'name': 'a', 'type': 'Signal'}, {'name': 'b', 'type': 'Signal'}] ui.app.function_parameters_display.formula = '$a + $b' ui.app.function_documentation.func_description = '<p>Test function</p>' access_input = [{'name': spy.user.name, 'username': spy.user.username, 'type': 'User', 'read': True, 'write': True, 'manage': True}, {'name': 'Everyone', 'username': None, 'type': 'UserGroup', 'read': True, 'write': True, 'manage': True} ] ui.app.access_management.selected_users_dict = access_input ui.app.summary_page.vue_on_review(data='') ui.app.summary_page.vue_on_submit(data='') ui.app.search_display.vue_update_package_object(data='testPackageAccessControl') assert access_input[0] in ui.backend.selected_package.permissions assert access_input[1] in ui.backend.selected_package.permissions @pytest.mark.system class TestDelete: def test_archive_function(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.search_display.vue_update_package_object(data='testPackage') ui.app.search_display.vue_update_function(data='testFunction($Signal, $Signal)') ui.app.summary_page.selected_for_delete = 'Function: testFunction' ui.app.summary_page.vue_on_delete(data='') ui.app.search_display.vue_update_package_object(data='testPackage') assert 'testFunction' + '($Signal, $Signal)' not in ui.app.search_display.function_list assert 'testFunction' not in ui.app.search_display.function_list def test_archive_package(self, instantiate_ui_create_function_and_package): ui = instantiate_ui_create_function_and_package('testPackage', 'testFunction') ui.app.summary_page.selected_for_delete = 'Package: testPackage' ui.app.summary_page.vue_on_delete(data='') ui.app.search_display.vue_update_package_object(data='testPackage') assert 'testPackage' not in ui.app.search_display.package_list
[ "time.sleep" ]
[((3032, 3047), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3042, 3047), False, 'import time\n'), ((3932, 3947), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (3942, 3947), False, 'import time\n')]
from asyncio import streams import time import numpy as np import pyaudio import anton.lights.config as config class AudioProcess(): def __init__(self): self.audio = pyaudio.PyAudio() self.frames_per_buffer = int(config.MIC_RATE / config.FPS) self.overflows = 0 self.prev_ovf_time = time.time() self.running = False def start_stream(self,callback): self.stream = self.audio.open(format=pyaudio.paInt16, channels=1, rate=config.MIC_RATE, input=True, frames_per_buffer=self.frames_per_buffer) self.running = True self.overflows = 0 while self.running: try: y = np.fromstring(self.stream.read(self.frames_per_buffer, exception_on_overflow=False), dtype=np.int16) y = y.astype(np.float32) self.stream.read(self.stream.get_read_available(), exception_on_overflow=False) callback(y) except IOError: self.overflows += 1 if time.time() > self.prev_ovf_time + 1: self.prev_ovf_time = time.time() print('Audio buffer has overflowed {} times'.format(self.overflows)) def kill_stream(self): self.running = False def stop_stream(self): self.stream.stop_stream() self.stream.close()
[ "pyaudio.PyAudio", "time.time" ]
[((179, 196), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (194, 196), False, 'import pyaudio\n'), ((320, 331), 'time.time', 'time.time', ([], {}), '()\n', (329, 331), False, 'import time\n'), ((1154, 1165), 'time.time', 'time.time', ([], {}), '()\n', (1163, 1165), False, 'import time\n'), ((1233, 1244), 'time.time', 'time.time', ([], {}), '()\n', (1242, 1244), False, 'import time\n')]
import graphene from .legislative import LegislativeQuery from .core import CoreQuery class Query(LegislativeQuery, CoreQuery, graphene.ObjectType): pass schema = graphene.Schema(query=Query)
[ "graphene.Schema" ]
[((171, 199), 'graphene.Schema', 'graphene.Schema', ([], {'query': 'Query'}), '(query=Query)\n', (186, 199), False, 'import graphene\n')]
import OGL import pywavefront from OpenGL.GL import * class BasicEnemy(OGL.Cube): def __init__(self,x,y,z): OGL.Cube.__init__(self,3,8,2,x,y,z) self.model = pywavefront.Wavefront("Handgun_obj.obj","models/") self.model.onload = self.onload #self.model = OGL.OBJ("Man.obj",Fse,10) #self.width = self.model.widthal #self.height = self.model.height #self.depth = self.model.depth def onload(self): trigger = self.model.get_mesh("Cube.005_Cube.000") trigger.pos[0] = -.3 def blit(self): self.model.draw()
[ "pywavefront.Wavefront", "OGL.Cube.__init__" ]
[((121, 162), 'OGL.Cube.__init__', 'OGL.Cube.__init__', (['self', '(3)', '(8)', '(2)', 'x', 'y', 'z'], {}), '(self, 3, 8, 2, x, y, z)\n', (138, 162), False, 'import OGL\n'), ((178, 229), 'pywavefront.Wavefront', 'pywavefront.Wavefront', (['"""Handgun_obj.obj"""', '"""models/"""'], {}), "('Handgun_obj.obj', 'models/')\n", (199, 229), False, 'import pywavefront\n')]
import json from pathlib import Path from assertpy import assert_that from pycep import BicepParser EXAMPLES_DIR = Path(__file__).parent / "examples/comparison" BICEP_PARSER = BicepParser() def test_parse_greater_than_or_equals() -> None: # given sub_dir_path = EXAMPLES_DIR / "greater_than_or_equals" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_greater_than() -> None: # given sub_dir_path = EXAMPLES_DIR / "greater_than" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_less_than_or_equals() -> None: # given sub_dir_path = EXAMPLES_DIR / "less_than_or_equals" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_less_than() -> None: # given sub_dir_path = EXAMPLES_DIR / "less_than" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_equals() -> None: # given sub_dir_path = EXAMPLES_DIR / "equals" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_not_equals() -> None: # given sub_dir_path = EXAMPLES_DIR / "not_equals" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_equals_case_insensitive() -> None: # given sub_dir_path = EXAMPLES_DIR / "equals_case_insensitive" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result) def test_parse_not_equals_case_insensitive() -> None: # given sub_dir_path = EXAMPLES_DIR / "not_equals_case_insensitive" file_path = sub_dir_path / "main.bicep" expected_result = json.loads((sub_dir_path / "result.json").read_text()) # when result = BICEP_PARSER.parse(file_path=file_path) # then assert_that(result).is_equal_to(expected_result)
[ "pathlib.Path", "assertpy.assert_that", "pycep.BicepParser" ]
[((179, 192), 'pycep.BicepParser', 'BicepParser', ([], {}), '()\n', (190, 192), False, 'from pycep import BicepParser\n'), ((118, 132), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (122, 132), False, 'from pathlib import Path\n'), ((517, 536), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (528, 536), False, 'from assertpy import assert_that\n'), ((870, 889), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (881, 889), False, 'from assertpy import assert_that\n'), ((1237, 1256), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (1248, 1256), False, 'from assertpy import assert_that\n'), ((1584, 1603), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (1595, 1603), False, 'from assertpy import assert_that\n'), ((1925, 1944), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (1936, 1944), False, 'from assertpy import assert_that\n'), ((2274, 2293), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (2285, 2293), False, 'from assertpy import assert_that\n'), ((2649, 2668), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (2660, 2668), False, 'from assertpy import assert_that\n'), ((3032, 3051), 'assertpy.assert_that', 'assert_that', (['result'], {}), '(result)\n', (3043, 3051), False, 'from assertpy import assert_that\n')]
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- import pytest import inspect import asyncio import logging import azure.iot.device.common.async_adapter as async_adapter logging.basicConfig(level=logging.INFO) pytestmark = pytest.mark.asyncio @pytest.fixture def dummy_value(): return 123 @pytest.fixture def mock_function(mocker, dummy_value): mock_fn = mocker.MagicMock(return_value=dummy_value) mock_fn.__doc__ = "docstring" return mock_fn @pytest.mark.describe("emulate_async()") class TestEmulateAsync(object): @pytest.mark.it("Returns a coroutine function when given a function") async def test_returns_coroutine(self, mock_function): async_fn = async_adapter.emulate_async(mock_function) assert inspect.iscoroutinefunction(async_fn) @pytest.mark.it( "Returns a coroutine function that returns the result of the input function when called" ) async def test_coroutine_returns_input_function_result( self, mocker, mock_function, dummy_value ): async_fn = async_adapter.emulate_async(mock_function) result = await async_fn(dummy_value) assert mock_function.call_count == 1 assert mock_function.call_args == mocker.call(dummy_value) assert result == mock_function.return_value @pytest.mark.it("Copies the input function docstring to resulting coroutine function") async def test_coroutine_has_input_function_docstring(self, mock_function): async_fn = async_adapter.emulate_async(mock_function) assert async_fn.__doc__ == mock_function.__doc__ @pytest.mark.it("Can be applied as a decorator") async def test_applied_as_decorator(self): # Define a function with emulate_async applied as a decorator @async_adapter.emulate_async def some_function(): return "foo" # Call the function as a coroutine result = await some_function() assert result == "foo" @pytest.mark.describe("AwaitableCallback") class TestAwaitableCallback(object): @pytest.mark.it("Instantiates from a provided callback function") async def test_instantiates(self, mock_function): callback = async_adapter.AwaitableCallback(mock_function) assert isinstance(callback, async_adapter.AwaitableCallback) @pytest.mark.it( "Invokes the callback function associated with an instance and returns its result when a call is invoked the instance" ) async def test_calling_object_calls_input_function_and_returns_result( self, mocker, mock_function ): callback = async_adapter.AwaitableCallback(mock_function) result = callback() assert mock_function.call_count == 1 assert mock_function.call_args == mocker.call() assert result == mock_function.return_value @pytest.mark.it("Completes the instance Future when a call is invoked on the instance") async def test_calling_object_completes_future(self, mock_function): callback = async_adapter.AwaitableCallback(mock_function) assert not callback.future.done() callback() await asyncio.sleep(0.1) # wait to give time to complete the callback assert callback.future.done() @pytest.mark.it("Can be called using positional arguments") async def test_can_be_called_using_positional_args(self, mocker, mock_function): callback = async_adapter.AwaitableCallback(mock_function) result = callback(1, 2, 3) assert mock_function.call_count == 1 assert mock_function.call_args == mocker.call(1, 2, 3) assert result == mock_function.return_value @pytest.mark.it("Can be called using explicit keyword arguments") async def test_can_be_called_using_explicit_kwargs(self, mocker, mock_function): callback = async_adapter.AwaitableCallback(mock_function) result = callback(a=1, b=2, c=3) assert mock_function.call_count == 1 assert mock_function.call_args == mocker.call(a=1, b=2, c=3) assert result == mock_function.return_value @pytest.mark.it("Can have its callback completion awaited upon") async def test_awaiting_completion_of_callback_returns_result(self, mock_function): callback = async_adapter.AwaitableCallback(mock_function) callback() assert await callback.completion() == mock_function.return_value assert callback.future.done()
[ "inspect.iscoroutinefunction", "logging.basicConfig", "asyncio.sleep", "pytest.mark.describe", "pytest.mark.it", "azure.iot.device.common.async_adapter.emulate_async", "azure.iot.device.common.async_adapter.AwaitableCallback" ]
[((433, 472), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (452, 472), False, 'import logging\n'), ((729, 768), 'pytest.mark.describe', 'pytest.mark.describe', (['"""emulate_async()"""'], {}), "('emulate_async()')\n", (749, 768), False, 'import pytest\n'), ((2232, 2273), 'pytest.mark.describe', 'pytest.mark.describe', (['"""AwaitableCallback"""'], {}), "('AwaitableCallback')\n", (2252, 2273), False, 'import pytest\n'), ((806, 874), 'pytest.mark.it', 'pytest.mark.it', (['"""Returns a coroutine function when given a function"""'], {}), "('Returns a coroutine function when given a function')\n", (820, 874), False, 'import pytest\n'), ((1055, 1169), 'pytest.mark.it', 'pytest.mark.it', (['"""Returns a coroutine function that returns the result of the input function when called"""'], {}), "(\n 'Returns a coroutine function that returns the result of the input function when called'\n )\n", (1069, 1169), False, 'import pytest\n'), ((1567, 1657), 'pytest.mark.it', 'pytest.mark.it', (['"""Copies the input function docstring to resulting coroutine function"""'], {}), "(\n 'Copies the input function docstring to resulting coroutine function')\n", (1581, 1657), False, 'import pytest\n'), ((1858, 1905), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be applied as a decorator"""'], {}), "('Can be applied as a decorator')\n", (1872, 1905), False, 'import pytest\n'), ((2316, 2380), 'pytest.mark.it', 'pytest.mark.it', (['"""Instantiates from a provided callback function"""'], {}), "('Instantiates from a provided callback function')\n", (2330, 2380), False, 'import pytest\n'), ((2576, 2720), 'pytest.mark.it', 'pytest.mark.it', (['"""Invokes the callback function associated with an instance and returns its result when a call is invoked the instance"""'], {}), "(\n 'Invokes the callback function associated with an instance and returns its result when a call is invoked the instance'\n )\n", (2590, 2720), False, 'import pytest\n'), ((3096, 3187), 'pytest.mark.it', 'pytest.mark.it', (['"""Completes the instance Future when a call is invoked on the instance"""'], {}), "(\n 'Completes the instance Future when a call is invoked on the instance')\n", (3110, 3187), False, 'import pytest\n'), ((3506, 3564), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be called using positional arguments"""'], {}), "('Can be called using positional arguments')\n", (3520, 3564), False, 'import pytest\n'), ((3917, 3981), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be called using explicit keyword arguments"""'], {}), "('Can be called using explicit keyword arguments')\n", (3931, 3981), False, 'import pytest\n'), ((4346, 4409), 'pytest.mark.it', 'pytest.mark.it', (['"""Can have its callback completion awaited upon"""'], {}), "('Can have its callback completion awaited upon')\n", (4360, 4409), False, 'import pytest\n'), ((953, 995), 'azure.iot.device.common.async_adapter.emulate_async', 'async_adapter.emulate_async', (['mock_function'], {}), '(mock_function)\n', (980, 995), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((1011, 1048), 'inspect.iscoroutinefunction', 'inspect.iscoroutinefunction', (['async_fn'], {}), '(async_fn)\n', (1038, 1048), False, 'import inspect\n'), ((1309, 1351), 'azure.iot.device.common.async_adapter.emulate_async', 'async_adapter.emulate_async', (['mock_function'], {}), '(mock_function)\n', (1336, 1351), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((1752, 1794), 'azure.iot.device.common.async_adapter.emulate_async', 'async_adapter.emulate_async', (['mock_function'], {}), '(mock_function)\n', (1779, 1794), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((2454, 2500), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (2485, 2500), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((2862, 2908), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (2893, 2908), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((3275, 3321), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (3306, 3321), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((3669, 3715), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (3700, 3715), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((4086, 4132), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (4117, 4132), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((4517, 4563), 'azure.iot.device.common.async_adapter.AwaitableCallback', 'async_adapter.AwaitableCallback', (['mock_function'], {}), '(mock_function)\n', (4548, 4563), True, 'import azure.iot.device.common.async_adapter as async_adapter\n'), ((3397, 3415), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (3410, 3415), False, 'import asyncio\n')]
from Constants import Constants from djitellopy import tello class Controller(): def __init__(self) -> None: """ Initialoization of class variables. """ self.const = Constants() self.drone = tello.Tello() self.up_down_velocity = 0 self.right_left_velocity = 0 self.forward_backward_velocity = 0 self.turn_velocity = 0 def start(self): """ Function starts the drone, it's video recording. """ self.drone.connect() self.drone.takeoff() self.drone.streamon() def battery(self): """ Function prints the current battery level of the drone in percent. """ print(f"Battery at {self.drone.get_battery()}%") def set_velocity(self, vel): """ Function sets the veolitiy of the drone in cm/s Args: vel (int): speed of the drone in cm/s """ self.drone.set_speed(vel) def move(self, x, y): """ Function utilizes the vector from the center of the camera to the detected nose. Depending on the x values orientation the drone is either turned left or right. Deoending on the y values orientation the drone is either moved up or down. Args: x (int): x component of the vector (Centre of camera image to nose) y (int): y component of the vecgtor (centre of camera image to nose) """ self.reset() # TURN if x < - self.const.TOLERANCE_X: self.turn_velocity = - self.const.DRONE_SPEED_TURN elif x > self.const.TOLERANCE_X: self.turn_velocity = self.const.DRONE_SPEED_TURN else: pass # UP DOWN if y < - self.const.TOLERANCE_Y: self.up_down_velocity = self.const.DRONE_SPEED_Y elif y > self.const.TOLERANCE_Y: self.up_down_velocity = - self.const.DRONE_SPEED_Y else: pass self.drone.send_rc_control(self.right_left_velocity, self.forward_backward_velocity, self.up_down_velocity, self.turn_velocity) def move_pose(self, pose): """ Function moves the drone if a pose is detected: - Right arm up - Left arm up - Arms crossed Args: pose (string): String identifier of the pose. """ self.reset() if pose == "left": self.right_left_velocity = -self.const.DRONE_SPEED_X elif pose == "right": self.right_left_velocity = +self.const.DRONE_SPEED_X else: self.right_left_velocity = 0 self.drone.send_rc_control(self.right_left_velocity, 0, 0, 0) def stop(self): """ Function stops the drones movement. """ self.drone.send_rc_control(0, 0, 0, 0) def start_height(self): """ Function sets the starting height of the drone. """ self.drone.send_rc_control(0, 0, self.const.START_HEIGHT, 0) def reset(self): """ Function resets all velocity values of the drone. """ self.up_down_velocity = 0 self.right_left_velocity = 0 self.forward_backward_velocity = 0 self.turn_velocity = 0 def get_stream(self): """ Function returnes the current frame of the video stream. Returns: (frame): Current Frame of the video stream. """ return self.drone.get_frame_read().frame
[ "Constants.Constants", "djitellopy.tello.Tello" ]
[((205, 216), 'Constants.Constants', 'Constants', ([], {}), '()\n', (214, 216), False, 'from Constants import Constants\n'), ((239, 252), 'djitellopy.tello.Tello', 'tello.Tello', ([], {}), '()\n', (250, 252), False, 'from djitellopy import tello\n')]
import numpy as np from numba import njit from snapshot_functions import read_particles_filter from scipy.linalg import eigh def run(argv): if len(argv) < 5: print('python script.py <IC-file> <preIC-file> <ID> <radius>') return 1 ID = int(argv[3]) r = float(argv[4]) print('getting IDs of nearby particles') pos, header = read_particles_filter(argv[2],ID_list=[ID],opts={'pos':True}) IDs, header = read_particles_filter(argv[2],center=pos[0],radius=r,opts={'ID':True}) print('reading positions of %d particles'%len(IDs)) pos0, ID0, header = read_particles_filter(argv[2],ID_list=IDs,opts={'pos':True,'ID':True}) sort0 = np.argsort(ID0) ID0 = ID0[sort0] pos0 = pos0[sort0] - pos pos1, ID1, header = read_particles_filter(argv[1],ID_list=IDs,opts={'pos':True,'ID':True}) sort1 = np.argsort(ID1) ID1 = ID1[sort1] pos1 = pos1[sort1] - pos if not np.array_equal(ID0,ID1): print('Error') print(np.stack((ID0,ID1)).T.tolist()) return rot = np.diag((1,1,1)) for i in range(2): if i > 0: eigval, eigvec = eigh(e) rot1 = eigvec.T print('rotate by %.0f degrees'%(np.arccos((np.trace(rot1)-1)/2)*180./np.pi)) pos = (rot1 @ (pos.T)).T pos0 = (rot1 @ (pos0.T)).T pos1 = (rot1 @ (pos1.T)).T rot = rot1 @ rot disp = pos1 - pos0 e = np.zeros((3,3)) for c in range(3): dist2 = np.zeros(pos0.shape[0]) for d in range(3): if d != c: dist2 += pos0[:,d]**2 idx = np.argsort(dist2)[:32] for d in range(3): e[c,d] = np.polyfit(pos0[idx,c],disp[idx,d],1)[0] e = .5*(e + e.T) with np.printoptions(precision=5, suppress=True): print('Tidal tensor:') print(e) with np.printoptions(precision=5, suppress=True): print('rotation matrix (%.0f degrees)'%(np.arccos((np.trace(rot)-1)/2)*180./np.pi)) print(rot) np.savetxt('rotation_%d.txt'%ID,rot) if __name__ == '__main__': from sys import argv run(argv)
[ "numpy.stack", "numpy.trace", "numpy.polyfit", "numpy.savetxt", "numpy.zeros", "numpy.argsort", "scipy.linalg.eigh", "snapshot_functions.read_particles_filter", "numpy.array_equal", "numpy.diag", "numpy.printoptions" ]
[((361, 425), 'snapshot_functions.read_particles_filter', 'read_particles_filter', (['argv[2]'], {'ID_list': '[ID]', 'opts': "{'pos': True}"}), "(argv[2], ID_list=[ID], opts={'pos': True})\n", (382, 425), False, 'from snapshot_functions import read_particles_filter\n'), ((440, 514), 'snapshot_functions.read_particles_filter', 'read_particles_filter', (['argv[2]'], {'center': 'pos[0]', 'radius': 'r', 'opts': "{'ID': True}"}), "(argv[2], center=pos[0], radius=r, opts={'ID': True})\n", (461, 514), False, 'from snapshot_functions import read_particles_filter\n'), ((591, 666), 'snapshot_functions.read_particles_filter', 'read_particles_filter', (['argv[2]'], {'ID_list': 'IDs', 'opts': "{'pos': True, 'ID': True}"}), "(argv[2], ID_list=IDs, opts={'pos': True, 'ID': True})\n", (612, 666), False, 'from snapshot_functions import read_particles_filter\n'), ((673, 688), 'numpy.argsort', 'np.argsort', (['ID0'], {}), '(ID0)\n', (683, 688), True, 'import numpy as np\n'), ((762, 837), 'snapshot_functions.read_particles_filter', 'read_particles_filter', (['argv[1]'], {'ID_list': 'IDs', 'opts': "{'pos': True, 'ID': True}"}), "(argv[1], ID_list=IDs, opts={'pos': True, 'ID': True})\n", (783, 837), False, 'from snapshot_functions import read_particles_filter\n'), ((844, 859), 'numpy.argsort', 'np.argsort', (['ID1'], {}), '(ID1)\n', (854, 859), True, 'import numpy as np\n'), ((1031, 1049), 'numpy.diag', 'np.diag', (['(1, 1, 1)'], {}), '((1, 1, 1))\n', (1038, 1049), True, 'import numpy as np\n'), ((1955, 1994), 'numpy.savetxt', 'np.savetxt', (["('rotation_%d.txt' % ID)", 'rot'], {}), "('rotation_%d.txt' % ID, rot)\n", (1965, 1994), True, 'import numpy as np\n'), ((920, 944), 'numpy.array_equal', 'np.array_equal', (['ID0', 'ID1'], {}), '(ID0, ID1)\n', (934, 944), True, 'import numpy as np\n'), ((1397, 1413), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (1405, 1413), True, 'import numpy as np\n'), ((1800, 1843), 'numpy.printoptions', 'np.printoptions', ([], {'precision': '(5)', 'suppress': '(True)'}), '(precision=5, suppress=True)\n', (1815, 1843), True, 'import numpy as np\n'), ((1111, 1118), 'scipy.linalg.eigh', 'eigh', (['e'], {}), '(e)\n', (1115, 1118), False, 'from scipy.linalg import eigh\n'), ((1452, 1475), 'numpy.zeros', 'np.zeros', (['pos0.shape[0]'], {}), '(pos0.shape[0])\n', (1460, 1475), True, 'import numpy as np\n'), ((1699, 1742), 'numpy.printoptions', 'np.printoptions', ([], {'precision': '(5)', 'suppress': '(True)'}), '(precision=5, suppress=True)\n', (1714, 1742), True, 'import numpy as np\n'), ((1557, 1574), 'numpy.argsort', 'np.argsort', (['dist2'], {}), '(dist2)\n', (1567, 1574), True, 'import numpy as np\n'), ((1624, 1665), 'numpy.polyfit', 'np.polyfit', (['pos0[idx, c]', 'disp[idx, d]', '(1)'], {}), '(pos0[idx, c], disp[idx, d], 1)\n', (1634, 1665), True, 'import numpy as np\n'), ((976, 996), 'numpy.stack', 'np.stack', (['(ID0, ID1)'], {}), '((ID0, ID1))\n', (984, 996), True, 'import numpy as np\n'), ((1901, 1914), 'numpy.trace', 'np.trace', (['rot'], {}), '(rot)\n', (1909, 1914), True, 'import numpy as np\n'), ((1194, 1208), 'numpy.trace', 'np.trace', (['rot1'], {}), '(rot1)\n', (1202, 1208), True, 'import numpy as np\n')]
from django.shortcuts import render, redirect from django.http import HttpResponse from .models import * from .forms import * from django.contrib.auth.decorators import login_required from accounts.decorators import adult_user # Create your views here. @login_required(login_url='login') def bookIndex(request): books = Book.objects.all() form = BookForm() if request.method =='POST': form = BookForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/books') context = {'books': books, 'form':form} return render(request, '../templates/list.html', context) @login_required(login_url='login') def editBook(request, pk): book = Book.objects.get(id=pk) form = BookForm(instance=book) if request.method == 'POST': form = BookForm(request.POST, instance=book) if form.is_valid(): form.save() return redirect('../../') context = {'form':form} return render(request, '../templates/edit_book.html', context) @login_required(login_url='login') def deleteBook(request, pk): item = Book.objects.get(id=pk) if request.method == 'POST': item.delete() return redirect('../../') context = {'item':item} return render(request, '../templates/delete.html', context)
[ "django.contrib.auth.decorators.login_required", "django.shortcuts.redirect", "django.shortcuts.render" ]
[((259, 292), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (273, 292), False, 'from django.contrib.auth.decorators import login_required\n'), ((612, 645), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (626, 645), False, 'from django.contrib.auth.decorators import login_required\n'), ((975, 1008), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (989, 1008), False, 'from django.contrib.auth.decorators import login_required\n'), ((559, 609), 'django.shortcuts.render', 'render', (['request', '"""../templates/list.html"""', 'context'], {}), "(request, '../templates/list.html', context)\n", (565, 609), False, 'from django.shortcuts import render, redirect\n'), ((917, 972), 'django.shortcuts.render', 'render', (['request', '"""../templates/edit_book.html"""', 'context'], {}), "(request, '../templates/edit_book.html', context)\n", (923, 972), False, 'from django.shortcuts import render, redirect\n'), ((1179, 1231), 'django.shortcuts.render', 'render', (['request', '"""../templates/delete.html"""', 'context'], {}), "(request, '../templates/delete.html', context)\n", (1185, 1231), False, 'from django.shortcuts import render, redirect\n'), ((487, 505), 'django.shortcuts.redirect', 'redirect', (['"""/books"""'], {}), "('/books')\n", (495, 505), False, 'from django.shortcuts import render, redirect\n'), ((1125, 1143), 'django.shortcuts.redirect', 'redirect', (['"""../../"""'], {}), "('../../')\n", (1133, 1143), False, 'from django.shortcuts import render, redirect\n'), ((863, 881), 'django.shortcuts.redirect', 'redirect', (['"""../../"""'], {}), "('../../')\n", (871, 881), False, 'from django.shortcuts import render, redirect\n')]
#!/usr/bin/env python from __future__ import print_function import hashlib import string import random import time from json import dumps as json_encode, loads as json_decode import requests class FileCatalogLowLevel(object): """ Low level file catalog interface. Use like a dict:: fc = FileCatalog('http://file_catalog.com') fc['my_new_file'] = {'locations':['/this/is/a/path']} Args: url (str): url of the file catalog server timeout (float): (optional) seconds to wait for a query to finish """ def __init__(self, url, timeout=60): self.url = url self.timeout = timeout self.session = requests.Session() def _getfileurl(self, uid): for _ in range(5): try: r = self.session.get(self.url+'/api/files', params={'query':json_encode({'uid':uid})}, timeout=self.timeout) except requests.exceptions.Timeout: continue if r.status_code == 429: continue r.raise_for_status() files = json_decode(r.text)['files'] break else: raise Exception('server is too busy') if len(files) != 1: raise KeyError() return self.url+files[0] def __getitem__(self, uid): url = self._getfileurl(uid) for _ in range(5): try: r = self.session.get(url, timeout=self.timeout) except requests.exceptions.Timeout: continue if r.status_code == 429: continue r.raise_for_status() return json_decode(r.text) raise Exception('server is too busy') def __setitem__(self, uid, value): meta = value.copy() meta['uid'] = uid data = json_encode(meta) try: url = self._getfileurl(uid) except KeyError: # does not exist method = self.session.post url = self.url+'/api/files' else: # exists, so update method = self.session.put for _ in range(5): try: r = method(url, data=data, timeout=self.timeout) except requests.exceptions.Timeout: continue if r.status_code == 429: continue r.raise_for_status() return raise Exception('server is too busy') def __delitem__(self, uid): url = self._getfileurl(uid) for _ in range(5): try: r = self.session.delete(url, timeout=self.timeout) except requests.exceptions.Timeout: continue if r.status_code == 429: continue r.raise_for_status() return raise Exception('server is too busy') def sha512sum(data): m = hashlib.sha512() m.update(data) return m.hexdigest() def make_data(): return ''.join(random.choice(string.ascii_letters) for _ in range(random.randint(1,1000))) def benchmark(address,num): start = time.time() fc = FileCatalogLowLevel(address) for i in range(num): data = make_data() cksm = sha512sum(data) fc[str(i)] = {'data':data,'checksum':cksm,'locations':[make_data()]} for i in range(num): meta = fc[str(i)] for i in range(num): del fc[str(i)] return time.time()-start def main(): import argparse parser = argparse.ArgumentParser(description='benchmark file_catalog server') parser.add_argument('--address',type=str,default='http://localhost:8888',help='server address') parser.add_argument('-n','--num',type=int,default=10000,help='number of test values') args = parser.parse_args() print('starting benchmark') t = benchmark(args.address, args.num) print('finished. took',t,'seconds') if __name__ == '__main__': main()
[ "argparse.ArgumentParser", "json.loads", "random.randint", "requests.Session", "random.choice", "json.dumps", "time.time", "hashlib.sha512" ]
[((2994, 3010), 'hashlib.sha512', 'hashlib.sha512', ([], {}), '()\n', (3008, 3010), False, 'import hashlib\n'), ((3209, 3220), 'time.time', 'time.time', ([], {}), '()\n', (3218, 3220), False, 'import time\n'), ((3593, 3661), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""benchmark file_catalog server"""'}), "(description='benchmark file_catalog server')\n", (3616, 3661), False, 'import argparse\n'), ((671, 689), 'requests.Session', 'requests.Session', ([], {}), '()\n', (687, 689), False, 'import requests\n'), ((1894, 1911), 'json.dumps', 'json_encode', (['meta'], {}), '(meta)\n', (1905, 1911), True, 'from json import dumps as json_encode, loads as json_decode\n'), ((3529, 3540), 'time.time', 'time.time', ([], {}), '()\n', (3538, 3540), False, 'import time\n'), ((1719, 1738), 'json.loads', 'json_decode', (['r.text'], {}), '(r.text)\n', (1730, 1738), True, 'from json import dumps as json_encode, loads as json_decode\n'), ((3092, 3127), 'random.choice', 'random.choice', (['string.ascii_letters'], {}), '(string.ascii_letters)\n', (3105, 3127), False, 'import random\n'), ((1154, 1173), 'json.loads', 'json_decode', (['r.text'], {}), '(r.text)\n', (1165, 1173), True, 'from json import dumps as json_encode, loads as json_decode\n'), ((3143, 3166), 'random.randint', 'random.randint', (['(1)', '(1000)'], {}), '(1, 1000)\n', (3157, 3166), False, 'import random\n'), ((880, 905), 'json.dumps', 'json_encode', (["{'uid': uid}"], {}), "({'uid': uid})\n", (891, 905), True, 'from json import dumps as json_encode, loads as json_decode\n')]
from collections import deque import numpy as np import cv2 import chainer from chainer import links as L import chainerrl from chainerrl import agents from chainerrl.action_value import DiscreteActionValue from chainerrl import explorers from chainerrl import links from chainerrl import replay_buffer def infer(agent, state): gray_state = [cv2.cvtColor(s, cv2.COLOR_RGB2GRAY) for s in state] dqn_state = [cv2.resize(s, (84, 84), interpolation=cv2.INTER_AREA) \ for s in gray_state] input_tensor = np.array(dqn_state).astype(np.float32) return agent.act(input_tensor) class Agent(object): def __init__(self, modelpath, n_actions=4, n_stack_frames=4): # Predefined parameters. replay_start_size = 5 * 10 ** 4 # Load the model. q_func = links.Sequence( links.NatureDQNHead(), L.Linear(512, n_actions), DiscreteActionValue) opt = chainer.optimizers.RMSpropGraves( lr=2.5e-4, alpha=0.95, momentum=0.0, eps=1e-2) opt.setup(q_func) rbuf = replay_buffer.ReplayBuffer(10 ** 6) explorer = explorers.LinearDecayEpsilonGreedy( start_epsilon=1.0, end_epsilon=0.1, decay_steps=10 ** 6, random_action_func=lambda: np.random.randint(n_actions)) def phi(x): # Feature extractor return np.asarray(x, dtype=np.float32) / 255 Agent = agents.DQN self._agent = Agent(q_func, opt, rbuf, gpu=-1, gamma=0.99, explorer=explorer, replay_start_size=replay_start_size, target_update_interval=10 ** 4, clip_delta=True, update_interval=4, batch_accumulator='sum', phi=phi) self._agent.load(modelpath) self._state = deque( [], maxlen=n_stack_frames) self._action = 0 def get_action(self): return self._action def put_state(self, state): # Note: should devide this code to 2 parts: # putting state part and do inference part... self._state.append(state) if len(self._state) < self._state.maxlen: # Need to wait. return state = list(self._state) self._action = infer(self._agent, state)
[ "cv2.resize", "chainerrl.links.NatureDQNHead", "chainerrl.replay_buffer.ReplayBuffer", "chainer.optimizers.RMSpropGraves", "cv2.cvtColor", "numpy.asarray", "numpy.random.randint", "numpy.array", "collections.deque", "chainer.links.Linear" ]
[((353, 388), 'cv2.cvtColor', 'cv2.cvtColor', (['s', 'cv2.COLOR_RGB2GRAY'], {}), '(s, cv2.COLOR_RGB2GRAY)\n', (365, 388), False, 'import cv2\n'), ((422, 475), 'cv2.resize', 'cv2.resize', (['s', '(84, 84)'], {'interpolation': 'cv2.INTER_AREA'}), '(s, (84, 84), interpolation=cv2.INTER_AREA)\n', (432, 475), False, 'import cv2\n'), ((1002, 1087), 'chainer.optimizers.RMSpropGraves', 'chainer.optimizers.RMSpropGraves', ([], {'lr': '(0.00025)', 'alpha': '(0.95)', 'momentum': '(0.0)', 'eps': '(0.01)'}), '(lr=0.00025, alpha=0.95, momentum=0.0, eps=0.01\n )\n', (1034, 1087), False, 'import chainer\n'), ((1136, 1171), 'chainerrl.replay_buffer.ReplayBuffer', 'replay_buffer.ReplayBuffer', (['(10 ** 6)'], {}), '(10 ** 6)\n', (1162, 1171), False, 'from chainerrl import replay_buffer\n'), ((1930, 1962), 'collections.deque', 'deque', (['[]'], {'maxlen': 'n_stack_frames'}), '([], maxlen=n_stack_frames)\n', (1935, 1962), False, 'from collections import deque\n'), ((535, 554), 'numpy.array', 'np.array', (['dqn_state'], {}), '(dqn_state)\n', (543, 554), True, 'import numpy as np\n'), ((894, 915), 'chainerrl.links.NatureDQNHead', 'links.NatureDQNHead', ([], {}), '()\n', (913, 915), False, 'from chainerrl import links\n'), ((929, 953), 'chainer.links.Linear', 'L.Linear', (['(512)', 'n_actions'], {}), '(512, n_actions)\n', (937, 953), True, 'from chainer import links as L\n'), ((1448, 1479), 'numpy.asarray', 'np.asarray', (['x'], {'dtype': 'np.float32'}), '(x, dtype=np.float32)\n', (1458, 1479), True, 'import numpy as np\n'), ((1347, 1375), 'numpy.random.randint', 'np.random.randint', (['n_actions'], {}), '(n_actions)\n', (1364, 1375), True, 'import numpy as np\n')]
import pytest import time import numpy as np from spotify_confidence.analysis.frequentist.confidence_computers.z_test_computer import sequential_bounds @pytest.mark.skip(reason="Skipping because this test is very slow") def test_many_days(): """ This input (based on a real experiment) is very long, which can cause slow calculation """ t = [ 0.0016169976338740648, 0.0057857955498163615, 0.012200379088315757, 0.020199591701142824, 0.02956441064038571, 0.04047102718841871, 0.052929825413405296, 0.06580092295219643, 0.07878439818310792, 0.09148496950057272, 0.1028893343050959, 0.1128434997940756, 0.12298934256730025, 0.13280979910049193, 0.14267997977787195, 0.15281963941289514, 0.16293176212095561, 0.17198778455162406, 0.17996747917082068, 0.18786110540725684, 0.1955669737257397, 0.20335013690301407, 0.21277055903588274, 0.22148328777708232, 0.2295912740670489, 0.23640586948077766, 0.2431234831038822, 0.24987292468428604, 0.2568336065927525, 0.2649271880853427, 0.27282722271091664, 0.2799894816822785, 0.2862801096305317, 0.2925685639072496, 0.2988294699944579, 0.3051314956400879, 0.3118994077972684, 0.31887303037202536, 0.32523581745772245, 0.3307398353487736, 0.33616198578702633, 0.34151324975562525, 0.3478405485563082, 0.3546238566149848, 0.36130761502236336, 0.36751189302418574, 0.3730571543616735, 0.37865278180851814, 0.38428987795273567, 0.3900127609160433, 0.3964718089893684, 0.40306122104207753, 0.40914555292031984, 0.41449831480764515, 0.4198849769608837, 0.4256404199470336, 0.4315384355133149, 0.43801594290086987, 0.4444516211895538, 0.45034373518130405, 0.4556807858158224, 0.4610488197166289, 0.46633036852044285, 0.4717294082126311, 0.47769497653470894, 0.48369759863580825, 0.4892945325380834, 0.49431792124380325, 0.49935417177798586, 0.5043009639028166, 0.5093262559789482, 0.5149098888134348, 0.5205835093969735, 0.5261172491490695, 0.5310141031413226, 0.5359027242118537, 0.540068909216935, 0.5451620919252675, 0.5506752550043325, 0.5562355968920056, 0.5614758121490083, 0.5660462437469214, 0.5706616804819072, 0.5750453002157994, 0.5795939049979849, 0.5861802311128667, 0.5913273051077091, 0.5958976691303413, 0.6001503392324151, 0.6042404457337608, 0.6082963816680697, 0.6124734913435614, 0.6174918231657613, 0.6223867287374153, 0.6268875352709179, 0.6308341907134806, 0.6348490070893678, 0.6388763812049537, 0.6430405276890614, 0.6476616520101889, 0.6525750168960728, 0.6570689758011117, 0.6610427627189518, 0.6649727383296814, 0.6689671694958335, 0.673019050913289, 0.6776959248411508, 0.6825336054124376, 0.6869984168463193, 0.6908780826604262, 0.6949984065748767, 0.6991746490342636, 0.7033415661048878, 0.7082721626873987, 0.7131064081819068, 0.7176506656210218, 0.7216193168175142, 0.7256178250256133, 0.7296113326629264, 0.733677461202103, 0.7383860054116087, 0.7431864069529378, 0.7475115177561259, 0.7513220765829758, 0.7551652404828552, 0.7591154774153049, 0.7635879699061145, 0.76888963361854, 0.7740750002725536, 0.7788235152607059, 0.7829338267710377, 0.7870690059847372, 0.7912444713283939, 0.7954864645360872, 0.8002680350991415, 0.8051864906561857, 0.8097254772233912, 0.8137210008565843, 0.8175460095309978, 0.8214444612731922, 0.8256005212486867, 0.8302889054993935, 0.8351108860804202, 0.839542135124793, 0.8433705788759852, 0.8472835029908369, 0.8513248314019267, 0.8556693700983707, 0.8606610209471658, 0.865499591259651, 0.8699232042972833, 0.8737653545679493, 0.8776996212090155, 0.8816179062961511, 0.8856027192473231, 0.8900849425785808, 0.8947120585746139, 0.8993599427069738, 0.9035026227768521, 0.9075820073336299, 0.9115699850604569, 0.9158137239629064, 0.9207252417911126, 0.925749689176233, 0.9303560370359392, 0.9343408161994707, 0.9384800274049299, 0.9426168396879175, 0.9475247422385961, 0.9523909621035122, 0.9573336433987555, 0.9618665256655873, 0.9657568345864344, 0.9697355995499667, 0.973736889607129, 0.9778353641807583, 0.9828378833872299, 0.987703190985854, 0.9921586319807856, 0.9960384779956415, 1.0, ] start_time = time.time() results = sequential_bounds(np.array(t), alpha=0.003333333, sides=2) my_bounds = results.bounds expected = np.array( [ 5.75400023, 8.0, 5.14701605, 4.91478643, 4.80691346, 4.69004328, 4.57921075, 4.49683943, 4.44452939, 4.38899083, 4.35683792, 4.33289847, 4.301461, 4.27383028, 4.24513591, 4.21444005, 4.18809224, 4.17037988, 4.15702106, 4.13796352, 4.12345883, 4.10808648, 4.07898394, 4.06169498, 4.04985422, 4.04453139, 4.03288177, 4.02205301, 4.00664024, 3.98770613, 3.97358123, 3.96589571, 3.95946059, 3.94995533, 3.94128534, 3.93114789, 3.91870273, 3.90749163, 3.90064315, 3.8958719, 3.88847126, 3.88184277, 3.86841705, 3.85642932, 3.84721152, 3.84099201, 3.83689676, 3.8295672, 3.82234648, 3.81501541, 3.80286989, 3.79370807, 3.78728177, 3.78449351, 3.77865864, 3.76988501, 3.76230126, 3.75251025, 3.74474277, 3.73953663, 3.73534961, 3.72974059, 3.72466752, 3.71785112, 3.70903202, 3.70176221, 3.6976847, 3.6944938, 3.68996741, 3.68449851, 3.67888767, 3.67142884, 3.66522708, 3.65968721, 3.65649679, 3.65207508, 3.65156885, 3.643952, 3.63644572, 3.63029181, 3.62665696, 3.62527741, 3.62117738, 3.61789837, 3.6128686, 3.59904477, 3.5976517, 3.59678297, 3.59434356, 3.59116304, 3.58814574, 3.5835558, 3.57659985, 3.5726481, 3.56990393, 3.56879169, 3.56501955, 3.56127173, 3.55720436, 3.55194666, 3.54597713, 3.5436994, 3.54287161, 3.53974477, 3.53649679, 3.53314876, 3.52700997, 3.52175088, 3.51873367, 3.51846468, 3.51401711, 3.5106822, 3.50742162, 3.50113309, 3.49658758, 3.49376264, 3.49238249, 3.48979047, 3.48725107, 3.48341163, 3.47810608, 3.47381485, 3.47184685, 3.47110719, 3.46801712, 3.46472076, 3.45913659, 3.45209404, 3.4484684, 3.44587153, 3.44472549, 3.44242755, 3.43895355, 3.43549018, 3.43080058, 3.42621252, 3.42437516, 3.42371762, 3.42122891, 3.41861765, 3.41451447, 3.40936002, 3.4051931, 3.40307035, 3.40295986, 3.40052495, 3.39688763, 3.39279348, 3.38725208, 3.38421998, 3.38214471, 3.38133324, 3.37908335, 3.37689107, 3.37364203, 3.36937673, 3.36593888, 3.36250238, 3.36109704, 3.35878324, 3.35666501, 3.35305866, 3.34754255, 3.34364255, 3.34157534, 3.34085629, 3.33864193, 3.33563376, 3.33016843, 3.32687574, 3.32338656, 3.32166421, 3.32107266, 3.31861916, 3.31615129, 3.31334059, 3.30792367, 3.30479742, 3.30339238, 3.30296421, 3.30041534, ] ) assert np.allclose(my_bounds, expected) # if the calculation with max_nints takes longer than 10 seconds, something is most likely broken assert (time.time() - start_time) < 15 # Run a second time but with initial state from last run. start_time = time.time() results = sequential_bounds(np.array(t), alpha=0.003333333, sides=2, state=results.state) my_bounds = results.bounds assert np.allclose(my_bounds, expected) # if the calculation with max_nints takes longer than 10 seconds, something is most likely broken print(f"Time passed second round: {time.time() - start_time}") assert (time.time() - start_time) < 0.01 @pytest.mark.skip(reason="Skipping because this test is very slow") def test_many_days_fast_and_no_crash(): """ This is based on experiment 1735 on 26.11.2020. The calculation of the corresponding bounds takes many minutes without performance tweak. Therefore, this test only checks for absence of crashs and time constraints, but does not compare against the baseline without performance tweak. There is a Jupyter notebook making that comparison. """ t = [ 0.011404679673257933, 0.02292450819418779, 0.0356455988484443, 0.04835740420885424, 0.05971666577058213, 0.06976017458481187, 0.07984165086754545, 0.09002459314412276, 0.10026356929804565, 0.11129746744100509, 0.1222487922920801, 0.13250332796555583, 0.1418309168157694, 0.15072692856918676, 0.15940425274581055, 0.16819162796171988, 0.17766544268380677, 0.18725283769713902, 0.19600162922594835, 0.20386600701959812, 0.21159934032678884, 0.21916233120704773, 0.22688560894714668, 0.23509036348536208, 0.24366994698965522, 0.2515994198750076, 0.25875219123481424, 0.2659624389836802, 0.2731790169781248, 0.28051081384508175, 0.28822790138928306, 0.2962915558739476, 0.3037246366701631, 0.31063411372423433, 0.31767205835063517, 0.32464032826076655, 0.3318100596369355, 0.3397812253123048, 0.3476375502493003, 0.3550356746451523, 0.3616457394863339, 0.3683042335071859, 0.375005792804928, 0.38175551518794676, 0.3891222824602354, 0.39652683513644266, 0.40347332732118724, 0.4098512458112366, 0.4163205187081655, 0.42263992444151655, 0.42899148558161226, 0.43464157988476515, 0.43858871208254674, 0.44192382717460427, 0.44482627278235426, 0.4474605932759375, 0.44957511937869815, 0.4509048070694502, 0.45222422911858906, 0.45333747002744257, 0.45426598540713137, 0.4551955091445229, 0.45605329943533507, 0.456895460181754, 0.4578387508027823, 0.45881449093488524, 0.45965707183034693, 0.4603621239391219, 0.4610501740166303, 0.46173166976907054, 0.4624475477181825, 0.4632872155802805, 0.4641010162663083, 0.46481571779810027, 0.4654194019478082, 0.4660207332628762, 0.4666458170038323, 0.4672646265190821, 0.46791675385342846, 0.4685898046101078, 0.46918687841487516, 0.46969451649339183, 0.47019581032136176, 0.4706811945055765, 0.47116992587716583, 0.47170379526092326, 0.47227291514937425, 0.4727852448922026, 0.47322669549150526, 0.4736554715946826, 0.47408022827201673, 0.47450655350577753, 0.4749737592414058, 0.47545756086422586, 0.4759381553493523, 0.47630259262910407, 0.4766609657576709, 0.47699441004302984, 0.4773518028238301, 0.477775327063972, 0.4781977729215707, 0.47856485714029223, 0.47888037506649034, 0.47919262983512245, 0.47949520717080135, 0.47980748994936967, 0.4801789017032324, 0.4805627078538587, 0.48090167009664675, 0.4811904245288165, 0.48149113920373887, 0.4817901452725537, 0.4820966860142033, 0.48243977972257923, 0.4827841618880198, 0.48309197708176604, 0.4833586316742829, 0.4836129058750043, 0.4838654994795544, 0.4841171547512422, 0.48439948090305657, 0.48470691796266424, 0.4849764575786085, 0.4852081697757299, 0.48545255646897667, 0.4856974893559792, 0.48595208567096676, 0.48624575584693763, 0.4865416528128355, 0.4867930840050338, 0.4870117575768593, 0.4872274340855126, 0.4874240218226533, 0.4876215198827202, 0.4878617751103791, 0.488108108494191, 0.48831807097586183, 0.4884937072807334, 0.48866595438332605, 0.488852192449045, 0.48903411698459087, 0.4892522303576926, 0.4894829201921431, 0.4896802221826566, 0.4898457609055321, 0.49001188783706756, 0.4901847091433521, 0.4903469286887892, 0.4905345812562857, 0.49073597269748276, 0.49091467609036693, 0.4910691508884479, 0.4912115954189357, 0.49135658885361677, 0.49150574176382184, 0.49167835299558493, 0.49186735004001847, 0.49203167033066975, 0.49216849886895175, 0.4923075682021289, 0.4924506289512129, 0.49259525825672346, 0.49276396210238826, 0.49294465420074185, 0.4931019580023778, 0.49330306934421303, 0.4935200763248353, 0.49373208353184794, 0.4939721566949216, 0.4942334053697541, 0.4944958444668745, 0.4947262121870588, 0.49492469059489225, 0.4951192336066912, 0.495294323717807, 0.4954780829041733, 0.4956838158854796, 0.49592192835302007, 0.49614550366367866, 0.49633301618149417, 0.49652995404283723, 0.4967104500716375, 0.4969174855149766, 0.49712443692850716, 0.4973541744251272, 0.49756258235533957, 0.49772464784612763, 0.4978989396740621, 0.4980669292663541, 0.4982378038820735, 0.49843929335804726, 0.4986487236509305, 0.49883442952786183, 0.49899118713574214, 0.49915640374435144, 0.49932506557511197, ] alpha = 0.0033333333333333335 sides = 2 start_time = time.time() my_bounds = sequential_bounds(np.array(t), alpha=alpha, sides=sides).bounds expected = np.array( [ 5.0536015, 4.819334, 4.70702194, 4.60970036, 4.55329219, 4.5118919, 4.465161, 4.42168832, 4.37932413, 4.33343066, 4.29780246, 4.26550766, 4.2476601, 4.22343408, 4.20455427, 4.1834642, 4.15580542, 4.13352266, 4.1170148, 4.10326736, 4.08845795, 4.07496919, 4.05959646, 4.0417501, 4.02262887, 4.01056674, 4.00192679, 3.98996708, 3.97709149, 3.96442225, 3.95010566, 3.93456306, 3.92603865, 3.91801377, 3.90630556, 3.8975012, 3.88641115, 3.87143326, 3.85966246, 3.85112482, 3.84569926, 3.83714224, 3.82719647, 3.81910741, 3.80682977, 3.79652758, 3.78889289, 3.78428912, 3.77646938, 3.76966463, 3.76150223, 3.75820905, 3.76088934, 3.76171382, 3.76141619, 3.76079216, 3.76237742, 3.76725034, 3.76769877, 3.7690107, 3.7710916, 3.77168583, 3.76813708, 3.7705804, 3.76669411, 3.76711572, 3.76808636, 3.76962133, 3.76680748, 3.76844159, 3.76552364, 3.76210975, 3.76321355, 3.76471956, 3.76227721, 3.76424368, 3.76172169, 3.75923, 3.76099518, 3.75829319, 3.76028082, 3.75824824, 3.7562443, 3.76013739, 3.75818674, 3.7560594, 3.75379557, 3.75757852, 3.75582548, 3.75412511, 3.75244297, 3.75075688, 3.74891172, 3.75280489, 3.75090966, 3.7494744, 3.74806463, 3.75254602, 3.75114099, 3.74947802, 3.74782149, 3.74638383, 3.75092969, 3.74970739, 3.7485241, 3.74730404, 3.74585452, 3.74435839, 3.74303855, 3.74191532, 3.74074663, 3.73958567, 3.74415751, 3.74282592, 3.74149075, 3.74029857, 3.73926672, 3.73828357, 3.73730769, 3.7363362, 3.7352472, 3.73406243, 3.74020438, 3.7393112, 3.73836986, 3.73742713, 3.73644796, 3.73531947, 3.73418345, 3.73321896, 3.73238074, 3.73155456, 3.73080198, 3.73004637, 3.7291278, 3.72818669, 3.7273851, 3.72671496, 3.72605809, 3.72534827, 3.72465527, 3.72382494, 3.72294733, 3.73077145, 3.73014101, 3.72950865, 3.72885115, 3.7282343, 3.72752112, 3.72675617, 3.7260778, 3.7254917, 3.72495149, 3.72440186, 3.72383671, 3.723183, 3.72246763, 3.72184599, 3.7213286, 3.72080295, 3.72026245, 3.71971626, 3.71907946, 3.71839777, 3.71780463, 3.71704671, 3.7162294, 3.71543144, 3.71452847, 3.72065881, 3.71967136, 3.71880523, 3.71805949, 3.71732896, 3.71667185, 3.71598258, 3.71521135, 3.71431933, 3.71348235, 3.71278081, 3.71204444, 3.71136994, 3.7105967, 3.70982427, 3.70896735, 3.71527887, 3.71467395, 3.71402372, 3.71339733, 3.71276051, 3.71201001, 3.71123041, 3.71053954, 3.70995666, 3.70934263, 3.70871611, ] ) assert np.allclose(my_bounds, expected) # if the calculation with max_nints takes longer than 30 seconds, something is most likely broken assert (time.time() - start_time) < 30
[ "numpy.allclose", "pytest.mark.skip", "numpy.array", "time.time" ]
[((155, 221), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Skipping because this test is very slow"""'}), "(reason='Skipping because this test is very slow')\n", (171, 221), False, 'import pytest\n'), ((10635, 10701), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Skipping because this test is very slow"""'}), "(reason='Skipping because this test is very slow')\n", (10651, 10701), False, 'import pytest\n'), ((5478, 5489), 'time.time', 'time.time', ([], {}), '()\n', (5487, 5489), False, 'import time\n'), ((5610, 7901), 'numpy.array', 'np.array', (['[5.75400023, 8.0, 5.14701605, 4.91478643, 4.80691346, 4.69004328, \n 4.57921075, 4.49683943, 4.44452939, 4.38899083, 4.35683792, 4.33289847,\n 4.301461, 4.27383028, 4.24513591, 4.21444005, 4.18809224, 4.17037988, \n 4.15702106, 4.13796352, 4.12345883, 4.10808648, 4.07898394, 4.06169498,\n 4.04985422, 4.04453139, 4.03288177, 4.02205301, 4.00664024, 3.98770613,\n 3.97358123, 3.96589571, 3.95946059, 3.94995533, 3.94128534, 3.93114789,\n 3.91870273, 3.90749163, 3.90064315, 3.8958719, 3.88847126, 3.88184277, \n 3.86841705, 3.85642932, 3.84721152, 3.84099201, 3.83689676, 3.8295672, \n 3.82234648, 3.81501541, 3.80286989, 3.79370807, 3.78728177, 3.78449351,\n 3.77865864, 3.76988501, 3.76230126, 3.75251025, 3.74474277, 3.73953663,\n 3.73534961, 3.72974059, 3.72466752, 3.71785112, 3.70903202, 3.70176221,\n 3.6976847, 3.6944938, 3.68996741, 3.68449851, 3.67888767, 3.67142884, \n 3.66522708, 3.65968721, 3.65649679, 3.65207508, 3.65156885, 3.643952, \n 3.63644572, 3.63029181, 3.62665696, 3.62527741, 3.62117738, 3.61789837,\n 3.6128686, 3.59904477, 3.5976517, 3.59678297, 3.59434356, 3.59116304, \n 3.58814574, 3.5835558, 3.57659985, 3.5726481, 3.56990393, 3.56879169, \n 3.56501955, 3.56127173, 3.55720436, 3.55194666, 3.54597713, 3.5436994, \n 3.54287161, 3.53974477, 3.53649679, 3.53314876, 3.52700997, 3.52175088,\n 3.51873367, 3.51846468, 3.51401711, 3.5106822, 3.50742162, 3.50113309, \n 3.49658758, 3.49376264, 3.49238249, 3.48979047, 3.48725107, 3.48341163,\n 3.47810608, 3.47381485, 3.47184685, 3.47110719, 3.46801712, 3.46472076,\n 3.45913659, 3.45209404, 3.4484684, 3.44587153, 3.44472549, 3.44242755, \n 3.43895355, 3.43549018, 3.43080058, 3.42621252, 3.42437516, 3.42371762,\n 3.42122891, 3.41861765, 3.41451447, 3.40936002, 3.4051931, 3.40307035, \n 3.40295986, 3.40052495, 3.39688763, 3.39279348, 3.38725208, 3.38421998,\n 3.38214471, 3.38133324, 3.37908335, 3.37689107, 3.37364203, 3.36937673,\n 3.36593888, 3.36250238, 3.36109704, 3.35878324, 3.35666501, 3.35305866,\n 3.34754255, 3.34364255, 3.34157534, 3.34085629, 3.33864193, 3.33563376,\n 3.33016843, 3.32687574, 3.32338656, 3.32166421, 3.32107266, 3.31861916,\n 3.31615129, 3.31334059, 3.30792367, 3.30479742, 3.30339238, 3.30296421,\n 3.30041534]'], {}), '([5.75400023, 8.0, 5.14701605, 4.91478643, 4.80691346, 4.69004328, \n 4.57921075, 4.49683943, 4.44452939, 4.38899083, 4.35683792, 4.33289847,\n 4.301461, 4.27383028, 4.24513591, 4.21444005, 4.18809224, 4.17037988, \n 4.15702106, 4.13796352, 4.12345883, 4.10808648, 4.07898394, 4.06169498,\n 4.04985422, 4.04453139, 4.03288177, 4.02205301, 4.00664024, 3.98770613,\n 3.97358123, 3.96589571, 3.95946059, 3.94995533, 3.94128534, 3.93114789,\n 3.91870273, 3.90749163, 3.90064315, 3.8958719, 3.88847126, 3.88184277, \n 3.86841705, 3.85642932, 3.84721152, 3.84099201, 3.83689676, 3.8295672, \n 3.82234648, 3.81501541, 3.80286989, 3.79370807, 3.78728177, 3.78449351,\n 3.77865864, 3.76988501, 3.76230126, 3.75251025, 3.74474277, 3.73953663,\n 3.73534961, 3.72974059, 3.72466752, 3.71785112, 3.70903202, 3.70176221,\n 3.6976847, 3.6944938, 3.68996741, 3.68449851, 3.67888767, 3.67142884, \n 3.66522708, 3.65968721, 3.65649679, 3.65207508, 3.65156885, 3.643952, \n 3.63644572, 3.63029181, 3.62665696, 3.62527741, 3.62117738, 3.61789837,\n 3.6128686, 3.59904477, 3.5976517, 3.59678297, 3.59434356, 3.59116304, \n 3.58814574, 3.5835558, 3.57659985, 3.5726481, 3.56990393, 3.56879169, \n 3.56501955, 3.56127173, 3.55720436, 3.55194666, 3.54597713, 3.5436994, \n 3.54287161, 3.53974477, 3.53649679, 3.53314876, 3.52700997, 3.52175088,\n 3.51873367, 3.51846468, 3.51401711, 3.5106822, 3.50742162, 3.50113309, \n 3.49658758, 3.49376264, 3.49238249, 3.48979047, 3.48725107, 3.48341163,\n 3.47810608, 3.47381485, 3.47184685, 3.47110719, 3.46801712, 3.46472076,\n 3.45913659, 3.45209404, 3.4484684, 3.44587153, 3.44472549, 3.44242755, \n 3.43895355, 3.43549018, 3.43080058, 3.42621252, 3.42437516, 3.42371762,\n 3.42122891, 3.41861765, 3.41451447, 3.40936002, 3.4051931, 3.40307035, \n 3.40295986, 3.40052495, 3.39688763, 3.39279348, 3.38725208, 3.38421998,\n 3.38214471, 3.38133324, 3.37908335, 3.37689107, 3.37364203, 3.36937673,\n 3.36593888, 3.36250238, 3.36109704, 3.35878324, 3.35666501, 3.35305866,\n 3.34754255, 3.34364255, 3.34157534, 3.34085629, 3.33864193, 3.33563376,\n 3.33016843, 3.32687574, 3.32338656, 3.32166421, 3.32107266, 3.31861916,\n 3.31615129, 3.31334059, 3.30792367, 3.30479742, 3.30339238, 3.30296421,\n 3.30041534])\n', (5618, 7901), True, 'import numpy as np\n'), ((9979, 10011), 'numpy.allclose', 'np.allclose', (['my_bounds', 'expected'], {}), '(my_bounds, expected)\n', (9990, 10011), True, 'import numpy as np\n'), ((10237, 10248), 'time.time', 'time.time', ([], {}), '()\n', (10246, 10248), False, 'import time\n'), ((10385, 10417), 'numpy.allclose', 'np.allclose', (['my_bounds', 'expected'], {}), '(my_bounds, expected)\n', (10396, 10417), True, 'import numpy as np\n'), ((16776, 16787), 'time.time', 'time.time', ([], {}), '()\n', (16785, 16787), False, 'import time\n'), ((16884, 19358), 'numpy.array', 'np.array', (['[5.0536015, 4.819334, 4.70702194, 4.60970036, 4.55329219, 4.5118919, \n 4.465161, 4.42168832, 4.37932413, 4.33343066, 4.29780246, 4.26550766, \n 4.2476601, 4.22343408, 4.20455427, 4.1834642, 4.15580542, 4.13352266, \n 4.1170148, 4.10326736, 4.08845795, 4.07496919, 4.05959646, 4.0417501, \n 4.02262887, 4.01056674, 4.00192679, 3.98996708, 3.97709149, 3.96442225,\n 3.95010566, 3.93456306, 3.92603865, 3.91801377, 3.90630556, 3.8975012, \n 3.88641115, 3.87143326, 3.85966246, 3.85112482, 3.84569926, 3.83714224,\n 3.82719647, 3.81910741, 3.80682977, 3.79652758, 3.78889289, 3.78428912,\n 3.77646938, 3.76966463, 3.76150223, 3.75820905, 3.76088934, 3.76171382,\n 3.76141619, 3.76079216, 3.76237742, 3.76725034, 3.76769877, 3.7690107, \n 3.7710916, 3.77168583, 3.76813708, 3.7705804, 3.76669411, 3.76711572, \n 3.76808636, 3.76962133, 3.76680748, 3.76844159, 3.76552364, 3.76210975,\n 3.76321355, 3.76471956, 3.76227721, 3.76424368, 3.76172169, 3.75923, \n 3.76099518, 3.75829319, 3.76028082, 3.75824824, 3.7562443, 3.76013739, \n 3.75818674, 3.7560594, 3.75379557, 3.75757852, 3.75582548, 3.75412511, \n 3.75244297, 3.75075688, 3.74891172, 3.75280489, 3.75090966, 3.7494744, \n 3.74806463, 3.75254602, 3.75114099, 3.74947802, 3.74782149, 3.74638383,\n 3.75092969, 3.74970739, 3.7485241, 3.74730404, 3.74585452, 3.74435839, \n 3.74303855, 3.74191532, 3.74074663, 3.73958567, 3.74415751, 3.74282592,\n 3.74149075, 3.74029857, 3.73926672, 3.73828357, 3.73730769, 3.7363362, \n 3.7352472, 3.73406243, 3.74020438, 3.7393112, 3.73836986, 3.73742713, \n 3.73644796, 3.73531947, 3.73418345, 3.73321896, 3.73238074, 3.73155456,\n 3.73080198, 3.73004637, 3.7291278, 3.72818669, 3.7273851, 3.72671496, \n 3.72605809, 3.72534827, 3.72465527, 3.72382494, 3.72294733, 3.73077145,\n 3.73014101, 3.72950865, 3.72885115, 3.7282343, 3.72752112, 3.72675617, \n 3.7260778, 3.7254917, 3.72495149, 3.72440186, 3.72383671, 3.723183, \n 3.72246763, 3.72184599, 3.7213286, 3.72080295, 3.72026245, 3.71971626, \n 3.71907946, 3.71839777, 3.71780463, 3.71704671, 3.7162294, 3.71543144, \n 3.71452847, 3.72065881, 3.71967136, 3.71880523, 3.71805949, 3.71732896,\n 3.71667185, 3.71598258, 3.71521135, 3.71431933, 3.71348235, 3.71278081,\n 3.71204444, 3.71136994, 3.7105967, 3.70982427, 3.70896735, 3.71527887, \n 3.71467395, 3.71402372, 3.71339733, 3.71276051, 3.71201001, 3.71123041,\n 3.71053954, 3.70995666, 3.70934263, 3.70871611]'], {}), '([5.0536015, 4.819334, 4.70702194, 4.60970036, 4.55329219, \n 4.5118919, 4.465161, 4.42168832, 4.37932413, 4.33343066, 4.29780246, \n 4.26550766, 4.2476601, 4.22343408, 4.20455427, 4.1834642, 4.15580542, \n 4.13352266, 4.1170148, 4.10326736, 4.08845795, 4.07496919, 4.05959646, \n 4.0417501, 4.02262887, 4.01056674, 4.00192679, 3.98996708, 3.97709149, \n 3.96442225, 3.95010566, 3.93456306, 3.92603865, 3.91801377, 3.90630556,\n 3.8975012, 3.88641115, 3.87143326, 3.85966246, 3.85112482, 3.84569926, \n 3.83714224, 3.82719647, 3.81910741, 3.80682977, 3.79652758, 3.78889289,\n 3.78428912, 3.77646938, 3.76966463, 3.76150223, 3.75820905, 3.76088934,\n 3.76171382, 3.76141619, 3.76079216, 3.76237742, 3.76725034, 3.76769877,\n 3.7690107, 3.7710916, 3.77168583, 3.76813708, 3.7705804, 3.76669411, \n 3.76711572, 3.76808636, 3.76962133, 3.76680748, 3.76844159, 3.76552364,\n 3.76210975, 3.76321355, 3.76471956, 3.76227721, 3.76424368, 3.76172169,\n 3.75923, 3.76099518, 3.75829319, 3.76028082, 3.75824824, 3.7562443, \n 3.76013739, 3.75818674, 3.7560594, 3.75379557, 3.75757852, 3.75582548, \n 3.75412511, 3.75244297, 3.75075688, 3.74891172, 3.75280489, 3.75090966,\n 3.7494744, 3.74806463, 3.75254602, 3.75114099, 3.74947802, 3.74782149, \n 3.74638383, 3.75092969, 3.74970739, 3.7485241, 3.74730404, 3.74585452, \n 3.74435839, 3.74303855, 3.74191532, 3.74074663, 3.73958567, 3.74415751,\n 3.74282592, 3.74149075, 3.74029857, 3.73926672, 3.73828357, 3.73730769,\n 3.7363362, 3.7352472, 3.73406243, 3.74020438, 3.7393112, 3.73836986, \n 3.73742713, 3.73644796, 3.73531947, 3.73418345, 3.73321896, 3.73238074,\n 3.73155456, 3.73080198, 3.73004637, 3.7291278, 3.72818669, 3.7273851, \n 3.72671496, 3.72605809, 3.72534827, 3.72465527, 3.72382494, 3.72294733,\n 3.73077145, 3.73014101, 3.72950865, 3.72885115, 3.7282343, 3.72752112, \n 3.72675617, 3.7260778, 3.7254917, 3.72495149, 3.72440186, 3.72383671, \n 3.723183, 3.72246763, 3.72184599, 3.7213286, 3.72080295, 3.72026245, \n 3.71971626, 3.71907946, 3.71839777, 3.71780463, 3.71704671, 3.7162294, \n 3.71543144, 3.71452847, 3.72065881, 3.71967136, 3.71880523, 3.71805949,\n 3.71732896, 3.71667185, 3.71598258, 3.71521135, 3.71431933, 3.71348235,\n 3.71278081, 3.71204444, 3.71136994, 3.7105967, 3.70982427, 3.70896735, \n 3.71527887, 3.71467395, 3.71402372, 3.71339733, 3.71276051, 3.71201001,\n 3.71123041, 3.71053954, 3.70995666, 3.70934263, 3.70871611])\n', (16892, 19358), True, 'import numpy as np\n'), ((21602, 21634), 'numpy.allclose', 'np.allclose', (['my_bounds', 'expected'], {}), '(my_bounds, expected)\n', (21613, 21634), True, 'import numpy as np\n'), ((5522, 5533), 'numpy.array', 'np.array', (['t'], {}), '(t)\n', (5530, 5533), True, 'import numpy as np\n'), ((10281, 10292), 'numpy.array', 'np.array', (['t'], {}), '(t)\n', (10289, 10292), True, 'import numpy as np\n'), ((10126, 10137), 'time.time', 'time.time', ([], {}), '()\n', (10135, 10137), False, 'import time\n'), ((10599, 10610), 'time.time', 'time.time', ([], {}), '()\n', (10608, 10610), False, 'import time\n'), ((16822, 16833), 'numpy.array', 'np.array', (['t'], {}), '(t)\n', (16830, 16833), True, 'import numpy as np\n'), ((21749, 21760), 'time.time', 'time.time', ([], {}), '()\n', (21758, 21760), False, 'import time\n'), ((10559, 10570), 'time.time', 'time.time', ([], {}), '()\n', (10568, 10570), False, 'import time\n')]
#coding: utf-8 import re from django import forms from django.utils.translation import ugettext_lazy as _ from main.utils import parse_csv_file as read_file def _validation_word(word): if len(word) > 46: raise forms.ValidationError(_('Maximum length allowed 46 words.')) if ' ' in word: raise forms.ValidationError(_('Forbidden use words with whitespaces.')) if not re.match(r'^\w+$', word): raise forms.ValidationError(_('Forbidden use words with special character.')) class FilesAdminForm(forms.ModelForm): def clean_file(self): file = self.cleaned_data['file'] parse_file = read_file(file) if not 'word' in parse_file.fieldnames: raise forms.ValidationError(_('Structure file incorrect, check example in documentation.')) for row in parse_file: _validation_word(row['word']) return file class Meta: fields = '__all__' class WordsAdminForm(forms.ModelForm): def clean_word(self): word = self.cleaned_data['word'] _validation_word(word) return word class Meta: fields = '__all__'
[ "django.utils.translation.ugettext_lazy", "re.match", "main.utils.parse_csv_file" ]
[((376, 400), 're.match', 're.match', (['"""^\\\\w+$"""', 'word'], {}), "('^\\\\w+$', word)\n", (384, 400), False, 'import re\n'), ((598, 613), 'main.utils.parse_csv_file', 'read_file', (['file'], {}), '(file)\n', (607, 613), True, 'from main.utils import parse_csv_file as read_file\n'), ((238, 275), 'django.utils.translation.ugettext_lazy', '_', (['"""Maximum length allowed 46 words."""'], {}), "('Maximum length allowed 46 words.')\n", (239, 275), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((324, 366), 'django.utils.translation.ugettext_lazy', '_', (['"""Forbidden use words with whitespaces."""'], {}), "('Forbidden use words with whitespaces.')\n", (325, 366), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((432, 480), 'django.utils.translation.ugettext_lazy', '_', (['"""Forbidden use words with special character."""'], {}), "('Forbidden use words with special character.')\n", (433, 480), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((688, 750), 'django.utils.translation.ugettext_lazy', '_', (['"""Structure file incorrect, check example in documentation."""'], {}), "('Structure file incorrect, check example in documentation.')\n", (689, 750), True, 'from django.utils.translation import ugettext_lazy as _\n')]
# -------------------------------------------------------------------------- # Source file provided under Apache License, Version 2.0, January 2004, # http://www.apache.org/licenses/ # (c) Copyright <NAME>, 2021 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # -------------------------------------------------------------------------- import ast import base64 import csv import glob import io import logging import os import re import sys import tempfile import time import urllib.parse from collections import namedtuple from contextlib import contextmanager from datetime import datetime from functools import lru_cache from operator import attrgetter import requests from ibm_watson_machine_learning.wml_client_error import WMLClientError, ApiRequestFailure from packaging import version from ibm_watson_machine_learning import APIClient # WML Python API version with a fixed Assets.download function WML_HAS_FIXED_DOWNLOAD = "1000.0.0" LOGNAME = 'log.txt' class Error(Exception): """Base class for all errors in this script""" pass class InvalidCredentials(Error): """The WML credentials were not found, or incorrect""" pass class SimilarNamesInJob(Error): """A job can't have two input files with the same name, irrespective of path""" pass class NoCredentialsToCreateSpace(Error): """Need to create a space, but credentials are incomplete to allow that""" pass # # The WML API offers a way to choose which entities are returned when asking # for job details. Unfortunately, that parameter is not surfaced in the WML # Python API, and we have to patch the code in order to send the value we want. # # APIClient._params() is the function that creates the parameters for the REST # call. We replace it with our own function '_new_params' that (1) calls the # original function and adds the filter we want, if we want one. # # The function will not be called with any parameter. So the filter we want to # use, if any, must be set in a global variable. _the_filter = None # Backup of the original function, so that we can call it and restore it later. _the_old_params = None def _new_params(): """Our new function to build a parameter list for the REST call. Called by the instance of APIClient itself.""" global _the_old_params global _the_filter # Use the original code and get its output # noinspection PyCallingNonCallable result = _the_old_params() # Add the filter, if one is required if _the_filter: result['include'] = _the_filter return result def _get_file_spec(path): force = False if path[0] == '+': force = True path = path[1:] basename = os.path.basename(path) return path, basename, force class _CredentialsProvider: """"Reads credentials for a DOWMLLib instance. Stores them as a 'credentials' attribute.""" ENVIRONMENT_VARIABLE_NAME = 'DOWML_CREDENTIALS' ENVIRONMENT_VARIABLE_NAME_FILE = 'DOWML_CREDENTIALS_FILE' # The keys in the credentials APIKEY = 'apikey' TOKEN = 'token' SPACE_ID = 'space_id' SPACE_NAME = 'space_name' URL = 'url' REGION = 'region' COS_CRN = 'cos_resource_crn' ML_CRN = 'ml_instance_crn' REGION_TO_URL = { 'us-south': 'https://us-south.ml.cloud.ibm.com', 'eu-de': 'https://eu-de.ml.cloud.ibm.com', 'eu-gb': 'https://eu-gb.ml.cloud.ibm.com', 'jp-tok': 'https://jp-tok.ml.cloud.ibm.com', } def __init__(self, wml_credentials_file=None, api_key=None, wml_credentials_str=None, url=None, region=None): self._logger = logging.getLogger(self.__class__.__name__) if wml_credentials_str is None: if wml_credentials_file is not None: wml_credentials_str = self._read_wml_credentials_from_file(wml_credentials_file) else: wml_credentials_str = self._read_wml_credentials_from_env() if wml_credentials_str: self._logger.debug('Found credential string.') self.credentials = self.check_credentials(wml_credentials_str, api_key=api_key, url=url, region=region) def usage(self): print(f'${self.ENVIRONMENT_VARIABLE_NAME} should contain credentials as a Python dict of the form:') print(f' {{\'{self.APIKEY}\': \'<apikey>\', \'{self.URL}\': \'https://us-south.ml.cloud.ibm.com\'}}') print(f'Or set ${self.ENVIRONMENT_VARIABLE_NAME_FILE} to the path to a file containing the same information.') def check_credentials(self, wml_cred_str, api_key, url, region): wml_credentials = None if wml_cred_str is not None: assert type(wml_cred_str) is str if not wml_cred_str: raise InvalidCredentials('WML credentials must not be an empty string.') wml_credentials = ast.literal_eval(wml_cred_str) assert type(wml_credentials) is dict if not wml_credentials: # If we don't find a credentials string through the environment, we will # assume that the parameters are enough to build one. wml_credentials = {} if api_key: wml_credentials[self.APIKEY] = api_key if url: wml_credentials[self.URL] = url if region: wml_credentials[self.REGION] = region if self.APIKEY not in wml_credentials and self.TOKEN not in wml_credentials: raise InvalidCredentials('API key (or token) must be specified.') if self.URL not in wml_credentials and self.REGION not in wml_credentials: raise InvalidCredentials('URL or region must be specified (but not both).') if self.APIKEY in wml_credentials: assert type(wml_credentials[self.APIKEY]) is str else: assert type(wml_credentials[self.TOKEN]) is str if region: if url: raise InvalidCredentials(f"You must not specify both '{self.URL}' and '{self.REGION}'.") wml_credentials[self.REGION] = region # Setting a region must clear the URL, otherwise there will be an # ambiguity (and therefore an error) just below wml_credentials.pop(self.URL, None) if self.REGION in wml_credentials: region = wml_credentials[self.REGION] if self.URL in wml_credentials: raise InvalidCredentials(f"WML credentials must not have both '{self.URL}' and '{self.REGION}'.") try: wml_credentials[self.URL] = self.REGION_TO_URL[region] except KeyError: raise InvalidCredentials(f"Unknown region '{region}'.") wml_credentials.pop(self.REGION) if url: # The url specified takes priority over the one in the credentials, if any. wml_credentials[self.URL] = url assert self.URL in wml_credentials assert type(wml_credentials[self.URL]) is str url = wml_credentials[self.URL] if not url: raise InvalidCredentials('URL must not be an empty string.') if url[-1] == '/': self._logger.warning('URL should not have a \'/\' at the end.') wml_credentials[self.URL] = url[:-1] self._logger.debug('Credentials have the expected structure.') return wml_credentials def _read_wml_credentials_from_env(self): """Return a string of credentials suitable for WML from the environment Raises InvalidCredentials if anything is wrong.""" var_name = self.ENVIRONMENT_VARIABLE_NAME var_file_name = self.ENVIRONMENT_VARIABLE_NAME_FILE self._logger.debug(f'Looking for credentials in environment variable {var_name}...') wml_cred_str = None if var_name in os.environ: wml_cred_str = os.environ[var_name] elif var_file_name in os.environ: self._logger.debug(f'Looking for credentials file name in environment variable {var_file_name}...') wml_cred_str = self._read_wml_credentials_from_file(os.environ[var_file_name]) return wml_cred_str def _read_wml_credentials_from_file(self, file): """Return the content of the file, assumed to be WML credentials""" self._logger.debug(f'Looking for credentials in file \'{file}\'...') with open(file) as f: wml_cred_str = f.read() return wml_cred_str def version_is_greater(current, minimum): """Compare two 'vv.nn.pp' versions. Return True is the current version string is greater or equal to the minimum string. Assumes that each string is of type vv.nn.pp, with vv, nn and pp being integers.""" return version.parse(current) >= version.parse(minimum) @contextmanager def suppress_stdout(): """"Sometimes it's nicer to not get printed output from APIClient""" with open(os.devnull, "w") as devnull: old_stdout = sys.stdout sys.stdout = devnull try: yield finally: sys.stdout = old_stdout class DOWMLLib: """A Python client to run DO models on WML""" DOWML_PREFIX = 'dowml' space_name = f'{DOWML_PREFIX}-space' MODEL_NAME = f'{DOWML_PREFIX}-model' MODEL_TYPES = ['cplex', 'cpo', 'opl', 'docplex'] DO_VERSION = '20.1' TSHIRT_SIZES = ['S', 'M', 'XL'] DEPLOYMENT_NAME = f'{DOWML_PREFIX}-deployment' def __init__(self, wml_credentials_file=None, api_key=None, space_id=None, url=None, region=None, tz=datetime.utcnow().astimezone().tzinfo): f"""Read and validate the WML credentials Args: wml_credentials_file: path to the file that contains the WML credentials. If None, they are read from the environment. space_id: the id of the space that should be used. If specified, this replaces the one in the credentials. url: the URL for the Machine Learning service to use. If specified, this replaces the one in the credentials. url and region can't be both specified. region: An alternative way to specify the URL for the Machine Learning service to use. If specified, the URL will be deduced from the region. This replaces the one in the credentials. url and region can't be both specified. Possible values for the region are {list(_CredentialsProvider.REGION_TO_URL.keys())} tz: timezone to use to display time, defaults to Python's default timezone.""" self.model_type = self.MODEL_TYPES[0] """Type of model to solve""" self.tshirt_size = self.TSHIRT_SIZES[0] """Tee-shirt size for the hardware to solve the model""" self.do_version = self.DO_VERSION """DO engines version to use""" self.timelimit = None """Time limit for the solve, in seconds""" self.inputs = 'assets' """Type of inputs that the created jobs should use""" self.outputs = 'inline' """Type of outputs that the created jobs should use""" self._logger = logging.getLogger(self.__class__.__name__) cred_provider = _CredentialsProvider(wml_credentials_file, api_key=api_key, url=url, region=region) wml_credentials = cred_provider.credentials # A space name in the credentials changes the default if cred_provider.SPACE_NAME in wml_credentials: self._logger.debug('They contain a space name.') self.space_name = wml_credentials[cred_provider.SPACE_NAME] if cred_provider.SPACE_ID in wml_credentials: self._logger.debug('And they contain a space id.') # The space_id specified here takes precedence # over the one, if any, defined in the credentials if space_id: wml_credentials[cred_provider.SPACE_ID] = space_id self._wml_credentials = wml_credentials # We don't initialize the client at this time, because this is an # expensive operation. self._client = None self._space_id = None self._data_connection = None self.tz = tz @property def url(self): """The URL for the WML service instance that the library is connected to.""" return self._wml_credentials[_CredentialsProvider.URL] @property def space_id(self): """The id for the deployment space that the library is connected to.""" return self._space_id @property def inline(self): self._logger.warning('Attribute \'inline\' is deprecated: use \'inputs\' instead.') return self.inputs == 'inline' @inline.setter def inline(self, value): self._logger.warning('Attribute \'inline\' is deprecated: use \'inputs\' instead.') self.inputs = 'inline' if value else 'assets' def _create_client(self): """Create the Python APIClient instance""" assert self._client is None self._logger.debug('Creating the WML client...') # http://ibm-wml-api-pyclient.mybluemix.net/#api-for-ibm-cloud client = APIClient(self._wml_credentials) self._logger.info(f'Creating the client succeeded. Client version is {client.version}.') self._logger.info(f'Client uses URL {self._wml_credentials[_CredentialsProvider.URL]}.') return client def _set_default_space(self): space_id_key = _CredentialsProvider.SPACE_ID if space_id_key in self._wml_credentials: space_id = self._wml_credentials[space_id_key] self._logger.debug(f'Using specified space \'{space_id}\'.') else: space_id = self._find_or_create_space() self._logger.debug('Setting default space...') self._client.set.default_space(space_id) self._space_id = space_id self._logger.debug('Done.') def _get_or_make_client(self): if self._client is None: self._client = self._create_client() # The client is pretty much useless when it doesn't yet have a # default space. So let's set it immediately. self._set_default_space() # It would seem natural to assert that self._space_id is not None. # But this fails when we are in unit-tests and we just set _client to # a mock object from outside, without also setting the _space_id. return self._client def solve(self, paths): """Solve the model, return the job id The model is sent as online data to WML (if 'inline yes') or is uploaded as a data asset to be reused later (default). :param paths: one or more pathname to the files to send, as a single string, separated by space :return: The id of the submitted job """ self._get_or_make_client() # As _get_deployment_id caches its results, it may happen that what it returns is # invalid. For example if the user deleted the deployment after the last solve. # So if we get an error about the deployment not existing, we clear the cache # and retry once. first_try = True while True: deployment_id = self._get_deployment_id() self._logger.info(f'Deployment id: {deployment_id}') try: job_id = self.create_job(paths, deployment_id) self._logger.info(f'Job id: {job_id}') return job_id except ApiRequestFailure as e: if first_try and b'deployment_does_not_exist' in e.args[1].content: self._logger.warning('Deployment was not found. Clearing the cache and retrying...') self._get_deployment_id_with_params.cache_clear() first_try = False else: if not first_try: self._logger.warning('Clearing the cache didn\'t help...') raise def client_data_asset_download(self, asset_id, filename): self._logger.debug(f'Downloading asset {asset_id} in {filename}...') with suppress_stdout(): name = self._client.data_assets.download(asset_id, filename) if version_is_greater(self._client.version, WML_HAS_FIXED_DOWNLOAD): filename = name # else the return value is useless when filename is an absolute path self._logger.debug(f'Done saving {filename}.') def get_log(self, job_id): """Extract the engine log from the job. :param job_id: The id of the job to get the log from :return: The decoded log, or None """ def _get_asset_content(asset_id): if version_is_greater(self._client.version, WML_HAS_FIXED_DOWNLOAD): return self._client.data_assets.download(asset_id).decode('ascii') else: with tempfile.TemporaryDirectory() as temp_dir_name: filename = os.path.join(temp_dir_name, f'{asset_id}-log.txt') self.client_data_asset_download(asset_id, filename) with open(filename) as f: content = f.read() return content def _get_log_from_output_references(references): self._logger.debug(f'Looking for {LOGNAME} in output_data_references...') for ref in references: if ref.get('type') != 'data_asset': continue if 'id' not in ref: self._logger.warning('Ignoring data asset with no id.') continue if ref['id'] == LOGNAME: self._logger.debug('Found it.') try: asset_id = ref['location']['id'] except KeyError: self._logger.error('Log data asset has no location/id information.') break self._logger.debug(f'This is asset {asset_id}.') return _get_asset_content(asset_id) return None def _get_log_from_outputs(outputs): self._logger.debug(f'Looking for {LOGNAME} in output_data...') for output_data in outputs: if output_data['id'] == LOGNAME: if 'content' not in output_data: self._logger.error(f'Log without content for job {job_id}.') continue self._logger.debug('Found it. Decoding it...') output = output_data['content'] output = self.decode_log(output) output = self.remove_empty_lines(output) self._logger.debug('Decoded the log.') return output return None job_details = self.get_job_details(job_id, with_contents='log') try: do = job_details['entity']['decision_optimization'] except KeyError: self._logger.warning('No decision_optimization structure available for this job.') return None # When we have references in the job, the 'output_data' may be an empty list if 'output_data' in do and do['output_data']: return _get_log_from_outputs(do['output_data']) elif 'output_data_references' in do: return _get_log_from_output_references(do['output_data_references']) else: self._logger.warning('No output_data or output_data_references structure available for this job.') return None def _parse_asset_references(self, details, key): def find_id_in_href(loc): href = loc.get('href') if loc else None if not href: return None path = urllib.parse.urlparse(href).path s = re.search('/v2/assets/(.*)', path) if s: return s.group(1) self._logger.warning(f'Could not decode href for asset \'{name}\'.') return None def find_id_in_id(loc): return loc.get('id') if loc else None try: refs = details['entity']['decision_optimization'][key] except KeyError: self._logger.debug(f'No \'{key}\' structure available for this job.') return {} result = {} for ref in refs: asset_type = ref.get('type') if asset_type != 'data_asset': self._logger.debug(f'Ignoring asset of unknown type \'{asset_type}\'.') continue name = ref.get('id') if not name: self._logger.warning('Found a data asset with no name.') continue self._logger.debug(f'Found a data asset named {name}.') location = ref.get('location') asset_id = find_id_in_id(location) or find_id_in_href(location) if asset_id: result[name] = asset_id else: self._logger.warning(f'Could not find asset id for asset \'{name}\'.') return result def get_output_asset_ids(self, details): """"Extract the output data asset ids from the job details. :param details: The details of the job to get the output from :return: A dict of outputs. Keys are the names of the outputs, and the corresponding value for each key is the id of the asset. """ return self._parse_asset_references(details, 'output_data_references') def get_input_asset_ids(self, details): """"Extract the input data asset ids from the job details. :param details: The details of the job to get the output from :return: A dict of inputs. Keys are the names of the inputs, and the corresponding value for each key is the id of the asset. """ return self._parse_asset_references(details, 'input_data_references') def get_output(self, details, csv_as_dataframe=None, tabular_as_csv=False): """Deprecated. Use get_outputs instead""" return self.get_outputs(details, csv_as_dataframe, tabular_as_csv) def _extract_inline_files_from_details(self, details, key, tabular_as_csv): try: files = details['entity']['decision_optimization'][key] except KeyError: self._logger.debug(f'No \'{key}\' structure available for this job.') return {} result = {} for output_data in files: name = output_data['id'] if 'content' in output_data: # What we have here is a regular file, encoded self._logger.debug(f'Found a regular file named {name}.') content = self._extract_regular_file(output_data) result[name] = content elif ('values' in output_data and 'fields' in output_data and name.lower().endswith('.csv')): self._logger.debug(f'Found a CSV file named {name}.') content = self._extract_csv_file(output_data, tabular_as_csv) result[name] = content else: self._logger.warning(f'Found an unknown file named {name}.') content = output_data result[name] = content return result def get_outputs(self, details, csv_as_dataframe=None, tabular_as_csv=False): """"Extract the inline outputs from the job details. :param details: The details of the job to get the outputs from :param csv_as_dataframe: Whether the content of a CSV file should be returned as a Pandas DataFrame or not. Deprecated: use tabular_as_csv instead :param tabular_as_csv: Whether tabular outputs should be returned as CSV file content instead of Pandas dataframe :return: A dict of outputs, with the names of the assets as keys, and the content as value, as bytes. We don't assume that the content is actually text. """ if csv_as_dataframe is not None: # We have a non-default value for this deprecated parameter # Let's first check that the replacement parameter is at its default # value assert(tabular_as_csv is False) # Now we can replace it with the correct value tabular_as_csv = not csv_as_dataframe return self._extract_inline_files_from_details(details, 'output_data', tabular_as_csv) def get_inputs(self, details, tabular_as_csv=False): """"Extract the inline inputs from the job details. :param details: The details of the job to get the inputs from :param tabular_as_csv: Whether tabular inputs should be returned as CSV file content instead of Pandas dataframe :return: A dict of inputs, with the names of the assets as keys, and the content as value, as bytes. We don't assume that the content is actually text. """ return self._extract_inline_files_from_details(details, 'input_data', tabular_as_csv) @staticmethod def _extract_csv_file(output_data, tabular_as_csv): if tabular_as_csv: content = io.StringIO() writer = csv.writer(content) writer.writerow(output_data['fields']) for r in output_data['values']: writer.writerow(r) content = content.getvalue().encode() else: import pandas content = pandas.DataFrame(output_data['values'], columns=output_data['fields']) return content @staticmethod def _extract_regular_file(output_data): content = output_data['content'] content = content.encode('UTF-8') content = base64.b64decode(content) return content def get_job_details(self, job_id, with_contents=None): """ Get the job details for the given job :param job_id: The id of the job to look for :param with_contents: if 'names', the details returned include the input and output files names. If 'full', the content of these files is included as well. If 'log', the content only includes the output files :return: The job details """ client = self._get_or_make_client() self._logger.debug('Fetching output...') output_filter = None if not with_contents: output_filter = 'solve_parameters,solve_state,status' elif with_contents == 'log': output_filter = 'output_data,output_data_references' job_details = self.client_get_job_details(client, job_id, output_filter) self._logger.debug('Done.') if with_contents != 'full' and with_contents != 'log': self.filter_large_chunks_from_details(job_details) return job_details @staticmethod def client_get_job_details(client, job_id, with_filter=None): # The filter doesn't work correctly if it contains spaces assert(not with_filter or (with_filter.find(' ') == -1)) if version_is_greater(client.version, "1.0.154"): result = client.deployments.get_job_details(job_id, with_filter) else: global _the_filter global _the_old_params # Save the filter in a global variable for our new function to find it _the_filter = with_filter # Save the pointer to the original code # noinspection PyProtectedMember _the_old_params = client._params # and replace it with our new function client._params = _new_params try: result = client.deployments.get_job_details(job_id) finally: # Put back the original code client._params = _the_old_params return result @staticmethod def filter_large_chunks_from_details(job_details): """Remove the large blobs (input/output) from the given job_details.""" try: do = job_details['entity']['decision_optimization'] for data in do.get('output_data', []): if 'content' in data: # This is the case for regular files, such as the log data['content'] = '[not shown]' elif 'values' in data: # This is the case for CSV files data['values'] = ['[not shown]'] for data in do.get('input_data', []): if 'content' in data: data['content'] = '[not shown]' if 'solve_state' in do and 'latest_engine_activity' in do['solve_state']: do['solve_state']['latest_engine_activity'] = ['[not shown]'] except KeyError: # GH-1: This happens when the job failed pass def _delete_data_assets(self, job_details): job_id = job_details['metadata']['id'] try: odr = job_details['entity']['decision_optimization']['output_data_references'] except KeyError: odr = [] for output in odr: if output.get('type') != 'data_asset': continue if 'location' not in output: self._logger.error(f'Missing \'location\' in details for job {job_id}.') elif 'id' not in output['location']: self._logger.error(f'Missing \'location.id\' in details for job {job_id}.') else: data_asset_id = output['location']['id'] self._logger.debug(f'Deleting data asset {data_asset_id}...') try: self._client.data_assets.delete(data_asset_id) self._logger.debug('Done.') except WMLClientError: self._logger.error('Exception raised while trying to delete the asset.', exc_info=True) def _client_deployments_delete_job(self, job_id, hard, job_details): """Deletes the platform run, so that the deployment job is deleted as well. If only calling client.deployments.delete_job(job_id, hard) the 'run' of the 'platform job' on the Watson Studio side is left, and it will never be deleted. On the other hand, deleting the run on the WS side also deletes the deployment job on the WML side. So let's do that. """ client = self._get_or_make_client() wml_url = self._wml_credentials['url'] # We don't want to (try to) delete the WS run if we only cancel the job # Instead, we skip everything until calling deployments.delete_job(True) everything_ok_so_far = hard ws_url = client.PLATFORM_URLS_MAP.get(wml_url) if everything_ok_so_far and not ws_url: self._logger.error(f'Unknown Watson Studio URL for WML URL {wml_url}.') everything_ok_so_far = False if everything_ok_so_far: try: platform_job_id = job_details['entity']['platform_job']['job_id'] platform_run_id = job_details['entity']['platform_job']['run_id'] url = f'{ws_url}/v2/jobs/{platform_job_id}/runs/{platform_run_id}?space_id={self.space_id}' except KeyError: self._logger.error('Watson Studio job id or run id not found in WML job details.') everything_ok_so_far = False if everything_ok_so_far: # noinspection PyUnboundLocalVariable self._logger.debug(f'Trying to delete run {platform_run_id} of Watson Studio job {platform_job_id}...') # noinspection PyProtectedMember # noinspection PyUnboundLocalVariable r = requests.delete(url, headers={'Authorization': f'Bearer {client.service_instance._get_token()}', 'Content-Type': 'application/json', 'cache-control': 'no-cache'}) if r.status_code != 204: self._logger.error(f'Error when trying to delete the Watson Studio run. {r.text}') everything_ok_so_far = False if not everything_ok_so_far: if hard: self._logger.error('Could not delete the Watson Studio run. Deleting the WML job deployment instead...') # else: # we just wanted to cancel the job, so there's nothing to warn against client.deployments.delete_job(job_id, hard) def delete_job(self, job_id, hard=True): """Delete the given job. :param job_id: the job to be deleted :param hard: deprecated. Use cancel_job instead of passing False """ if not hard: self.cancel_job(job_id) return self._logger.debug(f'Deleting data assets (if any) for job {job_id}...') job_details = self.get_job_details(job_id, with_contents='names') self._delete_data_assets(job_details) self._logger.debug(f'Done. Deleting job {job_id}...') self._client_deployments_delete_job(job_id, True, job_details) self._logger.debug('Done.') def cancel_job(self, job_id): """Cancel the given job. :param job_id: the job to be canceled """ self._logger.debug(f'Canceling job {job_id}...') self._client_deployments_delete_job(job_id, False, None) self._logger.debug('Done.') def decode_log(self, output): """Decode the engine log coming from DO4WML. :param output: A base-64 encoded text with empty lines :return: The decoded text, without empty lines """ output = output.encode('UTF-8') output = base64.b64decode(output) output = output.decode('UTF-8') output = self.remove_empty_lines(output) return output @staticmethod def remove_empty_lines(output): """Remove empty lines from the log. :param output: The text to process :return: The text, with no empty lines """ output = '\n'.join([s for s in output.splitlines() if s]) return output @staticmethod def _get_job_status_from_details(job_details): return job_details['entity']['decision_optimization']['status']['state'] @staticmethod def _get_job_id_from_details(job_details): return job_details['metadata']['id'] @staticmethod def _get_creation_time_from_details(job_details, tz): created = job_details['metadata']['created_at'] if created[-1] == 'Z': # A suffix of Z is not understood by isoformat. Let's replace # it with one that's understood created = created[:-1] + '+00:00' dt = datetime.fromisoformat(created) # Transform to local time dt = dt.astimezone(tz) # Remove timezone information so that ... dt = dt.replace(tzinfo=None) # ... just naively prints local time created = dt.isoformat(sep=' ', timespec='seconds') else: # Let's not mess with what we don't know pass return created @staticmethod def _get_input_names_from_details(job_details): do = job_details['entity']['decision_optimization'] inputs = do.get('input_data', []) names = [i['id'] for i in inputs] inputs = do.get('input_data_references', []) for i in inputs: if 'id' in i: names.append('*' + i['id']) else: names.append('Unknown') return names def wait_for_job_end(self, job_id, print_activity=False): """Wait for the job to finish, return its status and details as a tuple. If print_activity is True, some information is printed in the console.""" class ProgressiveDelay: def __init__(self): self.delays = [2, 2, 2, 2, 2, 2, 3, 5, 5, 5, 5, 5, 5, 5, 5, 5, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 30] def wait(self): delay = self.delays[0] if len(self.delays) > 1: self.delays.pop(0) assert (2 <= delay <= 30) time.sleep(delay) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass class StatusLogger: def __init__(self, initial_state): self.last_state = initial_state print(initial_state, end='', flush=True) def log_state(self, state): if state == self.last_state: print('.', end='', flush=True) else: if self.last_state != '': print('') # else: if state was empty, no need to end the line print(state, end='', flush=True) self.last_state = state def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): pass client = self._get_or_make_client() with StatusLogger('') as status_logger, \ ProgressiveDelay() as delayer: while True: job_details = self.client_get_job_details(client, job_id, with_filter='solve_state,status') do = job_details['entity']['decision_optimization'] status = self._get_job_status_from_details(job_details) self._logger.info(f'Job status: {status}') if print_activity: status_logger.log_state(f'Job is {status}.') if status in ['completed', 'failed', 'canceled']: break if print_activity: # There may be a bit of log to look at try: activity = do['solve_state']['latest_engine_activity'] if activity: # Because of the StatusLogger, we're not at the beginning of a line print('') # We are joining the lines in the activity with a CR, # only to remove them if they were already included... act = '\n'.join(activity) act = self.remove_empty_lines(act) print(act) except KeyError: # This must mean that no activity is available yet pass delayer.wait() if print_activity: # The status_logger printed something, but didn't end the line yet print('') return status, job_details @staticmethod def get_file_as_data(path): """Return the base-64 encoded content of a file.""" with open(path, 'rb') as f: data = f.read() data = base64.b64encode(data) data = data.decode('UTF-8') return data def _get_type_from_details(self, job): try: deployment_id = job['entity']['deployment']['id'] deployment = self._get_deployment_from_id(deployment_id) model_id = deployment['entity']['asset']['id'] model = self._get_model_definition_from_id(model_id) deployment_type = model['entity']['wml_model']['type'] match = re.fullmatch(r"do-(....*)_[0-9.]*", deployment_type) if match: deployment_type = match.group(1) return deployment_type except KeyError: # Something changed. But let's not fail just for that self._logger.warning('Error while fetching type of a job!') return '?????' def _get_version_from_details(self, job): try: deployment_id = job['entity']['deployment']['id'] deployment = self._get_deployment_from_id(deployment_id) model_id = deployment['entity']['asset']['id'] model = self._get_model_definition_from_id(model_id) deployment_type = model['entity']['wml_model']['type'] match = re.fullmatch(r"do-....*_([0-9.]*)", deployment_type) engine_version = '?????' if match: engine_version = match.group(1) return engine_version except KeyError: # Something changed. But let's not fail just for that self._logger.warning('Error while fetching version of a job!') return '?????' @lru_cache def _get_model_definition_from_id(self, model_id): client = self._get_or_make_client() model = client.model_definitions.get_details(model_id) return model @lru_cache def _get_deployment_from_id(self, deployment_id): client = self._get_or_make_client() deployment = client.deployments.get_details(deployment_id) return deployment def _get_size_from_details(self, job): try: deployment_id = job['entity']['deployment']['id'] deployment = self._get_deployment_from_id(deployment_id) size = deployment['entity']['hardware_spec']['name'] return size except KeyError: # Something changed. But let's not fail just for that self._logger.warning('Error while fetching size of a job!') return '?' def get_jobs(self): """Return the list of tuples (status, id, ...) for all jobs in the deployment.""" client = self._get_or_make_client() self._logger.debug('Getting job details...') job_details = client.deployments.get_job_details() self._logger.debug('Done.') self._logger.debug('Getting information about deployments and models...') result = [] for job in job_details['resources']: status = self._get_job_status_from_details(job) job_id = self._get_job_id_from_details(job) created = self._get_creation_time_from_details(job, self.tz) names = self._get_input_names_from_details(job) deployment_type = self._get_type_from_details(job) engine_version = self._get_version_from_details(job) size = self._get_size_from_details(job) JobTuple = namedtuple('Job', ['status', 'id', 'created', 'names', 'type', 'version', 'size']) j = JobTuple(status=status, id=job_id, created=created, names=names, type=deployment_type, version=engine_version, size=size) result.append(j) result.sort(key=attrgetter('created')) self._logger.debug('Done.') return result def parse_paths(self, paths): """Expand wildcards that may appear in the input assets list.""" self._logger.debug(f'Parsing input list: {paths}') globbed = [] for path in paths.split(): # Let's first get rid of the 'force' flag that glob # would not understand path, _, force = _get_file_spec(path) files = glob.glob(path) if not files: # If the path doesn't actually match an existing file, this is # not necessarily an error: this name can refer to a data # asset that exists already. So let's keep it. files = [path] if force: # Put back the '+' in front files = [f'+{file}' for file in files] globbed += files self._logger.debug(f'Actual input list: {globbed}') return globbed def create_inputs(self, paths, cdd_inputdata, solve_payload): # First deal with wildcards globbed = self.parse_paths(paths) # And let's now create the inputs from these files names = [] for path in globbed: path, basename, force = _get_file_spec(path) if basename in names: raise SimilarNamesInJob(basename) names.append(basename) if self.inputs == 'inline': input_data = { 'id': basename, 'content': self.get_file_as_data(path) } else: data_asset_id = self._create_data_asset_if_necessary(path, basename, force) input_data = { 'id': basename, "type": "data_asset", "location": { "href": "/v2/assets/" + data_asset_id + "?space_id=" + self.space_id } } solve_payload[cdd_inputdata].append(input_data) def create_job(self, paths, deployment_id): """Create a deployment job (aka a run) and return its id.""" client = self._get_or_make_client() cdd = client.deployments.DecisionOptimizationMetaNames assert(self.outputs == 'inline' or self.outputs == 'assets') cdd_outputdata = cdd.OUTPUT_DATA if self.outputs == 'assets': cdd_outputdata = cdd.OUTPUT_DATA_REFERENCES # Assume we use inline data (i.e. content in the job request) cdd_inputdata = cdd.INPUT_DATA if self.inputs == 'assets': # But if we don't want inline data, we have to submit # input references instead cdd_inputdata = cdd.INPUT_DATA_REFERENCES solve_payload = { cdd.SOLVE_PARAMETERS: { 'oaas.logAttachmentName': 'log.txt', 'oaas.logTailEnabled': 'true', 'oaas.resultsFormat': 'JSON' }, cdd_inputdata: [], cdd_outputdata: [ {'id': '.*'} ] } if self.outputs == 'assets': out = solve_payload[cdd_outputdata][0] out['type'] = 'data_asset' # PyCharm assumes that, because we added a string in the dict on # the previous line, we should only add strings in the same dict. # But this is not how WML does... # noinspection PyTypeChecker out['connection'] = {} # noinspection PyTypeChecker out['location'] = {'name': '${job_id}/${attachment_name}'} if self.timelimit: params = solve_payload[cdd.SOLVE_PARAMETERS] params['oaas.timeLimit'] = 1000 * self.timelimit self.create_inputs(paths, cdd_inputdata, solve_payload) self._logger.debug('Creating the job...') if self.inputs == 'inline': self._logger.debug('Data is inline. Let\'s not print the payload...') else: self._logger.debug(repr(solve_payload)) dt = datetime.now() job_details = client.deployments.create_job(deployment_id, solve_payload) submit_time = (datetime.now() - dt).total_seconds() self._logger.debug(f'Done in {submit_time}. Getting its id...') job_id = client.deployments.get_job_uid(job_details) return job_id def _get_deployment_id(self): # Which deployment we want depends on a number of configuration values # in the library. In order for the cache to work correctly, and not always # return the same deployment id, the cached function must be given these # values as parameters. return self._get_deployment_id_with_params(self.DEPLOYMENT_NAME, self.model_type, self.do_version, self.tshirt_size) @lru_cache def _get_deployment_id_with_params(self, deployment_name_prefix, model_type, do_version, tshirt_size): # The point of this forwarding function is to allow testing. # Specifically, counting the number of calls to the _cached # function, and deciding what it returns. # Mocking this function would remove the lru_cache... return self._get_deployment_id_with_params_cached(deployment_name_prefix, model_type, do_version, tshirt_size) def _get_deployment_id_with_params_cached(self, deployment_name_prefix, model_type, do_version, tshirt_size): """Create the deployment if doesn't exist already, return its id.""" self._logger.debug('Getting deployments...') client = self._get_or_make_client() deployment_details = client.deployments.get_details() self._logger.debug('Done.') resources = deployment_details['resources'] deployment_name = f'{deployment_name_prefix}-{model_type}-{do_version}-{tshirt_size}' self._logger.debug(f'Got the list. Looking for deployment named \'{deployment_name}\'') deployment_id = None for r in resources: if r['entity']['name'] == deployment_name: deployment_id = r['metadata']['id'] self._logger.debug('Found it.') break if deployment_id is not None: return deployment_id self._logger.debug('This deployment doesn\'t exist yet. Creating it...') deployment_id = self._create_deployment(deployment_name) return deployment_id def _create_deployment(self, deployment_name): # We need a model to create a deployment model_id = self._get_model_id() # Create the deployment self._logger.debug('Creating the deployment itself...') client = self._get_or_make_client() cdc = client.deployments.ConfigurationMetaNames meta_props = { cdc.NAME: deployment_name, cdc.DESCRIPTION: "Deployment for the Solve on WML Python script", cdc.BATCH: {}, cdc.HARDWARE_SPEC: {'name': self.tshirt_size, 'num_nodes': 2} } deployment = client.deployments.create(artifact_uid=model_id, meta_props=meta_props) self._logger.debug('Deployment created.') deployment_id = client.deployments.get_id(deployment) return deployment_id def _get_model_id(self): """Create an empty model if one doesn't exist, return its id.""" self._logger.debug('Getting models...') client = self._get_or_make_client() details = client.repository.get_details() self._logger.debug('Done.') resources = details['models']['resources'] model_name = f'{self.MODEL_NAME}-{self.model_type}-{self.do_version}' self._logger.debug(f'Got the list. Looking for model named \'{model_name}\'...') model_id = None for r in resources: if r['metadata']['name'] == model_name: model_id = r['metadata']['id'] self._logger.debug('Found it.') self._logger.debug(f'Model id: {model_id}') break if model_id is None: self._logger.debug('This model doesn\'t exist yet. Creating it...') model_id = self._create_model(model_name) return model_id def get_available_versions(self): """Return the list of available DO versions on the platform.""" client = self._get_or_make_client() target_version = "1.0.92" if not version_is_greater(client.version, target_version): return [f'Error: need WML client version {target_version} or better to retrieve available versions'] available_versions = [] for s in client.software_specifications.get_details()['resources']: name = s['metadata']['name'] match = re.fullmatch(r"do_([0-9.]*)", name) if match: available_versions.append(match.group(1)) return available_versions def _create_model(self, model_name): client = self._get_or_make_client() cr = client.repository crm = cr.ModelMetaNames model_metadata = { crm.NAME: model_name, crm.DESCRIPTION: "Model for the solve-on-wml script", crm.TYPE: f'do-{self.model_type}_{self.do_version}', crm.SOFTWARE_SPEC_UID: client.software_specifications.get_id_by_name(f'do_{self.do_version}') } # We need an empty.zip file, because APIClient doesn't know better handle, path = tempfile.mkstemp(suffix='.zip', text=False) try: # This string is the result of converting the file # empty.zip in the repository using # openssl base64 < empty.zip file_content = base64.b64decode('UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==') os.write(handle, file_content) finally: os.close(handle) try: model_details = cr.store_model(model=path, meta_props=model_metadata) finally: os.remove(path) self._logger.debug('Model created.') model_id = client.repository.get_model_id(model_details) self._logger.debug(f'Model id: {model_id}') return model_id def _find_or_create_space(self): """Find the Space to use from its name, create it if it doesn't exist.""" assert self._client client = self._client self._logger.debug('Fetching existing spaces...') space_details = client.spaces.get_details() resources = space_details['resources'] self._logger.debug(f'Got the list. Looking for space named \'{self.space_name}\'...') space_id = None for r in resources: if r['entity']['name'] == self.space_name: space_id = r['metadata']['id'] self._logger.debug('Found it.') break if space_id is None: self._logger.debug('This space doesn\'t exist yet. Creating it...') # Prepare necessary information wml_credentials = self._wml_credentials cos_crn = _CredentialsProvider.COS_CRN ml_crn = _CredentialsProvider.ML_CRN if cos_crn not in wml_credentials or ml_crn not in wml_credentials: raise NoCredentialsToCreateSpace(f'WML credentials do not contain the information necessary ' f'to create a deployment space. \nMissing \'{cos_crn}\' ' f'and/or \'{ml_crn}\'.') assert type(wml_credentials[cos_crn]) is str assert type(wml_credentials[ml_crn]) is str csc = client.spaces.ConfigurationMetaNames metadata = { csc.NAME: self.space_name, csc.DESCRIPTION: self.space_name + ' description', csc.STORAGE: { "type": "bmcos_object_storage", "resource_crn": self._wml_credentials[cos_crn] }, csc.COMPUTE: { "name": "existing_instance_id", "crn": self._wml_credentials[ml_crn] } } # Create the space # We want the space to be ready as soon as the code returns from # spaces.store(), so we use background_mode=False. In addition, this # gives us error checking which doesn't happen in default mode. space_details = client.spaces.store(meta_props=metadata, background_mode=False) state = space_details['entity']['status'].get('state') self._logger.debug(f'Space created, with state={state}.') space_id = client.spaces.get_uid(space_details) self._logger.info(f'Space id: {space_id}') return space_id def _get_asset_details(self): """Return the list of all the data assets in the space.""" client = self._get_or_make_client() # This is the first version where data_assets.get_details() works assert(version_is_greater(client.version, "1.0.95.1")) results = client.data_assets.get_details()['resources'] return results def _find_asset_id_by_name(self, name): """Looks for a data asset with the given name, returns its id, or None""" assets = self._get_asset_details() for asset in assets: metadata = asset['metadata'] if metadata['name'] == name: return metadata['asset_id'] return None def create_asset(self, path, basename): """Create a data asset with the given name. A Watson Studio data asset is an entity that mimicks a file.""" client = self._get_or_make_client() asset_details = client.data_assets.create(basename, path) return asset_details['metadata']['guid'] def delete_asset(self, uid): """Delete an existing asset. Return True if ok, False if not.""" client = self._get_or_make_client() status = client.data_assets.delete(uid) return status == "SUCCESS" def _create_data_asset_if_necessary(self, path, basename, force): """Create a data asset (and upload file) if it doesn't exist already (or force is True).""" asset_to_delete = None self._logger.info(f'Checking whether a data asset named \'{basename}\' already exists.') data_asset_id = self._find_asset_id_by_name(basename) if data_asset_id: self._logger.debug(f'Yes, with id {data_asset_id}.') if not force: return data_asset_id self._logger.debug('Creating new asset with local content.') asset_to_delete = data_asset_id else: self._logger.debug('No, creating the data asset.') data_asset_id = self.create_asset(path, basename) self._logger.debug('Done.') if asset_to_delete: self._logger.debug('Deleting the old data asset.') if self.delete_asset(asset_to_delete): self._logger.debug('Done.') else: self._logger.warning('Could not delete pre-existing asset.') return data_asset_id
[ "os.remove", "base64.b64decode", "datetime.datetime.utcnow", "os.close", "glob.glob", "os.path.join", "pandas.DataFrame", "re.fullmatch", "datetime.datetime.fromisoformat", "tempfile.TemporaryDirectory", "re.search", "datetime.datetime.now", "ibm_watson_machine_learning.APIClient", "io.StringIO", "csv.writer", "os.path.basename", "packaging.version.parse", "time.sleep", "operator.attrgetter", "os.write", "tempfile.mkstemp", "base64.b64encode", "collections.namedtuple", "ast.literal_eval", "logging.getLogger" ]
[((2966, 2988), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2982, 2988), False, 'import os\n'), ((3879, 3921), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (3896, 3921), False, 'import logging\n'), ((8960, 8982), 'packaging.version.parse', 'version.parse', (['current'], {}), '(current)\n', (8973, 8982), False, 'from packaging import version\n'), ((8986, 9008), 'packaging.version.parse', 'version.parse', (['minimum'], {}), '(minimum)\n', (8999, 9008), False, 'from packaging import version\n'), ((11417, 11459), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (11434, 11459), False, 'import logging\n'), ((13418, 13450), 'ibm_watson_machine_learning.APIClient', 'APIClient', (['self._wml_credentials'], {}), '(self._wml_credentials)\n', (13427, 13450), False, 'from ibm_watson_machine_learning import APIClient\n'), ((26168, 26193), 'base64.b64decode', 'base64.b64decode', (['content'], {}), '(content)\n', (26184, 26193), False, 'import base64\n'), ((34114, 34138), 'base64.b64decode', 'base64.b64decode', (['output'], {}), '(output)\n', (34130, 34138), False, 'import base64\n'), ((39442, 39464), 'base64.b64encode', 'base64.b64encode', (['data'], {}), '(data)\n', (39458, 39464), False, 'import base64\n'), ((47225, 47239), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (47237, 47239), False, 'from datetime import datetime\n'), ((52775, 52818), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".zip"""', 'text': '(False)'}), "(suffix='.zip', text=False)\n", (52791, 52818), False, 'import tempfile\n'), ((5110, 5140), 'ast.literal_eval', 'ast.literal_eval', (['wml_cred_str'], {}), '(wml_cred_str)\n', (5126, 5140), False, 'import ast\n'), ((20192, 20226), 're.search', 're.search', (['"""/v2/assets/(.*)"""', 'path'], {}), "('/v2/assets/(.*)', path)\n", (20201, 20226), False, 'import re\n'), ((25574, 25587), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (25585, 25587), False, 'import io\n'), ((25609, 25628), 'csv.writer', 'csv.writer', (['content'], {}), '(content)\n', (25619, 25628), False, 'import csv\n'), ((25871, 25941), 'pandas.DataFrame', 'pandas.DataFrame', (["output_data['values']"], {'columns': "output_data['fields']"}), "(output_data['values'], columns=output_data['fields'])\n", (25887, 25941), False, 'import pandas\n'), ((35147, 35178), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['created'], {}), '(created)\n', (35169, 35178), False, 'from datetime import datetime\n'), ((39920, 39971), 're.fullmatch', 're.fullmatch', (['"""do-(....*)_[0-9.]*"""', 'deployment_type'], {}), "('do-(....*)_[0-9.]*', deployment_type)\n", (39932, 39971), False, 'import re\n'), ((40671, 40722), 're.fullmatch', 're.fullmatch', (['"""do-....*_([0-9.]*)"""', 'deployment_type'], {}), "('do-....*_([0-9.]*)', deployment_type)\n", (40683, 40722), False, 'import re\n'), ((42833, 42919), 'collections.namedtuple', 'namedtuple', (['"""Job"""', "['status', 'id', 'created', 'names', 'type', 'version', 'size']"], {}), "('Job', ['status', 'id', 'created', 'names', 'type', 'version',\n 'size'])\n", (42843, 42919), False, 'from collections import namedtuple\n'), ((43605, 43620), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (43614, 43620), False, 'import glob\n'), ((52054, 52088), 're.fullmatch', 're.fullmatch', (['"""do_([0-9.]*)"""', 'name'], {}), "('do_([0-9.]*)', name)\n", (52066, 52088), False, 'import re\n'), ((53013, 53065), 'base64.b64decode', 'base64.b64decode', (['"""UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA=="""'], {}), "('UEsFBgAAAAAAAAAAAAAAAAAAAAAAAA==')\n", (53029, 53065), False, 'import base64\n'), ((53078, 53108), 'os.write', 'os.write', (['handle', 'file_content'], {}), '(handle, file_content)\n', (53086, 53108), False, 'import os\n'), ((53138, 53154), 'os.close', 'os.close', (['handle'], {}), '(handle)\n', (53146, 53154), False, 'import os\n'), ((53322, 53337), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (53331, 53337), False, 'import os\n'), ((36669, 36686), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (36679, 36686), False, 'import time\n'), ((43132, 43153), 'operator.attrgetter', 'attrgetter', (['"""created"""'], {}), "('created')\n", (43142, 43153), False, 'from operator import attrgetter\n'), ((9844, 9861), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9859, 9861), False, 'from datetime import datetime\n'), ((17217, 17246), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (17244, 17246), False, 'import tempfile\n'), ((17296, 17346), 'os.path.join', 'os.path.join', (['temp_dir_name', 'f"""{asset_id}-log.txt"""'], {}), "(temp_dir_name, f'{asset_id}-log.txt')\n", (17308, 17346), False, 'import os\n'), ((47345, 47359), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (47357, 47359), False, 'from datetime import datetime\n')]
# -*- coding: utf-8 -*- __all__ = () # https://docs.python.org/3/howto/logging.html#configuring-logging-for-a-library import logging logging.getLogger(__name__).addHandler(logging.NullHandler())
[ "logging.getLogger", "logging.NullHandler" ]
[((173, 194), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (192, 194), False, 'import logging\n'), ((134, 161), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (151, 161), False, 'import logging\n')]
import pygame from pygame.locals import * pygame.init() screen = pygame.display.set_mode((600, 600), 0) pygame.display.set_caption('Simple Pygame Game') bee = pygame.image.load('bee1.png').convert_alpha() beeX = 0 beeY = 0 clock = pygame.time.Clock() loop = True while loop: for event in pygame.event.get(): if event.type == QUIT \ or (event.type == KEYDOWN and event.key == K_ESCAPE): loop = False keystate = pygame.key.get_pressed() if keystate[K_RIGHT]: beeX += 5 screen.fill((0,120,0)) screen.blit(bee, (beeX, beeY)) pygame.display.flip() clock.tick(60) pygame.quit()
[ "pygame.quit", "pygame.event.get", "pygame.display.set_mode", "pygame.init", "pygame.display.flip", "pygame.image.load", "pygame.display.set_caption", "pygame.time.Clock", "pygame.key.get_pressed" ]
[((43, 56), 'pygame.init', 'pygame.init', ([], {}), '()\n', (54, 56), False, 'import pygame\n'), ((66, 104), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(600, 600)', '(0)'], {}), '((600, 600), 0)\n', (89, 104), False, 'import pygame\n'), ((105, 153), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Simple Pygame Game"""'], {}), "('Simple Pygame Game')\n", (131, 153), False, 'import pygame\n'), ((232, 251), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (249, 251), False, 'import pygame\n'), ((631, 644), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (642, 644), False, 'import pygame\n'), ((294, 312), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (310, 312), False, 'import pygame\n'), ((453, 477), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (475, 477), False, 'import pygame\n'), ((589, 610), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (608, 610), False, 'import pygame\n'), ((160, 189), 'pygame.image.load', 'pygame.image.load', (['"""bee1.png"""'], {}), "('bee1.png')\n", (177, 189), False, 'import pygame\n')]
#!/usr/bin/env python import sys import logging from configparser import ConfigParser from os.path import expanduser from socket import gethostname from time import sleep from eq3bt import Thermostat, Mode from bluepy.btle import BTLEException from mqttwrapper import run_script log = logging.getLogger("radcontrold") def callback(topic, payload, config): log.debug("%s %s", topic, payload) room = topic.split("/")[2] mode = { b'0': Mode.Closed, b'1': Mode.Open, }.get(payload) if mode is None: log.warning("Ignoring invalid payload on %s", topic) return addresses = config['radiators'].get(room, "") if not addresses: # Control message is for a radiator we're not responsible for. log.debug("No EQ3 addresses in config for %s", room) return success = True for address in addresses.split(","): for attempt in range(10): try: Thermostat(address).mode = mode log.info("Set %s in %s to %s", address, room, mode) break except BTLEException: log.warning("Couldn't set mode %s for %s in %s", mode, address, room) sleep(1) else: success = False # Only post acknowledgment to MQTT topic if all thermostats were controlled. if success: return [ ("{}/ack".format(topic), payload) ] def main(): formatter = "[%(asctime)s] %(name)s %(levelname)s - %(message)s" logging.basicConfig(level=logging.DEBUG, format=formatter) logging.getLogger('eq3bt').setLevel(logging.ERROR) hostname = gethostname().split(".")[0] config = ConfigParser() config.read(expanduser("~/.config/radcontrold/{}.ini".format(hostname))) if not config.has_section('radiators') or len(config['radiators']) == 0: log.warning("No config for {}, exiting.".format(hostname)) sys.exit(0) run_script(callback, broker=config['mqtt']['broker'], topics=['control/radiator/+/active'], config=config) if __name__ == '__main__': main()
[ "logging.basicConfig", "eq3bt.Thermostat", "logging.getLogger", "time.sleep", "socket.gethostname", "mqttwrapper.run_script", "configparser.ConfigParser", "sys.exit" ]
[((287, 319), 'logging.getLogger', 'logging.getLogger', (['"""radcontrold"""'], {}), "('radcontrold')\n", (304, 319), False, 'import logging\n'), ((1522, 1580), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': 'formatter'}), '(level=logging.DEBUG, format=formatter)\n', (1541, 1580), False, 'import logging\n'), ((1693, 1707), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (1705, 1707), False, 'from configparser import ConfigParser\n'), ((1954, 2065), 'mqttwrapper.run_script', 'run_script', (['callback'], {'broker': "config['mqtt']['broker']", 'topics': "['control/radiator/+/active']", 'config': 'config'}), "(callback, broker=config['mqtt']['broker'], topics=[\n 'control/radiator/+/active'], config=config)\n", (1964, 2065), False, 'from mqttwrapper import run_script\n'), ((1937, 1948), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1945, 1948), False, 'import sys\n'), ((1214, 1222), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1219, 1222), False, 'from time import sleep\n'), ((1585, 1611), 'logging.getLogger', 'logging.getLogger', (['"""eq3bt"""'], {}), "('eq3bt')\n", (1602, 1611), False, 'import logging\n'), ((1652, 1665), 'socket.gethostname', 'gethostname', ([], {}), '()\n', (1663, 1665), False, 'from socket import gethostname\n'), ((960, 979), 'eq3bt.Thermostat', 'Thermostat', (['address'], {}), '(address)\n', (970, 979), False, 'from eq3bt import Thermostat, Mode\n')]
# -*- coding: utf-8 -*- import os import sys import json import time import datetime from django.core.management.base import BaseCommand from fabric.api import * from fabric.contrib.files import exists, append, contains from django.conf import settings username = 'root' project_dir = os.getcwd() project_name = project_dir.split('/')[-1] remote_project_dir = '/var/opt/{}'.format(project_name) env.user = username env.connection_attempts = 10 # centos 7 - wkhtmltopdf # yum -y install xorg-x11-server-Xvfb xorg-x11-fonts-Type1 xorg-x11-fonts-75dpi # curl -O -L https://downloads.wkhtmltopdf.org/0.12/0.12.5/wkhtmltox-0.12.5-1.centos7.x86_64.rpm # yum -y localinstall wkhtmltox-0.12.5-1.centos7.x86_64.rpm # export QT_XKB_CONFIG_ROOT=/usr/share/X11/xkb # ln -s /usr/local/bin/wkhtmltopdf /bin # pip install pdfkit # python # import pdfkit # pdfkit.from_string('Hello!', 'out.pdf') class Command(BaseCommand): VERBOSE = False def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument('--push', action='store_true', dest='push', default=False, help='Syncronize files only') parser.add_argument('--update', action='store_true', dest='update', default=False, help='Syncronize files and update the requirements') parser.add_argument('--deploy', action='store_true', dest='deploy', default=False, help='Deploy the application.') parser.add_argument('--create', action='store_true', dest='create', default=False, help='Creates a new droplet and deploy the application.') parser.add_argument('--verbose', action='store_true', dest='verbose', default=False, help='Verbose the output') def handle(self, *args, **options): if 'help' not in options: if settings.DIGITAL_OCEAN_TOKEN: execute_task = deploy Command.VERBOSE = options.get('verbose', False) output['running'] = Command.VERBOSE output['warnings'] = Command.VERBOSE output['stdout'] = Command.VERBOSE output['stderr'] = Command.VERBOSE if options.get('push'): execute_task = push elif options.get('update'): execute_task = update if options.get('create') or settings.DIGITAL_OCEAN_SERVER: if options.get('create'): host = _create_droplet() if host: env.hosts = [host] execute(execute_task, host=host) else: print('Sorry! The droplet could not be created.') else: host = _check_droplet() if host: env.hosts = [host] execute(execute_task, host=host) else: print('Sorry! The droplet {} could not be found'.format(settings.DIGITAL_OCEAN_SERVER)) else: print('Please, set the DIGITAL_OCEAN_SERVER variable in settings.py or execute the' ' command with --create parameter to create a new droplet.') else: print('Please, set the DIGITAL_OCEAN_TOKEN variable in settings.py') GIT_INGORE_FILE_CONTENT = '''*˜ *.pyc .svn .DS_Store .DS_Store? ._* .idea/ djangoplus/.idea/* .Spotlight-V100 .Trashes ehthumbs.db Thumbs.db .project .pydevproject .settings/* sqlite.db mail/* media/* dist/ djangoplus.egg-info/ */dist/ */*egg-info/ geckodriver.log videos/* ''' NGINEX_FILE_CONTENT = '''server {{ client_max_body_size 100M; listen {port}; server_name {server_name}; access_log /var/opt/{project_name}/logs/nginx_access.log; error_log /var/opt/{project_name}/logs/nginx_error.log; location /static {{ alias /var/opt/{project_name}/static; }} location /media {{ alias /var/opt/{project_name}/media; }} location / {{ proxy_pass_header Server; proxy_set_header Host $http_host; proxy_redirect off; proxy_set_header X-Real_IP $remote_addr; proxy_set_header X-Scheme $scheme; proxy_connect_timeout 600s; proxy_send_timeout 600s; proxy_read_timeout 600s; proxy_pass http://localhost:{local_port}/; }} }} ''' SUPERVISOR_FILE_CONTENT = '''[program:{project_name}] directory = /var/opt/{project_name} user = www-data command = /var/opt/{project_name}/gunicorn_start.sh stdout_logfile = /var/opt/{project_name}/logs/supervisor_out.log stderr_logfile = /var/opt/{project_name}/logs/supervisor_err.log ''' GUNICORN_FILE_CONTENT = '''#!/bin/bash set -e source /var/opt/.virtualenvs/{project_name}/bin/activate mkdir -p /var/opt/{project_name}/logs cd /var/opt/{project_name} export QT_QPA_PLATFORM='offscreen' export QT_QPA_FONTDIR='/usr/share/fonts/truetype/dejavu/' exec gunicorn {project_name}.wsgi:application -w 1 -b 127.0.0.1:{port} --timeout=600 --user=www-data --group=www-data --log-level=_debug --log-file=/var/opt/{project_name}/logs/gunicorn.log 2>>/var/opt/{project_name}/logs/gunicorn.log ''' LIMITS_FILE_CONTENT = ''' * soft nofile 65536 * hard nofile 65536 root soft nofile 65536 root hard nofile 65536 ''' BASHRC_FILE_CONTENT = ''' export WORKON_HOME=/var/opt/.virtualenvs mkdir -p $WORKON_HOME source /usr/local/bin/virtualenvwrapper.sh ''' def _debug(s): if Command.VERBOSE: print('[{}] {}\n'.format(datetime.datetime.now(), s)) def _available_port(): nginex_dir = '/etc/nginx/sites-enabled' port = 8000 with cd(nginex_dir): files = run('ls').split() files.remove('default') if project_name in files: files = [project_name] if files: command = "grep localhost {} | grep -o '[0-9]*'".format(' '.join(files)) ports = run(command).split() ports.sort() port = ports[-1] if project_name not in files: port = int(port) + 1 _debug('Returning port {}!'.format(port)) return int(port) def _check_local_keys(): local_home_dir = local('echo $HOME', capture=True) local_ssh_dir = os.path.join(local_home_dir, '.ssh') local_public_key_path = os.path.join(local_ssh_dir, 'id_rsa.pub') if not os.path.exists(local_ssh_dir): _debug('Creating dir {}...'.format(local_ssh_dir)) local('mkdir {}'.format(local_ssh_dir)) if not os.path.exists(local_public_key_path): local("ssh-keygen -f {}/id_rsa -t rsa -N ''".format(local_ssh_dir)) key = open(local_public_key_path, 'r').read().strip() _debug('Checking if private key was uploaded to digital ocean...') url = 'https://api.digitalocean.com/v2/account/keys' command = '''curl -X GET -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url) response = local(command, capture=True) # print response if key not in response: _debug('Uploading private key to digital ocean...') command = '''curl -X POST -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' -d '{{"name":"{}","public_key":"{}"}}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, 'Default', key, url) response = local(command, capture=True) # print response def _check_remote_keys(): local_home_dir = local('echo $HOME', capture=True) local_ssh_dir = os.path.join(local_home_dir, '.ssh') local_public_key_path = os.path.join(local_ssh_dir, 'id_rsa.pub') local_private_key_path = os.path.join(local_ssh_dir, 'id_rsa') remote_home_dir = run('echo $HOME') remote_ssh_dir = os.path.join(remote_home_dir, '.ssh') remote_public_key_path = os.path.join(remote_ssh_dir, 'id_rsa.pub') remote_private_key_path = os.path.join(remote_ssh_dir, 'id_rsa') remote_private_known_hosts_path = os.path.join(remote_ssh_dir, 'known_hosts') if not exists(remote_ssh_dir): _debug('Creading remote dir {}...'.format(remote_ssh_dir)) run('mkdir -p {}'.format(remote_ssh_dir)) _debug('Creating empty file {}...'.format(remote_private_known_hosts_path)) run('touch {}'.format(remote_private_known_hosts_path)) with cd(remote_ssh_dir): public_key = open(local_public_key_path, 'r').read() private_key = open(local_private_key_path, 'r').read() _debug('Checking if public key is in file {}...'.format(remote_public_key_path)) if not contains(remote_public_key_path, public_key): _debug('Appending public key in file {}...'.format(remote_public_key_path)) append(remote_public_key_path, public_key) _debug('Checking if private key is in file {}...'.format(remote_private_key_path)) if not contains(remote_private_key_path, private_key): _debug('Appending private key in file {}...'.format(remote_private_key_path)) append(remote_private_key_path, private_key) run('chmod 644 {}'.format(remote_public_key_path)) run('chmod 600 {}'.format(remote_private_key_path)) _debug('Checking if {} is in file {}...'.format(env.hosts[0], remote_private_known_hosts_path)) if not contains(remote_private_known_hosts_path, env.hosts[0]): _debug('Appending {} in file {}...'.format(env.hosts[0], remote_private_known_hosts_path)) run('ssh-keyscan {} >> {}'.format(env.hosts[0], remote_private_known_hosts_path)) def _check_repository(): with cd('/home'): git_dir = '/home/git' if not exists(git_dir): run('adduser --disabled-password --gecos "" git') run('mkdir /home/git/.ssh && chmod 700 /home/git/.ssh') run('touch /home/git/.ssh/authorized_keys && chmod 600 /home/git/.ssh/authorized_keys') run('cat /root/.ssh/authorized_keys >> /home/git/.ssh/authorized_keys') run('chown -R git.git /home/git/.ssh/') project_git_dir = '/home/git/{}.git'.format(project_name) if not exists(project_git_dir): run('mkdir {}'.format(project_git_dir)) run('cd {} && git init --bare'.format(project_git_dir)) run('chown -R git.git {}'.format(project_git_dir)) return 'git@{}:{}.git'.format(env.hosts[0], project_name) def _setup_local_repository(): _debug('Checking if local project is a git project...') if not os.path.exists(os.path.join(project_dir, '.git')): with cd(project_dir): _debug('Making local project a git project...') repository_url = _check_repository() local('git init') local('git remote add origin "{}"'.format(repository_url)) local('echo "..." > README.md') local('echo "{}" > .gitignore'.format(GIT_INGORE_FILE_CONTENT)) local('git config --global user.email "<EMAIL>"') local('git config --global user.name "user"') def _setup_remote_repository(): _debug('Checking if the project was cloned in remote server...') if not exists(remote_project_dir): with cd('/var/opt'): _debug('Cloning project in remote server...') repository_url = _check_repository() run('git clone {} {}'.format(repository_url, project_name)) run('chown -R www-data.www-data {}'.format(project_name)) _debug('Updating project in remote server...') with cd(remote_project_dir): run('git pull origin master') def _push_local_changes(): _debug('Checking if project has local changes...') now = datetime.datetime.now().strftime("%Y%m%d %H:%M:%S") with cd(project_dir): if 'nothing to commit' not in local('git status', capture=True): _debug('Comminting local changes...') files = [] for file_name in local('ls', capture=True).split(): if file_name not in GIT_INGORE_FILE_CONTENT or file_name == 'fabfile.py': files.append(file_name) files.append('.gitignore') for pattern in NGINEX_FILE_CONTENT.split(): if pattern in files: files.remove(pattern) local('git add {}'.format(' '.join(files))) local("git commit -m '{}'".format(now)) _debug('Uploading local changes...') local('git push origin master') def _setup_remote_env(): _debug('Checking if the virtualenv dir was created in remote server...') virtual_env_dir = '/var/opt/.virtualenvs' if not exists(virtual_env_dir): _debug('Creating dir {}'.format(virtual_env_dir)) run('mkdir -p {}'.format(virtual_env_dir)) project_env_dir = os.path.join(virtual_env_dir, project_name) _debug('Checking if virtualenv for the project was created...') if not exists(project_env_dir): with shell_env(WORKON_HOME=virtual_env_dir): _debug('Creating virtual env {}'.format(project_name)) run('source /usr/local/bin/virtualenvwrapper.sh && mkvirtualenv --python=/usr/bin/python3 {}'.format(project_name)) def _setup_remote_project(): with cd(remote_project_dir): _debug('Checking project requirements..') if exists('requirements.txt'): virtual_env_dir = '/var/opt/.virtualenvs' with shell_env(WORKON_HOME=virtual_env_dir): _debug('Installing/Updating project requirements...') run('source /usr/local/bin/virtualenvwrapper.sh && workon {} && pip3 install --upgrade pip'.format(project_name)) run('source /usr/local/bin/virtualenvwrapper.sh && workon {} && pip3 install -U -r requirements.txt'.format(project_name)) _debug('Checking if necessary dirs (logs, media and static) were created...') run('mkdir -p logs') run('mkdir -p static') run('mkdir -p media') _debug('Granting access to www-data...') run('chown -R www-data.www-data .') def _check_domain(): if settings.DIGITAL_OCEAN_DOMAIN: url = 'https://api.digitalocean.com/v2/domains' command = '''curl -X GET -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}/{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url, settings.DIGITAL_OCEAN_DOMAIN) _debug('Checking if domain {} was created...'.format(settings.DIGITAL_OCEAN_DOMAIN)) data = json.loads(local(command, capture=True)) if data.get('id', None) == 'not_found': _debug('Creating domain {}...'.format(settings.DIGITAL_OCEAN_DOMAIN)) ip_address = env.hosts[0] command = '''curl -X POST -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' -d '{{"name":"{}","ip_address":"{}"}}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, settings.DIGITAL_OCEAN_DOMAIN, ip_address, url) data = json.loads(local(command, capture=True)) ip_address = None try: ip_address = local('dig {} a +short'.format(settings.DIGITAL_OCEAN_DOMAIN), capture=True).strip() except Exception as e: print(e) if ip_address != env.hosts[0]: _debug('The domain is not activated yet. The ip {} is going to be used for the deploy.'.format(env.hosts[0])) return None return settings.DIGITAL_OCEAN_DOMAIN def _print_remote_url(): file_path = '/etc/nginx/sites-enabled/{}'.format(project_name) local_file_path = '/tmp/nginx.tmp' get(file_path, local_file_path) file_content = open(local_file_path).read() server_name = None port = None for line in file_content.split('\n'): if 'server_name ' in line: server_name = line.strip().split()[1].replace(';', '') elif 'listen ' in line: port = line.strip().split()[1].replace(';', '') url = 'http://{}'.format(server_name) if int(port) != 80: url = '{}:{}'.format(url, port) print(('\n\n\nURL: {}\n\n'.format(url))) def _setup_nginx_file(): file_path = '/etc/nginx/sites-enabled/{}'.format(project_name) _debug('Checking nginx file {}...'.format(file_path)) checked_domain = _check_domain() if exists(file_path): local_file_path = '/tmp/nginx.tmp' get(file_path, local_file_path) file_content = open(local_file_path, 'r').read() if checked_domain and checked_domain not in file_content: content = [] for line in file_content.split('\n'): if 'server_name ' in line: line = line.replace('server_name', 'server_name {}'.format(checked_domain)) elif 'listen ' in line: line = ' listen 80;' content.append(line) file_descriptor = open('/tmp/nginx.tmp', 'w') file_descriptor.write('\n'.join(content)) put(file_descriptor, file_path) _debug('Restarting nginx...') run('/etc/init.d/nginx restart') else: _debug('Creating nginx file {}...'.format(file_path)) local_port = _available_port() if checked_domain: port = 80 server_name = checked_domain else: port = local_port + 1000 server_name = env.hosts[0] text = NGINEX_FILE_CONTENT.format(project_name=project_name, server_name=server_name, port=port, local_port=local_port) append(file_path, text) _debug('Nginx configured with {}:{}'.format(server_name, port)) _debug('Restarting nginx...') run('/etc/init.d/nginx restart') def _setup_supervisor_file(): file_path = '/etc/supervisor/conf.d/{}.conf '.format(project_name) _debug('Checking supervisor file {}...'.format(file_path)) if not exists(file_path): _debug('Creating supervisor file {}...'.format(file_path)) text = SUPERVISOR_FILE_CONTENT.format(project_name=project_name) append(file_path, text) _debug('Reloading supervisorctl...') run('supervisorctl reload') def _setup_gunicorn_file(): file_path = '/var/opt/{}/gunicorn_start.sh '.format(project_name) _debug('Checking gunicorn file {}...'.format(file_path)) if not exists(file_path): _debug('Creating gunicorn file {}'.format(file_path)) port = _available_port() text = GUNICORN_FILE_CONTENT.format(project_name=project_name, port=port) append(file_path, text) run('chmod a+x {}'.format(file_path)) def _setup_postgres(): file_path = '/etc/postgresql/9.6/main/pg_hba.conf ' if not exists(file_path): run('apt-get -y install postgresql postgresql-contrib') run('cp {} /tmp'.format(file_path)) run('echo "local all postgres trust\\nhost all ' 'postgres 127.0.0.1/32 trust\\nhost all postgres ::1/128 ' ' trust" > {}'.format(file_path)) run('/etc/init.d/postgresql restart') def _setup_remote_webserver(): _setup_nginx_file() _setup_supervisor_file() _setup_gunicorn_file() def _reload_remote_application(): _debug('Updating project in remote server...') with cd(remote_project_dir): virtual_env_dir = '/var/opt/.virtualenvs' with shell_env(WORKON_HOME=virtual_env_dir): run('source /usr/local/bin/virtualenvwrapper.sh && workon {} && python manage.py sync'.format(project_name)) run('chown -R www-data.www-data .') run('chmod a+w *.db') run('ls -l') _debug('Restarting supervisorctl...') run('supervisorctl restart {}'.format(project_name)) def _delete_remote_project(): _debug('Deleting remove project...') if exists(remote_project_dir): run('rm -r {}'.format(remote_project_dir)) def _delete_remote_env(): _debug('Deleting remote env...') run('source /usr/local/bin/virtualenvwrapper.sh && rmvirtualenv {}'.format(project_name)) def _delete_domain(): url = 'https://api.digitalocean.com/v2/domains' if settings.DIGITAL_OCEAN_DOMAIN: _debug('Deleting domain {}...'.format(settings.DIGITAL_OCEAN_DOMAIN)) command = '''curl -X DELETE -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}/{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url, settings.DIGITAL_OCEAN_DOMAIN) local(command) def _delete_repository(): project_git_dir = '/home/git/{}.git'.format(project_name) if exists(project_git_dir): run('rm -r {}'.format(project_git_dir)) def _delete_local_repository(): _debug('Deleting local repository...') with cd(project_dir): local('rm -rf .git') def _delete_nginx_file(): _debug('Deleting nginx file...') file_path = '/etc/nginx/sites-enabled/{} '.format(project_name) if exists(file_path): run('rm {}'.format(file_path)) def _delete_supervisor_file(): _debug('Deleting supervisor file..') file_path = '/etc/supervisor/conf.d/{}.conf'.format(project_name) if exists(file_path): run('rm {}'.format(file_path)) def _reload_remote_webserver(): _debug('Reloading supervisorctl...') run('supervisorctl reload') _debug('Reloading nginx...') run('/etc/init.d/nginx restart') _debug('Starting supervisor...') run('service supervisor start') def _configure_crontab(): _debug('Configuring crontab...') output = run("crontab -l") line = '0 * * * * /var/opt/.virtualenvs/{}/bin/python /var/opt/{}/manage.py backup >/tmp/cron.log 2>&1'.format( project_name, project_name) if line not in output: run('crontab -l | { cat; echo "{}"; } | crontab -'.format(line)) def _check_droplet(): _check_local_keys() url = 'https://api.digitalocean.com/v2/droplets/' command = '''curl -X GET -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url) _debug('Checking if droplet exists...') response = json.loads(local(command, capture=True)) if 'droplets' in response: for droplet in response['droplets']: ip_address = droplet['networks']['v4'][0]['ip_address'] if droplet['name'] == project_name or ip_address == settings.DIGITAL_OCEAN_SERVER: _debug('Droplet found with IP {}'.format(ip_address)) local_home_dir = local('echo $HOME', capture=True) local_known_hosts_path = os.path.join(local_home_dir, '.ssh/known_hosts') _debug('Checking if file {} exists...'.format(local_known_hosts_path)) if not os.path.exists(local_known_hosts_path): _debug('Creating empty file {}...'.format(local_known_hosts_path)) local('touch {}'.format(local_known_hosts_path)) local_known_hosts_file_content = open(local_known_hosts_path, 'r').read() if ip_address not in local_known_hosts_file_content: _debug('Registering {} as known host...'.format(ip_address)) time.sleep(5) local('ssh-keyscan -T 15 {} >> {}'.format(ip_address, local_known_hosts_path)) if settings.DIGITAL_OCEAN_SERVER not in local_known_hosts_file_content: _debug('Registering {} as known host...'.format(settings.DIGITAL_OCEAN_SERVER)) local('ssh-keyscan {} >> {}'.format(settings.DIGITAL_OCEAN_SERVER, local_known_hosts_path)) return ip_address _debug('No droplet cound be found for the project') else: raise Exception(response) def _create_droplet(): # curl -X GET --silent "https://api.digitalocean.com/v2/images?per_page=999" -H "Authorization: Bearer XXXXXXX" _check_local_keys() if settings.DIGITAL_OCEAN_TOKEN: url = 'https://api.digitalocean.com/v2/account/keys' _debug('Getting installed keys at digital ocean...') command = '''curl -X GET -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url) response = json.loads(local(command, capture=True)) # print response ssh_keys = [] for ssh_key in response['ssh_keys']: ssh_keys.append(ssh_key['id']) _debug('Creating droplet...') url = 'https://api.digitalocean.com/v2/droplets/' command = '''curl -X POST -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' -d '{{"name":"{}","region":"{}","size":"{}","image":"{}", "ssh_keys":{}}}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, project_name, 'nyc3', '512mb', 'debian-9-x64', ssh_keys, url) response = json.loads(local(command, capture=True)) droplet_id = response['droplet']['id'] time.sleep(15) url = 'https://api.digitalocean.com/v2/droplets/{}/'.format(droplet_id) command = '''curl -X GET -H 'Content-Type: application/json' -H 'Authorization: Bearer {}' "{}"'''.format(settings.DIGITAL_OCEAN_TOKEN, url) response = json.loads(local(command, capture=True)) ip_address = response['droplet']['networks']['v4'][0]['ip_address'] _debug('Droplet created with IP {}!'.format(ip_address)) _update_settings_file(ip_address) return _check_droplet() _debug('Please, set the DIGITAL_OCEAN_TOKEN value in settings.py file') sys.exit() def _execute_aptget(): with cd('/'): if not exists('/swap.img'): run('apt-get update') run('apt-get -y install python-pip') run('pip install virtualenv virtualenvwrapper') run('apt-get -y install python3 python3-pip build-essential python3-dev git nginx supervisor libncurses5-dev') run('apt-get -y install vim') run('apt-get -y install libjpeg62-turbo-dev libfreetype6-dev libtiff5-dev liblcms2-dev libwebp-dev tk8.6-dev libjpeg-dev') run('apt-get -y install wkhtmltopdf xvfb') run('apt-get -y install htop') if not contains('/etc/security/limits.conf', '65536'): # print LIMITS_FILE_CONTENT append('/etc/security/limits.conf', LIMITS_FILE_CONTENT) run('pip3 install --upgrade pip') if not contains('/root/.bashrc', 'WORKON_HOME'): # print BASHRC_FILE_CONTENT append('/root/.bashrc', BASHRC_FILE_CONTENT) if not exists('/swap.img'): run('lsb_release -a') run('dd if=/dev/zero of=/swap.img bs=1024k count=2000') run('mkswap /swap.img') run('swapon /swap.img') run('echo "/swap.img none swap sw 0 0" >> /etc/fstab') def _update_settings_file(ip): _debug('Updating settings.py file with {} for DIGITAL_OCEAN_SERVER'.format(ip)) settings_file_path = os.path.join(settings.BASE_DIR, '{}/settings.py'.format(project_name)) content = [] settings_file = open(settings_file_path) lines = settings_file.read().split('\n') settings_file.close() for line in lines: if 'DIGITAL_OCEAN_SERVER' in line: line = 'DIGITAL_OCEAN_SERVER = \'{}\''.format(ip) content.append(line) content_str = '\n'.join(content) print(content_str) settings_file = open(settings_file_path, 'w') settings_file.write(content_str) settings_file.close() def backupdb(): local_home_dir = local('echo $HOME', capture=True) backup_dir = os.path.join(local_home_dir, 'backup') if not os.path.exists(backup_dir): local('mkdir -p {}'.format(backup_dir)) with cd('/var/opt'): for entry in run('ls').split(): file_name = '/var/opt/{}/sqlite.db'.format(entry) bakcup_file_name = os.path.join(backup_dir, '{}.db'.format(entry)) if exists(file_name): command = 'scp {}@{}:{} {}'.format(username, env.hosts[0], file_name, bakcup_file_name) local(command) def install_docker(): run('apt update') run('apt install -y apt-transport-https ca-certificates curl gnupg2 software-properties-common') run('curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add -') run('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/debian $(lsb_release -cs) stable"') run('apt update') run('apt-cache policy docker-ce') run('apt install -y docker-ce') def deploy(): _execute_aptget() _setup_postgres() _check_remote_keys() _setup_local_repository() _push_local_changes() _setup_remote_env() _setup_remote_repository() _setup_remote_project() _setup_remote_webserver() _reload_remote_application() _print_remote_url() def update(): _push_local_changes() _setup_remote_repository() _setup_remote_project() _reload_remote_application() _setup_nginx_file() _print_remote_url() def push(): _push_local_changes() _setup_remote_repository() _reload_remote_application() _print_remote_url() def undeploy(): _delete_remote_project() _delete_domain() _delete_repository() _delete_local_repository() _delete_nginx_file() _delete_supervisor_file() _reload_remote_webserver() _delete_remote_env()
[ "fabric.contrib.files.exists", "fabric.contrib.files.contains", "os.getcwd", "os.path.exists", "datetime.datetime.now", "time.sleep", "fabric.contrib.files.append", "os.path.join", "sys.exit" ]
[((289, 300), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (298, 300), False, 'import os\n'), ((6371, 6407), 'os.path.join', 'os.path.join', (['local_home_dir', '""".ssh"""'], {}), "(local_home_dir, '.ssh')\n", (6383, 6407), False, 'import os\n'), ((6436, 6477), 'os.path.join', 'os.path.join', (['local_ssh_dir', '"""id_rsa.pub"""'], {}), "(local_ssh_dir, 'id_rsa.pub')\n", (6448, 6477), False, 'import os\n'), ((7627, 7663), 'os.path.join', 'os.path.join', (['local_home_dir', '""".ssh"""'], {}), "(local_home_dir, '.ssh')\n", (7639, 7663), False, 'import os\n'), ((7692, 7733), 'os.path.join', 'os.path.join', (['local_ssh_dir', '"""id_rsa.pub"""'], {}), "(local_ssh_dir, 'id_rsa.pub')\n", (7704, 7733), False, 'import os\n'), ((7763, 7800), 'os.path.join', 'os.path.join', (['local_ssh_dir', '"""id_rsa"""'], {}), "(local_ssh_dir, 'id_rsa')\n", (7775, 7800), False, 'import os\n'), ((7863, 7900), 'os.path.join', 'os.path.join', (['remote_home_dir', '""".ssh"""'], {}), "(remote_home_dir, '.ssh')\n", (7875, 7900), False, 'import os\n'), ((7930, 7972), 'os.path.join', 'os.path.join', (['remote_ssh_dir', '"""id_rsa.pub"""'], {}), "(remote_ssh_dir, 'id_rsa.pub')\n", (7942, 7972), False, 'import os\n'), ((8003, 8041), 'os.path.join', 'os.path.join', (['remote_ssh_dir', '"""id_rsa"""'], {}), "(remote_ssh_dir, 'id_rsa')\n", (8015, 8041), False, 'import os\n'), ((8080, 8123), 'os.path.join', 'os.path.join', (['remote_ssh_dir', '"""known_hosts"""'], {}), "(remote_ssh_dir, 'known_hosts')\n", (8092, 8123), False, 'import os\n'), ((12869, 12912), 'os.path.join', 'os.path.join', (['virtual_env_dir', 'project_name'], {}), '(virtual_env_dir, project_name)\n', (12881, 12912), False, 'import os\n'), ((16321, 16338), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (16327, 16338), False, 'from fabric.contrib.files import exists, append, contains\n'), ((19934, 19960), 'fabric.contrib.files.exists', 'exists', (['remote_project_dir'], {}), '(remote_project_dir)\n', (19940, 19960), False, 'from fabric.contrib.files import exists, append, contains\n'), ((20670, 20693), 'fabric.contrib.files.exists', 'exists', (['project_git_dir'], {}), '(project_git_dir)\n', (20676, 20693), False, 'from fabric.contrib.files import exists, append, contains\n'), ((21015, 21032), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (21021, 21032), False, 'from fabric.contrib.files import exists, append, contains\n'), ((21224, 21241), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (21230, 21241), False, 'from fabric.contrib.files import exists, append, contains\n'), ((25584, 25594), 'sys.exit', 'sys.exit', ([], {}), '()\n', (25592, 25594), False, 'import sys\n'), ((27699, 27737), 'os.path.join', 'os.path.join', (['local_home_dir', '"""backup"""'], {}), "(local_home_dir, 'backup')\n", (27711, 27737), False, 'import os\n'), ((6489, 6518), 'os.path.exists', 'os.path.exists', (['local_ssh_dir'], {}), '(local_ssh_dir)\n', (6503, 6518), False, 'import os\n'), ((8135, 8157), 'fabric.contrib.files.exists', 'exists', (['remote_ssh_dir'], {}), '(remote_ssh_dir)\n', (8141, 8157), False, 'from fabric.contrib.files import exists, append, contains\n'), ((11241, 11267), 'fabric.contrib.files.exists', 'exists', (['remote_project_dir'], {}), '(remote_project_dir)\n', (11247, 11267), False, 'from fabric.contrib.files import exists, append, contains\n'), ((12713, 12736), 'fabric.contrib.files.exists', 'exists', (['virtual_env_dir'], {}), '(virtual_env_dir)\n', (12719, 12736), False, 'from fabric.contrib.files import exists, append, contains\n'), ((12992, 13015), 'fabric.contrib.files.exists', 'exists', (['project_env_dir'], {}), '(project_env_dir)\n', (12998, 13015), False, 'from fabric.contrib.files import exists, append, contains\n'), ((13390, 13416), 'fabric.contrib.files.exists', 'exists', (['"""requirements.txt"""'], {}), "('requirements.txt')\n", (13396, 13416), False, 'from fabric.contrib.files import exists, append, contains\n'), ((17551, 17574), 'fabric.contrib.files.append', 'append', (['file_path', 'text'], {}), '(file_path, text)\n', (17557, 17574), False, 'from fabric.contrib.files import exists, append, contains\n'), ((17903, 17920), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (17909, 17920), False, 'from fabric.contrib.files import exists, append, contains\n'), ((18070, 18093), 'fabric.contrib.files.append', 'append', (['file_path', 'text'], {}), '(file_path, text)\n', (18076, 18093), False, 'from fabric.contrib.files import exists, append, contains\n'), ((18347, 18364), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (18353, 18364), False, 'from fabric.contrib.files import exists, append, contains\n'), ((18551, 18574), 'fabric.contrib.files.append', 'append', (['file_path', 'text'], {}), '(file_path, text)\n', (18557, 18574), False, 'from fabric.contrib.files import exists, append, contains\n'), ((18713, 18730), 'fabric.contrib.files.exists', 'exists', (['file_path'], {}), '(file_path)\n', (18719, 18730), False, 'from fabric.contrib.files import exists, append, contains\n'), ((24983, 24997), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (24993, 24997), False, 'import time\n'), ((27749, 27775), 'os.path.exists', 'os.path.exists', (['backup_dir'], {}), '(backup_dir)\n', (27763, 27775), False, 'import os\n'), ((6642, 6679), 'os.path.exists', 'os.path.exists', (['local_public_key_path'], {}), '(local_public_key_path)\n', (6656, 6679), False, 'import os\n'), ((8682, 8726), 'fabric.contrib.files.contains', 'contains', (['remote_public_key_path', 'public_key'], {}), '(remote_public_key_path, public_key)\n', (8690, 8726), False, 'from fabric.contrib.files import exists, append, contains\n'), ((8828, 8870), 'fabric.contrib.files.append', 'append', (['remote_public_key_path', 'public_key'], {}), '(remote_public_key_path, public_key)\n', (8834, 8870), False, 'from fabric.contrib.files import exists, append, contains\n'), ((8977, 9023), 'fabric.contrib.files.contains', 'contains', (['remote_private_key_path', 'private_key'], {}), '(remote_private_key_path, private_key)\n', (8985, 9023), False, 'from fabric.contrib.files import exists, append, contains\n'), ((9127, 9171), 'fabric.contrib.files.append', 'append', (['remote_private_key_path', 'private_key'], {}), '(remote_private_key_path, private_key)\n', (9133, 9171), False, 'from fabric.contrib.files import exists, append, contains\n'), ((9410, 9465), 'fabric.contrib.files.contains', 'contains', (['remote_private_known_hosts_path', 'env.hosts[0]'], {}), '(remote_private_known_hosts_path, env.hosts[0])\n', (9418, 9465), False, 'from fabric.contrib.files import exists, append, contains\n'), ((9758, 9773), 'fabric.contrib.files.exists', 'exists', (['git_dir'], {}), '(git_dir)\n', (9764, 9773), False, 'from fabric.contrib.files import exists, append, contains\n'), ((10222, 10245), 'fabric.contrib.files.exists', 'exists', (['project_git_dir'], {}), '(project_git_dir)\n', (10228, 10245), False, 'from fabric.contrib.files import exists, append, contains\n'), ((10611, 10644), 'os.path.join', 'os.path.join', (['project_dir', '""".git"""'], {}), "(project_dir, '.git')\n", (10623, 10644), False, 'import os\n'), ((11763, 11786), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11784, 11786), False, 'import datetime\n'), ((25653, 25672), 'fabric.contrib.files.exists', 'exists', (['"""/swap.img"""'], {}), "('/swap.img')\n", (25659, 25672), False, 'from fabric.contrib.files import exists, append, contains\n'), ((28046, 28063), 'fabric.contrib.files.exists', 'exists', (['file_name'], {}), '(file_name)\n', (28052, 28063), False, 'from fabric.contrib.files import exists, append, contains\n'), ((5650, 5673), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5671, 5673), False, 'import datetime\n'), ((22641, 22689), 'os.path.join', 'os.path.join', (['local_home_dir', '""".ssh/known_hosts"""'], {}), "(local_home_dir, '.ssh/known_hosts')\n", (22653, 22689), False, 'import os\n'), ((26237, 26283), 'fabric.contrib.files.contains', 'contains', (['"""/etc/security/limits.conf"""', '"""65536"""'], {}), "('/etc/security/limits.conf', '65536')\n", (26245, 26283), False, 'from fabric.contrib.files import exists, append, contains\n'), ((26345, 26401), 'fabric.contrib.files.append', 'append', (['"""/etc/security/limits.conf"""', 'LIMITS_FILE_CONTENT'], {}), "('/etc/security/limits.conf', LIMITS_FILE_CONTENT)\n", (26351, 26401), False, 'from fabric.contrib.files import exists, append, contains\n'), ((26469, 26509), 'fabric.contrib.files.contains', 'contains', (['"""/root/.bashrc"""', '"""WORKON_HOME"""'], {}), "('/root/.bashrc', 'WORKON_HOME')\n", (26477, 26509), False, 'from fabric.contrib.files import exists, append, contains\n'), ((26571, 26615), 'fabric.contrib.files.append', 'append', (['"""/root/.bashrc"""', 'BASHRC_FILE_CONTENT'], {}), "('/root/.bashrc', BASHRC_FILE_CONTENT)\n", (26577, 26615), False, 'from fabric.contrib.files import exists, append, contains\n'), ((26636, 26655), 'fabric.contrib.files.exists', 'exists', (['"""/swap.img"""'], {}), "('/swap.img')\n", (26642, 26655), False, 'from fabric.contrib.files import exists, append, contains\n'), ((22800, 22838), 'os.path.exists', 'os.path.exists', (['local_known_hosts_path'], {}), '(local_known_hosts_path)\n', (22814, 22838), False, 'import os\n'), ((23256, 23269), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (23266, 23269), False, 'import time\n')]
# -*- coding: utf-8 -*- """ streaming decorators module. """ import charma.streaming.services as streaming_services def stream(*args, **kwargs): """ decorator to register a stream provider. :param object args: stream provider class constructor arguments. :param object kwargs: stream provider class constructor keyword arguments. :raises InvalidStreamProviderTypeError: invalid stream provider type error. :raises DuplicateStreamProviderError: duplicate stream provider error. :returns: stream provider class. :rtype: type """ def decorator(cls): """ decorates the given class and registers an instance of it into available stream providers. :param type cls: stream provider class. :returns: stream provider class. :rtype: type """ instance = cls(*args, **kwargs) streaming_services.register_stream_provider(instance, **kwargs) return cls return decorator
[ "charma.streaming.services.register_stream_provider" ]
[((886, 949), 'charma.streaming.services.register_stream_provider', 'streaming_services.register_stream_provider', (['instance'], {}), '(instance, **kwargs)\n', (929, 949), True, 'import charma.streaming.services as streaming_services\n')]
from django.db import models # Create your models here. Database stuff class Post(models.Model): #creates a table called posts title = models.CharField(max_length=140) #Syntax: name = datatype(constraints) body = models.TextField() date = models.DateTimeField() def __str__(self): return self.title #returns a list of titles
[ "django.db.models.CharField", "django.db.models.TextField", "django.db.models.DateTimeField" ]
[((142, 174), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(140)'}), '(max_length=140)\n', (158, 174), False, 'from django.db import models\n'), ((224, 242), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (240, 242), False, 'from django.db import models\n'), ((254, 276), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (274, 276), False, 'from django.db import models\n')]
import pytest from ..mutation import Mutation from ..objecttype import ObjectType from ..schema import Schema from ..scalars import String from ..dynamic import Dynamic def test_generate_mutation_no_args(): class MyMutation(Mutation): '''Documentation''' @classmethod def mutate(cls, *args, **kwargs): pass assert issubclass(MyMutation, ObjectType) assert MyMutation._meta.name == "MyMutation" assert MyMutation._meta.description == "Documentation" assert MyMutation.Field().resolver == MyMutation.mutate def test_generate_mutation_with_meta(): class MyMutation(Mutation): class Meta: name = 'MyOtherMutation' description = 'Documentation' @classmethod def mutate(cls, *args, **kwargs): pass assert MyMutation._meta.name == "MyOtherMutation" assert MyMutation._meta.description == "Documentation" assert MyMutation.Field().resolver == MyMutation.mutate def test_mutation_raises_exception_if_no_mutate(): with pytest.raises(AssertionError) as excinfo: class MyMutation(Mutation): pass assert "All mutations must define a mutate method in it" == str(excinfo.value) def test_mutation_execution(): class CreateUser(Mutation): class Input: name = String() dynamic = Dynamic(lambda: String()) dynamic_none = Dynamic(lambda: None) name = String() dynamic = Dynamic(lambda: String()) def mutate(self, args, context, info): name = args.get('name') dynamic = args.get('dynamic') return CreateUser(name=name, dynamic=dynamic) class Query(ObjectType): a = String() class MyMutation(ObjectType): create_user = CreateUser.Field() schema = Schema(query=Query, mutation=MyMutation) result = schema.execute(''' mutation mymutation { createUser(name:"Peter", dynamic: "dynamic") { name dynamic } } ''') assert not result.errors assert result.data == { 'createUser': { 'name': 'Peter', 'dynamic': 'dynamic', } }
[ "pytest.raises" ]
[((1056, 1085), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1069, 1085), False, 'import pytest\n')]
import matplotlib.pyplot as plt import numpy as np import healpy as hp map_I = hp.read_map('data/COM_CMB_IQU-smica_1024_R2.02_full.fits') hp.mollview(map_I, norm='hist', min=-0.1, max=0.1, xsize=2000) plt.show() map_Q = hp.read_map('data/COM_CMB_IQU-smica_1024_R2.02_full.fits', field=1) hp.mollview(map_Q, norm='hist', min=-0.01, max=0.01, xsize=2000) plt.show() map_U = hp.read_map('data/COM_CMB_IQU-smica_1024_R2.02_full.fits', field=2) hp.mollview(map_U, norm='hist', min=-0.01, max=0.01, xsize=2000) plt.show() cl_I = hp.anafast(map_I, lmax=2048) plt.show() cl_Q = hp.anafast(map_Q, lmax=2048) plt.show() cl_U = hp.anafast(map_U, lmax=2048) plt.show() ell = np.arange(len(cl_I)) plt.figure(figsize=(5, 5)) plt.plot(ell, ell * (ell + 1) * cl_I) plt.xlabel('ell') plt.ylabel('ell(ell+1)cl_I') plt.grid() plt.show() plt.figure(figsize=(5, 5)) plt.plot(ell, ell * (ell + 1) * cl_Q) plt.xlabel('ell') plt.ylabel('ell(ell+1)cl_Q') plt.grid() plt.show() plt.figure(figsize=(5, 5)) plt.plot(ell, ell * (ell + 1) * cl_U) plt.xlabel('ell') plt.ylabel('ell(ell+1)cl_U') plt.grid() plt.show()
[ "matplotlib.pyplot.show", "healpy.mollview", "matplotlib.pyplot.plot", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.figure", "healpy.read_map", "healpy.anafast", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.grid" ]
[((81, 139), 'healpy.read_map', 'hp.read_map', (['"""data/COM_CMB_IQU-smica_1024_R2.02_full.fits"""'], {}), "('data/COM_CMB_IQU-smica_1024_R2.02_full.fits')\n", (92, 139), True, 'import healpy as hp\n'), ((140, 202), 'healpy.mollview', 'hp.mollview', (['map_I'], {'norm': '"""hist"""', 'min': '(-0.1)', 'max': '(0.1)', 'xsize': '(2000)'}), "(map_I, norm='hist', min=-0.1, max=0.1, xsize=2000)\n", (151, 202), True, 'import healpy as hp\n'), ((203, 213), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (211, 213), True, 'import matplotlib.pyplot as plt\n'), ((223, 290), 'healpy.read_map', 'hp.read_map', (['"""data/COM_CMB_IQU-smica_1024_R2.02_full.fits"""'], {'field': '(1)'}), "('data/COM_CMB_IQU-smica_1024_R2.02_full.fits', field=1)\n", (234, 290), True, 'import healpy as hp\n'), ((291, 355), 'healpy.mollview', 'hp.mollview', (['map_Q'], {'norm': '"""hist"""', 'min': '(-0.01)', 'max': '(0.01)', 'xsize': '(2000)'}), "(map_Q, norm='hist', min=-0.01, max=0.01, xsize=2000)\n", (302, 355), True, 'import healpy as hp\n'), ((356, 366), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (364, 366), True, 'import matplotlib.pyplot as plt\n'), ((376, 443), 'healpy.read_map', 'hp.read_map', (['"""data/COM_CMB_IQU-smica_1024_R2.02_full.fits"""'], {'field': '(2)'}), "('data/COM_CMB_IQU-smica_1024_R2.02_full.fits', field=2)\n", (387, 443), True, 'import healpy as hp\n'), ((444, 508), 'healpy.mollview', 'hp.mollview', (['map_U'], {'norm': '"""hist"""', 'min': '(-0.01)', 'max': '(0.01)', 'xsize': '(2000)'}), "(map_U, norm='hist', min=-0.01, max=0.01, xsize=2000)\n", (455, 508), True, 'import healpy as hp\n'), ((509, 519), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (517, 519), True, 'import matplotlib.pyplot as plt\n'), ((528, 556), 'healpy.anafast', 'hp.anafast', (['map_I'], {'lmax': '(2048)'}), '(map_I, lmax=2048)\n', (538, 556), True, 'import healpy as hp\n'), ((557, 567), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (565, 567), True, 'import matplotlib.pyplot as plt\n'), ((576, 604), 'healpy.anafast', 'hp.anafast', (['map_Q'], {'lmax': '(2048)'}), '(map_Q, lmax=2048)\n', (586, 604), True, 'import healpy as hp\n'), ((605, 615), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (613, 615), True, 'import matplotlib.pyplot as plt\n'), ((624, 652), 'healpy.anafast', 'hp.anafast', (['map_U'], {'lmax': '(2048)'}), '(map_U, lmax=2048)\n', (634, 652), True, 'import healpy as hp\n'), ((653, 663), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (661, 663), True, 'import matplotlib.pyplot as plt\n'), ((693, 719), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (703, 719), True, 'import matplotlib.pyplot as plt\n'), ((720, 757), 'matplotlib.pyplot.plot', 'plt.plot', (['ell', '(ell * (ell + 1) * cl_I)'], {}), '(ell, ell * (ell + 1) * cl_I)\n', (728, 757), True, 'import matplotlib.pyplot as plt\n'), ((758, 775), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ell"""'], {}), "('ell')\n", (768, 775), True, 'import matplotlib.pyplot as plt\n'), ((776, 804), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ell(ell+1)cl_I"""'], {}), "('ell(ell+1)cl_I')\n", (786, 804), True, 'import matplotlib.pyplot as plt\n'), ((805, 815), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (813, 815), True, 'import matplotlib.pyplot as plt\n'), ((816, 826), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (824, 826), True, 'import matplotlib.pyplot as plt\n'), ((828, 854), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (838, 854), True, 'import matplotlib.pyplot as plt\n'), ((855, 892), 'matplotlib.pyplot.plot', 'plt.plot', (['ell', '(ell * (ell + 1) * cl_Q)'], {}), '(ell, ell * (ell + 1) * cl_Q)\n', (863, 892), True, 'import matplotlib.pyplot as plt\n'), ((893, 910), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ell"""'], {}), "('ell')\n", (903, 910), True, 'import matplotlib.pyplot as plt\n'), ((911, 939), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ell(ell+1)cl_Q"""'], {}), "('ell(ell+1)cl_Q')\n", (921, 939), True, 'import matplotlib.pyplot as plt\n'), ((940, 950), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (948, 950), True, 'import matplotlib.pyplot as plt\n'), ((951, 961), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (959, 961), True, 'import matplotlib.pyplot as plt\n'), ((963, 989), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (973, 989), True, 'import matplotlib.pyplot as plt\n'), ((990, 1027), 'matplotlib.pyplot.plot', 'plt.plot', (['ell', '(ell * (ell + 1) * cl_U)'], {}), '(ell, ell * (ell + 1) * cl_U)\n', (998, 1027), True, 'import matplotlib.pyplot as plt\n'), ((1028, 1045), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ell"""'], {}), "('ell')\n", (1038, 1045), True, 'import matplotlib.pyplot as plt\n'), ((1046, 1074), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""ell(ell+1)cl_U"""'], {}), "('ell(ell+1)cl_U')\n", (1056, 1074), True, 'import matplotlib.pyplot as plt\n'), ((1075, 1085), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1083, 1085), True, 'import matplotlib.pyplot as plt\n'), ((1086, 1096), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1094, 1096), True, 'import matplotlib.pyplot as plt\n')]
import os import cv2 import random import logging from tkinter import Tk from car import Car, CarSpecs from HTCSPythonUtil import config if os.name == "nt": # https://github.com/opencv/opencv/issues/11360 import ctypes # Set DPI Awareness (Windows 10 and 8) _ = ctypes.windll.shcore.SetProcessDpiAwareness(2) # the argument is the awareness level, which can be 0, 1 or 2: # for 1-to-1 pixel control I seem to need it to be non-zero (I'm using level 2) logger = logging.getLogger(__name__) tk = Tk() window_width = tk.winfo_screenwidth() black_region_height = 100 # image resources WINDOW_NAME = "Highway Traffic Control System Visualization" im_bigmap = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/map.png") im_minimap = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/minimap.png") red_car_straight = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car1.png") red_car_left = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car1left.png") red_car_right = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car1right.png") blue_car_straight = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car2.png") blue_car_left = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car2left.png") blue_car_right = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/car2right.png") truck = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/truck.png") explosion = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/explosion.png") title = cv2.imread(os.path.dirname(os.path.abspath(__file__)) + "/res/title.png") try: _ = [im_bigmap.shape[0], im_minimap.shape[0], red_car_straight.shape[0], red_car_left.shape[0], red_car_right.shape[0], blue_car_straight.shape[0], blue_car_left.shape[0], blue_car_right.shape[0], truck.shape[0], explosion.shape[0], title.shape[0]] except AttributeError: logger.critical("Some image resources were not found.") # to fit screen im_minimap = cv2.resize(im_minimap, (window_width, im_minimap.shape[0])) title = cv2.resize(title, (window_width, black_region_height)) logger.info(f"Window width will be set to {window_width} pixels.") # measure minimap_length_pixel = im_minimap.shape[1] minimap_height_pixel = im_minimap.shape[0] bigmap_length_pixel = im_bigmap.shape[1] # fix parameters region_width_meter_start = 200 map_height_meter = 16 map_length_meter = config["position_bound"] center_fast_lane_mini = 32 center_slow_lane_mini = 80 center_merge_lane_mini = 130 detail_height = int(window_width * map_height_meter / region_width_meter_start) y_stretch = detail_height / im_bigmap.shape[0] center_fast_lane = 42.5 * y_stretch center_slow_lane = 103.5 * y_stretch center_merge_lane = 164 * y_stretch car_height = int((center_slow_lane - center_fast_lane) * 0.8) x_scale_minimap = minimap_length_pixel / map_length_meter x_scale_bigmap = bigmap_length_pixel / map_length_meter class CarImage(Car): def __init__(self, car_id, specs: CarSpecs, state): # Create Car super().__init__(car_id, specs, state) if specs.size > 7.5: self.straight = truck self.left = truck self.right = truck self.color = (11, 195, 255) self.text_color = self.color # Red or Blue elif bool(random.getrandbits(1)): self.straight = red_car_straight self.left = red_car_left self.right = red_car_right self.color = (0, 0, 255) # BGR self.text_color = self.color else: self.straight = blue_car_straight self.left = blue_car_left self.right = blue_car_right self.color = (255, 0, 0) # BGR self.text_color = (253, 177, 0) # BGR # At least we set the height, but width will be dependent on the region's width in meter self.straight = cv2.resize(self.straight, (self.straight.shape[0], car_height)) self.left = cv2.resize(self.left, (self.left.shape[0], car_height)) self.right = cv2.resize(self.right, (self.right.shape[0], car_height)) self.exploded = False def __str__(self): return super().__str__() def __repr__(self): return super().__repr__() def get_point_on_minimap(self): cy = 0 if self.lane == 0: cy = center_merge_lane_mini elif self.lane == 1: cy = int((center_merge_lane_mini + center_slow_lane_mini) / 2) elif self.lane == 2: cy = center_slow_lane_mini elif self.lane in [3, 4]: cy = int((center_slow_lane_mini + center_fast_lane_mini) / 2) elif self.lane == 5: cy = center_fast_lane_mini cx = int(self.distance_taken * x_scale_minimap) return cx, cy def is_in_region(self, region_offset, region_width): return self.distance_taken > region_offset and \ self.distance_taken - self.specs.size < region_offset + region_width def get_y_slice(self): start = 0 if self.lane == 0: start = int(center_merge_lane - self.straight.shape[0] / 2) elif self.lane == 1: start = int((center_merge_lane + center_slow_lane) / 2 - self.straight.shape[0] / 2) elif self.lane == 2: start = int(center_slow_lane - self.straight.shape[0] / 2) elif self.lane in [3, 4]: start = int((center_slow_lane + center_fast_lane) / 2 - self.straight.shape[0] / 2) elif self.lane == 5: start = int(center_fast_lane - self.straight.shape[0] / 2) return slice(start, start + self.straight.shape[0]) def width_pixel(self, region_width_meter): return int(self.specs.size / region_width_meter * window_width) def get_x_slice_and_image(self, offset_region, width_region): w_px_car = self.width_pixel(width_region) on_vis_slice_x_end = int((self.distance_taken - offset_region) / width_region * window_width) on_vis_slice_x_start = on_vis_slice_x_end - w_px_car on_car_slice_x_start = 0 on_car_slice_x_end = w_px_car if on_vis_slice_x_end > window_width: on_car_slice_x_end -= on_vis_slice_x_end - window_width on_vis_slice_x_end = window_width elif on_vis_slice_x_start < 0: on_car_slice_x_start -= on_vis_slice_x_start on_vis_slice_x_start = 0 car_x_slice = slice(on_car_slice_x_start, on_car_slice_x_end) return slice(on_vis_slice_x_start, on_vis_slice_x_end), self.get_image(w_px_car, car_x_slice) def get_image(self, car_width_pixel, x_slice): if self.distance_taken > map_length_meter - 30 or self.exploded: im = explosion elif self.lane in [1, 3]: im = self.left elif self.lane == 4: im = self.right else: im = self.straight return cv2.resize(im, (car_width_pixel, car_height))[:, x_slice, :]
[ "os.path.abspath", "ctypes.windll.shcore.SetProcessDpiAwareness", "logging.getLogger", "random.getrandbits", "tkinter.Tk", "cv2.resize" ]
[((489, 516), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (506, 516), False, 'import logging\n'), ((522, 526), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (524, 526), False, 'from tkinter import Tk\n'), ((2044, 2103), 'cv2.resize', 'cv2.resize', (['im_minimap', '(window_width, im_minimap.shape[0])'], {}), '(im_minimap, (window_width, im_minimap.shape[0]))\n', (2054, 2103), False, 'import cv2\n'), ((2112, 2166), 'cv2.resize', 'cv2.resize', (['title', '(window_width, black_region_height)'], {}), '(title, (window_width, black_region_height))\n', (2122, 2166), False, 'import cv2\n'), ((280, 326), 'ctypes.windll.shcore.SetProcessDpiAwareness', 'ctypes.windll.shcore.SetProcessDpiAwareness', (['(2)'], {}), '(2)\n', (323, 326), False, 'import ctypes\n'), ((3953, 4016), 'cv2.resize', 'cv2.resize', (['self.straight', '(self.straight.shape[0], car_height)'], {}), '(self.straight, (self.straight.shape[0], car_height))\n', (3963, 4016), False, 'import cv2\n'), ((4037, 4092), 'cv2.resize', 'cv2.resize', (['self.left', '(self.left.shape[0], car_height)'], {}), '(self.left, (self.left.shape[0], car_height))\n', (4047, 4092), False, 'import cv2\n'), ((4114, 4171), 'cv2.resize', 'cv2.resize', (['self.right', '(self.right.shape[0], car_height)'], {}), '(self.right, (self.right.shape[0], car_height))\n', (4124, 4171), False, 'import cv2\n'), ((709, 734), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (724, 734), False, 'import os\n'), ((794, 819), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (809, 819), False, 'import os\n'), ((889, 914), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (904, 914), False, 'import os\n'), ((977, 1002), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (992, 1002), False, 'import os\n'), ((1070, 1095), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1085, 1095), False, 'import os\n'), ((1168, 1193), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1183, 1193), False, 'import os\n'), ((1257, 1282), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1272, 1282), False, 'import os\n'), ((1351, 1376), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1366, 1376), False, 'import os\n'), ((1437, 1462), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1452, 1462), False, 'import os\n'), ((1523, 1548), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1538, 1548), False, 'import os\n'), ((1609, 1634), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1624, 1634), False, 'import os\n'), ((6989, 7034), 'cv2.resize', 'cv2.resize', (['im', '(car_width_pixel, car_height)'], {}), '(im, (car_width_pixel, car_height))\n', (6999, 7034), False, 'import cv2\n'), ((3372, 3393), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (3390, 3393), False, 'import random\n')]
import emoji from config import config_email def enviar_email(): envio_email = emoji.emojize('E-mail enviado com sucesso! :wink:', use_aliases=True) try: config_email() except Exception as e: print(f'Erro ao chamar a funcao de disparo de emails! {e}') else: print(f'{envio_email}') enviar_email()
[ "config.config_email", "emoji.emojize" ]
[((85, 154), 'emoji.emojize', 'emoji.emojize', (['"""E-mail enviado com sucesso! :wink:"""'], {'use_aliases': '(True)'}), "('E-mail enviado com sucesso! :wink:', use_aliases=True)\n", (98, 154), False, 'import emoji\n'), ((173, 187), 'config.config_email', 'config_email', ([], {}), '()\n', (185, 187), False, 'from config import config_email\n')]
import show_omnibot import numpy as np import nengo model = nengo.Network() with model: bot = show_omnibot.OmniBotNetwork( show_omnibot.connection.Serial('/dev/ttyUSB2', baud=2000000), motor=True, arm=True, retina=False, # freqs=[100, 200, 300], wheel=True, servo=True, load=True, msg_period=0.1) motor = nengo.Node([0, 0, 0]) arm = nengo.Node([0]*5) nengo.Connection(motor, bot.motor) nengo.Connection(arm, bot.arm)
[ "show_omnibot.connection.Serial", "nengo.Network", "nengo.Connection", "nengo.Node" ]
[((62, 77), 'nengo.Network', 'nengo.Network', ([], {}), '()\n', (75, 77), False, 'import nengo\n'), ((341, 362), 'nengo.Node', 'nengo.Node', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (351, 362), False, 'import nengo\n'), ((373, 392), 'nengo.Node', 'nengo.Node', (['([0] * 5)'], {}), '([0] * 5)\n', (383, 392), False, 'import nengo\n'), ((395, 429), 'nengo.Connection', 'nengo.Connection', (['motor', 'bot.motor'], {}), '(motor, bot.motor)\n', (411, 429), False, 'import nengo\n'), ((434, 464), 'nengo.Connection', 'nengo.Connection', (['arm', 'bot.arm'], {}), '(arm, bot.arm)\n', (450, 464), False, 'import nengo\n'), ((137, 197), 'show_omnibot.connection.Serial', 'show_omnibot.connection.Serial', (['"""/dev/ttyUSB2"""'], {'baud': '(2000000)'}), "('/dev/ttyUSB2', baud=2000000)\n", (167, 197), False, 'import show_omnibot\n')]
from rest_framework.renderers import JSONRenderer, BaseRenderer from rest_framework_csv.renderers import CSVRenderer as BaseCSVRenderer class TextRenderer(BaseRenderer): media_type = "text/plain" format = "text" format_description = "text" def render(self, data, *args, **kwargs): if isinstance(data, dict): # For instance used if the API returns an exception return "\n".join(f"{k}: {v}" for k, v in data.items()) return str(data) class PrettyJsonRenderer(JSONRenderer): format = "json" format_description = "JSON" def render(self, data, *args, **kwargs): if str(data.__class__) == "<class 'pandas.core.frame.DataFrame'>": data = { "data": [ {k: v for k, v in zip(data.columns, row)} for row in data.values ] } return super().render(data, *args, **kwargs) def get_indent(self, accepted_media_type, renderer_context): return 4 class JsonStatRenderer(PrettyJsonRenderer): format = "json-stat" format_description = "JSON-stat" def render(self, data, *args, **kwargs): # We handle pandas DF but we do not want to had dependencies on it hence # It's up to the user to provide access to pyjstat if str(data.__class__) == "<class 'pandas.core.frame.DataFrame'>": from pyjstat import pyjstat import pandas def flatten_metrics_data_frame(data): json_stat_data = [] # noqa: B301 for _index, row in data.iterrows(): # noinspection PyCompatibility # IDEs detect row.iteritems as a call # to dict.iteritems which is not supported in py3, # whereas it is pandas.Series.iteritems() group_data = { key: value for key, value in row.iteritems() # noqa: B301 if key != "metrics" } for metric, metric_value in row.metrics.items(): metric_data = {"metric": metric, "value": metric_value} metric_data.update(group_data) json_stat_data.append(metric_data) return pandas.DataFrame(json_stat_data) flatten_data_frame = flatten_metrics_data_frame(data) if len(flatten_data_frame.index) > 0: data = {"data": pyjstat.Dataset.read(flatten_data_frame)} else: data = {"data": []} return super().render(data, *args, **kwargs) class CSVRenderer(BaseCSVRenderer): def __init__(self): pass def render(self, data, *args, **kwargs): if str(data.__class__) == "<class 'pandas.core.frame.DataFrame'>": data = [{k: v for k, v in zip(data.columns, row)} for row in data.values] return super().render(data, *args, **kwargs) media_type = "text" format = "csv" format_description = "CSV" def renderer(*renderers): def add_renderers(view): view.renderer_classes = renderers return view return add_renderers
[ "pandas.DataFrame", "pyjstat.pyjstat.Dataset.read" ]
[((2335, 2367), 'pandas.DataFrame', 'pandas.DataFrame', (['json_stat_data'], {}), '(json_stat_data)\n', (2351, 2367), False, 'import pandas\n'), ((2517, 2557), 'pyjstat.pyjstat.Dataset.read', 'pyjstat.Dataset.read', (['flatten_data_frame'], {}), '(flatten_data_frame)\n', (2537, 2557), False, 'from pyjstat import pyjstat\n')]
import networkx as nx import numpy as np def gen_graph(graph_type, n, mean_deg): """Generates and returns a nx.Digraph and its adjacency matrix. Nodes are randomly permutated. Arguments: graph_type (string): type of graph Erdos-Renyi, scale-free, sachs or any graph in BNRepo n (int): number of nodes mean_deg (float): average degree of nodes """ # beta is the unpermutated adjacency matrix if graph_type == "erdos-renyi": beta = gen_random_graph(n, mean_deg) elif graph_type == "scale-free": # select import igraph as ig G_ig = ig.Graph.Barabasi(n=n, m=int(round(mean_deg / 2)), directed=True) beta = np.array(G_ig.get_adjacency().data) else: raise NotImplementedError # Randomly permute nodes perm_mat = np.random.permutation(np.eye(n)) adj_matrix = perm_mat.T @ beta @ perm_mat # Sanity check, is the graph acyclic? assert np.trace(np.linalg.matrix_power(np.eye(n) + adj_matrix, n)) == n # Create and return directed graph graph = nx.from_numpy_array(adj_matrix, create_using=nx.DiGraph) return graph, adj_matrix def gen_random_graph(n, mean_deg): """Returns the adjacency matrix of an Erdos Renyi DAG Args: n (int): number of nodes mean_deg (float): average degree of a node """ assert mean_deg <= n - 1 prob_one_edge = mean_deg / (n - 1) beta = np.triu(np.random.random((n, n)) < prob_one_edge, k=1) return np.float32(beta) def simulate_parameter(adj_matrix, w_ranges): """Simulate SEM parameters for a DAG. Args: adj_matrix (np.array): [n, n] binary adj matrix of DAG w_ranges (tuple): disjoint weight ranges Returns: weighted_adj_matrix (np.array): [n, n] weighted adj matrix of DAG """ weighted_adj_matrix = np.zeros(adj_matrix.shape) range_choice = np.random.randint(len(w_ranges), size=adj_matrix.shape) # which range for i, (low, high) in enumerate(w_ranges): weights = np.random.uniform(low=low, high=high, size=adj_matrix.shape) weighted_adj_matrix += adj_matrix * (range_choice == i) * weights return weighted_adj_matrix def sample_lin_scms(graph_type, noise_type, adj_matrix, nb_samples=1000, weighted=False, w_ranges=((-2.0, -.5), (.5, 2.0))): """ Given a directed graph and a particular noise type, generates edge weights and samples Args: graph_type (string): type of graph noise_type (string): one of gaussian, exp, gumbel, type of random noise adj_matrix (np.array): [n, n] binary adjacency matrix nb_samples (int): number of samples to generate weighted (bool): whether to use uniformly weighted edges or all edges are w_ranges (tuple): negative and positive ranges to sample edge weights (if weighted) Returns: X (np.array): [nb_samples, n] sample matrix beta (np.array): [n, n] weighted adjacency matrix sigma_n (np.array): [n, n] sample covariance matrix """ n = adj_matrix.shape[0] # Sample edge weights if weighted: beta = simulate_parameter(adj_matrix, w_ranges) else: beta = adj_matrix aux_inv = np.linalg.inv(np.eye(n) - beta) # Sample noise if noise_type == "gaussian": epsilon = np.random.normal(size=(nb_samples, n)) elif noise_type == "exp": epsilon = np.random.exponential(size=(nb_samples, n)) elif noise_type == "gumbel": epsilon = np.random.gumbel(size=(nb_samples, n)) else: raise NotImplementedError X = epsilon @ aux_inv sigma_n = np.cov(X.T, bias=True) return X, beta, sigma_n
[ "numpy.random.gumbel", "numpy.random.uniform", "numpy.float32", "numpy.random.exponential", "numpy.zeros", "networkx.from_numpy_array", "numpy.random.random", "numpy.random.normal", "numpy.eye", "numpy.cov" ]
[((1068, 1124), 'networkx.from_numpy_array', 'nx.from_numpy_array', (['adj_matrix'], {'create_using': 'nx.DiGraph'}), '(adj_matrix, create_using=nx.DiGraph)\n', (1087, 1124), True, 'import networkx as nx\n'), ((1497, 1513), 'numpy.float32', 'np.float32', (['beta'], {}), '(beta)\n', (1507, 1513), True, 'import numpy as np\n'), ((1852, 1878), 'numpy.zeros', 'np.zeros', (['adj_matrix.shape'], {}), '(adj_matrix.shape)\n', (1860, 1878), True, 'import numpy as np\n'), ((3666, 3688), 'numpy.cov', 'np.cov', (['X.T'], {'bias': '(True)'}), '(X.T, bias=True)\n', (3672, 3688), True, 'import numpy as np\n'), ((840, 849), 'numpy.eye', 'np.eye', (['n'], {}), '(n)\n', (846, 849), True, 'import numpy as np\n'), ((2034, 2094), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': 'low', 'high': 'high', 'size': 'adj_matrix.shape'}), '(low=low, high=high, size=adj_matrix.shape)\n', (2051, 2094), True, 'import numpy as np\n'), ((3360, 3398), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(nb_samples, n)'}), '(size=(nb_samples, n))\n', (3376, 3398), True, 'import numpy as np\n'), ((1439, 1463), 'numpy.random.random', 'np.random.random', (['(n, n)'], {}), '((n, n))\n', (1455, 1463), True, 'import numpy as np\n'), ((3271, 3280), 'numpy.eye', 'np.eye', (['n'], {}), '(n)\n', (3277, 3280), True, 'import numpy as np\n'), ((3447, 3490), 'numpy.random.exponential', 'np.random.exponential', ([], {'size': '(nb_samples, n)'}), '(size=(nb_samples, n))\n', (3468, 3490), True, 'import numpy as np\n'), ((3542, 3580), 'numpy.random.gumbel', 'np.random.gumbel', ([], {'size': '(nb_samples, n)'}), '(size=(nb_samples, n))\n', (3558, 3580), True, 'import numpy as np\n'), ((983, 992), 'numpy.eye', 'np.eye', (['n'], {}), '(n)\n', (989, 992), True, 'import numpy as np\n')]
from Crypto import Random from Crypto.PublicKey import RSA import base64 def generate_keys(modulus_length=256*4): privatekey = RSA.generate(modulus_length, Random.new().read) publickey = privatekey.publickey() return privatekey, publickey def encryptit(message , publickey): encrypted_msg = publickey.encrypt(message, 32)[0] encoded_encrypted_msg = base64.b64encode(encrypted_msg) return encoded_encrypted_msg def decryptit(message, privatekey): decoded_encrypted_msg = base64.b64decode(message) decoded_decrypted_msg = privatekey.decrypt(decoded_encrypted_msg) return decoded_decrypted_msg if __name__ == '__main__': message = "This is a awesome message!" privatekey , publickey = generate_keys() encrypted_msg = encryptit(message.encode("utf-8"), publickey) decrypted_msg = decryptit(encrypted_msg, privatekey) print(f'{privatekey.exportKey()} - ({len(privatekey.exportKey())})') print(f'{publickey.exportKey()} - ({len(publickey.exportKey())})') print(f'Original: {message} - ({len(message)})') print(f'Encrypted: {encrypted_msg} - ({len(encrypted_msg)})') print(f'Decrypted: {decrypted_msg} - ({len(decrypted_msg)})')
[ "Crypto.Random.new", "base64.b64encode", "base64.b64decode" ]
[((373, 404), 'base64.b64encode', 'base64.b64encode', (['encrypted_msg'], {}), '(encrypted_msg)\n', (389, 404), False, 'import base64\n'), ((504, 529), 'base64.b64decode', 'base64.b64decode', (['message'], {}), '(message)\n', (520, 529), False, 'import base64\n'), ((162, 174), 'Crypto.Random.new', 'Random.new', ([], {}), '()\n', (172, 174), False, 'from Crypto import Random\n')]
#!/usr/bin/python3 import os import shutil import sys from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership RUN_USER = env['sys_user'] RUN_GROUP = env['sys_group'] PG_DATA = env['pgdata'] PG_CONFIG_DIR = env['pg_config_dir'] try: PG_SSL_KEY_FILE = env['pg_ssl_key_file'] PG_SSL_CERT_FILE = env['pg_ssl_cert_file'] PG_SSL_CA_FILE = env['pg_ssl_ca_file'] shutil.copyfile(PG_SSL_KEY_FILE, f'{PG_CONFIG_DIR}/server.key') shutil.copyfile(PG_SSL_CERT_FILE, f'{PG_CONFIG_DIR}/server.crt') shutil.copyfile(PG_SSL_CA_FILE, f'{PG_CONFIG_DIR}/root.crt') set_perms(f'{PG_CONFIG_DIR}/server.key', user=RUN_USER, group=RUN_GROUP, mode=0o600 ) set_perms(f'{PG_CONFIG_DIR}/server.crt', user=RUN_USER, group=RUN_GROUP, mode=0o600 ) set_perms(f'{PG_CONFIG_DIR}/root.crt', user=RUN_USER, group=RUN_GROUP, mode=0o600 ) except: print("no certificate") set_ownership(f'{PG_CONFIG_DIR}', user=RUN_USER, group=RUN_GROUP) set_ownership(f'{PG_DATA}', user=RUN_USER, group=RUN_GROUP) set_ownership('/var/log/patroni', user=RUN_USER, group=RUN_GROUP) gen_cfg('patroni.yml.j2', f'{PG_CONFIG_DIR}/patroni.yml' , user=RUN_USER, group=RUN_GROUP,mode=0o640 , overwrite=False) start_app(f'patroni {PG_CONFIG_DIR}/patroni.yml', PG_DATA, 'patroni')
[ "entrypoint_helpers.set_ownership", "entrypoint_helpers.set_perms", "entrypoint_helpers.gen_cfg", "shutil.copyfile", "entrypoint_helpers.start_app" ]
[((939, 1004), 'entrypoint_helpers.set_ownership', 'set_ownership', (['f"""{PG_CONFIG_DIR}"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP'}), "(f'{PG_CONFIG_DIR}', user=RUN_USER, group=RUN_GROUP)\n", (952, 1004), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((1006, 1065), 'entrypoint_helpers.set_ownership', 'set_ownership', (['f"""{PG_DATA}"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP'}), "(f'{PG_DATA}', user=RUN_USER, group=RUN_GROUP)\n", (1019, 1065), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((1067, 1132), 'entrypoint_helpers.set_ownership', 'set_ownership', (['"""/var/log/patroni"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP'}), "('/var/log/patroni', user=RUN_USER, group=RUN_GROUP)\n", (1080, 1132), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((1136, 1256), 'entrypoint_helpers.gen_cfg', 'gen_cfg', (['"""patroni.yml.j2"""', 'f"""{PG_CONFIG_DIR}/patroni.yml"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP', 'mode': '(416)', 'overwrite': '(False)'}), "('patroni.yml.j2', f'{PG_CONFIG_DIR}/patroni.yml', user=RUN_USER,\n group=RUN_GROUP, mode=416, overwrite=False)\n", (1143, 1256), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((1259, 1328), 'entrypoint_helpers.start_app', 'start_app', (['f"""patroni {PG_CONFIG_DIR}/patroni.yml"""', 'PG_DATA', '"""patroni"""'], {}), "(f'patroni {PG_CONFIG_DIR}/patroni.yml', PG_DATA, 'patroni')\n", (1268, 1328), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((430, 493), 'shutil.copyfile', 'shutil.copyfile', (['PG_SSL_KEY_FILE', 'f"""{PG_CONFIG_DIR}/server.key"""'], {}), "(PG_SSL_KEY_FILE, f'{PG_CONFIG_DIR}/server.key')\n", (445, 493), False, 'import shutil\n'), ((499, 563), 'shutil.copyfile', 'shutil.copyfile', (['PG_SSL_CERT_FILE', 'f"""{PG_CONFIG_DIR}/server.crt"""'], {}), "(PG_SSL_CERT_FILE, f'{PG_CONFIG_DIR}/server.crt')\n", (514, 563), False, 'import shutil\n'), ((569, 629), 'shutil.copyfile', 'shutil.copyfile', (['PG_SSL_CA_FILE', 'f"""{PG_CONFIG_DIR}/root.crt"""'], {}), "(PG_SSL_CA_FILE, f'{PG_CONFIG_DIR}/root.crt')\n", (584, 629), False, 'import shutil\n'), ((636, 722), 'entrypoint_helpers.set_perms', 'set_perms', (['f"""{PG_CONFIG_DIR}/server.key"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP', 'mode': '(384)'}), "(f'{PG_CONFIG_DIR}/server.key', user=RUN_USER, group=RUN_GROUP,\n mode=384)\n", (645, 722), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((726, 812), 'entrypoint_helpers.set_perms', 'set_perms', (['f"""{PG_CONFIG_DIR}/server.crt"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP', 'mode': '(384)'}), "(f'{PG_CONFIG_DIR}/server.crt', user=RUN_USER, group=RUN_GROUP,\n mode=384)\n", (735, 812), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n'), ((816, 901), 'entrypoint_helpers.set_perms', 'set_perms', (['f"""{PG_CONFIG_DIR}/root.crt"""'], {'user': 'RUN_USER', 'group': 'RUN_GROUP', 'mode': '(384)'}), "(f'{PG_CONFIG_DIR}/root.crt', user=RUN_USER, group=RUN_GROUP, mode=384\n )\n", (825, 901), False, 'from entrypoint_helpers import env, gen_cfg, gen_container_id, str2bool, start_app, set_perms, set_ownership\n')]
import sys import tweepy def tweeting(consumer_key, consumer_secret, my_access_token, my_access_token_secret, message): # Authentication my_auth = tweepy.OAuthHandler(consumer_key, consumer_secret) my_auth.set_access_token(my_access_token, my_access_token_secret) my_api = tweepy.API(my_auth) my_api.update_status(message) if __name__ == '__main__': if len(sys.argv) == 6: consumer_key = sys.argv[1] consumer_secret_key = sys.argv[2] my_access_token = sys.argv[3] my_access_token_secret = sys.argv[4] message = sys.argv[5] tweeting(consumer_key, consumer_secret_key, my_access_token, my_access_token_secret, message)
[ "tweepy.OAuthHandler", "tweepy.API" ]
[((159, 209), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (178, 209), False, 'import tweepy\n'), ((293, 312), 'tweepy.API', 'tweepy.API', (['my_auth'], {}), '(my_auth)\n', (303, 312), False, 'import tweepy\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import os import argparse import pandas as pd from pkg_resources import resource_filename from .utils import column_exists, fixup_columns, get_app_file_path, download_file IN_ROLLS_DATA = {'v1': 'https://dataverse.harvard.edu/api/v1/access/datafile/4967581', 'v2': 'https://dataverse.harvard.edu/api/v1/access/datafile/4965696', 'v2_1k': 'https://dataverse.harvard.edu/api/v1/access/datafile/4965695', } IN_ROLLS_COLS = ['n_male', 'n_female', 'n_third_gender', 'prop_female', 'prop_male', 'prop_third_gender'] class InRollsFnData(): __df = None __state = None __year = None @staticmethod def load_naampy_data(dataset): data_fn = 'naampy_{0:s}.csv.gz'.format(dataset) data_path = get_app_file_path('naampy', data_fn) if not os.path.exists(data_path): print("Downloading naampy data from the server ({0!s})..." .format(data_fn)) if not download_file(IN_ROLLS_DATA[dataset], data_path): print("ERROR: Cannot download naampy data file") return None else: print("Using cached naampy data from local ({0!s})...".format(data_path)) return data_path @classmethod def in_rolls_fn_gender(cls, df, namecol, state=None, year=None, dataset='v2_1k'): """Appends additional columns from Female ratio data to the input DataFrame based on the first name. Removes extra space. Checks if the name is the Indian electoral rolls data. If it is, outputs data from that row. Args: df (:obj:`DataFrame`): Pandas DataFrame containing the first name column. namecol (str or int): Column's name or location of the name in DataFrame. state (str): The state name of Indian electoral rolls data to be used. (default is None for all states) year (int): The year of Indian electoral rolls to be used. (default is None for all years) Returns: DataFrame: Pandas DataFrame with additional columns:- 'n_female', 'n_male', 'n_third_gender', 'prop_female', 'prop_male', 'prop_third_gender' by first name """ if namecol not in df.columns: print("No column `{0!s}` in the DataFrame".format(namecol)) return df df['__first_name'] = df[namecol].str.strip() df['__first_name'] = df['__first_name'].str.lower() if cls.__df is None or cls.__state != state or cls.__year != year: data_path = InRollsFnData.load_naampy_data(dataset) adf = pd.read_csv(data_path, usecols=['state', 'birth_year', 'first_name', 'n_female', 'n_male', 'n_third_gender']) agg_dict = {'n_female': 'sum', 'n_male': 'sum', 'n_third_gender': 'sum'} if state and year: adf = adf[(adf.state==state) & (adf.birth_year==year)].copy() del adf['birth_year'] del adf['state'] elif state: adf = adf.groupby(['state', 'first_name']).agg(agg_dict).reset_index() adf = adf[adf.state==state].copy() del adf['state'] elif year: adf = adf.groupby(['birth_year', 'first_name']).agg(agg_dict).reset_index() adf = adf[adf.birth_year==year].copy() del adf['birth_year'] else: adf = adf.groupby(['first_name']).agg(agg_dict).reset_index() n = adf['n_female'] + adf['n_male'] + adf['n_third_gender'] adf['prop_female'] = adf['n_female'] / n adf['prop_male'] = adf['n_male'] / n adf['prop_third_gender'] = adf['n_third_gender'] / n cls.__df = adf cls.__df = cls.__df[['first_name'] + IN_ROLLS_COLS] cls.__df.rename(columns={'first_name': '__first_name'}, inplace=True) rdf = pd.merge(df, cls.__df, how='left', on='__first_name') del rdf['__first_name'] return rdf @staticmethod def list_states(dataset='v2_1k'): data_path = InRollsFnData.load_naampy_data(dataset) adf = pd.read_csv(data_path, usecols=['state']) return adf.state.unique() in_rolls_fn_gender = InRollsFnData.in_rolls_fn_gender def main(argv=sys.argv[1:]): title = ('Appends Electoral roll columns for prop_female, n_female, ' 'n_male n_third_gender by first name') parser = argparse.ArgumentParser(description=title) parser.add_argument('input', default=None, help='Input file') parser.add_argument('-f', '--first-name', required=True, help='Name or index location of column contains ' 'the first name') parser.add_argument('-s', '--state', default=None, choices=InRollsFnData.list_states(), help='State name of Indian electoral rolls data ' '(default=all)') parser.add_argument('-y', '--year', type=int, default=None, help='Birth year in Indian electoral rolls data (default=all)') parser.add_argument('-o', '--output', default='in-rolls-output.csv', help='Output file with Indian electoral rolls data columns') parser.add_argument('-d', '--dataset', default='v2_1k', choices=['v1', 'v2', 'v2_1k'], help='Select the dataset v1 is 12 states,\n' + 'v2 and v2_1k for 30 states with 100 and 1,000\n' + ' first name occurrences respectively' '(default=v2_1k)') args = parser.parse_args(argv) print(args) if not args.first_name.isdigit(): df = pd.read_csv(args.input) else: df = pd.read_csv(args.input, header=None) args.first_name = int(args.first_name) if not column_exists(df, args.first_name): return -1 rdf = in_rolls_fn_gender(df, args.first_name, args.state, args.year, args.dataset) print("Saving output to file: `{0:s}`".format(args.output)) rdf.columns = fixup_columns(rdf.columns) rdf.to_csv(args.output, index=False) return 0 if __name__ == "__main__": sys.exit(main())
[ "pandas.read_csv", "pandas.merge", "os.path.exists", "argparse.ArgumentParser" ]
[((4631, 4673), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'title'}), '(description=title)\n', (4654, 4673), False, 'import argparse\n'), ((4092, 4145), 'pandas.merge', 'pd.merge', (['df', 'cls.__df'], {'how': '"""left"""', 'on': '"""__first_name"""'}), "(df, cls.__df, how='left', on='__first_name')\n", (4100, 4145), True, 'import pandas as pd\n'), ((4330, 4371), 'pandas.read_csv', 'pd.read_csv', (['data_path'], {'usecols': "['state']"}), "(data_path, usecols=['state'])\n", (4341, 4371), True, 'import pandas as pd\n'), ((5980, 6003), 'pandas.read_csv', 'pd.read_csv', (['args.input'], {}), '(args.input)\n', (5991, 6003), True, 'import pandas as pd\n'), ((6027, 6063), 'pandas.read_csv', 'pd.read_csv', (['args.input'], {'header': 'None'}), '(args.input, header=None)\n', (6038, 6063), True, 'import pandas as pd\n'), ((883, 908), 'os.path.exists', 'os.path.exists', (['data_path'], {}), '(data_path)\n', (897, 908), False, 'import os\n'), ((2762, 2875), 'pandas.read_csv', 'pd.read_csv', (['data_path'], {'usecols': "['state', 'birth_year', 'first_name', 'n_female', 'n_male', 'n_third_gender']"}), "(data_path, usecols=['state', 'birth_year', 'first_name',\n 'n_female', 'n_male', 'n_third_gender'])\n", (2773, 2875), True, 'import pandas as pd\n')]
from django.urls import path from itez.users.views import ( update_view, # UserUpdateView, user_delete, user_detail_view, user_redirect_view, UserCreateView, user_profile, user_profile_photo_update, ) app_name = "users" urlpatterns = [ path("user/create/", UserCreateView.as_view(), name="user_create"), path("user/delete/<int:user_id>", user_delete, name="delete"), path("user/profile/photo/upload/", user_profile_photo_update, name="profile_photo"), path("user/profile/", user_profile, name="profile"), path("~redirect/", view=user_redirect_view, name="redirect"), # path("update/<int:pk>/", view=UserUpdateView.as_view(), name="update"), path("update/<int:pk>/", view=update_view, name="update"), path("<str:username>/", view=user_detail_view, name="detail"), ]
[ "itez.users.views.UserCreateView.as_view", "django.urls.path" ]
[((346, 407), 'django.urls.path', 'path', (['"""user/delete/<int:user_id>"""', 'user_delete'], {'name': '"""delete"""'}), "('user/delete/<int:user_id>', user_delete, name='delete')\n", (350, 407), False, 'from django.urls import path\n'), ((413, 501), 'django.urls.path', 'path', (['"""user/profile/photo/upload/"""', 'user_profile_photo_update'], {'name': '"""profile_photo"""'}), "('user/profile/photo/upload/', user_profile_photo_update, name=\n 'profile_photo')\n", (417, 501), False, 'from django.urls import path\n'), ((502, 553), 'django.urls.path', 'path', (['"""user/profile/"""', 'user_profile'], {'name': '"""profile"""'}), "('user/profile/', user_profile, name='profile')\n", (506, 553), False, 'from django.urls import path\n'), ((559, 619), 'django.urls.path', 'path', (['"""~redirect/"""'], {'view': 'user_redirect_view', 'name': '"""redirect"""'}), "('~redirect/', view=user_redirect_view, name='redirect')\n", (563, 619), False, 'from django.urls import path\n'), ((703, 760), 'django.urls.path', 'path', (['"""update/<int:pk>/"""'], {'view': 'update_view', 'name': '"""update"""'}), "('update/<int:pk>/', view=update_view, name='update')\n", (707, 760), False, 'from django.urls import path\n'), ((766, 827), 'django.urls.path', 'path', (['"""<str:username>/"""'], {'view': 'user_detail_view', 'name': '"""detail"""'}), "('<str:username>/', view=user_detail_view, name='detail')\n", (770, 827), False, 'from django.urls import path\n'), ((295, 319), 'itez.users.views.UserCreateView.as_view', 'UserCreateView.as_view', ([], {}), '()\n', (317, 319), False, 'from itez.users.views import update_view, user_delete, user_detail_view, user_redirect_view, UserCreateView, user_profile, user_profile_photo_update\n')]
#!/bin/env python """ Implements Python interface to NRL NAAPS files """ import os import sys from types import * from pyhdf import SD from glob import glob from numpy import ones, concatenate, array,linspace,arange, transpose from datetime import date, datetime, timedelta from .config import strTemplate MISSING = -9999.99 ALIAS = dict (latitude = 'lat' , longitude = 'lon' , elevation = 'zs' , time = 'Time') ALIAS['532_attenuated_backscatter'] = 'taback' ALIAS['532_attenuated_backscatter_error'] = 'taback_err' ALIAS['532_attenuated_molecular_backscatter'] = 'mol_aback' SDS = list(ALIAS.keys()) #......................................................................................... class NAAPS(object): """ Base class for NAAPS object. """ def __init__ (self,Path,keep=None,Verbose=0,only_good=True): """ Creates an NAAPS object defining the attributes corresponding to the SDS's on input. The optional parameter *keep* is used to specify the number of scan lines (from the left of the swath) to keep. This is needed for coping with the row anomaly problem. """ # Initially are lists of numpy arrays for each granule # ---------------------------------------------------- self.verb = Verbose self.keep = keep self.SDS = SDS # Variable names # -------------- self.Names = [] for name in SDS: self.Names.append(name) self.Names += ['nymd','nhms'] # Create empty lists for SDS to be read from orbit file; # each element of the list contains data for one orbit # ------------------------------------------------------ for name in self.Names: self.__dict__[name] = [] self.time_ = [] # to hold datetime objects # Read each orbit, appending them to the list # ------------------------------------------- if type(Path) is ListType: if len(Path) == 0: self.nobs = 0 print("WARNING: Empty NAAPS object created") return else: Path = [Path, ] self._readList(Path) # Make each attribute a single numpy array # ---------------------------------------- for name in self.Names: # print 'name',name, 'donnees',self.__dict__[name] try: self.__dict__[name] = concatenate((self.__dict__[name])) except: print("Failed concatenating "+name) # Make aliases for compatibility with older code # ---------------------------------------------- # Alias = ALIAS.keys() for name in self.Names: if name in SDS: self.__dict__[ALIAS[name]] = self.__dict__[name] #--- def _readList(self,List): """ Recursively, look for files in list; list items can be files or directories. """ for item in List: if os.path.isdir(item): self._readDir(item) elif os.path.isfile(item): self._readOrbit(item) else: print("%s is not a valid file or directory, ignoring it"%item) #--- def _readDir(self,dir): """Recursively, look for files in directory.""" for item in os.listdir(dir): path = dir + os.sep + item if os.path.isdir(path): self._readDir(path) elif os.path.isfile(path): self._readOrbit(path) else: print("%s is not a valid file or directory, ignoring it"%item) #--- def _readOrbit(self,filename): """Reads one CALIPSO orbit with Level 1.5 data.""" # Reference time # -------------- REF_DATE = datetime(1993,1,1,0,0,0) # Open the CALIPSO file and loop over the datasets, # extracting GEOLOCATION and Data fields # ---------------------------------------------- if self.verb: print("[] working on <%s>"%filename) f = SD.SD(filename) # for group in self.SDS.keys(): for name in self.SDS: v = name print('v', v) if v == 'time': sd = f.select(v) Time = sd.get() nobs = len(Time) nymd = ones(nobs).astype('int') nhms = ones(nobs).astype('int') self.__dict__[v].append(Time) # time as on file for i in range(nobs): yymmdd = Time[i] nymd0 = int(Time[i]) nd = Time[i] - nymd0 nd0 = nd * 24.0 hh = int(nd0) nd1 = nd0 - hh nd2 = nd1 * 60 mm = int(nd2) nd3 = nd2 - mm nd4 = nd3 * 60 ss = int(nd4) nymd[i] = 20000000 + nymd0 nhms[i] = ((hh * 100) + mm) * 100 + ss self.nymd.append(nymd) self.nhms.append(nhms) year = int(nymd[i]/10000) month = int((nymd[i] - 10000*year)/100) day = nymd[i] - (year*10000 + month * 100) self.time_.append(datetime(year,month,day,hh,mm,ss)) else: sd = f.select(v) data = sd.get() # most of parameter : data = (nobs) or (nobs,km) except L2 feature type(nobs,km,4) data = transpose(data) print('data', data.shape) if self.keep != None: self.__dict__[v].append(data[0:self.keep,:]) else: self.__dict__[v].append(data) #--- def writeg(self,g5,syn_time,nsyn=8,g5_h=None,g5_ab=None,filename=None,dir='.',expid='NAAPS',Verb=1): """ Writes gridded CALIPSO measurements to file (same grid as GEOS-5 file). Verb -- Verbose level: 0 - really quiet (default) 1 - Warns if invalid file is found 2 - Prints out non-zero number of fires in each file. """ from gfio import GFIO from binObs_ import binobs3dh # Determine synoptic time range # ----------------------------- dt = timedelta(seconds = 12. * 60. * 60. / nsyn) t1, t2 = (syn_time-dt,syn_time+dt) # Lat lon grid from GEOS-5 file # ------------ im = 360 jm = 181 print('im,jm', im, jm) glon = linspace(-180.,180.,im,endpoint=False) glat = linspace(-90.,90.,jm) print('glon', glon, glat) dLon = 360. / im dLat = 180. / ( jm - 1.) print('dlon', dLon, dLat) nymd = 10000 * syn_time.year + 100 * syn_time.month + syn_time.day nhms = 10000 * syn_time.hour + 100 * syn_time.minute + syn_time.second print('nymd=',nymd, 'nhms=',nhms) na_height = arange(0,8100,400) # height above sea level for NAAPS 100mfor night 400m forday print('na_height shape', na_height.shape, g5_h.shape) g5_height = g5_h km = g5_height.shape[0] # because it is at the edge print('km', km, g5_height.shape, g5_height[:,0]) nobs = self.lon.shape vtitle = [ 'taback', 'taback_err', 'mol_aback', 'height' ] vname = ['taback','taback_err', 'mol_aback'] vunits = [ 'km-1 sr-1','km-1 sr-1', 'km-1 sr-1' ] kmvar = [km, km, km] title = 'Gridded NAAPS attenuated backscatter coeff lev Geos5' source = 'NASA/GSFC/GMAO GEOS-5 Aerosol Group' contact = 'Virginie' if filename is None: filename = '%s/%s.day.calipso_l3a.%d_%02dz.nc4'%(dir,expid,nymd,nhms/10000) # QA filtering # ------------ I_bad = ones(self.taback.shape) # bad data I_bad = False # Time filter of data # ------------------- lon = self.lon lat = self.lat taback = _timefilter(self.time_,t1,t2,self.taback,I_bad) taback_err = _timefilter(self.time_,t1,t2,self.taback_err,I_bad) mol_aback = _timefilter(self.time_,t1,t2,self.mol_aback,I_bad) # height = _timefilter(self.time_,t1,t2,na_height,I_bad) print('taback', taback.shape) # Create the file # --------------- f = GFIO() glevs=arange(km) f.create(filename, vname, nymd, nhms, lon=glon, lat=glat, levs=glevs, levunits='m', vtitle=vtitle, vunits=vunits,kmvar=kmvar,amiss=MISSING, title=title, source=source, contact=contact) # gObs=binobs3dh(lon[13:14],lat[13:14],taback[13:14,:],na_height,g5_height[:,13:14],im,jm,MISSING) print('test', lon[10:11],lat[10:11],taback[10:11,:],na_height,g5_height[:,10:11]) gObs=binobs3dh(lon[10:11],lat[10:11],taback[10:11,:],na_height,g5_height[:,10:11],im,jm,MISSING) print('gobs', gObs[357:358,101:102,:]) # Grid variable and write to file # ------------------------------- f.write('taback', nymd, nhms, binobs3dh(lon,lat,taback,na_height,g5_height,im,jm,MISSING) ) f.write('taback_err', nymd, nhms, binobs3dh(lon,lat,taback_err,na_height,g5_height,im,jm,MISSING) ) f.write('mol_aback', nymd, nhms, binobs3dh(lon,lat,mol_aback,na_height,g5_height,im,jm,MISSING) ) # f.write('height', nymd, nhms, g5_height) if Verb >=1: print("[w] Wrote file "+filename) #.................................................................... def _timefilter ( t, t1, t2, a, I_bad ): filler = MISSING * ones(a.shape[1:]) b = a.copy() for i in range(len(t)): if (t[i]<t1) or (t[i]>=t2): b[i] = filler if len(b.shape) == 3: b[I_bad,:] = MISSING elif len(b.shape) == 2: b[I_bad] = MISSING else: raise IndexError("Invalid rank=%d for time filtering"%len(b.shape)) return b #--- def orbits (path, syn_time, nsyn=8, period='night', Verbose=0 ): """ Returns a list of CALIPSO orbits for a given product at given synoptic time. On input, path --- mounting point for the CALIPSO Level 1.5 files syn_time --- synoptic time (timedate format) nsyn --- number of synoptic times per day (optional) """ # Determine synoptic time range # ----------------------------- dt = timedelta(seconds = 12. * 60. * 60. / nsyn) t1, t2 = (syn_time-dt,syn_time+dt) print("[*] ", t1,"|", t2) today = syn_time yesterday = today - timedelta(hours=24) Files = [] for t in (yesterday,today): yy, mm, dd = (t.year,t.month,t.day) dirn = "%s/%02d/%s"%(path,mm,period) Files += glob("%s/naaps_caliop_assim_*.cdf"%(dirn)) # print 'Files', dirn, Files Orbits = [] for f in Files: dirn, filen = os.path.split(f) tokens = filen.split('_') beg_yy = int(tokens[3][0:4]) beg_mm = int(tokens[3][4:6]) beg_dd = int(tokens[3][6:8]) beg_h = int(tokens[3][8:10]) beg_m = int(tokens[3][10:12]) t_beg = datetime(beg_yy,beg_mm,beg_dd,beg_h,beg_m,0) t_end = t_beg + timedelta(minutes=90) # t_end = datetime(end_yy,end_mm,end_dd,end_h,end_m,0) # print 'year', beg_yy, 'month', beg_mm, 'day', beg_dd, 'hour', beg_h, 'min', beg_m if (t_beg>=t1 and t_beg<t2) or (t_end>=t1 and t_end<t2): print("[x] ", t_beg, '|', t_end) Orbits += [f,] if Verbose: print("[] ", f) return Orbits #............................................................................ if __name__ == "__main__": # syn_time = datetime(2008,6,30,0,0,0) # Time interval snd time step # --------------------------- t_beg = datetime(2007,4,1,0) t_end = datetime(2007,4,1,21) dt = timedelta(seconds=3*60*60) # 3-hourly t = t_beg - dt while t < t_end: t += dt syn_time = t Files = orbits('/nobackup/2/vbuchard/CALIPSO_L15/NAAPS/',syn_time,period='day',Verbose=1) print('files',Files) #def hold(): # NAAPS files naap = NAAPS(Files,Verbose=1) # GEOS-5 file g_template = "/nobackup/2/vbuchard/CALIPSO_L15/GEOS-5/aback_63lay/Y%y4/M%m2/dR_MERRA-AA-r2_ext532nm_Nv_63layers.%y4%m2%d2_%h200z.nc4" g_fn = strTemplate(g_template,dtime=syn_time) lon=naap.lon lat=naap.lat g = GFIO(g_fn) g5_height = g.interp('h',lon,lat) g5_aback = g.read('taback') naap.writeg(g,syn_time,nsyn=8,g5_h=g5_height,g5_ab=g5_aback,filename=None,dir='/nobackup/2/vbuchard/CALIPSO_L15/',expid='NAAPS',Verb=1)
[ "binObs_.binobs3dh", "os.path.isdir", "gfio.GFIO", "numpy.transpose", "numpy.ones", "datetime.datetime", "os.path.isfile", "datetime.timedelta", "numpy.arange", "numpy.linspace", "pyhdf.SD.SD", "glob.glob", "os.path.split", "os.listdir", "numpy.concatenate" ]
[((10722, 10766), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(12.0 * 60.0 * 60.0 / nsyn)'}), '(seconds=12.0 * 60.0 * 60.0 / nsyn)\n', (10731, 10766), False, 'from datetime import date, datetime, timedelta\n'), ((12181, 12204), 'datetime.datetime', 'datetime', (['(2007)', '(4)', '(1)', '(0)'], {}), '(2007, 4, 1, 0)\n', (12189, 12204), False, 'from datetime import date, datetime, timedelta\n'), ((12214, 12238), 'datetime.datetime', 'datetime', (['(2007)', '(4)', '(1)', '(21)'], {}), '(2007, 4, 1, 21)\n', (12222, 12238), False, 'from datetime import date, datetime, timedelta\n'), ((12245, 12275), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(3 * 60 * 60)'}), '(seconds=3 * 60 * 60)\n', (12254, 12275), False, 'from datetime import date, datetime, timedelta\n'), ((3457, 3472), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (3467, 3472), False, 'import os\n'), ((3903, 3932), 'datetime.datetime', 'datetime', (['(1993)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(1993, 1, 1, 0, 0, 0)\n', (3911, 3932), False, 'from datetime import date, datetime, timedelta\n'), ((4180, 4195), 'pyhdf.SD.SD', 'SD.SD', (['filename'], {}), '(filename)\n', (4185, 4195), False, 'from pyhdf import SD\n'), ((6539, 6583), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(12.0 * 60.0 * 60.0 / nsyn)'}), '(seconds=12.0 * 60.0 * 60.0 / nsyn)\n', (6548, 6583), False, 'from datetime import date, datetime, timedelta\n'), ((6765, 6808), 'numpy.linspace', 'linspace', (['(-180.0)', '(180.0)', 'im'], {'endpoint': '(False)'}), '(-180.0, 180.0, im, endpoint=False)\n', (6773, 6808), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((6818, 6843), 'numpy.linspace', 'linspace', (['(-90.0)', '(90.0)', 'jm'], {}), '(-90.0, 90.0, jm)\n', (6826, 6843), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((7193, 7213), 'numpy.arange', 'arange', (['(0)', '(8100)', '(400)'], {}), '(0, 8100, 400)\n', (7199, 7213), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((8124, 8147), 'numpy.ones', 'ones', (['self.taback.shape'], {}), '(self.taback.shape)\n', (8128, 8147), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((8656, 8662), 'gfio.GFIO', 'GFIO', ([], {}), '()\n', (8660, 8662), False, 'from gfio import GFIO\n'), ((8676, 8686), 'numpy.arange', 'arange', (['km'], {}), '(km)\n', (8682, 8686), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((9143, 9247), 'binObs_.binobs3dh', 'binobs3dh', (['lon[10:11]', 'lat[10:11]', 'taback[10:11, :]', 'na_height', 'g5_height[:, 10:11]', 'im', 'jm', 'MISSING'], {}), '(lon[10:11], lat[10:11], taback[10:11, :], na_height, g5_height[:,\n 10:11], im, jm, MISSING)\n', (9152, 9247), False, 'from binObs_ import binobs3dh\n'), ((9942, 9959), 'numpy.ones', 'ones', (['a.shape[1:]'], {}), '(a.shape[1:])\n', (9946, 9959), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((10882, 10901), 'datetime.timedelta', 'timedelta', ([], {'hours': '(24)'}), '(hours=24)\n', (10891, 10901), False, 'from datetime import date, datetime, timedelta\n'), ((11081, 11123), 'glob.glob', 'glob', (["('%s/naaps_caliop_assim_*.cdf' % dirn)"], {}), "('%s/naaps_caliop_assim_*.cdf' % dirn)\n", (11085, 11123), False, 'from glob import glob\n'), ((11219, 11235), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (11232, 11235), False, 'import os\n'), ((11481, 11530), 'datetime.datetime', 'datetime', (['beg_yy', 'beg_mm', 'beg_dd', 'beg_h', 'beg_m', '(0)'], {}), '(beg_yy, beg_mm, beg_dd, beg_h, beg_m, 0)\n', (11489, 11530), False, 'from datetime import date, datetime, timedelta\n'), ((12836, 12846), 'gfio.GFIO', 'GFIO', (['g_fn'], {}), '(g_fn)\n', (12840, 12846), False, 'from gfio import GFIO\n'), ((3141, 3160), 'os.path.isdir', 'os.path.isdir', (['item'], {}), '(item)\n', (3154, 3160), False, 'import os\n'), ((3528, 3547), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (3541, 3547), False, 'import os\n'), ((9405, 9471), 'binObs_.binobs3dh', 'binobs3dh', (['lon', 'lat', 'taback', 'na_height', 'g5_height', 'im', 'jm', 'MISSING'], {}), '(lon, lat, taback, na_height, g5_height, im, jm, MISSING)\n', (9414, 9471), False, 'from binObs_ import binobs3dh\n'), ((9508, 9578), 'binObs_.binobs3dh', 'binobs3dh', (['lon', 'lat', 'taback_err', 'na_height', 'g5_height', 'im', 'jm', 'MISSING'], {}), '(lon, lat, taback_err, na_height, g5_height, im, jm, MISSING)\n', (9517, 9578), False, 'from binObs_ import binobs3dh\n'), ((9614, 9683), 'binObs_.binobs3dh', 'binobs3dh', (['lon', 'lat', 'mol_aback', 'na_height', 'g5_height', 'im', 'jm', 'MISSING'], {}), '(lon, lat, mol_aback, na_height, g5_height, im, jm, MISSING)\n', (9623, 9683), False, 'from binObs_ import binobs3dh\n'), ((11550, 11571), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(90)'}), '(minutes=90)\n', (11559, 11571), False, 'from datetime import date, datetime, timedelta\n'), ((2543, 2575), 'numpy.concatenate', 'concatenate', (['self.__dict__[name]'], {}), '(self.__dict__[name])\n', (2554, 2575), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((3204, 3224), 'os.path.isfile', 'os.path.isfile', (['item'], {}), '(item)\n', (3218, 3224), False, 'import os\n'), ((3591, 3611), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (3605, 3611), False, 'import os\n'), ((5728, 5743), 'numpy.transpose', 'transpose', (['data'], {}), '(data)\n', (5737, 5743), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((4475, 4485), 'numpy.ones', 'ones', (['nobs'], {}), '(nobs)\n', (4479, 4485), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((4522, 4532), 'numpy.ones', 'ones', (['nobs'], {}), '(nobs)\n', (4526, 4532), False, 'from numpy import ones, concatenate, array, linspace, arange, transpose\n'), ((5478, 5516), 'datetime.datetime', 'datetime', (['year', 'month', 'day', 'hh', 'mm', 'ss'], {}), '(year, month, day, hh, mm, ss)\n', (5486, 5516), False, 'from datetime import date, datetime, timedelta\n')]
import runexp import testexp import summary memo = "multi_phase/sumo/pipeline" runexp.main(memo) print("****************************** runexp ends (generate, train, test)!! ******************************") summary.main(memo) print("****************************** summary_detail ends ******************************")
[ "summary.main", "runexp.main" ]
[((81, 98), 'runexp.main', 'runexp.main', (['memo'], {}), '(memo)\n', (92, 98), False, 'import runexp\n'), ((208, 226), 'summary.main', 'summary.main', (['memo'], {}), '(memo)\n', (220, 226), False, 'import summary\n')]
from neuromllite import ( Network, Cell, Population, Synapse, RectangularRegion, RandomLayout, ) from neuromllite import ( Projection, RandomConnectivity, OneToOneConnector, Simulation, InputSource, Input, ) from neuromllite.NetworkGenerator import check_to_generate_or_run import sys def generate(): dt = 0.05 simtime = 100 ################################################################################ ### Build new network net = Network(id="FN") net.notes = "<NAME>umo cell model - originally specified in NeuroML/LEMS" net.parameters = { "initial_w": 0.0, "initial_v": -1, "a_v": -0.3333333333333333, "b_v": 0.0, "c_v": 1.0, "d_v": 1, "e_v": -1.0, "f_v": 1.0, "time_constant_v": 1.0, "a_w": 1.0, "b_w": -0.8, "c_w": 0.7, "time_constant_w": 12.5, "threshold": -1.0, "mode": 1.0, "uncorrelated_activity": 0.0, "Iext": 0, } cellInput = Cell(id="fn", lems_source_file="FN_Definitions.xml", parameters={}) for p in net.parameters: cellInput.parameters[p] = p net.cells.append(cellInput) r1 = RectangularRegion( id="region1", x=0, y=0, z=0, width=1000, height=100, depth=1000 ) net.regions.append(r1) pop = Population( id="FNpop", size="1", component=cellInput.id, properties={"color": "0.2 0.2 0.2", "radius": 3}, random_layout=RandomLayout(region=r1.id), ) net.populations.append(pop) new_file = net.to_json_file("%s.json" % net.id) ################################################################################ ### Build Simulation object & save as JSON sim = Simulation( id="Sim%s" % net.id, network=new_file, duration=simtime, dt=dt, seed=123, recordVariables={"V": {"all": "*"}, "W": {"all": "*"}}, plots2D={ "VW": {"x_axis": "%s/0/fn/V" % pop.id, "y_axis": "%s/0/fn/W" % pop.id} }, ) sim.to_json_file() return sim, net if __name__ == "__main__": sim, net = generate() ################################################################################ ### Run in some simulators import sys check_to_generate_or_run(sys.argv, sim)
[ "neuromllite.Simulation", "neuromllite.NetworkGenerator.check_to_generate_or_run", "neuromllite.RectangularRegion", "neuromllite.Network", "neuromllite.RandomLayout", "neuromllite.Cell" ]
[((511, 527), 'neuromllite.Network', 'Network', ([], {'id': '"""FN"""'}), "(id='FN')\n", (518, 527), False, 'from neuromllite import Network, Cell, Population, Synapse, RectangularRegion, RandomLayout\n'), ((1070, 1137), 'neuromllite.Cell', 'Cell', ([], {'id': '"""fn"""', 'lems_source_file': '"""FN_Definitions.xml"""', 'parameters': '{}'}), "(id='fn', lems_source_file='FN_Definitions.xml', parameters={})\n", (1074, 1137), False, 'from neuromllite import Network, Cell, Population, Synapse, RectangularRegion, RandomLayout\n'), ((1245, 1331), 'neuromllite.RectangularRegion', 'RectangularRegion', ([], {'id': '"""region1"""', 'x': '(0)', 'y': '(0)', 'z': '(0)', 'width': '(1000)', 'height': '(100)', 'depth': '(1000)'}), "(id='region1', x=0, y=0, z=0, width=1000, height=100,\n depth=1000)\n", (1262, 1331), False, 'from neuromllite import Network, Cell, Population, Synapse, RectangularRegion, RandomLayout\n'), ((1807, 2041), 'neuromllite.Simulation', 'Simulation', ([], {'id': "('Sim%s' % net.id)", 'network': 'new_file', 'duration': 'simtime', 'dt': 'dt', 'seed': '(123)', 'recordVariables': "{'V': {'all': '*'}, 'W': {'all': '*'}}", 'plots2D': "{'VW': {'x_axis': '%s/0/fn/V' % pop.id, 'y_axis': '%s/0/fn/W' % pop.id}}"}), "(id='Sim%s' % net.id, network=new_file, duration=simtime, dt=dt,\n seed=123, recordVariables={'V': {'all': '*'}, 'W': {'all': '*'}},\n plots2D={'VW': {'x_axis': '%s/0/fn/V' % pop.id, 'y_axis': '%s/0/fn/W' %\n pop.id}})\n", (1817, 2041), False, 'from neuromllite import Projection, RandomConnectivity, OneToOneConnector, Simulation, InputSource, Input\n'), ((2356, 2395), 'neuromllite.NetworkGenerator.check_to_generate_or_run', 'check_to_generate_or_run', (['sys.argv', 'sim'], {}), '(sys.argv, sim)\n', (2380, 2395), False, 'from neuromllite.NetworkGenerator import check_to_generate_or_run\n'), ((1542, 1568), 'neuromllite.RandomLayout', 'RandomLayout', ([], {'region': 'r1.id'}), '(region=r1.id)\n', (1554, 1568), False, 'from neuromllite import Network, Cell, Population, Synapse, RectangularRegion, RandomLayout\n')]
""" Dump Apache HTTPD configuration to stdout. """ from os import chdir from os.path import dirname, join, normpath, realpath import sys import coils www_directory = sys.argv[1] config_fname = sys.argv[2] if len(sys.argv)>=3 else 'wabbit.conf' # Load the configuration, and add to it path to www. config = coils.Config(config_fname) config['www_directory'] = normpath(www_directory) # Go into the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) # Create a httpd.conf file with text replacement as per the configuration. with open('httpd.conf.in') as inf: for line in inf.readlines(): line = line.rstrip() for key, val in config.items(): line = line.replace('@{}@'.format(key), val) print(line)
[ "os.path.realpath", "os.path.normpath", "coils.Config", "os.chdir" ]
[((310, 336), 'coils.Config', 'coils.Config', (['config_fname'], {}), '(config_fname)\n', (322, 336), False, 'import coils\n'), ((363, 386), 'os.path.normpath', 'normpath', (['www_directory'], {}), '(www_directory)\n', (371, 386), False, 'from os.path import dirname, join, normpath, realpath\n'), ((465, 480), 'os.chdir', 'chdir', (['this_dir'], {}), '(this_dir)\n', (470, 480), False, 'from os import chdir\n'), ((445, 463), 'os.path.realpath', 'realpath', (['__file__'], {}), '(__file__)\n', (453, 463), False, 'from os.path import dirname, join, normpath, realpath\n')]
from fastapi import FastAPI from fastapi.responses import JSONResponse from database import Database app = FastAPI() database = Database() @app.get("/") def read_root(): return { "Collections": ["ports", "ssh_logins", "user_connections", "network_traffic", "storage"] } @app.get("/collection/{collection_name}") def read_item(collection_name: str): headers = {'Access-Control-Allow-Origin': '*'} documents = [] for doc in database.database[collection_name].find({}): doc.pop("_id") documents.append(doc) return JSONResponse(content=documents, headers=headers)
[ "fastapi.responses.JSONResponse", "database.Database", "fastapi.FastAPI" ]
[((109, 118), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (116, 118), False, 'from fastapi import FastAPI\n'), ((130, 140), 'database.Database', 'Database', ([], {}), '()\n', (138, 140), False, 'from database import Database\n'), ((565, 613), 'fastapi.responses.JSONResponse', 'JSONResponse', ([], {'content': 'documents', 'headers': 'headers'}), '(content=documents, headers=headers)\n', (577, 613), False, 'from fastapi.responses import JSONResponse\n')]
""" Script for evaluating AE examples. """ import argparse import importlib import os import shutil import sys import torch import torchvision from tqdm import tqdm from fta.utils.dataset_utils import imagenet_utils from fta.utils.torch_utils import image_utils, model_utils import pdb # Sample Usage: # CUDA_VISIBLE_DEVICES=0 python tools/evaluate.py def evaluate(args): imagenet_label_dict = imagenet_utils.load_imagenet_label_dict() target_model_type = args.target_model model_class = getattr(torchvision.models, args.target_model) model = model_class(pretrained=True).cuda() model.eval() img_mean, img_std = imagenet_utils.get_imagenet_normalize() torch_normalize = model_utils.Normalize(img_mean, img_std) img_names = os.listdir(args.benign_dir) acc_count = 0 total_count = 0 for img_name in tqdm(img_names): img_name_noext = os.path.splitext(img_name)[0] img_path_benign = os.path.join(args.benign_dir, img_name) img_benign_var = image_utils.load_img( img_path_benign, expand_batch_dim=True).cuda() img_benign_var = torch_normalize(img_benign_var) pred_benign = torch.argmax(model(img_benign_var), axis=1) pred_benign_id = pred_benign.cpu().numpy()[0] img_path_adv = os.path.join( args.adv_dir, img_name_noext + "_adv.png") if not os.path.exists(img_path_adv): print("adv image not found.") continue img_adv_var = image_utils.load_img( img_path_adv, expand_batch_dim=True).cuda() img_adv_var = torch_normalize(img_adv_var) pred_adv = torch.argmax(model(img_adv_var), axis=1) pred_adv_id = pred_adv.cpu().numpy()[0] print("ID: {0}, ori: {1}, adv: {2}".format( img_name_noext, imagenet_label_dict[pred_benign_id], imagenet_label_dict[pred_adv_id])) if pred_benign_id == pred_adv_id: acc_count += 1 total_count += 1 accuracy = float(acc_count) / float(total_count) print("Evaluate path: ", args.adv_dir) print("Target Model: ", args.target_model) print("ASR: ", 1.0 - accuracy) print("{} over {}".format(total_count - acc_count, total_count)) return def parse_args(args): parser = argparse.ArgumentParser( description="PyTorch AE evaluator.") parser.add_argument( '--benign_dir', default="./sample_images", type=str) parser.add_argument( '--adv_dir', default="./temp_outputs", type=str) parser.add_argument( '--target_model', default="resnet152", type=str) return parser.parse_args(args) def main(args=None): # parse arguments if args is None: args = sys.argv[1:] args = parse_args(args) args_dic = vars(args) evaluate(args) if __name__ == "__main__": main()
[ "fta.utils.dataset_utils.imagenet_utils.load_imagenet_label_dict", "tqdm.tqdm", "argparse.ArgumentParser", "os.path.exists", "fta.utils.torch_utils.image_utils.load_img", "fta.utils.torch_utils.model_utils.Normalize", "fta.utils.dataset_utils.imagenet_utils.get_imagenet_normalize", "os.path.splitext", "os.path.join", "os.listdir" ]
[((426, 467), 'fta.utils.dataset_utils.imagenet_utils.load_imagenet_label_dict', 'imagenet_utils.load_imagenet_label_dict', ([], {}), '()\n', (465, 467), False, 'from fta.utils.dataset_utils import imagenet_utils\n'), ((661, 700), 'fta.utils.dataset_utils.imagenet_utils.get_imagenet_normalize', 'imagenet_utils.get_imagenet_normalize', ([], {}), '()\n', (698, 700), False, 'from fta.utils.dataset_utils import imagenet_utils\n'), ((722, 762), 'fta.utils.torch_utils.model_utils.Normalize', 'model_utils.Normalize', (['img_mean', 'img_std'], {}), '(img_mean, img_std)\n', (743, 762), False, 'from fta.utils.torch_utils import image_utils, model_utils\n'), ((780, 807), 'os.listdir', 'os.listdir', (['args.benign_dir'], {}), '(args.benign_dir)\n', (790, 807), False, 'import os\n'), ((863, 878), 'tqdm.tqdm', 'tqdm', (['img_names'], {}), '(img_names)\n', (867, 878), False, 'from tqdm import tqdm\n'), ((2251, 2311), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch AE evaluator."""'}), "(description='PyTorch AE evaluator.')\n", (2274, 2311), False, 'import argparse\n'), ((957, 996), 'os.path.join', 'os.path.join', (['args.benign_dir', 'img_name'], {}), '(args.benign_dir, img_name)\n', (969, 996), False, 'import os\n'), ((1291, 1346), 'os.path.join', 'os.path.join', (['args.adv_dir', "(img_name_noext + '_adv.png')"], {}), "(args.adv_dir, img_name_noext + '_adv.png')\n", (1303, 1346), False, 'import os\n'), ((902, 928), 'os.path.splitext', 'os.path.splitext', (['img_name'], {}), '(img_name)\n', (918, 928), False, 'import os\n'), ((1378, 1406), 'os.path.exists', 'os.path.exists', (['img_path_adv'], {}), '(img_path_adv)\n', (1392, 1406), False, 'import os\n'), ((1019, 1079), 'fta.utils.torch_utils.image_utils.load_img', 'image_utils.load_img', (['img_path_benign'], {'expand_batch_dim': '(True)'}), '(img_path_benign, expand_batch_dim=True)\n', (1039, 1079), False, 'from fta.utils.torch_utils import image_utils, model_utils\n'), ((1480, 1537), 'fta.utils.torch_utils.image_utils.load_img', 'image_utils.load_img', (['img_path_adv'], {'expand_batch_dim': '(True)'}), '(img_path_adv, expand_batch_dim=True)\n', (1500, 1537), False, 'from fta.utils.torch_utils import image_utils, model_utils\n')]
#!/usr/bin/env python from __future__ import division, print_function, absolute_import from __future__ import unicode_literals def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('sknano', parent_package, top_path) config.add_subpackage('apps') config.add_subpackage('core') config.add_subpackage('generators') config.add_subpackage('io') config.add_subpackage('scripts') config.add_subpackage('structures') config.add_subpackage('testing') config.add_subpackage('utils') config.add_data_dir('data') #config.make_config_py() return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
[ "numpy.distutils.misc_util.Configuration" ]
[((251, 300), 'numpy.distutils.misc_util.Configuration', 'Configuration', (['"""sknano"""', 'parent_package', 'top_path'], {}), "('sknano', parent_package, top_path)\n", (264, 300), False, 'from numpy.distutils.misc_util import Configuration\n')]
import pandas as pd import datetime from pathlib import Path import numpy as np def plus3h(data): columns = ['Created','First Lock', 'FirstResponse', 'Close Time'] columns_to_3 = {column: f"{column}+3" for column in columns} for col in columns: for index, row in data.iterrows(): row = str(row[col]) if row == str(np.nan): data.loc[index, columns_to_3[col]] = 'NaN' else: time = datetime.datetime.fromisoformat(row) plus = time + datetime.timedelta(hours=3) data.loc[index, columns_to_3[col]] = plus return columns_to_3.values(), data def weekend_filtration(day): weekday = day.weekday() if weekday == 5: modified_date = day.replace(hour= 8, minute= 0, second=0) return modified_date + datetime.timedelta(days=2) elif weekday == 6: modified_date = day.replace(hour= 8, minute= 0, second=0) return modified_date + datetime.timedelta(days=1) else: return day def working_hours_filtration(day): start_of_working_hours = datetime.datetime(1900, 1, 1, 8, 0, 0) end_of_working_hours = datetime.datetime(1900, 1, 1, 17, 0, 0) if day.time() < start_of_working_hours.time(): modifies_date2 = day.replace(hour= 8, minute= 0, second=0) return modifies_date2 elif day.time() > end_of_working_hours.time(): modifies_date2 = day.replace(hour= 8, minute= 0, second=0) modifies_date2 = modifies_date2 + datetime.timedelta(days=1) return modifies_date2 else: return day def holiday_filtration(day): naujieji = datetime.datetime(2022, 1, 1) atkurimas = datetime.datetime(day.year, 2, 16) nepriklausomybes = datetime.datetime(day.year, 3, 11) velykos = datetime.datetime(day.year, 4, 17) velykos2 = datetime.datetime(day.year, 4, 18) darbininku = datetime.datetime(day.year, 5, 1) jonines = datetime.datetime(day.year, 6, 24) mindaugines = datetime.datetime(day.year, 7, 6) zolines = datetime.datetime(day.year, 8, 15) velines = datetime.datetime(day.year, 11, 1) velines2 = datetime.datetime(day.year, 11, 2) kucios = datetime.datetime(day.year, 12, 24) kaledos = datetime.datetime(day.year, 12, 25) kaledos2 = datetime.datetime(day.year, 12, 26) holidays_list = [naujieji, atkurimas, nepriklausomybes, velykos, velykos2, darbininku, jonines, mindaugines, zolines, velines, velines2, kucios, kaledos, kaledos2] for holiday in holidays_list: while day.date() == holiday.date(): day = day + datetime.timedelta(days=1) day = day.replace(hour= 8, minute= 0, second=0) return day def final_time_modification(columns_list, data): columns_mod = {column: f"{column}mod" for column in columns_list} for column in columns_list: for index, row in data.iterrows(): r = str(row[column]) if r == "NaN" or r == "NaT": data.loc[index, columns_mod[column]] = r else: formated_date = datetime.datetime.fromisoformat(r) not_holiday = holiday_filtration(formated_date) not_weekend = weekend_filtration(not_holiday) working_hours = working_hours_filtration(not_weekend) data.loc[index, columns_mod[column]] = working_hours return columns_mod.values(), data # Solution Time % created - first response # INtake Time % created - first lock def delta_counter(index, final_data, first, name, created, close): if (first == 'NaN' or first == 'NaT') and (close == 'NaN' or close == 'NaT'): final_data.loc[index, name] = first elif (first == 'NaN' or first == 'NaT') and (close != 'NaN' or close != 'NaT'): creat = datetime.datetime.fromisoformat(created) clo = datetime.datetime.fromisoformat(close) if creat.date() == clo.date(): rezult = clo - creat final_data.loc[index, name] = rezult else: sum = datetime.timedelta() creat2 = creat end_of_working_hours = datetime.datetime(year=creat2.year, month=creat2.month, day = creat2.day, hour= 17, minute = 0, second = 0) delta_creat2 = end_of_working_hours - creat2 sum = sum + delta_creat2 while creat2.date() < clo.date() and creat2.date() + datetime.timedelta(days=1) < clo.date(): creat2 = creat2 + datetime.timedelta(days=1) not_holiday = holiday_filtration(creat2) not_weekend = weekend_filtration(not_holiday) creat2 = not_weekend if creat2.date() + datetime.timedelta(days=1) > clo.date(): break sum = sum + datetime.timedelta(hours=8) start_of_working_hours = datetime.datetime(year=clo.year, month=clo.month, day = clo.day, hour= 8, minute = 0, second = 0) delta_closed = clo - start_of_working_hours sum = sum + delta_closed final_data.loc[index, name] = sum else: creat = datetime.datetime.fromisoformat(created) first = datetime.datetime.fromisoformat(first) if creat.date() == first.date(): rezult = first - creat final_data.loc[index, name] = rezult else: sum = datetime.timedelta() creat2 = creat end_of_working_hours = datetime.datetime(year=creat2.year, month=creat2.month, day = creat2.day, hour= 17, minute = 0, second = 0) delta_creat2 = end_of_working_hours - creat2 sum = sum + delta_creat2 while creat2.date() < first.date() and creat2.date() + datetime.timedelta(days=1) < first.date(): creat2 = creat2 + datetime.timedelta(days=1) not_holiday = holiday_filtration(creat2) not_weekend = weekend_filtration(not_holiday) creat2 = not_weekend if creat2.date() + datetime.timedelta(days=1) > first.date(): break sum = sum + datetime.timedelta(hours=8) start_of_working_hours = datetime.datetime(year=first.year, month=first.month, day = first.day, hour= 8, minute = 0, second = 0) delta_closed = first - start_of_working_hours sum = sum + delta_closed final_data.loc[index, name] = sum return final_data def sol_int_counter(final_data): for index, row in final_data.iterrows(): created = str(row['Created+3mod']) close = str(row['Close Time+3mod']) first_restonse = str(row['FirstResponse+3mod']) first_lock = str(row['First Lock+3mod']) delta_counter(index, final_data, first_restonse,'First Response - Created', created, close) delta_counter(index, final_data, first_lock, 'First Lock - Created', created, close) return final_data def date(data3): for index, row in data3.iterrows(): r = str(row['Created']) x = datetime.datetime.fromisoformat(r) date_numbers = x.isocalendar() month = x.month data3.loc[index,'Month'] = month data3.loc[index,'Year'] = int(date_numbers[0]) data3.loc[index,'Week'] = int(date_numbers[1]) return data3 class CsvFiltration(): def first_filtration_GB(self, extract_direktorija, filter_direktorija_GB): base = pd.read_csv(f'{filter_direktorija_GB}/base_GB.csv', delimiter=';', engine='python').set_index('TecReq#') file_location = extract_direktorija files = list(file_location.glob("*.csv*")) for x in files: data = pd.read_csv(x, delimiter=';', engine='python') df1 = data[['TecReq#', 'Created', 'First Lock', 'FirstResponse', 'Close Time', 'Queue', 'Owner Country Code', 'State', 'Number of Articles', 'Needed knowledge level', 'Case CV']] df2 = df1.loc[df1['State'] != "merged"] final_data = df2.loc[df2['Queue'].str.contains("TH_GB|TH_IE")] colums_list, data1 = plus3h(final_data) mod_list, data2 = final_time_modification(colums_list, data1) data3 = sol_int_counter(data2) final_data = date(data3) new_data = final_data[['TecReq#', 'Year','Month', 'Week', 'Created+3mod', 'First Lock+3mod', 'FirstResponse+3mod', 'Close Time+3mod', 'First Response - Created', 'First Lock - Created', 'Queue', 'Owner Country Code', 'State', 'Number of Articles', 'Needed knowledge level', 'Case CV']] base = base.combine_first(new_data.set_index('TecReq#')) base.to_csv(f"{filter_direktorija_GB}/base_GB.csv", sep=';') print("base was updated") def first_filtration(self, extract_direktorija, filter_direktorija): base = pd.read_csv(f'{filter_direktorija}/base.csv', delimiter=';', engine='python').set_index('TecReq#') file_location = extract_direktorija files = list(file_location.glob("*.csv*")) for x in files: data = pd.read_csv(x, delimiter=';', engine='python') df1 = data[['TecReq#', 'Created', 'First Lock', 'FirstResponse', 'Close Time', 'Queue', 'Owner Country Code', 'State', 'Number of Articles', 'Needed knowledge level', 'Case CV']] df2 = df1.loc[df1['State'] != "merged"] final_data = df2.loc[df2['Queue'].str.contains("TH_DE|TH_AT|TH_CH|TH_IT")] colums_list, data1 = plus3h(final_data) mod_list, data2 = final_time_modification(colums_list, data1) data3 = sol_int_counter(data2) final_data = date(data3) new_data = final_data[['TecReq#', 'Year','Month', 'Week', 'Created+3mod', 'First Lock+3mod', 'FirstResponse+3mod', 'Close Time+3mod', 'First Response - Created', 'First Lock - Created', 'Queue', 'Owner Country Code', 'State', 'Number of Articles', 'Needed knowledge level', 'Case CV']] base = base.combine_first(new_data.set_index('TecReq#')) base.to_csv(f"{filter_direktorija}/base.csv", sep=';') print("base was updated") # len(set(baze.index.values)) == len(baze.index.values) # len(set(baze.index.values)) # len(baze.index.values)
[ "pandas.read_csv", "datetime.datetime.fromisoformat", "datetime.timedelta", "datetime.datetime" ]
[((1117, 1155), 'datetime.datetime', 'datetime.datetime', (['(1900)', '(1)', '(1)', '(8)', '(0)', '(0)'], {}), '(1900, 1, 1, 8, 0, 0)\n', (1134, 1155), False, 'import datetime\n'), ((1183, 1222), 'datetime.datetime', 'datetime.datetime', (['(1900)', '(1)', '(1)', '(17)', '(0)', '(0)'], {}), '(1900, 1, 1, 17, 0, 0)\n', (1200, 1222), False, 'import datetime\n'), ((1665, 1694), 'datetime.datetime', 'datetime.datetime', (['(2022)', '(1)', '(1)'], {}), '(2022, 1, 1)\n', (1682, 1694), False, 'import datetime\n'), ((1711, 1745), 'datetime.datetime', 'datetime.datetime', (['day.year', '(2)', '(16)'], {}), '(day.year, 2, 16)\n', (1728, 1745), False, 'import datetime\n'), ((1769, 1803), 'datetime.datetime', 'datetime.datetime', (['day.year', '(3)', '(11)'], {}), '(day.year, 3, 11)\n', (1786, 1803), False, 'import datetime\n'), ((1818, 1852), 'datetime.datetime', 'datetime.datetime', (['day.year', '(4)', '(17)'], {}), '(day.year, 4, 17)\n', (1835, 1852), False, 'import datetime\n'), ((1868, 1902), 'datetime.datetime', 'datetime.datetime', (['day.year', '(4)', '(18)'], {}), '(day.year, 4, 18)\n', (1885, 1902), False, 'import datetime\n'), ((1920, 1953), 'datetime.datetime', 'datetime.datetime', (['day.year', '(5)', '(1)'], {}), '(day.year, 5, 1)\n', (1937, 1953), False, 'import datetime\n'), ((1968, 2002), 'datetime.datetime', 'datetime.datetime', (['day.year', '(6)', '(24)'], {}), '(day.year, 6, 24)\n', (1985, 2002), False, 'import datetime\n'), ((2021, 2054), 'datetime.datetime', 'datetime.datetime', (['day.year', '(7)', '(6)'], {}), '(day.year, 7, 6)\n', (2038, 2054), False, 'import datetime\n'), ((2069, 2103), 'datetime.datetime', 'datetime.datetime', (['day.year', '(8)', '(15)'], {}), '(day.year, 8, 15)\n', (2086, 2103), False, 'import datetime\n'), ((2118, 2152), 'datetime.datetime', 'datetime.datetime', (['day.year', '(11)', '(1)'], {}), '(day.year, 11, 1)\n', (2135, 2152), False, 'import datetime\n'), ((2168, 2202), 'datetime.datetime', 'datetime.datetime', (['day.year', '(11)', '(2)'], {}), '(day.year, 11, 2)\n', (2185, 2202), False, 'import datetime\n'), ((2216, 2251), 'datetime.datetime', 'datetime.datetime', (['day.year', '(12)', '(24)'], {}), '(day.year, 12, 24)\n', (2233, 2251), False, 'import datetime\n'), ((2266, 2301), 'datetime.datetime', 'datetime.datetime', (['day.year', '(12)', '(25)'], {}), '(day.year, 12, 25)\n', (2283, 2301), False, 'import datetime\n'), ((2317, 2352), 'datetime.datetime', 'datetime.datetime', (['day.year', '(12)', '(26)'], {}), '(day.year, 12, 26)\n', (2334, 2352), False, 'import datetime\n'), ((7128, 7162), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['r'], {}), '(r)\n', (7159, 7162), False, 'import datetime\n'), ((847, 873), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(2)'}), '(days=2)\n', (865, 873), False, 'import datetime\n'), ((3842, 3882), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['created'], {}), '(created)\n', (3873, 3882), False, 'import datetime\n'), ((3897, 3935), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['close'], {}), '(close)\n', (3928, 3935), False, 'import datetime\n'), ((5174, 5214), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['created'], {}), '(created)\n', (5205, 5214), False, 'import datetime\n'), ((5231, 5269), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['first'], {}), '(first)\n', (5262, 5269), False, 'import datetime\n'), ((7758, 7804), 'pandas.read_csv', 'pd.read_csv', (['x'], {'delimiter': '""";"""', 'engine': '"""python"""'}), "(x, delimiter=';', engine='python')\n", (7769, 7804), True, 'import pandas as pd\n'), ((9232, 9278), 'pandas.read_csv', 'pd.read_csv', (['x'], {'delimiter': '""";"""', 'engine': '"""python"""'}), "(x, delimiter=';', engine='python')\n", (9243, 9278), True, 'import pandas as pd\n'), ((468, 504), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['row'], {}), '(row)\n', (499, 504), False, 'import datetime\n'), ((995, 1021), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1013, 1021), False, 'import datetime\n'), ((1533, 1559), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (1551, 1559), False, 'import datetime\n'), ((2638, 2664), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2656, 2664), False, 'import datetime\n'), ((3121, 3155), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['r'], {}), '(r)\n', (3152, 3155), False, 'import datetime\n'), ((4090, 4110), 'datetime.timedelta', 'datetime.timedelta', ([], {}), '()\n', (4108, 4110), False, 'import datetime\n'), ((4179, 4283), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'creat2.year', 'month': 'creat2.month', 'day': 'creat2.day', 'hour': '(17)', 'minute': '(0)', 'second': '(0)'}), '(year=creat2.year, month=creat2.month, day=creat2.day,\n hour=17, minute=0, second=0)\n', (4196, 4283), False, 'import datetime\n'), ((4910, 5004), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'clo.year', 'month': 'clo.month', 'day': 'clo.day', 'hour': '(8)', 'minute': '(0)', 'second': '(0)'}), '(year=clo.year, month=clo.month, day=clo.day, hour=8,\n minute=0, second=0)\n', (4927, 5004), False, 'import datetime\n'), ((5428, 5448), 'datetime.timedelta', 'datetime.timedelta', ([], {}), '()\n', (5446, 5448), False, 'import datetime\n'), ((5516, 5620), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'creat2.year', 'month': 'creat2.month', 'day': 'creat2.day', 'hour': '(17)', 'minute': '(0)', 'second': '(0)'}), '(year=creat2.year, month=creat2.month, day=creat2.day,\n hour=17, minute=0, second=0)\n', (5533, 5620), False, 'import datetime\n'), ((6253, 6353), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'first.year', 'month': 'first.month', 'day': 'first.day', 'hour': '(8)', 'minute': '(0)', 'second': '(0)'}), '(year=first.year, month=first.month, day=first.day, hour=8,\n minute=0, second=0)\n', (6270, 6353), False, 'import datetime\n'), ((7514, 7602), 'pandas.read_csv', 'pd.read_csv', (['f"""{filter_direktorija_GB}/base_GB.csv"""'], {'delimiter': '""";"""', 'engine': '"""python"""'}), "(f'{filter_direktorija_GB}/base_GB.csv', delimiter=';', engine=\n 'python')\n", (7525, 7602), True, 'import pandas as pd\n'), ((8994, 9071), 'pandas.read_csv', 'pd.read_csv', (['f"""{filter_direktorija}/base.csv"""'], {'delimiter': '""";"""', 'engine': '"""python"""'}), "(f'{filter_direktorija}/base.csv', delimiter=';', engine='python')\n", (9005, 9071), True, 'import pandas as pd\n'), ((536, 563), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(3)'}), '(hours=3)\n', (554, 563), False, 'import datetime\n'), ((4522, 4548), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4540, 4548), False, 'import datetime\n'), ((4844, 4871), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(8)'}), '(hours=8)\n', (4862, 4871), False, 'import datetime\n'), ((5863, 5889), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (5881, 5889), False, 'import datetime\n'), ((6187, 6214), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(8)'}), '(hours=8)\n', (6205, 6214), False, 'import datetime\n'), ((4447, 4473), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4465, 4473), False, 'import datetime\n'), ((4741, 4767), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4759, 4767), False, 'import datetime\n'), ((5786, 5812), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (5804, 5812), False, 'import datetime\n'), ((6082, 6108), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (6100, 6108), False, 'import datetime\n')]
import os import pytest @pytest.fixture(scope="session") def settings(): return { "HOST": "solr", "PORT": 8983, "PROTO": "http://", "SOLR_USER": os.environ["SOLR_USER"], "SOLR_PASS": os.environ["SOLR_PASS"], "SERVER_PATH": "solr", "SOLR_BASE_URL": "http://solr:8983/solr", "SOLR_CONNECTION_URI": "solr://solr:8983/solr", "SOLR_WORKER_COLLECTION_NAME": "sales_test_", "SUPERSET_URI": "http://superset:8088", "SUPERSET_USER": os.environ["SUPERSET_USER"], "SUPERSET_PASS": os.environ["SUPERSET_PASS"], "SUPERSET_DATABASE_NAME": "sales_test_", }
[ "pytest.fixture" ]
[((28, 59), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (42, 59), False, 'import pytest\n')]
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module contains Google Cloud Storage to Trino operator.""" import csv import json from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Iterable, Optional, Sequence, Union from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.providers.trino.hooks.trino import TrinoHook if TYPE_CHECKING: from airflow.utils.context import Context class GCSToTrinoOperator(BaseOperator): """ Loads a csv file from Google Cloud Storage into a Trino table. Assumptions: 1. CSV file should not have headers 2. Trino table with requisite columns is already created 3. Optionally, a separate JSON file with headers can be provided :param source_bucket: Source GCS bucket that contains the csv :param source_object: csv file including the path :param trino_table: trino table to upload the data :param trino_conn_id: destination trino connection :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud and interact with the Google Cloud Storage service. :param schema_fields: The names of the columns to fill in the table. If schema_fields is provided, any path provided in the schema object will be :param schema_object: JSON file with schema fields :param delegate_to: The account to impersonate using domain-wide delegation of authority, if any. For this to work, the service account making the request must have domain-wide delegation enabled. :param impersonation_chain: Optional service account to impersonate using short-term credentials, or chained list of accounts required to get the access_token of the last account in the list, which will be impersonated in the request. If set as a string, the account must grant the originating account the Service Account Token Creator IAM role. If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity, with first account from the list granting this role to the originating account. """ template_fields: Sequence[str] = ( 'source_bucket', 'source_object', 'trino_table', ) def __init__( self, *, source_bucket: str, source_object: str, trino_table: str, trino_conn_id: str = "trino_default", gcp_conn_id: str = "google_cloud_default", schema_fields: Optional[Iterable[str]] = None, schema_object: Optional[str] = None, delegate_to: Optional[str] = None, impersonation_chain: Optional[Union[str, Sequence[str]]] = None, **kwargs, ) -> None: super().__init__(**kwargs) self.source_bucket = source_bucket self.source_object = source_object self.trino_table = trino_table self.trino_conn_id = trino_conn_id self.gcp_conn_id = gcp_conn_id self.schema_fields = schema_fields self.schema_object = schema_object self.delegate_to = delegate_to self.impersonation_chain = impersonation_chain def execute(self, context: 'Context') -> None: gcs_hook = GCSHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) trino_hook = TrinoHook(trino_conn_id=self.trino_conn_id) with NamedTemporaryFile("w+") as temp_file: self.log.info("Downloading data from %s", self.source_object) gcs_hook.download( bucket_name=self.source_bucket, object_name=self.source_object, filename=temp_file.name, ) data = csv.reader(temp_file) rows = (tuple(row) for row in data) self.log.info("Inserting data into %s", self.trino_table) if self.schema_fields: trino_hook.insert_rows(table=self.trino_table, rows=rows, target_fields=self.schema_fields) elif self.schema_object: blob = gcs_hook.download( bucket_name=self.source_bucket, object_name=self.schema_object, ) schema_fields = json.loads(blob.decode("utf-8")) trino_hook.insert_rows(table=self.trino_table, rows=rows, target_fields=schema_fields) else: trino_hook.insert_rows(table=self.trino_table, rows=rows)
[ "airflow.providers.google.cloud.hooks.gcs.GCSHook", "tempfile.NamedTemporaryFile", "csv.reader", "airflow.providers.trino.hooks.trino.TrinoHook" ]
[((4038, 4155), 'airflow.providers.google.cloud.hooks.gcs.GCSHook', 'GCSHook', ([], {'gcp_conn_id': 'self.gcp_conn_id', 'delegate_to': 'self.delegate_to', 'impersonation_chain': 'self.impersonation_chain'}), '(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to,\n impersonation_chain=self.impersonation_chain)\n', (4045, 4155), False, 'from airflow.providers.google.cloud.hooks.gcs import GCSHook\n'), ((4221, 4264), 'airflow.providers.trino.hooks.trino.TrinoHook', 'TrinoHook', ([], {'trino_conn_id': 'self.trino_conn_id'}), '(trino_conn_id=self.trino_conn_id)\n', (4230, 4264), False, 'from airflow.providers.trino.hooks.trino import TrinoHook\n'), ((4279, 4303), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', (['"""w+"""'], {}), "('w+')\n", (4297, 4303), False, 'from tempfile import NamedTemporaryFile\n'), ((4594, 4615), 'csv.reader', 'csv.reader', (['temp_file'], {}), '(temp_file)\n', (4604, 4615), False, 'import csv\n')]
# -*- coding: utf-8 -*- """ Transmission Line helper functions """ import numpy as np def ZL_2_Zin(L,Z0,gamma,ZL): """ Returns the input impedance seen through a lossy transmission line of characteristic impedance Z0 and complex wavenumber gamma=alpha+j*beta Zin = ZL_2_Zin(L,Z0,gamma,ZL) Args ---- L : length [m] of the transmission line Z0: characteristic impedance of the transmission line gamma: complex wavenumber associated to the transmission line ZL: Load impedance Returns ------- Zin: input impedance """ assert L > 0 assert Z0 > 0 Zin = Z0*(ZL + Z0*np.tanh(gamma*L))/(Z0 + ZL*np.tanh(gamma*L)) return Zin def transfer_matrix(L,V0,I0,Z0,gamma): """ Returns the voltage and the current at a distance L from an initial voltage V0 and current I0 on a transmission line which propagation constant is gamma. VL, IL = transfer_matrix(L,V0,I0,Z0,gamma) L is positive from the load toward the generator Args ----- L : transmission line length [m] V0: initial voltage [V] I0: initial current [A] Z0 : characteristic impedance of the transmission line gamma: =alpha+j*beta propagation constant of the transmission line Returns -------- VL: voltage at length L IL: current at length L """ if Z0 <= 0: raise ValueError transfer_matrix = np.array([[np.cosh(gamma*L), Z0*np.sinh(gamma*L)], [np.sinh(gamma*L)/Z0, np.cosh(gamma*L)]]) U = np.array([V0,I0]) A = transfer_matrix @ U VL = A[0] IL = A[1] return VL, IL def V0f_2_VL(L, V0f, gamma, reflection_coefficient): """ Propagation of the voltage at a distance L from the forward voltage and reflection coefficient VL = V0f_2_VL(L, V0f, gamma, reflectionCoefficient) Args ---- L : Transmission Line Length [m] V0f : forward voltage [V] gamma : Transmission Line Complex Propagatioon Constant [1] reflectionCoefficient : complex reflection coefficient [1] Returns -------- VL : (total) voltage at length L """ assert L > 0 assert gamma > 0 assert reflection_coefficient > 0 VL = V0f*(np.exp(-gamma*L) + reflection_coefficient*np.exp(+gamma*L)) return VL
[ "numpy.tanh", "numpy.array", "numpy.exp", "numpy.cosh", "numpy.sinh" ]
[((1595, 1613), 'numpy.array', 'np.array', (['[V0, I0]'], {}), '([V0, I0])\n', (1603, 1613), True, 'import numpy as np\n'), ((2315, 2333), 'numpy.exp', 'np.exp', (['(-gamma * L)'], {}), '(-gamma * L)\n', (2321, 2333), True, 'import numpy as np\n'), ((686, 704), 'numpy.tanh', 'np.tanh', (['(gamma * L)'], {}), '(gamma * L)\n', (693, 704), True, 'import numpy as np\n'), ((1472, 1490), 'numpy.cosh', 'np.cosh', (['(gamma * L)'], {}), '(gamma * L)\n', (1479, 1490), True, 'import numpy as np\n'), ((1567, 1585), 'numpy.cosh', 'np.cosh', (['(gamma * L)'], {}), '(gamma * L)\n', (1574, 1585), True, 'import numpy as np\n'), ((2357, 2375), 'numpy.exp', 'np.exp', (['(+gamma * L)'], {}), '(+gamma * L)\n', (2363, 2375), True, 'import numpy as np\n'), ((659, 677), 'numpy.tanh', 'np.tanh', (['(gamma * L)'], {}), '(gamma * L)\n', (666, 677), True, 'import numpy as np\n'), ((1493, 1511), 'numpy.sinh', 'np.sinh', (['(gamma * L)'], {}), '(gamma * L)\n', (1500, 1511), True, 'import numpy as np\n'), ((1546, 1564), 'numpy.sinh', 'np.sinh', (['(gamma * L)'], {}), '(gamma * L)\n', (1553, 1564), True, 'import numpy as np\n')]
import json def main(): with open('chips.csv', 'r') as f: data = f.read() raw_types = [ t.split('\n') for t in data.split('$$TYPE$$') if t != '\n' ] items = [] for t in raw_types: type_name = t[0].strip() print(type_name) raw_items = [item.split('\t') for item in t[1:] if item != '\n'] for item in raw_items: if len(item) != 4: continue (name, oz, upc, case) = item items.append({'type': type_name, 'name': name, 'oz': oz, 'upc': upc, 'case': case}) with open('chips.json', 'w') as f: json.dump(items, f, indent=2) if __name__ == "__main__": main()
[ "json.dump" ]
[((652, 681), 'json.dump', 'json.dump', (['items', 'f'], {'indent': '(2)'}), '(items, f, indent=2)\n', (661, 681), False, 'import json\n')]
import unittest from lab1 import get_max from lab1 import reverse from lab1 import search from lab1 import fib from lab1 import factorial_iter from lab1 import factorial_rec class MyTest(unittest.TestCase): def runTest(self): with self.subTest(msg="testing get_max"): self.test_get_max() with self.subTest(msg="testing reverse"): self.test_reverse() with self.subTest(msg="testing search"): self.test_search() with self.subTest(msg="testing fib"): self.test_fib() with self.subTest(msg="testing factorial"): self.test_factorial() def test_get_max(self): arr = [1,2,3,4,5] self.assertEqual(get_max(arr), 5) arr = [1, 1, 1, 0] self.assertEqual(get_max(arr), 1) self.assertEqual(get_max([]), None) def test_reverse(self): self.assertEqual(reverse("qweEerty"), "ytreEewq") self.assertEqual(reverse("aa"), "aa") self.assertEqual(reverse("a"), "a") self.assertEqual(reverse(""), "") def test_search(self): arr = [1,2,3,4,5] self.assertEqual(search(arr, 5), 4) arr = [1,2,3,4,5] self.assertEqual(search(arr, 2), 1) arr = [1, 1, 1] self.assertEqual(search(arr, 5), None) arr = [] self.assertEqual(search(arr, 5), None) def test_fib(self): def fib_numbers(n): sequence = [] for i in range(n+1): sequence.append(fib(i)) return sequence #this will test your fib function by calling it multiple times self.assertEqual(fib_numbers(10), [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]) def test_factorial(self): self.assertEqual(factorial_iter(0), 1) self.assertEqual(factorial_iter(1), 1) self.assertEqual(factorial_iter(3), 6) self.assertEqual(factorial_rec(0), 1) self.assertEqual(factorial_rec(1), 1) self.assertEqual(factorial_rec(3), 6) def get_score(max_score, result): score = max_score for error in result.errors: #print("-10 points for ", error[1]) score -= 30 for failure in result.failures: #print("-5 points for ", failure[1]) score -= 5 return max(0, score) def main(): runner = unittest.TextTestRunner() result = runner.run(MyTest()) score = get_score(90, result) print("SCORE:{%s}\n" % (score)) return score if __name__ == '__main__': main()
[ "lab1.search", "unittest.TextTestRunner", "lab1.factorial_rec", "lab1.reverse", "lab1.fib", "lab1.factorial_iter", "lab1.get_max" ]
[((2342, 2367), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (2365, 2367), False, 'import unittest\n'), ((716, 728), 'lab1.get_max', 'get_max', (['arr'], {}), '(arr)\n', (723, 728), False, 'from lab1 import get_max\n'), ((785, 797), 'lab1.get_max', 'get_max', (['arr'], {}), '(arr)\n', (792, 797), False, 'from lab1 import get_max\n'), ((827, 838), 'lab1.get_max', 'get_max', (['[]'], {}), '([])\n', (834, 838), False, 'from lab1 import get_max\n'), ((900, 919), 'lab1.reverse', 'reverse', (['"""qweEerty"""'], {}), "('qweEerty')\n", (907, 919), False, 'from lab1 import reverse\n'), ((958, 971), 'lab1.reverse', 'reverse', (['"""aa"""'], {}), "('aa')\n", (965, 971), False, 'from lab1 import reverse\n'), ((1004, 1016), 'lab1.reverse', 'reverse', (['"""a"""'], {}), "('a')\n", (1011, 1016), False, 'from lab1 import reverse\n'), ((1048, 1059), 'lab1.reverse', 'reverse', (['""""""'], {}), "('')\n", (1055, 1059), False, 'from lab1 import reverse\n'), ((1144, 1158), 'lab1.search', 'search', (['arr', '(5)'], {}), '(arr, 5)\n', (1150, 1158), False, 'from lab1 import search\n'), ((1214, 1228), 'lab1.search', 'search', (['arr', '(2)'], {}), '(arr, 2)\n', (1220, 1228), False, 'from lab1 import search\n'), ((1282, 1296), 'lab1.search', 'search', (['arr', '(5)'], {}), '(arr, 5)\n', (1288, 1296), False, 'from lab1 import search\n'), ((1346, 1360), 'lab1.search', 'search', (['arr', '(5)'], {}), '(arr, 5)\n', (1352, 1360), False, 'from lab1 import search\n'), ((1782, 1799), 'lab1.factorial_iter', 'factorial_iter', (['(0)'], {}), '(0)\n', (1796, 1799), False, 'from lab1 import factorial_iter\n'), ((1829, 1846), 'lab1.factorial_iter', 'factorial_iter', (['(1)'], {}), '(1)\n', (1843, 1846), False, 'from lab1 import factorial_iter\n'), ((1876, 1893), 'lab1.factorial_iter', 'factorial_iter', (['(3)'], {}), '(3)\n', (1890, 1893), False, 'from lab1 import factorial_iter\n'), ((1923, 1939), 'lab1.factorial_rec', 'factorial_rec', (['(0)'], {}), '(0)\n', (1936, 1939), False, 'from lab1 import factorial_rec\n'), ((1969, 1985), 'lab1.factorial_rec', 'factorial_rec', (['(1)'], {}), '(1)\n', (1982, 1985), False, 'from lab1 import factorial_rec\n'), ((2015, 2031), 'lab1.factorial_rec', 'factorial_rec', (['(3)'], {}), '(3)\n', (2028, 2031), False, 'from lab1 import factorial_rec\n'), ((1512, 1518), 'lab1.fib', 'fib', (['i'], {}), '(i)\n', (1515, 1518), False, 'from lab1 import fib\n')]
from __future__ import print_function import numpy as np from sklearn.preprocessing import Normalizer # For reproducibility np.random.seed(1000) if __name__ == '__main__': # Create a dummy dataset data = np.array([1.0, 2.0]) print(data) # Max normalization n_max = Normalizer(norm='max') nm = n_max.fit_transform(data.reshape(1, -1)) print(nm) # L1 normalization n_l1 = Normalizer(norm='l1') nl1 = n_l1.fit_transform(data.reshape(1, -1)) print(nl1) # L2 normalization n_l2 = Normalizer(norm='l2') nl2 = n_l2.fit_transform(data.reshape(1, -1)) print(nl2)
[ "numpy.array", "sklearn.preprocessing.Normalizer", "numpy.random.seed" ]
[((134, 154), 'numpy.random.seed', 'np.random.seed', (['(1000)'], {}), '(1000)\n', (148, 154), True, 'import numpy as np\n'), ((227, 247), 'numpy.array', 'np.array', (['[1.0, 2.0]'], {}), '([1.0, 2.0])\n', (235, 247), True, 'import numpy as np\n'), ((305, 327), 'sklearn.preprocessing.Normalizer', 'Normalizer', ([], {'norm': '"""max"""'}), "(norm='max')\n", (315, 327), False, 'from sklearn.preprocessing import Normalizer\n'), ((432, 453), 'sklearn.preprocessing.Normalizer', 'Normalizer', ([], {'norm': '"""l1"""'}), "(norm='l1')\n", (442, 453), False, 'from sklearn.preprocessing import Normalizer\n'), ((559, 580), 'sklearn.preprocessing.Normalizer', 'Normalizer', ([], {'norm': '"""l2"""'}), "(norm='l2')\n", (569, 580), False, 'from sklearn.preprocessing import Normalizer\n')]
# Generated by Django 2.1.5 on 2019-01-22 20:47 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Humidity', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='Temperature', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('temperature', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)), ('chnage', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)), ('time_stamp', models.DateTimeField(auto_now=True)), ], ), ]
[ "django.db.models.DateTimeField", "django.db.models.DecimalField", "django.db.models.AutoField" ]
[((304, 397), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (320, 397), False, 'from django.db import migrations, models\n'), ((530, 623), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (546, 623), False, 'from django.db import migrations, models\n'), ((654, 718), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'default': '(0.0)', 'max_digits': '(3)'}), '(decimal_places=2, default=0.0, max_digits=3)\n', (673, 718), False, 'from django.db import migrations, models\n'), ((748, 812), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'default': '(0.0)', 'max_digits': '(3)'}), '(decimal_places=2, default=0.0, max_digits=3)\n', (767, 812), False, 'from django.db import migrations, models\n'), ((846, 881), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (866, 881), False, 'from django.db import migrations, models\n')]
# Generated by Django 3.2.4 on 2021-06-30 06:52 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('accounts', '0001_initial'), ] operations = [ migrations.AddField( model_name='assistant', name='phone', field=models.CharField(default=django.utils.timezone.now, max_length=20), preserve_default=False, ), migrations.AddField( model_name='assistant', name='pickup_location', field=models.CharField(default=django.utils.timezone.now, max_length=256), preserve_default=False, ), migrations.AddField( model_name='caller', name='pickup_location', field=models.CharField(default=django.utils.timezone.now, max_length=256), preserve_default=False, ), migrations.AlterField( model_name='caller', name='phone', field=models.CharField(max_length=20), ), ]
[ "django.db.models.CharField" ]
[((355, 421), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'django.utils.timezone.now', 'max_length': '(20)'}), '(default=django.utils.timezone.now, max_length=20)\n', (371, 421), False, 'from django.db import migrations, models\n'), ((589, 656), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'django.utils.timezone.now', 'max_length': '(256)'}), '(default=django.utils.timezone.now, max_length=256)\n', (605, 656), False, 'from django.db import migrations, models\n'), ((821, 888), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'django.utils.timezone.now', 'max_length': '(256)'}), '(default=django.utils.timezone.now, max_length=256)\n', (837, 888), False, 'from django.db import migrations, models\n'), ((1045, 1076), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (1061, 1076), False, 'from django.db import migrations, models\n')]
""" Copyright (C) 2018 University of Massachusetts Amherst. This file is part of "coref_tools" http://github.com/nmonath/coref_tools Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import argparse import datetime import errno import os import sys from shutil import copytree import torch from coref.models import new_model from coref.train import new_trainer from coref.util.Config import Config from coref.util.IO import copy_source_to_dir if __name__ == '__main__': parser = argparse.ArgumentParser(description='Train PWE HAC on dataset') parser.add_argument('config', type=str, help='the config file') parser.add_argument('--outbase', type=str, help='prefix of out dir within experiment_out_dir') parser.add_argument('--dataname', type=str, help='Name of dataset.') args = parser.parse_args() config = Config(args.config) if args.outbase: ts = args.outbase dataname = args.dataname ts = os.path.join(dataname, ts) else: now = datetime.datetime.now() ts = "{:04d}-{:02d}-{:02d}-{:02d}-{:02d}-{:02d}".format( now.year, now.month, now.day, now.hour, now.minute, now.second) debug = config.debug diagnostics = {} # Set up output dir config.experiment_out_dir = os.path.join( config.experiment_out_dir, ts) output_dir = config.experiment_out_dir copy_source_to_dir(output_dir,config) if config.batcher_filename != 'None': batcher = torch.load(config.batcher_filename) else: batcher = None model = new_model(config) config.save_config(config.experiment_out_dir) trainer = new_trainer(config, model) trainer.train(batcher, config.experiment_out_dir, None)
[ "argparse.ArgumentParser", "torch.load", "datetime.datetime.now", "coref.train.new_trainer", "coref.models.new_model", "coref.util.IO.copy_source_to_dir", "coref.util.Config.Config", "os.path.join" ]
[((959, 1022), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train PWE HAC on dataset"""'}), "(description='Train PWE HAC on dataset')\n", (982, 1022), False, 'import argparse\n'), ((1332, 1351), 'coref.util.Config.Config', 'Config', (['args.config'], {}), '(args.config)\n', (1338, 1351), False, 'from coref.util.Config import Config\n'), ((1766, 1809), 'os.path.join', 'os.path.join', (['config.experiment_out_dir', 'ts'], {}), '(config.experiment_out_dir, ts)\n', (1778, 1809), False, 'import os\n'), ((1867, 1905), 'coref.util.IO.copy_source_to_dir', 'copy_source_to_dir', (['output_dir', 'config'], {}), '(output_dir, config)\n', (1885, 1905), False, 'from coref.util.IO import copy_source_to_dir\n'), ((2048, 2065), 'coref.models.new_model', 'new_model', (['config'], {}), '(config)\n', (2057, 2065), False, 'from coref.models import new_model\n'), ((2131, 2157), 'coref.train.new_trainer', 'new_trainer', (['config', 'model'], {}), '(config, model)\n', (2142, 2157), False, 'from coref.train import new_trainer\n'), ((1445, 1471), 'os.path.join', 'os.path.join', (['dataname', 'ts'], {}), '(dataname, ts)\n', (1457, 1471), False, 'import os\n'), ((1496, 1519), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1517, 1519), False, 'import datetime\n'), ((1966, 2001), 'torch.load', 'torch.load', (['config.batcher_filename'], {}), '(config.batcher_filename)\n', (1976, 2001), False, 'import torch\n')]
from flask import Flask # blueprint import from blueprints.tweets.tweets import tweetsData from blueprints.btc.btc import btcData def create_app(app): # register blueprint app.register_blueprint(tweetsData) app.register_blueprint(btcData) return app if __name__ == "__main__": app = Flask(__name__) create_app(app).run(host="0.0.0.0", debug=False, port=9000)
[ "flask.Flask" ]
[((309, 324), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (314, 324), False, 'from flask import Flask\n')]
from django.shortcuts import render from django.views import View from django.views.generic import ListView, DetailView from .models import Video from django.contrib.auth.mixins import LoginRequiredMixin # Create your views here. class PopularVideosList(ListView): template_name = 'index.html' context_object_name = 'most_popular_videos' def get_queryset(self): return Video.objects.filter(is_most_viewed=True) class PopularVideosDetail(DetailView): model = Video template_name = 'youtube/popularVideosDetail.html' class MyVideosList(LoginRequiredMixin, View): def get(self, request): user = request.user.username my_videos_list = Video.objects.filter( channel_title__exact=user ).order_by('-published_at')[:15] context = {'my_videos': my_videos_list} return render(request, 'youtube/myVideosList.html', context)
[ "django.shortcuts.render" ]
[((852, 905), 'django.shortcuts.render', 'render', (['request', '"""youtube/myVideosList.html"""', 'context'], {}), "(request, 'youtube/myVideosList.html', context)\n", (858, 905), False, 'from django.shortcuts import render\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-25 13:40 from django.db import migrations, models import jsonfield.fields class Migration(migrations.Migration): dependencies = [ ('django_prices_vatlayer', '0001_initial'), ] operations = [ migrations.CreateModel( name='RateTypes', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('types', jsonfield.fields.JSONField(verbose_name='types')), ], ), ]
[ "django.db.models.AutoField" ]
[((385, 478), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (401, 478), False, 'from django.db import migrations, models\n')]
""" Observer for iContact instances """ import logging from django.db.models import signals from icontact.models import IContact from icontact.client import IContactClient, IContactException logger = logging.getLogger(__name__) class IContactObserver(object): """ Class that utilizes icontact client to sync model with icontact service """ def __init__(self, client=None): """ Initialize an instance of the CalendarObserver class. """ self.adapters = {} self._client = client def observe(self, model, adapter): """ Establishes a connection between the model and Google Calendar, using adapter to transform data. """ self.adapters[model] = adapter signals.post_save.connect(self.on_update, sender=model) signals.post_delete.connect(self.on_delete, sender=model) def on_update(self, **kwargs): """ Called by Django's signal mechanism when an observed model is updated. """ created = kwargs.get('created', False) if created: logging.debug("Created") self.create(kwargs['sender'], kwargs['instance']) return logging.debug("Updating") self.update(kwargs['sender'], kwargs['instance']) def on_delete(self, **kwargs): """ Called by Django's signal mechanism when an observed model is deleted. """ self.delete(kwargs['sender'], kwargs['instance']) def client(self): """ Instantiate the client class to make authenticated calls to icontact. """ if self._client is None: self._client = IContactClient() return self._client def get_contact(self, instance): """ gets a contact from icontact service """ contact_id = IContact.objects.get_contact_id(instance) logging.debug("contact id: {id}".format(id=contact_id)) try: contact = self._client.get_contact(contact_id) except IContactException: return None logging.debug('contact retrived') logging.debug(contact) return contact def create(self, sender, instance): """ creates a new contact on icontact's datata base as well as a iContact instance """ adapter = self.adapters[sender] logging.debug('Adapter: {adapter}'.format(adapter=adapter)) client = self.client() contact = adapter.get_contact_data(instance) #IcontactData instance data = contact.get_data() logging.debug("contact's data: %s"%data) try: icontact = client.create_contact(payload=data) contact_id = icontact['contacts'][0]['contactId'] subscription = client.subscribe(contact_id) except IContactException: return None IContact.objects.set_contact_id(instance, contact_id) def update(self, sender, instance): """ Update or create an Icontact Contact. By default the client subscribes to the deault list specified in settings.py """ adapter = self.adapters[sender] client = self.client() contact = adapter.get_contact_data(instance) #IcontactData instance data = contact.get_data() logging.debug(data) logging.debug(data['contact']) try: icontact = self.get_contact(instance) contact_id = icontact['contact']['contactId'] client.update_contact(contact_id=contact_id, payload=data['contact']) except IContactException: return None IContact.objects.set_contact_id(instance, contact_id) def delete(self, sender, instance): """ Deletes iContact record from their service and from our database """ adapter = self.adapters[sender] client = self.client() contact = adapter.get_contact_data(instance) #IcontactData instance icontact = self.get_contact(instance) if not icontact: return None contact_id = icontact['contact']['contactId'] try: client.delete_contact(contact_id) #delete from icontact except IContactException: pass IContact.objects.delete_contact_id(instance) #delete from database
[ "django.db.models.signals.post_delete.connect", "icontact.models.IContact.objects.delete_contact_id", "logging.debug", "icontact.models.IContact.objects.get_contact_id", "icontact.client.IContactClient", "django.db.models.signals.post_save.connect", "logging.getLogger", "icontact.models.IContact.objects.set_contact_id" ]
[((204, 231), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (221, 231), False, 'import logging\n'), ((774, 829), 'django.db.models.signals.post_save.connect', 'signals.post_save.connect', (['self.on_update'], {'sender': 'model'}), '(self.on_update, sender=model)\n', (799, 829), False, 'from django.db.models import signals\n'), ((838, 895), 'django.db.models.signals.post_delete.connect', 'signals.post_delete.connect', (['self.on_delete'], {'sender': 'model'}), '(self.on_delete, sender=model)\n', (865, 895), False, 'from django.db.models import signals\n'), ((1233, 1258), 'logging.debug', 'logging.debug', (['"""Updating"""'], {}), "('Updating')\n", (1246, 1258), False, 'import logging\n'), ((1892, 1933), 'icontact.models.IContact.objects.get_contact_id', 'IContact.objects.get_contact_id', (['instance'], {}), '(instance)\n', (1923, 1933), False, 'from icontact.models import IContact\n'), ((2136, 2169), 'logging.debug', 'logging.debug', (['"""contact retrived"""'], {}), "('contact retrived')\n", (2149, 2169), False, 'import logging\n'), ((2178, 2200), 'logging.debug', 'logging.debug', (['contact'], {}), '(contact)\n', (2191, 2200), False, 'import logging\n'), ((2649, 2691), 'logging.debug', 'logging.debug', (['("contact\'s data: %s" % data)'], {}), '("contact\'s data: %s" % data)\n', (2662, 2691), False, 'import logging\n'), ((2946, 2999), 'icontact.models.IContact.objects.set_contact_id', 'IContact.objects.set_contact_id', (['instance', 'contact_id'], {}), '(instance, contact_id)\n', (2977, 2999), False, 'from icontact.models import IContact\n'), ((3410, 3429), 'logging.debug', 'logging.debug', (['data'], {}), '(data)\n', (3423, 3429), False, 'import logging\n'), ((3438, 3468), 'logging.debug', 'logging.debug', (["data['contact']"], {}), "(data['contact'])\n", (3451, 3468), False, 'import logging\n'), ((3739, 3792), 'icontact.models.IContact.objects.set_contact_id', 'IContact.objects.set_contact_id', (['instance', 'contact_id'], {}), '(instance, contact_id)\n', (3770, 3792), False, 'from icontact.models import IContact\n'), ((4359, 4403), 'icontact.models.IContact.objects.delete_contact_id', 'IContact.objects.delete_contact_id', (['instance'], {}), '(instance)\n', (4393, 4403), False, 'from icontact.models import IContact\n'), ((1118, 1142), 'logging.debug', 'logging.debug', (['"""Created"""'], {}), "('Created')\n", (1131, 1142), False, 'import logging\n'), ((1707, 1723), 'icontact.client.IContactClient', 'IContactClient', ([], {}), '()\n', (1721, 1723), False, 'from icontact.client import IContactClient, IContactException\n')]
import os import shutil import logging logging.basicConfig( format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG) log = logging.getLogger() class models_genesis_config: model = "Unet3D" suffix = "genesis_oct" exp_name = model + "-" + suffix # data data = "/home/harddrive/Projects/GAMMA_data/model_genesis_data/generated/" # train_fold = [0, 1, 2, 3, 4] train_fold = [0] # valid_fold = [5, 6] valid_fold = [1] test_fold = [1] hu_min = -1000.0 hu_max = 1000.0 scale = 32 input_rows = 96 input_cols = 96 input_deps = 96 nb_class = 1 # model pre-training verbose = 1 weights = None batch_size = 1 optimizer = "sgd" workers = 10 max_queue_size = workers * 4 save_samples = "png" nb_epoch = 10000 patience = 50 lr = 1 # image deformation nonlinear_rate = 0.9 paint_rate = 0.9 outpaint_rate = 0.8 inpaint_rate = 1.0 - outpaint_rate local_rate = 0.5 flip_rate = 0.4 # logs model_path = "../pretrained_weights" if not os.path.exists(model_path): os.makedirs(model_path) logs_path = os.path.join(model_path, "Logs") if not os.path.exists(logs_path): os.makedirs(logs_path) def display(self): """Display Configuration values.""" print("\nConfigurations:") for a in dir(self): if not a.startswith("__") and not callable(getattr(self, a)): log.info("{:30} {}".format(a, getattr(self, a))) log.info("\n")
[ "os.makedirs", "logging.basicConfig", "os.path.exists", "os.path.join", "logging.getLogger" ]
[((41, 195), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""', 'level': 'logging.DEBUG'}), "(format=\n '%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG)\n", (60, 195), False, 'import logging\n'), ((207, 226), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (224, 226), False, 'import logging\n'), ((1230, 1262), 'os.path.join', 'os.path.join', (['model_path', '"""Logs"""'], {}), "(model_path, 'Logs')\n", (1242, 1262), False, 'import os\n'), ((1154, 1180), 'os.path.exists', 'os.path.exists', (['model_path'], {}), '(model_path)\n', (1168, 1180), False, 'import os\n'), ((1190, 1213), 'os.makedirs', 'os.makedirs', (['model_path'], {}), '(model_path)\n', (1201, 1213), False, 'import os\n'), ((1274, 1299), 'os.path.exists', 'os.path.exists', (['logs_path'], {}), '(logs_path)\n', (1288, 1299), False, 'import os\n'), ((1309, 1331), 'os.makedirs', 'os.makedirs', (['logs_path'], {}), '(logs_path)\n', (1320, 1331), False, 'import os\n')]
import distutils import os from PyInstaller.utils.hooks import logger # https://github.com/pyinstaller/pyinstaller/issues/4064 # https://pythonhosted.org/PyInstaller/hooks.html#the-pre-find-module-path-pfmp-api-method def pre_find_module_path(api): # Absolute path of the system-wide "distutils" package when run from within # a venv or None otherwise. distutils_dir = getattr(distutils, 'distutils_path', None) if distutils_dir is not None: # workaround for https://github.com/pyinstaller/pyinstaller/issues/4064 if distutils_dir.endswith('__init__.py'): distutils_dir = os.path.dirname(distutils_dir) # Find this package in its parent directory. api.search_dirs = [os.path.dirname(distutils_dir)] logger.info('>>>>>>> CUSTOM >>>>>>>>> distutils: retargeting to non-venv dir %r' % distutils_dir)
[ "PyInstaller.utils.hooks.logger.info", "os.path.dirname" ]
[((773, 879), 'PyInstaller.utils.hooks.logger.info', 'logger.info', (["('>>>>>>> CUSTOM >>>>>>>>> distutils: retargeting to non-venv dir %r' %\n distutils_dir)"], {}), "(\n '>>>>>>> CUSTOM >>>>>>>>> distutils: retargeting to non-venv dir %r' %\n distutils_dir)\n", (784, 879), False, 'from PyInstaller.utils.hooks import logger\n'), ((620, 650), 'os.path.dirname', 'os.path.dirname', (['distutils_dir'], {}), '(distutils_dir)\n', (635, 650), False, 'import os\n'), ((732, 762), 'os.path.dirname', 'os.path.dirname', (['distutils_dir'], {}), '(distutils_dir)\n', (747, 762), False, 'import os\n')]
import argparse import os from pathlib import Path import logging def missing_image(image_path: Path, label_path: Path): """ This function removes the images from the train folder if the correspining labels are not found in the .txt file. NOTE - Make sure you perform the conversion from the label to txt. The code performs the following function, - Takes the input dataset folder path, searches if the images with label information are present. - If not found, removes the image. :params image_path - The directory where the training images are present label_path - The directory where .txt file correspinding to each image is present. """ for image in image_path.iterdir(): if image.suffix == ".jpg": # Corresponding label file name label = label_path / (image.stem + ".txt") if not label.is_file(): logging.warning("Label not found: {}".format(label)) logging.warning("Deleting file: {}".format(image)) os.remove(image) def main(): ap = argparse.ArgumentParser() ap.add_argument("-l", "--label_path", help="path to the label dir") ap.add_argument("-d", "--image_path", help="directory with images") args = ap.parse_args() image_path = Path(args.image_path).absolute() label_path = Path(args.label_path).absolute() assert image_path.is_dir(), "Image directory needs to exist" assert label_path.is_dir(), "Label directory needs to exist" missing_image(image_path, label_path) if __name__ == "__main__": main()
[ "pathlib.Path", "argparse.ArgumentParser", "os.remove" ]
[((1165, 1190), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1188, 1190), False, 'import argparse\n'), ((1385, 1406), 'pathlib.Path', 'Path', (['args.image_path'], {}), '(args.image_path)\n', (1389, 1406), False, 'from pathlib import Path\n'), ((1436, 1457), 'pathlib.Path', 'Path', (['args.label_path'], {}), '(args.label_path)\n', (1440, 1457), False, 'from pathlib import Path\n'), ((1119, 1135), 'os.remove', 'os.remove', (['image'], {}), '(image)\n', (1128, 1135), False, 'import os\n')]
from django.dispatch import Signal, receiver from django.contrib.auth.signals import user_logged_out from django.core.exceptions import ObjectDoesNotExist from axes.models import AccessLog # django 1.4 has a new timezone aware now() use if available. try: from django.utils.timezone import now except ImportError: # fall back to none timezone aware now() from datetime import datetime now = datetime.now user_locked_out = Signal(providing_args=['request', 'username', 'ip_address']) @receiver(user_logged_out) def log_user_lockout(sender, request, user, signal, *args, **kwargs): """ When a user logs out, update the access log""" if not user: return access_log = None access_logs = AccessLog.objects.filter(username=user.username, logout_time__isnull=True).order_by("-attempt_time") if len(access_logs) > 0: access_log = access_logs[0] if access_log: access_log.logout_time = now() access_log.save()
[ "django.utils.timezone.now", "django.dispatch.receiver", "django.dispatch.Signal", "axes.models.AccessLog.objects.filter" ]
[((440, 500), 'django.dispatch.Signal', 'Signal', ([], {'providing_args': "['request', 'username', 'ip_address']"}), "(providing_args=['request', 'username', 'ip_address'])\n", (446, 500), False, 'from django.dispatch import Signal, receiver\n'), ((503, 528), 'django.dispatch.receiver', 'receiver', (['user_logged_out'], {}), '(user_logged_out)\n', (511, 528), False, 'from django.dispatch import Signal, receiver\n'), ((967, 972), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (970, 972), False, 'from django.utils.timezone import now\n'), ((727, 801), 'axes.models.AccessLog.objects.filter', 'AccessLog.objects.filter', ([], {'username': 'user.username', 'logout_time__isnull': '(True)'}), '(username=user.username, logout_time__isnull=True)\n', (751, 801), False, 'from axes.models import AccessLog\n')]
# Copyright 2015 Rackspace # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from cafe.engine.http.behaviors import get_range_data class TestHttpFunctions(unittest.TestCase): def test_get_range_data(self): data = '0123456789' data_subset = get_range_data(data, '0-4') self.assertEqual('01234', data_subset) data_subset = get_range_data(data, '5-9') self.assertEqual('56789', data_subset) def test_get_range_data_with_first_byte_pos(self): data = '0123456789' data_subset = get_range_data(data, '7-') self.assertEqual('789', data_subset) def test_get_range_data_with_last_byte_pos(self): data = '0123456789' data_subset = get_range_data(data, '-3') self.assertEqual('789', data_subset) if __name__ == '__main__': unittest.main()
[ "unittest.main", "cafe.engine.http.behaviors.get_range_data" ]
[((1337, 1352), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1350, 1352), False, 'import unittest\n'), ((776, 803), 'cafe.engine.http.behaviors.get_range_data', 'get_range_data', (['data', '"""0-4"""'], {}), "(data, '0-4')\n", (790, 803), False, 'from cafe.engine.http.behaviors import get_range_data\n'), ((874, 901), 'cafe.engine.http.behaviors.get_range_data', 'get_range_data', (['data', '"""5-9"""'], {}), "(data, '5-9')\n", (888, 901), False, 'from cafe.engine.http.behaviors import get_range_data\n'), ((1055, 1081), 'cafe.engine.http.behaviors.get_range_data', 'get_range_data', (['data', '"""7-"""'], {}), "(data, '7-')\n", (1069, 1081), False, 'from cafe.engine.http.behaviors import get_range_data\n'), ((1232, 1258), 'cafe.engine.http.behaviors.get_range_data', 'get_range_data', (['data', '"""-3"""'], {}), "(data, '-3')\n", (1246, 1258), False, 'from cafe.engine.http.behaviors import get_range_data\n')]
#!/usr/bin/env python # coding: utf-8 #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# # Microsoft VS header #--------------------------------------------------# import os import sys import os.path from sys import platform from pathlib import Path #--------------------------------------------------# if os.name == 'nt' or platform == 'win32': print("Running on Windows") if 'ptvsd' in sys.modules: print("Running in Visual Studio") try: os.chdir(os.path.dirname(__file__)) print('CurrentDir: ', os.getcwd()) except: pass #--------------------------------------------------# else: print("Running outside Visual Studio") try: if not 'workbookDir' in globals(): workbookDir = os.getcwd() print('workbookDir: ' + workbookDir) os.chdir(workbookDir) except: pass #--------------------------------------------------# from rdkit import Chem from rdkit import DataStructs from rdkit.Chem import AllChem from rdkit.Chem import MACCSkeys from rdkit.Chem.AtomPairs import Pairs from rdkit.Chem.AtomPairs import Torsions from rdkit.Chem.Fingerprints import FingerprintMols #--------------------------------------------------# import ast import copy import pickle import scipy.io import subprocess import numpy as np import pandas as pd from numpy import * from tqdm import tqdm from pathlib import Path from random import shuffle #--------------------------------------------------# import seaborn as sns import matplotlib.mlab as mlab import matplotlib.pyplot as plt #--------------------------------------------------# from scipy import stats from matplotlib import pylab as pl #--------------------------------------------------# from AP_RDKIT_FP import * from Step07_NetworkToDistance import * #--------------------------------------------------# ############################################################################################################## ############################################################################################################## loading_folder = Path("MNX_data/") saving_folder = Path("MNX_ECFP_savings/") ############################################################################################################## ############################################################################################################## # all_cmpds : list( ["X","X",...] ) # list # all_ecfps : set ( ["ecfp", "ecfp", ...] ) # set # all_pairs : [{{},{}}, {{},{}}, {{},{}},... ] # all_info : [ [ { fr{}, fr{} }, d ], [ { fr{}, fr{} }, d ], [ { fr{}, fr{} }, d ], .... ] ############################################################################################################## ############################################################################################################## # Args # Select ECFP encodings #------------------- 0 1 2 3 4 5 6 ECFP_encodings_list = ["ECFP2", "ECFP4", "ECFP6", "JTVAE", "MorganFP", "ECFP8", "ECFPX"] ECFP_encodings = ECFP_encodings_list[1] ECFP_type = ECFP_encodings[-1] if ECFP_encodings in ["ECFP2", "ECFP4", "ECFP6"] else "6" # 2, 4, 6 #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# def list_smiles_to_ecfp_through_dict(smiles_list, all_cmpds_ecfps_dict): ecfp_list=[] for one_smiles in smiles_list: ecfp_list=ecfp_list+all_cmpds_ecfps_dict[one_smiles] return ecfp_list #====================================================================================================# def parse_one_pair_info(one_pair_info, all_ecfps, all_cmpds_ecfps_dict): dimension=len(all_ecfps) X1i=[0]*dimension X2i=[0]*dimension X1i_ecfp_list=list_smiles_to_ecfp_through_dict(list(list(one_pair_info[0])[0]),all_cmpds_ecfps_dict) X2i_ecfp_list=list_smiles_to_ecfp_through_dict(list(list(one_pair_info[0])[1]),all_cmpds_ecfps_dict) distance=one_pair_info[1] for one_ecfp in X1i_ecfp_list: X1i[all_ecfps.index(one_ecfp)]=X1i_ecfp_list.count(one_ecfp) for one_ecfp in X2i_ecfp_list: X2i[all_ecfps.index(one_ecfp)]=X2i_ecfp_list.count(one_ecfp) Yi=distance return (X1i,X2i,Yi) #====================================================================================================# def list_subtract(list_a,list_b): list_out=[] for i in range(len(list_a)): list_out.append(list_a[i]-list_b[i]) return list_out #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# def Step09_main(loading_folder, saving_folder, ECFP_encodings): #====================================================================================================# pickle_in1=open(saving_folder / "Step07_paired_cmpds_list","rb") paired_smiles_list=pickle.load(pickle_in1) pickle_in1.close() pickle_in2=open(saving_folder / "Step07_all_pairs_list","rb") all_pairs_list=pickle.load(pickle_in2) pickle_in2.close() #====================================================================================================# pickle_in1=open(saving_folder / ("Step08_all_cmpds_"+ECFP_encodings),"rb") all_smiles=pickle.load(pickle_in1) pickle_in1.close() pickle_in2=open(saving_folder / ("Step08_all_ecfps_"+ECFP_encodings),"rb") all_ecfps=pickle.load(pickle_in2) pickle_in2.close() pickle_in3=open(saving_folder / ("Step08_all_cmpds_ecfps_dict_"+ECFP_encodings),"rb") all_smiles_ecfps_dict=pickle.load(pickle_in3) pickle_in3.close() #====================================================================================================# for one_pair_info in paired_smiles_list: if len(one_pair_info[0])!=2: print (one_pair_info[0]) print ("wtf?") paired_smiles_list.remove(one_pair_info) print ("screened!") #====================================================================================================# all_ecfps=list(all_ecfps) X_Diff=[] Y_Distance=[] for one_pair_info in tqdm(paired_smiles_list): (X1i, X2i, Yi)=parse_one_pair_info(one_pair_info,all_ecfps,all_smiles_ecfps_dict) X_Diff.append(list_subtract(X1i, X2i)) Y_Distance.append(Yi) Step09_processed_data_dict = {"X_data": X_Diff, "y_data": Y_Distance} #====================================================================================================# pickle_out1=open(saving_folder / "Step09_processed_data_"+ ECFP_encodings,"wb") pickle.dump(Step09_processed_data_dict, pickle_out1) pickle_out1.close() print("Step09_main Done!") return #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# if __name__ == '__main__': Step09_main(loading_folder, saving_folder, ECFP_encodings) #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$# #$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$#
[ "tqdm.tqdm", "pickle.dump", "os.getcwd", "os.path.dirname", "pathlib.Path", "pickle.load", "os.chdir" ]
[((2622, 2639), 'pathlib.Path', 'Path', (['"""MNX_data/"""'], {}), "('MNX_data/')\n", (2626, 2639), False, 'from pathlib import Path\n'), ((2657, 2682), 'pathlib.Path', 'Path', (['"""MNX_ECFP_savings/"""'], {}), "('MNX_ECFP_savings/')\n", (2661, 2682), False, 'from pathlib import Path\n'), ((6194, 6217), 'pickle.load', 'pickle.load', (['pickle_in1'], {}), '(pickle_in1)\n', (6205, 6217), False, 'import pickle\n'), ((6330, 6353), 'pickle.load', 'pickle.load', (['pickle_in2'], {}), '(pickle_in2)\n', (6341, 6353), False, 'import pickle\n'), ((6583, 6606), 'pickle.load', 'pickle.load', (['pickle_in1'], {}), '(pickle_in1)\n', (6594, 6606), False, 'import pickle\n'), ((6727, 6750), 'pickle.load', 'pickle.load', (['pickle_in2'], {}), '(pickle_in2)\n', (6738, 6750), False, 'import pickle\n'), ((6894, 6917), 'pickle.load', 'pickle.load', (['pickle_in3'], {}), '(pickle_in3)\n', (6905, 6917), False, 'import pickle\n'), ((7480, 7504), 'tqdm.tqdm', 'tqdm', (['paired_smiles_list'], {}), '(paired_smiles_list)\n', (7484, 7504), False, 'from tqdm import tqdm\n'), ((7951, 8003), 'pickle.dump', 'pickle.dump', (['Step09_processed_data_dict', 'pickle_out1'], {}), '(Step09_processed_data_dict, pickle_out1)\n', (7962, 8003), False, 'import pickle\n'), ((912, 937), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (927, 937), False, 'import os\n'), ((974, 985), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (983, 985), False, 'import os\n'), ((1228, 1239), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1237, 1239), False, 'import os\n'), ((1311, 1332), 'os.chdir', 'os.chdir', (['workbookDir'], {}), '(workbookDir)\n', (1319, 1332), False, 'import os\n')]
# Copyright (c) 2019-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # import os import subprocess import sys import uuid from pathlib import Path, PosixPath from subprocess import Popen from .evosuite_test_runners import ( EvosuiteTestRunner, TestRuntimeError, CompilationError, InvalidTest, clean_firejail, FIREJAIL_PROFILE, ) from ...model.src.utils import ( TREE_SITTER_ROOT, limit_virtual_memory, MAX_VIRTUAL_MEMORY, ) from ...preprocessing.lang_processors.lang_processor import LangProcessor sys.path.append(str(Path(__file__).parents[3])) print("adding to path", str(Path(__file__).parents[3])) python_processor = LangProcessor.processors["python"](root_folder=TREE_SITTER_ROOT) class PythonTestRunner(EvosuiteTestRunner): def __init__( self, tmp_folder=Path( Path.home().joinpath("data/CodeGen/automatic_tests/tmp_tests_folder/python") ), timeout=15, ): super().__init__(tmp_folder=tmp_folder, timeout=timeout) def _run_tests( self, function: str, test: str, tmp_path: PosixPath, classname: str = None, scaffolding: str = None, ): assert ( scaffolding is None ), f"Scaffolding should be None for python tests, was {scaffolding}" if "#TOFILL" not in test: raise InvalidTest("Missing #TOFILL") try: f_name = python_processor.get_function_name(function) except (ValueError, IndexError): raise CompilationError("No function definition") function = python_processor.detokenize_code( function.replace(f" {f_name.strip()} ", " f_filled ") ) filled_test = test.replace("#TOFILL", function) test_path = self.write_test(filled_test, classname, tmp_path) assert test_path.is_file() test_cmd = f"{limit_virtual_memory(MAX_VIRTUAL_MEMORY)}; firejail --profile={FIREJAIL_PROFILE} python {test_path}" test_proc = Popen( test_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, executable="/bin/bash", preexec_fn=os.setsid, ) return test_proc, tmp_path def _eval_proc_state(self, out, err): stderr = err.decode("utf-8", errors="replace") stderr = clean_firejail(stderr) res_line = stderr.splitlines() if len(res_line) <= 2 or not ( res_line[-1].startswith("OK") or res_line[-1].startswith("FAILED") ): raise TestRuntimeError(stderr) assert res_line[-3].startswith("Ran ") number_of_tests = int(res_line[-3].replace("Ran ", "").split(" ")[0]) res_line = res_line[-1] if res_line.startswith("OK"): return "success", number_of_tests, 0 else: assert res_line.startswith("FAILED (errors=") or res_line.startswith( "FAILED (failures=" ) number_failures = int(res_line.split("=")[-1].replace(")", "")) return "failure", number_of_tests, number_failures @staticmethod def write_test(test, classname, out_folder): if classname is None: classname = "a" test_path = out_folder.joinpath(f"python_test_{classname}.py") with open(test_path, "w", encoding="utf-8") as o: o.write(test) return test_path
[ "subprocess.Popen", "pathlib.Path.home", "pathlib.Path" ]
[((2146, 2271), 'subprocess.Popen', 'Popen', (['test_cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'shell': '(True)', 'executable': '"""/bin/bash"""', 'preexec_fn': 'os.setsid'}), "(test_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True,\n executable='/bin/bash', preexec_fn=os.setsid)\n", (2151, 2271), False, 'from subprocess import Popen\n'), ((677, 691), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (681, 691), False, 'from pathlib import Path, PosixPath\n'), ((733, 747), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (737, 747), False, 'from pathlib import Path, PosixPath\n'), ((962, 973), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (971, 973), False, 'from pathlib import Path, PosixPath\n')]
from django.urls import path from .views import call_json_to_db, get_all_movies, add_movie, edit_movie, remove_movie urlpatterns = [ path('load-db/', call_json_to_db, name='load_db'), path('list/', get_all_movies, name='all_movies'), path('add/', add_movie, name='add_movie'), path('edit/', edit_movie, name='edit_movie'), path('remove/', remove_movie, name='remove_movie'), ]
[ "django.urls.path" ]
[((138, 187), 'django.urls.path', 'path', (['"""load-db/"""', 'call_json_to_db'], {'name': '"""load_db"""'}), "('load-db/', call_json_to_db, name='load_db')\n", (142, 187), False, 'from django.urls import path\n'), ((193, 241), 'django.urls.path', 'path', (['"""list/"""', 'get_all_movies'], {'name': '"""all_movies"""'}), "('list/', get_all_movies, name='all_movies')\n", (197, 241), False, 'from django.urls import path\n'), ((247, 288), 'django.urls.path', 'path', (['"""add/"""', 'add_movie'], {'name': '"""add_movie"""'}), "('add/', add_movie, name='add_movie')\n", (251, 288), False, 'from django.urls import path\n'), ((294, 338), 'django.urls.path', 'path', (['"""edit/"""', 'edit_movie'], {'name': '"""edit_movie"""'}), "('edit/', edit_movie, name='edit_movie')\n", (298, 338), False, 'from django.urls import path\n'), ((344, 394), 'django.urls.path', 'path', (['"""remove/"""', 'remove_movie'], {'name': '"""remove_movie"""'}), "('remove/', remove_movie, name='remove_movie')\n", (348, 394), False, 'from django.urls import path\n')]
from itertools import tee from pathlib import Path from typing import Union, List, Tuple import h5py import torch from torch.utils.data import Dataset, DataLoader from tqdm.auto import tqdm T = List[Tuple[torch.Tensor, torch.Tensor]] def dataset_to_h5_file(dataset: Union[Dataset, T], filepath: Union[str, Path], inputs_type=None, targets_type=None, inputs_name: str = 'inputs', targets_name: str = 'targets'): n = len(dataset) x, y = next(tee(iter(dataset))[1]) assert isinstance(x, torch.Tensor), f'input should be a torch tensor, not {type(x)}' assert isinstance(y, torch.Tensor), f'target should be a torch tensor, not {type(x)}' inputs_shape = (n,) + x.size() targets_shape = (n,) + y.size() if inputs_type is None: inputs_type = x.numpy().dtype if targets_type is None: targets_type = y.numpy().dtype with h5py.File(name=filepath, mode='w', libver='latest', swmr=True) as h5_file: inputs = h5_file.create_dataset(inputs_name, shape=inputs_shape, dtype=inputs_type, fillvalue=0) targets = h5_file.create_dataset(targets_name, shape=targets_shape, dtype=targets_type, fillvalue=0) dloader = DataLoader(dataset, batch_size=1, num_workers=8) for i, (x, y) in enumerate(tqdm(dloader, desc=str(filepath))): inputs[i] = x targets[i] = y assert i == n - 1
[ "h5py.File", "torch.utils.data.DataLoader" ]
[((895, 957), 'h5py.File', 'h5py.File', ([], {'name': 'filepath', 'mode': '"""w"""', 'libver': '"""latest"""', 'swmr': '(True)'}), "(name=filepath, mode='w', libver='latest', swmr=True)\n", (904, 957), False, 'import h5py\n'), ((1204, 1252), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': '(1)', 'num_workers': '(8)'}), '(dataset, batch_size=1, num_workers=8)\n', (1214, 1252), False, 'from torch.utils.data import Dataset, DataLoader\n')]
# -*- coding: utf-8 -*- import biapol_utilities as biau import numpy as np def test_suppression(): a = np.random.rand(100).reshape(10, -1) threshold = 0.5 a_sup = biau.label.suppressed_similarity(a, threshold=threshold) assert(all(a_sup[a < threshold].ravel() == 0)) if __name__ == "__main__": test_suppression()
[ "numpy.random.rand", "biapol_utilities.label.suppressed_similarity" ]
[((180, 236), 'biapol_utilities.label.suppressed_similarity', 'biau.label.suppressed_similarity', (['a'], {'threshold': 'threshold'}), '(a, threshold=threshold)\n', (212, 236), True, 'import biapol_utilities as biau\n'), ((111, 130), 'numpy.random.rand', 'np.random.rand', (['(100)'], {}), '(100)\n', (125, 130), True, 'import numpy as np\n')]
#!/usr/bin/python # Script to generate sitemaps # <NAME> # Requires mysql-python import MySQLdb import argparse import logging import os import sys import subprocess from config import db logging.basicConfig(level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument("sitemapdir") args = parser.parse_args() try: sitemapdir = args.sitemapdir except: logging.error("sitemap dir not set. run python generate_sitemap.py -h") sys.exit(0) # clear sitemapdir if it is there already if os.path.exists(sitemapdir): subprocess.call(['rm','-rfv',sitemapdir+"/*"]) else: os.makedirs(sitemapdir) MAX_PER_FILE = 49999 db = MySQLdb.connect(host=db["dbhost"], user=db["dbuser"], passwd=db["dbpass"], db="citeseerx") cur = db.cursor() i = 0 file = 1 header = '<?xml version="1.0" encoding="UTF-8"?>\n<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n' cur.execute("SELECT id FROM papers WHERE public = 1") for row in cur.fetchall(): if i == 0: f = open(os.path.join(sitemapdir,"sitemap%d.xml" % file), 'w+') f.write(header) f.write('<url>\n\t<loc>http://citeseerx.ist.psu.edu/viewdoc/download?doi=%s&amp;rep=rep1&amp;type=pdf</loc>\n</url>\n' % row[0]) i = i + 1 if i == MAX_PER_FILE: file = file + 1 i = 0 f.write('</urlset>') f.close() logging.info("sitemap generated: {}".format(f.name)) if not f.closed: f.write('</urlset>') f.close() logging.info("sitemap generated: {}".format(f.name)) f = open(os.path.join(sitemapdir,'sitemap_index.xml'), 'w+') f.write('<?xml version="1.0" encoding="UTF-8"?>\n<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n') for i in range(1, file+1): f.write('<sitemap>\n\t<loc>http://citeseerx.ist.psu.edu/sitemap%d.xml</loc>\n</sitemap>\n' % i) f.write('</sitemapindex>'); f.close() logging.info("sitemap index file: {}".format(f.name))
[ "logging.error", "MySQLdb.connect", "argparse.ArgumentParser", "logging.basicConfig", "os.makedirs", "os.path.exists", "subprocess.call", "os.path.join", "config.db.cursor", "sys.exit" ]
[((190, 229), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (209, 229), False, 'import logging\n'), ((239, 264), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (262, 264), False, 'import argparse\n'), ((513, 539), 'os.path.exists', 'os.path.exists', (['sitemapdir'], {}), '(sitemapdir)\n', (527, 539), False, 'import os\n'), ((653, 747), 'MySQLdb.connect', 'MySQLdb.connect', ([], {'host': "db['dbhost']", 'user': "db['dbuser']", 'passwd': "db['dbpass']", 'db': '"""citeseerx"""'}), "(host=db['dbhost'], user=db['dbuser'], passwd=db['dbpass'],\n db='citeseerx')\n", (668, 747), False, 'import MySQLdb\n'), ((750, 761), 'config.db.cursor', 'db.cursor', ([], {}), '()\n', (759, 761), False, 'from config import db\n'), ((545, 595), 'subprocess.call', 'subprocess.call', (["['rm', '-rfv', sitemapdir + '/*']"], {}), "(['rm', '-rfv', sitemapdir + '/*'])\n", (560, 595), False, 'import subprocess\n'), ((602, 625), 'os.makedirs', 'os.makedirs', (['sitemapdir'], {}), '(sitemapdir)\n', (613, 625), False, 'import os\n'), ((1527, 1572), 'os.path.join', 'os.path.join', (['sitemapdir', '"""sitemap_index.xml"""'], {}), "(sitemapdir, 'sitemap_index.xml')\n", (1539, 1572), False, 'import os\n'), ((376, 447), 'logging.error', 'logging.error', (['"""sitemap dir not set. run python generate_sitemap.py -h"""'], {}), "('sitemap dir not set. run python generate_sitemap.py -h')\n", (389, 447), False, 'import logging\n'), ((452, 463), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (460, 463), False, 'import sys\n'), ((1005, 1053), 'os.path.join', 'os.path.join', (['sitemapdir', "('sitemap%d.xml' % file)"], {}), "(sitemapdir, 'sitemap%d.xml' % file)\n", (1017, 1053), False, 'import os\n')]
import os import sys import platform from fHDHR import fHDHR_VERSION from fHDHR.tools import is_docker class Versions(): """ fHDHR versioning management system. """ def __init__(self, settings, fHDHR_web, logger, web, db, scheduler): self.fHDHR_web = fHDHR_web self.logger = logger self.web = web self.db = db self.scheduler = scheduler self.github_org_list_url = "https://api.github.com/orgs/fHDHR/repos?type=all" self.github_fhdhr_core_info_url = "https://raw.githubusercontent.com/fHDHR/fHDHR/main/version.json" self.dict = {} self.official_plugins = self.db.get_fhdhr_value("versions", "dict") or {} self.register_fhdhr() self.register_env() self.get_online_versions() self.update_url = "/api/versions?method=check" def sched_init(self, fhdhr): """ The Scheduled update method. """ self.api = fhdhr.api self.scheduler.every(2).to(3).hours.do(self.sched_update) def sched_update(self): """ Use an API thread to update Versions listing. """ self.api.threadget(self.update_url) def get_online_versions(self): """ Update Onling versions listing. """ self.logger.debug("Checking for Online Plugin Information") official_plugins = {} try: github_org_json = self.web.session.get(self.github_org_list_url).json() except self.web.exceptions.ReadTimeout as err: self.logger.error("Online Plugin Information Check Failed: %s" % err) return online_plugin_names = [x["name"] for x in github_org_json if x["name"].startswith("fHDHR_plugin_")] for plugin_name in online_plugin_names: plugin_version_check_success = 0 for branch in ["main", "master", "dev"]: if not plugin_version_check_success: self.logger.debug("Attempting Online Plugin Information for %s %s branch" % (plugin_name, branch)) plugin_json_url = "https://raw.githubusercontent.com/fHDHR/%s/%s/plugin.json" % (plugin_name, branch) try: plugin_json = self.web.session.get(plugin_json_url) if plugin_json.status_code == 200: plugin_json = plugin_json.json() official_plugins[plugin_name] = plugin_json plugin_version_check_success = 1 except self.web.exceptions.ReadTimeout as err: self.logger.error("Online Plugin Information Check Failed for %s %s branch: %s" % (plugin_name, branch, err)) self.official_plugins = official_plugins core_json = self.web.session.get(self.github_fhdhr_core_info_url).json() for key in list(core_json.keys()): self.official_plugins[key] = {"name": key, "version": core_json[key], "type": "core"} self.db.set_fhdhr_value("versions", "dict", official_plugins) def register_version(self, item_name, item_version, item_type): """ Register a version item. """ self.logger.debug("Registering %s item: %s %s" % (item_type, item_name, item_version)) self.dict[item_name] = { "name": item_name, "version": item_version, "type": item_type } def register_fhdhr(self): """ Register core version items. """ self.register_version("fHDHR", fHDHR_VERSION, "fHDHR") self.register_version("fHDHR_web", self.fHDHR_web.fHDHR_web_VERSION, "fHDHR") def register_env(self): """ Register env version items. """ self.register_version("Python", sys.version, "env") if sys.version_info.major == 2 or sys.version_info < (3, 7): self.logger.error('Error: fHDHR requires python 3.7+. Do NOT expect support for older versions of python.') opersystem = platform.system() self.register_version("Operating System", opersystem, "env") if opersystem in ["Linux", "Darwin"]: # Linux/Mac if os.getuid() == 0 or os.geteuid() == 0: self.logger.warning('Do not run fHDHR with root privileges.') elif opersystem in ["Windows"]: # Windows if os.environ.get("USERNAME") == "Administrator": self.logger.warning('Do not run fHDHR as Administrator.') else: self.logger.warning("Uncommon Operating System, use at your own risk.") cpu_type = platform.machine() self.register_version("CPU Type", cpu_type, "env") isdocker = is_docker() self.register_version("Docker", isdocker, "env") def register_plugins(self, plugins): """ Register plugin version items. """ self.logger.info("Scanning Plugins for Version Information.") self.plugins = plugins plugin_names = [] for plugin in list(self.plugins.plugins.keys()): if self.plugins.plugins[plugin].plugin_name not in plugin_names: plugin_names.append(self.plugins.plugins[plugin].plugin_name) self.register_version(self.plugins.plugins[plugin].plugin_name, self.plugins.plugins[plugin].manifest["version"], "plugin")
[ "fHDHR.tools.is_docker", "os.environ.get", "os.getuid", "platform.system", "os.geteuid", "platform.machine" ]
[((4149, 4166), 'platform.system', 'platform.system', ([], {}), '()\n', (4164, 4166), False, 'import platform\n'), ((4759, 4777), 'platform.machine', 'platform.machine', ([], {}), '()\n', (4775, 4777), False, 'import platform\n'), ((4857, 4868), 'fHDHR.tools.is_docker', 'is_docker', ([], {}), '()\n', (4866, 4868), False, 'from fHDHR.tools import is_docker\n'), ((4323, 4334), 'os.getuid', 'os.getuid', ([], {}), '()\n', (4332, 4334), False, 'import os\n'), ((4343, 4355), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (4353, 4355), False, 'import os\n'), ((4519, 4545), 'os.environ.get', 'os.environ.get', (['"""USERNAME"""'], {}), "('USERNAME')\n", (4533, 4545), False, 'import os\n')]
from django.shortcuts import render_to_response, redirect from django.template import RequestContext from django.template.loader import render_to_string from django.core.mail import send_mail, mail_managers, EmailMessage from django.contrib.auth.decorators import login_required from django.contrib import messages from OpenDataCatalog.contest.models import * from datetime import datetime def get_entries(request, contest_id=1): contest = Contest.objects.get(pk=contest_id) entries = Entry.objects.filter(contest=contest, is_visible=True) if not request.GET.__contains__('sort'): entries = entries.order_by('-vote_count') return render_to_response('contest/entries.html', {'contest': contest, 'entries': entries}, context_instance=RequestContext(request)) def get_entries_table(request, contest_id=1): contest = Contest.objects.get(pk=contest_id) entries = Entry.objects.filter(contest=contest) if not request.GET.__contains__('sort'): entries = entries.order_by('-vote_count') return render_to_response('contest/entry_table.html', {'contest': contest, 'entries': entries}, context_instance=RequestContext(request)) def get_winners(request, contest_id=1): contest = Contest.objects.get(pk=contest_id) entries = Entry.objects.filter(contest=contest, is_visible=True).order_by('-vote_count') return render_to_response('contest/winners.html', {'contest': contest, 'entries': entries}, context_instance=RequestContext(request)) def get_rules(request, contest_id=1): contest = Contest.objects.get(pk=contest_id) return render_to_response('contest/rules.html', {'contest': contest}, context_instance=RequestContext(request)) def get_entry(request, entry_id): entry = Entry.objects.get(pk=entry_id) return render_to_response('contest/entry.html', {'contest': entry.contest, 'entry': entry}, context_instance=RequestContext(request)) #@login_required def add_entry(request, contest_id=1): contest = Contest.objects.get(pk=contest_id) if request.method == 'POST': form = EntryForm(request.POST) form.contest = contest_id if form.is_valid(): data = { #"submitter": request.user.username, "submit_date": datetime.now(), "org_name": form.cleaned_data.get("org_name"), "org_url": form.cleaned_data.get("org_url"), "contact_person": form.cleaned_data.get("contact_person"), "contact_phone": form.cleaned_data.get("contact_phone"), "contact_email": form.cleaned_data.get("contact_email"), "data_set": form.cleaned_data.get("data_set"), "data_use": form.cleaned_data.get("data_use"), "data_mission": form.cleaned_data.get("data_mission") } subject = 'OpenDataPhilly - Contest Submission' user_email = form.cleaned_data.get("contact_email") text_content = render_to_string('contest/submit_email.txt', data) text_content_copy = render_to_string('contest/submit_email_copy.txt', data) mail_managers(subject, text_content) msg = EmailMessage(subject, text_content_copy, to=[user_email]) msg.send() return render_to_response('contest/thanks.html', {'contest': contest}, context_instance=RequestContext(request)) else: form = EntryForm() return render_to_response('contest/submit_entry.html', {'contest': contest, 'form': form}, context_instance=RequestContext(request)) @login_required def add_vote(request, entry_id): entry = Entry.objects.get(pk=entry_id) contest = entry.contest user = User.objects.get(username=request.user) if contest.user_can_vote(user): new_vote = Vote(user=user, entry=entry) new_vote.save() entry.vote_count = entry.vote_set.count() entry.save() next_vote_date = contest.get_next_vote_date(user) if next_vote_date > contest.end_date: messages.success(request, '<div style="font-weight:bold;">Your vote has been recorded.</div>Thank you for your vote! You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!') else: messages.success(request, '<div style="font-weight:bold;">Your vote has been recorded.</div>You may vote once per week, so come back and visit us again on ' + next_vote_date.strftime('%A, %b %d %Y, %I:%M%p') + '. <br><br>Until then, encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!') else: next_vote_date = contest.get_next_vote_date(user) if next_vote_date > contest.end_date: messages.error(request, '<div style="font-weight:bold;">You have already voted.</div>You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!') else: messages.error(request, '<div style="font-weight:bold;">You have already voted.</div>You may vote once per week, so come back and visit us again on ' + next_vote_date.strftime('%A, %b %d %Y, %I:%M%p') + '. <br><br>Until then, encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!') return redirect('/contest/?sort=vote_count')
[ "django.shortcuts.redirect", "django.contrib.messages.error", "django.template.loader.render_to_string", "datetime.datetime.now", "django.core.mail.EmailMessage", "django.contrib.messages.success", "django.core.mail.mail_managers", "django.template.RequestContext" ]
[((5469, 5506), 'django.shortcuts.redirect', 'redirect', (['"""/contest/?sort=vote_count"""'], {}), "('/contest/?sort=vote_count')\n", (5477, 5506), False, 'from django.shortcuts import render_to_response, redirect\n'), ((757, 780), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (771, 780), False, 'from django.template import RequestContext\n'), ((1142, 1165), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1156, 1165), False, 'from django.template import RequestContext\n'), ((1463, 1486), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1477, 1486), False, 'from django.template import RequestContext\n'), ((1667, 1690), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1681, 1690), False, 'from django.template import RequestContext\n'), ((1883, 1906), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (1897, 1906), False, 'from django.template import RequestContext\n'), ((2977, 3027), 'django.template.loader.render_to_string', 'render_to_string', (['"""contest/submit_email.txt"""', 'data'], {}), "('contest/submit_email.txt', data)\n", (2993, 3027), False, 'from django.template.loader import render_to_string\n'), ((3060, 3115), 'django.template.loader.render_to_string', 'render_to_string', (['"""contest/submit_email_copy.txt"""', 'data'], {}), "('contest/submit_email_copy.txt', data)\n", (3076, 3115), False, 'from django.template.loader import render_to_string\n'), ((3128, 3164), 'django.core.mail.mail_managers', 'mail_managers', (['subject', 'text_content'], {}), '(subject, text_content)\n', (3141, 3164), False, 'from django.core.mail import send_mail, mail_managers, EmailMessage\n'), ((3184, 3241), 'django.core.mail.EmailMessage', 'EmailMessage', (['subject', 'text_content_copy'], {'to': '[user_email]'}), '(subject, text_content_copy, to=[user_email])\n', (3196, 3241), False, 'from django.core.mail import send_mail, mail_managers, EmailMessage\n'), ((3543, 3566), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (3557, 3566), False, 'from django.template import RequestContext\n'), ((4036, 4345), 'django.contrib.messages.success', 'messages.success', (['request', '"""<div style="font-weight:bold;">Your vote has been recorded.</div>Thank you for your vote! You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!"""'], {}), '(request,\n \'<div style="font-weight:bold;">Your vote has been recorded.</div>Thank you for your vote! You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!\'\n )\n', (4052, 4345), False, 'from django.contrib import messages\n'), ((4825, 5102), 'django.contrib.messages.error', 'messages.error', (['request', '"""<div style="font-weight:bold;">You have already voted.</div>You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!"""'], {}), '(request,\n \'<div style="font-weight:bold;">You have already voted.</div>You will not be able to vote again before the end of the contest. <br><br>Please encourage others to visit <a href="/">OpenDataPhilly</a> and to join the race toward more open data!\'\n )\n', (4839, 5102), False, 'from django.contrib import messages\n'), ((2254, 2268), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2266, 2268), False, 'from datetime import datetime\n'), ((3366, 3389), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (3380, 3389), False, 'from django.template import RequestContext\n')]
# -*- coding: utf-8 -*- """ Created on Tue Apr 6 09:59:14 2021 @author: ll17354 """ from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.linear_model._logistic import LogisticRegression from sklearn.utils.validation import check_X_y, check_array, check_is_fitted import sys import warnings import math import statsmodels import numpy as np from scipy import stats import statsmodels.api as smf def firth_likelihood(beta, logit): return -(logit.loglike(beta) + 0.5*np.log(np.linalg.det(-logit.hessian(beta)))) def null_fit_firth(y, X, start_vec = None, step_limit=1000, convergence_limit=0.0001): """ Computes the null model in the likelihood ratio test Parameters ---------- X : array-like of shape (n_samples, n_features) Training vector, where n_samples is the number of samples and n_features is the number of features. Make sure X has an intercept term (column of ones). y : array-like of shape (n_samples,) Target vector relative to X. Please note this function only currently works for binomial regression so output values of {0, 1} will work while {0, 1, 2} will not. start_vec : int or None, optional starting vector The default is None. step_limit : TYPE, optional Max number of steps before MLE termination. The default is 1000. convergence_limit : TYPE, optional Minimum difference between MLE's. The default is 0.0001. Returns ------- return_fit : intercept: Intercept coeffcient beta: list of beta coeffcients bse: coeffcient standard errors fitll: fit log-likelihood """ logit_model = smf.Logit(y, X) if start_vec is None: start_vec = np.zeros(X.shape[1]) beta_iterations = [] beta_iterations.append(start_vec) for i in range(0, step_limit): pi = logit_model.predict(beta_iterations[i]) W = np.diagflat(np.multiply(pi, 1-pi)) var_covar_mat = np.linalg.pinv(-logit_model.hessian(beta_iterations[i])) # build hat matrix rootW = np.sqrt(W) H = np.dot(np.transpose(X), np.transpose(rootW)) H = np.matmul(var_covar_mat, H) H = np.matmul(np.dot(rootW, X), H) # penalised score U = np.matmul(np.transpose(X), y - pi + np.multiply(np.diagonal(H), 0.5 - pi)) new_beta = beta_iterations[i] + np.matmul(var_covar_mat, U) # step halving j = 0 while firth_likelihood(new_beta, logit_model) > firth_likelihood(beta_iterations[i], logit_model): new_beta = beta_iterations[i] + 0.5*(new_beta - beta_iterations[i]) j = j + 1 if (j > step_limit): sys.stderr.write('Firth regression failed. Try increasing step limit.\n') return None beta_iterations.append(new_beta) if i > 0 and (np.linalg.norm(beta_iterations[i] - beta_iterations[i-1]) < convergence_limit): break return_fit = None if np.linalg.norm(beta_iterations[i] - beta_iterations[i-1]) >= convergence_limit: sys.stderr.write('Firth regression failed to converge.\n') else: # Calculate stats fitll = -firth_likelihood(beta_iterations[-1], logit_model) intercept = beta_iterations[-1][0] beta = beta_iterations[-1][1:].tolist() bse = np.sqrt(np.diagonal(np.linalg.pinv(-logit_model.hessian(beta_iterations[-1])))) return_fit = intercept, beta, bse, fitll return return_fit class Firth_LogisticRegression(LogisticRegression, ClassifierMixin, BaseEstimator): """ This class represents a rewriting Firth regression originally implemented by John Lees (https://gist.github.com/johnlees/3e06380965f367e4894ea20fbae2b90d) into a class which can interact with the sci-kit learn ecosystem. To use the fit function make sure X has an intercept term (column of ones). When using validation functions make sure to not include this 'dummy' column of ones. Please note: This estimator class does not currently pass the check_estimator test in sklearn. This is because it cannot perform the multinomial classification task that check_estimator attempts to pass it. Parameters ---------- start_vec : ndarray of shape (n_features, 1). Default set to None in which case the zero vector is used. step_limit : int. convergence_limit : float. multi_class : string. Default is set to 'ovr' to let this function intgerate with the logistic_regression parent class and pass the _check_multi_class function. A bit hacky but works. Antributes ---------- classes_ : ndarray of shape (n_classes, ) A list of class labels known to the classifier. coef_ : ndarray of shape (1, n_features) or (n_classes, n_features) Coefficient of the features in the decision function. `coef_` is of shape (1, n_features) when the given problem is binary. In particular, when `multi_class='multinomial'`, `coef_` corresponds to outcome 1 (True) and `-coef_` corresponds to outcome 0 (False). beta_ : list of size n_features. This is used in the wald and likelihood ratio test functions. intercept_ : ndarray of shape (1,) or (n_classes,) Intercept (a.k.a. bias) added to the decision function. """ def __init__(self, start_vec = None, step_limit = 1000, convergence_limit = 0.0001, multi_class = 'ovr'): self.start_vec = start_vec self.step_limit = step_limit self.convergence_limit = convergence_limit self.multi_class = multi_class # multiclass should not be changed from 'ovr' def fit(self, X = None, y = None): """ Fits the model accoridng to given training data. This fit function which has been changed to work inaccordance with the sklearn estimator documentation. Major changes are, rather than returning specific variables fit() return an instance of itself allowing other functions to be run from it. Parameters ---------- X : array-like of shape (n_samples, n_features) Training vector, where n_samples is the number of samples and n_features is the number of features. Make sure X has an intercept term (column of ones). y : array-like of shape (n_samples,) Target vector relative to X. Please note this function only currently works for binomial regression so output values of {0, 1} will work while {0, 1, 2} will not. Returns ------- self Fitted estimator. self.fitll_ : fit log-likelihood self.intercept_ : intercept self.coef_ : coeffcients not including intercept (used in all other sklearn classes ) self.beta_ : coeffcients including intercept (used in wald and LR tests) self.bse_ : standard errors """ X, y = check_X_y(X, y) self.n_features_in = X.shape[1]-1 self.classes_ = np.unique(y) logit_model = smf.Logit(y, X) if self.start_vec is None: start_vec = np.zeros(X.shape[1]) beta_iterations = [] beta_iterations.append(start_vec) for i in range(0, self.step_limit): pi = logit_model.predict(beta_iterations[i]) W = np.diagflat(np.multiply(pi, 1-pi)) var_covar_mat = np.linalg.pinv(-logit_model.hessian(beta_iterations[i])) # build hat matrix rootW = np.sqrt(W) H = np.dot(np.transpose(X), np.transpose(rootW)) H = np.matmul(var_covar_mat, H) H = np.matmul(np.dot(rootW, X), H) # penalised score U = np.matmul(np.transpose(X), y - pi + np.multiply(np.diagonal(H), 0.5 - pi)) new_beta = beta_iterations[i] + np.matmul(var_covar_mat, U) # step halving j = 0 while firth_likelihood(new_beta, logit_model) > firth_likelihood(beta_iterations[i], logit_model): new_beta = beta_iterations[i] + 0.5*(new_beta - beta_iterations[i]) j = j + 1 if (j > self.step_limit): sys.stderr.write('Firth regression failed. Try increasing step limit.\n') return None beta_iterations.append(new_beta) if i > 0 and (np.linalg.norm(beta_iterations[i] - beta_iterations[i-1]) < self.convergence_limit): break if np.linalg.norm(beta_iterations[i] - beta_iterations[i-1]) >= self.convergence_limit: sys.stderr.write('Firth regression failed to converge\n') else: # Calculate stats self.fitll_ = -firth_likelihood(beta_iterations[-1], logit_model) self.intercept_ = beta_iterations[-1][0] self.coef_ = np.array(beta_iterations[-1][1:].tolist()).reshape((1, self.n_features_in)) #for other sklearn functions self.beta_ = [self.intercept_] + beta_iterations[-1][1:].tolist() #used by Wald and LR test self.bse_ = np.sqrt(np.diagonal(np.linalg.pinv(-logit_model.hessian(beta_iterations[-1])))) return self def test_wald(self): ''' Implemnatation of the wald test Returns ------- waldp : list A list p-values from the Wald test. ''' check_is_fitted(self) waldp = [] for beta_val, bse_val in zip(self.beta_, self.bse_): waldp.append(2 * (1 - stats.norm.cdf(abs(beta_val/bse_val)))) return waldp def test_likelihoodratio(self, X, y, start_vec = None, step_limit=1000, convergence_limit=0.0001): """ Implementation of the likelihood ratio test. An external function, null_fit_firth(), is used to refit the null-estimator. Parameters ---------- X : {array-like} of shape (n_samples, n_features) Training vector, where n_samples is the number of samples and n_features is the number of features. Make sure to include the dummy column of ones. y : array-like of shape (n_samples,) Target vector relative to X. Returns ------- lrtp : List List of p-values from the likelihood ratio test. """ check_is_fitted(self) X_np = X.values lrtp = [] for beta_idx, (beta_val, bse_val) in enumerate(zip(self.beta_, self.bse_)): null_X = np.delete(X_np, beta_idx, axis=1) (null_intercept, null_beta, null_bse, null_fitll) = null_fit_firth(y, null_X, start_vec, step_limit, convergence_limit) lrstat = -2*(null_fitll - self.fitll_) lrt_pvalue = 1 if lrstat > 0: # non-convergence lrt_pvalue = stats.chi2.sf(lrstat, 1) lrtp.append(lrt_pvalue) return lrtp
[ "scipy.stats.chi2.sf", "numpy.multiply", "statsmodels.api.Logit", "sklearn.utils.validation.check_X_y", "numpy.zeros", "numpy.transpose", "sklearn.utils.validation.check_is_fitted", "numpy.diagonal", "numpy.linalg.norm", "numpy.matmul", "numpy.dot", "sys.stderr.write", "numpy.delete", "numpy.unique", "numpy.sqrt" ]
[((1714, 1729), 'statsmodels.api.Logit', 'smf.Logit', (['y', 'X'], {}), '(y, X)\n', (1723, 1729), True, 'import statsmodels.api as smf\n'), ((1781, 1801), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (1789, 1801), True, 'import numpy as np\n'), ((2130, 2140), 'numpy.sqrt', 'np.sqrt', (['W'], {}), '(W)\n', (2137, 2140), True, 'import numpy as np\n'), ((2210, 2237), 'numpy.matmul', 'np.matmul', (['var_covar_mat', 'H'], {}), '(var_covar_mat, H)\n', (2219, 2237), True, 'import numpy as np\n'), ((3053, 3112), 'numpy.linalg.norm', 'np.linalg.norm', (['(beta_iterations[i] - beta_iterations[i - 1])'], {}), '(beta_iterations[i] - beta_iterations[i - 1])\n', (3067, 3112), True, 'import numpy as np\n'), ((3141, 3199), 'sys.stderr.write', 'sys.stderr.write', (['"""Firth regression failed to converge.\n"""'], {}), "('Firth regression failed to converge.\\n')\n", (3157, 3199), False, 'import sys\n'), ((7328, 7343), 'sklearn.utils.validation.check_X_y', 'check_X_y', (['X', 'y'], {}), '(X, y)\n', (7337, 7343), False, 'from sklearn.utils.validation import check_X_y, check_array, check_is_fitted\n'), ((7410, 7422), 'numpy.unique', 'np.unique', (['y'], {}), '(y)\n', (7419, 7422), True, 'import numpy as np\n'), ((7454, 7469), 'statsmodels.api.Logit', 'smf.Logit', (['y', 'X'], {}), '(y, X)\n', (7463, 7469), True, 'import statsmodels.api as smf\n'), ((9833, 9854), 'sklearn.utils.validation.check_is_fitted', 'check_is_fitted', (['self'], {}), '(self)\n', (9848, 9854), False, 'from sklearn.utils.validation import check_X_y, check_array, check_is_fitted\n'), ((10805, 10826), 'sklearn.utils.validation.check_is_fitted', 'check_is_fitted', (['self'], {}), '(self)\n', (10820, 10826), False, 'from sklearn.utils.validation import check_X_y, check_array, check_is_fitted\n'), ((1982, 2005), 'numpy.multiply', 'np.multiply', (['pi', '(1 - pi)'], {}), '(pi, 1 - pi)\n', (1993, 2005), True, 'import numpy as np\n'), ((2160, 2175), 'numpy.transpose', 'np.transpose', (['X'], {}), '(X)\n', (2172, 2175), True, 'import numpy as np\n'), ((2177, 2196), 'numpy.transpose', 'np.transpose', (['rootW'], {}), '(rootW)\n', (2189, 2196), True, 'import numpy as np\n'), ((2260, 2276), 'numpy.dot', 'np.dot', (['rootW', 'X'], {}), '(rootW, X)\n', (2266, 2276), True, 'import numpy as np\n'), ((2330, 2345), 'numpy.transpose', 'np.transpose', (['X'], {}), '(X)\n', (2342, 2345), True, 'import numpy as np\n'), ((2435, 2462), 'numpy.matmul', 'np.matmul', (['var_covar_mat', 'U'], {}), '(var_covar_mat, U)\n', (2444, 2462), True, 'import numpy as np\n'), ((7538, 7558), 'numpy.zeros', 'np.zeros', (['X.shape[1]'], {}), '(X.shape[1])\n', (7546, 7558), True, 'import numpy as np\n'), ((7932, 7942), 'numpy.sqrt', 'np.sqrt', (['W'], {}), '(W)\n', (7939, 7942), True, 'import numpy as np\n'), ((8020, 8047), 'numpy.matmul', 'np.matmul', (['var_covar_mat', 'H'], {}), '(var_covar_mat, H)\n', (8029, 8047), True, 'import numpy as np\n'), ((8931, 8990), 'numpy.linalg.norm', 'np.linalg.norm', (['(beta_iterations[i] - beta_iterations[i - 1])'], {}), '(beta_iterations[i] - beta_iterations[i - 1])\n', (8945, 8990), True, 'import numpy as np\n'), ((9028, 9085), 'sys.stderr.write', 'sys.stderr.write', (['"""Firth regression failed to converge\n"""'], {}), "('Firth regression failed to converge\\n')\n", (9044, 9085), False, 'import sys\n'), ((10974, 11007), 'numpy.delete', 'np.delete', (['X_np', 'beta_idx'], {'axis': '(1)'}), '(X_np, beta_idx, axis=1)\n', (10983, 11007), True, 'import numpy as np\n'), ((2759, 2832), 'sys.stderr.write', 'sys.stderr.write', (['"""Firth regression failed. Try increasing step limit.\n"""'], {}), "('Firth regression failed. Try increasing step limit.\\n')\n", (2775, 2832), False, 'import sys\n'), ((2925, 2984), 'numpy.linalg.norm', 'np.linalg.norm', (['(beta_iterations[i] - beta_iterations[i - 1])'], {}), '(beta_iterations[i] - beta_iterations[i - 1])\n', (2939, 2984), True, 'import numpy as np\n'), ((7768, 7791), 'numpy.multiply', 'np.multiply', (['pi', '(1 - pi)'], {}), '(pi, 1 - pi)\n', (7779, 7791), True, 'import numpy as np\n'), ((7966, 7981), 'numpy.transpose', 'np.transpose', (['X'], {}), '(X)\n', (7978, 7981), True, 'import numpy as np\n'), ((7983, 8002), 'numpy.transpose', 'np.transpose', (['rootW'], {}), '(rootW)\n', (7995, 8002), True, 'import numpy as np\n'), ((8074, 8090), 'numpy.dot', 'np.dot', (['rootW', 'X'], {}), '(rootW, X)\n', (8080, 8090), True, 'import numpy as np\n'), ((8156, 8171), 'numpy.transpose', 'np.transpose', (['X'], {}), '(X)\n', (8168, 8171), True, 'import numpy as np\n'), ((8265, 8292), 'numpy.matmul', 'np.matmul', (['var_covar_mat', 'U'], {}), '(var_covar_mat, U)\n', (8274, 8292), True, 'import numpy as np\n'), ((11292, 11316), 'scipy.stats.chi2.sf', 'stats.chi2.sf', (['lrstat', '(1)'], {}), '(lrstat, 1)\n', (11305, 11316), False, 'from scipy import stats\n'), ((2368, 2382), 'numpy.diagonal', 'np.diagonal', (['H'], {}), '(H)\n', (2379, 2382), True, 'import numpy as np\n'), ((8626, 8699), 'sys.stderr.write', 'sys.stderr.write', (['"""Firth regression failed. Try increasing step limit.\n"""'], {}), "('Firth regression failed. Try increasing step limit.\\n')\n", (8642, 8699), False, 'import sys\n'), ((8808, 8867), 'numpy.linalg.norm', 'np.linalg.norm', (['(beta_iterations[i] - beta_iterations[i - 1])'], {}), '(beta_iterations[i] - beta_iterations[i - 1])\n', (8822, 8867), True, 'import numpy as np\n'), ((8194, 8208), 'numpy.diagonal', 'np.diagonal', (['H'], {}), '(H)\n', (8205, 8208), True, 'import numpy as np\n')]
import pytest from flask_open_directory import BaseQuery, User from flask_open_directory.query.base_query import _quote_if_str import ldap3 @pytest.fixture def base_query(open_directory): return BaseQuery(open_directory=open_directory) @pytest.fixture def connection(): return ldap3.Connection(ldap3.Server('localhost')) @pytest.fixture def custom_model(): class CustomModel(object): @classmethod def ldap_attribute_map(cls): pass @classmethod def attribute_name_for(cls, key): pass @classmethod def query_cn(cls): return 'cn=custom' @classmethod def from_entry(cls, entry): pass @classmethod def ldap_keys(cls): pass return CustomModel def test_quote_if_str(): assert _quote_if_str('a') == "'a'" assert _quote_if_str(None) is None obj = object() assert _quote_if_str(obj) == obj def test_BaseQuery_model(base_query): base_query.model = User assert base_query.model == User base_query.model = User() assert base_query.model == User with pytest.raises(TypeError): base_query.model = object def test_BaseQuery_open_directory(base_query): assert base_query.open_directory is not None with pytest.raises(TypeError): base_query.open_directory = object() def test_BaseQuery_search_base(base_query, open_directory, custom_model): assert base_query.search_base == open_directory.base_dn base_query.model = User assert base_query.search_base == 'cn=users,' + open_directory.base_dn no_open_directory = BaseQuery() assert no_open_directory.search_base is None explicit = BaseQuery(search_base='dc=example,dc=com') assert explicit.search_base == 'dc=example,dc=com' custom = BaseQuery(open_directory=open_directory, model=custom_model) assert custom.model is not None assert custom.search_base == 'cn=custom,{}'.format(open_directory.base_dn) def test_BaseQuery_search_filter(base_query): assert base_query.search_filter == base_query._default_search_filter q = BaseQuery(search_filter='(objectClass=apple-user)') assert q.search_filter == '(objectClass=apple-user)' def test_BaseQuery_ldap_attributes(base_query): assert base_query.ldap_attributes is None base_query.ldap_attributes = ['a', 'b', object()] assert base_query.ldap_attributes == ('a', 'b') base_query.ldap_attributes = 'c' assert base_query.ldap_attributes == ('c', ) def test_BaseQuery_connection(base_query, connection): assert base_query.connection is None base_query.connection = connection assert base_query.connection == connection with pytest.raises(TypeError): base_query.connection = object() def test_BaseQuery_connection_ctx(base_query, connection): with base_query.connection_ctx() as ctx: assert isinstance(ctx, ldap3.Connection) assert ctx != connection base_query.connection = connection with base_query.connection_ctx() as ctx: assert ctx == connection no_ctx = BaseQuery() with no_ctx.connection_ctx() as ctx: assert ctx is None def test_BaseQuery_first(base_query, open_directory): base_query.model = User user = base_query.first() assert isinstance(user, User) entry = base_query.first(convert=False) assert isinstance(entry, ldap3.Entry) with base_query.connection_ctx() as conn: user = base_query.first(conn) assert isinstance(user, User) invalid = BaseQuery() item = invalid.first() assert item is None def test_BaseQuery_all(base_query): base_query.model = User users = base_query.all() assert isinstance(users, tuple) for u in users: assert isinstance(u, User) entries = base_query.all(convert=False) for e in entries: assert isinstance(e, ldap3.Entry) with base_query.connection_ctx() as conn: users = base_query.all(conn) assert len(users) > 0 invalid = BaseQuery() items = invalid.all() assert isinstance(items, tuple) assert len(items) == 0 def test_BaseQuery_repr(base_query, open_directory): r = repr(base_query) assert "model=None" in r assert "search_base='{}'".format(open_directory.base_dn) in r assert "search_filter='(objectClass=*)'" in r assert "open_directory={}".format(repr(open_directory)) in r assert "connection={}".format(base_query.connection) in r assert "ldap_attributes=None" in r assert "BaseQuery(" in r assert ")" in r
[ "flask_open_directory.query.base_query._quote_if_str", "ldap3.Server", "flask_open_directory.BaseQuery", "pytest.raises", "flask_open_directory.User" ]
[((202, 242), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {'open_directory': 'open_directory'}), '(open_directory=open_directory)\n', (211, 242), False, 'from flask_open_directory import BaseQuery, User\n'), ((1048, 1054), 'flask_open_directory.User', 'User', ([], {}), '()\n', (1052, 1054), False, 'from flask_open_directory import BaseQuery, User\n'), ((1603, 1614), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {}), '()\n', (1612, 1614), False, 'from flask_open_directory import BaseQuery, User\n'), ((1680, 1722), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {'search_base': '"""dc=example,dc=com"""'}), "(search_base='dc=example,dc=com')\n", (1689, 1722), False, 'from flask_open_directory import BaseQuery, User\n'), ((1792, 1852), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {'open_directory': 'open_directory', 'model': 'custom_model'}), '(open_directory=open_directory, model=custom_model)\n', (1801, 1852), False, 'from flask_open_directory import BaseQuery, User\n'), ((2097, 2148), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {'search_filter': '"""(objectClass=apple-user)"""'}), "(search_filter='(objectClass=apple-user)')\n", (2106, 2148), False, 'from flask_open_directory import BaseQuery, User\n'), ((3076, 3087), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {}), '()\n', (3085, 3087), False, 'from flask_open_directory import BaseQuery, User\n'), ((3530, 3541), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {}), '()\n', (3539, 3541), False, 'from flask_open_directory import BaseQuery, User\n'), ((4018, 4029), 'flask_open_directory.BaseQuery', 'BaseQuery', ([], {}), '()\n', (4027, 4029), False, 'from flask_open_directory import BaseQuery, User\n'), ((307, 332), 'ldap3.Server', 'ldap3.Server', (['"""localhost"""'], {}), "('localhost')\n", (319, 332), False, 'import ldap3\n'), ((797, 815), 'flask_open_directory.query.base_query._quote_if_str', '_quote_if_str', (['"""a"""'], {}), "('a')\n", (810, 815), False, 'from flask_open_directory.query.base_query import _quote_if_str\n'), ((836, 855), 'flask_open_directory.query.base_query._quote_if_str', '_quote_if_str', (['None'], {}), '(None)\n', (849, 855), False, 'from flask_open_directory.query.base_query import _quote_if_str\n'), ((894, 912), 'flask_open_directory.query.base_query._quote_if_str', '_quote_if_str', (['obj'], {}), '(obj)\n', (907, 912), False, 'from flask_open_directory.query.base_query import _quote_if_str\n'), ((1101, 1125), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1114, 1125), False, 'import pytest\n'), ((1269, 1293), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1282, 1293), False, 'import pytest\n'), ((2688, 2712), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2701, 2712), False, 'import pytest\n')]
from avgn.utils.audio import get_samplerate from avgn.utils.json import NoIndent, NoIndentEncoder import numpy as np from avgn.utils.paths import DATA_DIR import librosa from datetime import datetime import pandas as pd import avgn import json DATASET_ID = 'mobysound_humpback_whale' def load_labs(labels): all_labels = [] for label_file in labels: label_df = pd.DataFrame( [line.split() for line in open(label_file, "r")], columns=["start_time", "end_time", "low_freq", "high_freq", "SNR"], ) label_df['file'] = label_file.stem all_labels.append(label_df) all_labels = pd.concat(all_labels).reset_index() for lab in ['start_time', 'end_time', 'low_freq', 'high_freq', 'SNR']: all_labels[lab] = all_labels[lab].values.astype('float32') return all_labels def find_longest_nonvocal_stretch(file_df, wav_duration): """ An ugly function to find the longest stretch of nonvocal behavior in a syllable dataframe """ ## find the longest stretch of non-vocal behavior in this wav max_break = np.argmax(file_df.start_time.values[1:] - file_df.end_time.values[:-1]) noise_end_time = file_df.start_time.values[1:][max_break] noise_start_time = file_df.end_time.values[:-1][max_break] start_noise = file_df.start_time.values[0] end_noise = wav_duration - file_df.end_time.values[-1] noise_lens = np.array([noise_end_time - noise_start_time, start_noise, end_noise]) noise_start_ends = np.array( [ [noise_start_time, noise_end_time], [0, start_noise], [file_df.end_time.values[-1], wav_duration], ] ) noise_start, noise_end = noise_start_ends[np.argmax(noise_lens)] return noise_start, noise_end def generate_noise_and_json(bout_number, fn, DT_ID, wavloc, file_df): # location of wav #wavloc = np.array(wavs)[np.array([i.stem for i in wavs]) == fn][0] # wav time wavdate = datetime.strptime(fn, "%y%m%d-%H%M") wav_date = wavdate.strftime("%Y-%m-%d_%H-%M-%S") # wav samplerate and duration sr = get_samplerate(wavloc.as_posix()) wav_duration = librosa.get_duration(filename=wavloc) # df of syllables in file #file_df = label_df[label_df.file == fn].sort_values(by="start_time") ## find the longest stretch of non-vocal behavior in this wav noise_start, noise_end = find_longest_nonvocal_stretch(file_df, wav_duration) bout_start_string = avgn.utils.general.seconds_to_str(noise_start) # determine save locations noise_out = ( DATA_DIR / "processed" / DATASET_ID / DT_ID / "NOISE" / (fn + "__" + bout_start_string + ".WAV") ) json_out = DATA_DIR / "processed" / DATASET_ID / DT_ID / "JSON" / (fn + ".JSON") # wav general information json_dict = {} json_dict["bout_number"] = bout_number json_dict["species"] = "Megaptera novaengliae" json_dict["common_name"] = "Humpback whale" json_dict["datetime"] = wav_date json_dict["samplerate_hz"] = sr json_dict["length_s"] = wav_duration json_dict["wav_loc"] = wavloc.as_posix() json_dict["noise_loc"] = noise_out.as_posix() json_dict["indvs"] = { "UNK": { "syllables": { "start_times": NoIndent( list(file_df.start_time.values.astype("float")) ), "end_times": NoIndent(list(file_df.end_time.astype("float"))), "high_freq": NoIndent(list(file_df.high_freq.astype("float"))), "low_freq": NoIndent(list(file_df.low_freq.astype("float"))), "SNR": NoIndent(list(file_df.SNR.astype("float"))), } } } json_txt = json.dumps(json_dict, cls=NoIndentEncoder, indent=2) # save wav file noise_wav, sr = librosa.load( wavloc, sr=None, mono=True, offset=noise_start, duration=noise_end - noise_start ) avgn.utils.paths.ensure_dir(noise_out) librosa.output.write_wav(noise_out, y=noise_wav, sr=sr, norm=True) # save json avgn.utils.paths.ensure_dir(json_out.as_posix()) print(json_txt, file=open(json_out.as_posix(), "w"))
[ "numpy.argmax", "avgn.utils.paths.ensure_dir", "json.dumps", "datetime.datetime.strptime", "numpy.array", "avgn.utils.general.seconds_to_str", "librosa.load", "librosa.output.write_wav", "pandas.concat", "librosa.get_duration" ]
[((1087, 1158), 'numpy.argmax', 'np.argmax', (['(file_df.start_time.values[1:] - file_df.end_time.values[:-1])'], {}), '(file_df.start_time.values[1:] - file_df.end_time.values[:-1])\n', (1096, 1158), True, 'import numpy as np\n'), ((1407, 1476), 'numpy.array', 'np.array', (['[noise_end_time - noise_start_time, start_noise, end_noise]'], {}), '([noise_end_time - noise_start_time, start_noise, end_noise])\n', (1415, 1476), True, 'import numpy as np\n'), ((1500, 1614), 'numpy.array', 'np.array', (['[[noise_start_time, noise_end_time], [0, start_noise], [file_df.end_time.\n values[-1], wav_duration]]'], {}), '([[noise_start_time, noise_end_time], [0, start_noise], [file_df.\n end_time.values[-1], wav_duration]])\n', (1508, 1614), True, 'import numpy as np\n'), ((1968, 2004), 'datetime.datetime.strptime', 'datetime.strptime', (['fn', '"""%y%m%d-%H%M"""'], {}), "(fn, '%y%m%d-%H%M')\n", (1985, 2004), False, 'from datetime import datetime\n'), ((2154, 2191), 'librosa.get_duration', 'librosa.get_duration', ([], {'filename': 'wavloc'}), '(filename=wavloc)\n', (2174, 2191), False, 'import librosa\n'), ((2469, 2515), 'avgn.utils.general.seconds_to_str', 'avgn.utils.general.seconds_to_str', (['noise_start'], {}), '(noise_start)\n', (2502, 2515), False, 'import avgn\n'), ((3754, 3806), 'json.dumps', 'json.dumps', (['json_dict'], {'cls': 'NoIndentEncoder', 'indent': '(2)'}), '(json_dict, cls=NoIndentEncoder, indent=2)\n', (3764, 3806), False, 'import json\n'), ((3848, 3947), 'librosa.load', 'librosa.load', (['wavloc'], {'sr': 'None', 'mono': '(True)', 'offset': 'noise_start', 'duration': '(noise_end - noise_start)'}), '(wavloc, sr=None, mono=True, offset=noise_start, duration=\n noise_end - noise_start)\n', (3860, 3947), False, 'import librosa\n'), ((3961, 3999), 'avgn.utils.paths.ensure_dir', 'avgn.utils.paths.ensure_dir', (['noise_out'], {}), '(noise_out)\n', (3988, 3999), False, 'import avgn\n'), ((4004, 4070), 'librosa.output.write_wav', 'librosa.output.write_wav', (['noise_out'], {'y': 'noise_wav', 'sr': 'sr', 'norm': '(True)'}), '(noise_out, y=noise_wav, sr=sr, norm=True)\n', (4028, 4070), False, 'import librosa\n'), ((1717, 1738), 'numpy.argmax', 'np.argmax', (['noise_lens'], {}), '(noise_lens)\n', (1726, 1738), True, 'import numpy as np\n'), ((640, 661), 'pandas.concat', 'pd.concat', (['all_labels'], {}), '(all_labels)\n', (649, 661), True, 'import pandas as pd\n')]
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np import unittest from federatedml.ftl.plain_ftl import PlainFTLHostModel from federatedml.ftl.hetero_ftl.hetero_ftl_guest import HeteroPlainFTLGuest, HeteroFTLGuest from federatedml.ftl.plain_ftl import PlainFTLGuestModel from federatedml.feature.instance import Instance from federatedml.ftl.common.data_util import create_table from federatedml.ftl.test.fake_models import FakeAutoencoder, FakeDiffConverge from federatedml.param.param import FTLModelParam from federatedml.util.transfer_variable import HeteroFTLTransferVariable from arch.api.eggroll import init class TestHeteroFTLGuest(HeteroPlainFTLGuest): def __init__(self, guest, model_param, transfer_variable): super(TestHeteroFTLGuest, self).__init__(guest, model_param, transfer_variable) U_B = np.array([[4, 2, 3, 1, 2], [6, 5, 1, 4, 5], [7, 4, 1, 9, 10], [6, 5, 1, 4, 5]]) overlap_indexes = [1, 2] Wh = np.ones((5, U_B.shape[1])) bh = np.zeros(U_B.shape[1]) autoencoderB = FakeAutoencoder(1) autoencoderB.build(U_B.shape[1], Wh, bh) self.host = PlainFTLHostModel(autoencoderB, self.model_param) self.host.set_batch(U_B, overlap_indexes) def _do_remote(self, value=None, name=None, tag=None, role=None, idx=None): print("@_do_remote", value, name, tag, role, idx) def _do_get(self, name=None, tag=None, idx=None): print("@_do_get", name, tag, idx) if tag == "HeteroFTLTransferVariable.host_sample_indexes.0": return [np.array([1, 2, 4, 5])] elif tag == "HeteroFTLTransferVariable.host_component_list.0.0": return self.host.send_components() return None class TestCreateGuestHostEggrollTable(unittest.TestCase): def test_hetero_plain_guest_prepare_table(self): U_A = np.array([[1, 2, 3, 4, 5], [4, 5, 6, 7, 8], [7, 8, 9, 10, 11], [4, 5, 6, 7, 8]]) y = np.array([[1], [-1], [1], [-1]]) Wh = np.ones((5, U_A.shape[1])) bh = np.zeros(U_A.shape[1]) model_param = FTLModelParam(alpha=1, max_iteration=1) autoencoderA = FakeAutoencoder(0) autoencoderA.build(U_A.shape[1], Wh, bh) guest = PlainFTLGuestModel(autoencoderA, model_param) converge_func = FakeDiffConverge(None) ftl_guest = TestHeteroFTLGuest(guest, model_param, HeteroFTLTransferVariable()) ftl_guest.set_converge_function(converge_func) guest_sample_indexes = np.array([0, 1, 2, 3]) guest_x_dict = {} guest_label_dict = {} instance_dict = {} instance_list = [] np.random.seed(100) for i, feature, label, in zip(guest_sample_indexes, U_A, y): instance = Instance(inst_id=i, features=feature, label=label[0]) guest_x_dict[i] = feature guest_label_dict[i] = label[0] instance_dict[i] = instance instance_list.append(instance) guest_x = create_table(instance_list, indexes=guest_sample_indexes) guest_x, overlap_indexes, non_overlap_indexes, guest_y = ftl_guest.prepare_data(guest_x) print("guest_x", guest_x) print("overlap_indexes", overlap_indexes) print("non_overlap_indexes", non_overlap_indexes) print("guest_y", guest_y) if __name__ == '__main__': init() unittest.main()
[ "unittest.main", "federatedml.ftl.plain_ftl.PlainFTLGuestModel", "numpy.random.seed", "federatedml.util.transfer_variable.HeteroFTLTransferVariable", "federatedml.feature.instance.Instance", "numpy.zeros", "numpy.ones", "federatedml.ftl.test.fake_models.FakeAutoencoder", "federatedml.ftl.plain_ftl.PlainFTLHostModel", "numpy.array", "federatedml.ftl.test.fake_models.FakeDiffConverge", "federatedml.ftl.common.data_util.create_table", "arch.api.eggroll.init", "federatedml.param.param.FTLModelParam" ]
[((4080, 4086), 'arch.api.eggroll.init', 'init', ([], {}), '()\n', (4084, 4086), False, 'from arch.api.eggroll import init\n'), ((4091, 4106), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4104, 4106), False, 'import unittest\n'), ((1417, 1496), 'numpy.array', 'np.array', (['[[4, 2, 3, 1, 2], [6, 5, 1, 4, 5], [7, 4, 1, 9, 10], [6, 5, 1, 4, 5]]'], {}), '([[4, 2, 3, 1, 2], [6, 5, 1, 4, 5], [7, 4, 1, 9, 10], [6, 5, 1, 4, 5]])\n', (1425, 1496), True, 'import numpy as np\n'), ((1617, 1643), 'numpy.ones', 'np.ones', (['(5, U_B.shape[1])'], {}), '((5, U_B.shape[1]))\n', (1624, 1643), True, 'import numpy as np\n'), ((1657, 1679), 'numpy.zeros', 'np.zeros', (['U_B.shape[1]'], {}), '(U_B.shape[1])\n', (1665, 1679), True, 'import numpy as np\n'), ((1704, 1722), 'federatedml.ftl.test.fake_models.FakeAutoencoder', 'FakeAutoencoder', (['(1)'], {}), '(1)\n', (1719, 1722), False, 'from federatedml.ftl.test.fake_models import FakeAutoencoder, FakeDiffConverge\n'), ((1793, 1842), 'federatedml.ftl.plain_ftl.PlainFTLHostModel', 'PlainFTLHostModel', (['autoencoderB', 'self.model_param'], {}), '(autoencoderB, self.model_param)\n', (1810, 1842), False, 'from federatedml.ftl.plain_ftl import PlainFTLHostModel\n'), ((2510, 2595), 'numpy.array', 'np.array', (['[[1, 2, 3, 4, 5], [4, 5, 6, 7, 8], [7, 8, 9, 10, 11], [4, 5, 6, 7, 8]]'], {}), '([[1, 2, 3, 4, 5], [4, 5, 6, 7, 8], [7, 8, 9, 10, 11], [4, 5, 6, 7, 8]]\n )\n', (2518, 2595), True, 'import numpy as np\n'), ((2675, 2707), 'numpy.array', 'np.array', (['[[1], [-1], [1], [-1]]'], {}), '([[1], [-1], [1], [-1]])\n', (2683, 2707), True, 'import numpy as np\n'), ((2722, 2748), 'numpy.ones', 'np.ones', (['(5, U_A.shape[1])'], {}), '((5, U_A.shape[1]))\n', (2729, 2748), True, 'import numpy as np\n'), ((2762, 2784), 'numpy.zeros', 'np.zeros', (['U_A.shape[1]'], {}), '(U_A.shape[1])\n', (2770, 2784), True, 'import numpy as np\n'), ((2808, 2847), 'federatedml.param.param.FTLModelParam', 'FTLModelParam', ([], {'alpha': '(1)', 'max_iteration': '(1)'}), '(alpha=1, max_iteration=1)\n', (2821, 2847), False, 'from federatedml.param.param import FTLModelParam\n'), ((2872, 2890), 'federatedml.ftl.test.fake_models.FakeAutoencoder', 'FakeAutoencoder', (['(0)'], {}), '(0)\n', (2887, 2890), False, 'from federatedml.ftl.test.fake_models import FakeAutoencoder, FakeDiffConverge\n'), ((2956, 3001), 'federatedml.ftl.plain_ftl.PlainFTLGuestModel', 'PlainFTLGuestModel', (['autoencoderA', 'model_param'], {}), '(autoencoderA, model_param)\n', (2974, 3001), False, 'from federatedml.ftl.plain_ftl import PlainFTLGuestModel\n'), ((3027, 3049), 'federatedml.ftl.test.fake_models.FakeDiffConverge', 'FakeDiffConverge', (['None'], {}), '(None)\n', (3043, 3049), False, 'from federatedml.ftl.test.fake_models import FakeAutoencoder, FakeDiffConverge\n'), ((3225, 3247), 'numpy.array', 'np.array', (['[0, 1, 2, 3]'], {}), '([0, 1, 2, 3])\n', (3233, 3247), True, 'import numpy as np\n'), ((3366, 3385), 'numpy.random.seed', 'np.random.seed', (['(100)'], {}), '(100)\n', (3380, 3385), True, 'import numpy as np\n'), ((3715, 3772), 'federatedml.ftl.common.data_util.create_table', 'create_table', (['instance_list'], {'indexes': 'guest_sample_indexes'}), '(instance_list, indexes=guest_sample_indexes)\n', (3727, 3772), False, 'from federatedml.ftl.common.data_util import create_table\n'), ((3109, 3136), 'federatedml.util.transfer_variable.HeteroFTLTransferVariable', 'HeteroFTLTransferVariable', ([], {}), '()\n', (3134, 3136), False, 'from federatedml.util.transfer_variable import HeteroFTLTransferVariable\n'), ((3478, 3531), 'federatedml.feature.instance.Instance', 'Instance', ([], {'inst_id': 'i', 'features': 'feature', 'label': 'label[0]'}), '(inst_id=i, features=feature, label=label[0])\n', (3486, 3531), False, 'from federatedml.feature.instance import Instance\n'), ((2218, 2240), 'numpy.array', 'np.array', (['[1, 2, 4, 5]'], {}), '([1, 2, 4, 5])\n', (2226, 2240), True, 'import numpy as np\n')]
import logging from google.appengine.api import memcache from mlabns.db import model from mlabns.util import constants def get_nagios_config(): """Retrieves nagios config info. First checks memcache, then datastore. Returns: Nagios model instance """ nagios = memcache.get(constants.DEFAULT_NAGIOS_ENTRY) if not nagios: nagios = model.Nagios.get_by_key_name(constants.DEFAULT_NAGIOS_ENTRY) if nagios: memcache.set(constants.DEFAULT_NAGIOS_ENTRY, nagios) else: logging.error('Datastore does not have the Nagios credentials.') return nagios
[ "logging.error", "mlabns.db.model.Nagios.get_by_key_name", "google.appengine.api.memcache.set", "google.appengine.api.memcache.get" ]
[((289, 333), 'google.appengine.api.memcache.get', 'memcache.get', (['constants.DEFAULT_NAGIOS_ENTRY'], {}), '(constants.DEFAULT_NAGIOS_ENTRY)\n', (301, 333), False, 'from google.appengine.api import memcache\n'), ((370, 430), 'mlabns.db.model.Nagios.get_by_key_name', 'model.Nagios.get_by_key_name', (['constants.DEFAULT_NAGIOS_ENTRY'], {}), '(constants.DEFAULT_NAGIOS_ENTRY)\n', (398, 430), False, 'from mlabns.db import model\n'), ((462, 514), 'google.appengine.api.memcache.set', 'memcache.set', (['constants.DEFAULT_NAGIOS_ENTRY', 'nagios'], {}), '(constants.DEFAULT_NAGIOS_ENTRY, nagios)\n', (474, 514), False, 'from google.appengine.api import memcache\n'), ((541, 605), 'logging.error', 'logging.error', (['"""Datastore does not have the Nagios credentials."""'], {}), "('Datastore does not have the Nagios credentials.')\n", (554, 605), False, 'import logging\n')]
import intcode import utils class Droid(object): def __init__(self): super(Droid, self).__init__() self.grid = [] self.current_line = [] self.width = 0 self.height = 0 self.neighborhoods = ((1, 0), (0, 1), (-1, 0), (0, -1)) def printGrid(self): s = "" for line in self.grid: s += str(''.join(line)) + "\n" print(s) def getNumNeighbors(self, x, y): num = 0 if self.grid[y][x] != '#': return 0 for n in self.neighborhoods: check_x = x + n[0] check_y = y + n[1] if check_x > 0 and check_x < self.width: if check_y > 0 and check_y < self.height: if self.grid[check_y][check_x] == '#': num += 1 return num def setOutput(self, v): if v > 127: print(f"{v} dust collected") if v == 10: self.grid.append(self.current_line) if len(self.current_line) > 0: self.width = len(self.current_line) self.height += 1 self.current_line = [] else: self.current_line.append(chr(v)) def getInput(self): return 0 data = [int(x) for x in utils.get_input(2019, 17).split(',')] data[0] = 2 droid = Droid() pc = intcode.Intcode(data, droid.getInput, droid.setOutput) pc.run() droid.printGrid() intersections = [] for x in range(droid.width): for y in range(droid.height): if droid.getNumNeighbors(x, y) > 2: intersections.append((x, y)) print(intersections) s = [i[0] * i[1] for i in intersections] print(sum(s))
[ "intcode.Intcode", "utils.get_input" ]
[((1367, 1421), 'intcode.Intcode', 'intcode.Intcode', (['data', 'droid.getInput', 'droid.setOutput'], {}), '(data, droid.getInput, droid.setOutput)\n', (1382, 1421), False, 'import intcode\n'), ((1296, 1321), 'utils.get_input', 'utils.get_input', (['(2019)', '(17)'], {}), '(2019, 17)\n', (1311, 1321), False, 'import utils\n')]
__author__ = '<NAME>' __email__ = '<EMAIL>' __version__= '1.8' __status__ = "Research" __date__ = "2/1/2020" __license__= "MIT License" import os import sys import numpy as np import time import glob from torchvision.utils import make_grid from tensorboardX import SummaryWriter import imageio import skimage from parameters import Params from sys_utils import tohms from image_utils import save_image #======================================================================================== class TeePipe(object): #source: https://stackoverflow.com/q/616645 def __init__(self, filename="Red.Wood", mode="a", buff=0): self.stdout = sys.stdout # self.file = open(filename, mode, buff) self.file = open(filename, mode) sys.stdout = self def __del__(self): self.close() def __enter__(self): pass def __exit__(self, *args): self.close() def write(self, message): self.stdout.write(message) self.file.write(message) def flush(self): self.stdout.flush() self.file.flush() os.fsync(self.file.fileno()) def close(self): if self.stdout != None: sys.stdout = self.stdout self.stdout = None if self.file != None: self.file.close() self.file = None # ======================================================================================== class MLLogger(): def __init__(self, hps): self.hps = hps self.logf = None self.im_size = hps.img_size self.epoch_num = hps.epochs_max # Total number of epochs self.iter_num = {} # Iterations per epoch # self.iter_epoch = 0 self.batch_size = hps.batch_size self.data = [] self.dkeys_id=['ts', 'epoch', 'iter', 'stage'] # Key lookup by ID self.dkeys = {} # ID lookup by key self.m_first = {} # stage_name -> position of first record self.m_last = {} # stage_name -> position of last record self.start_time = None # blobal start timestamp self.iter_global = 0 # Total iteration since the begining of the training self.print_header = True self.data_format_changed = True self.last_report_pos={} # stage_name -> Position in self.data of the last report # Tensorboard self.writer = None self.log_id = None return def load_config(self): logdir = self.exp_path+'/log/' cfg_filename = os.path.join(logdir, 'cfg-'+str(self.log_id-1)+'-*.json') cfg_files = glob.glob(cfg_filename) cfg_files.sort(reverse=True) if len(cfg_files) == 0 or not os.path.isfile(cfg_files[0]): return None p = Params() if not p.load(cfg_files[0]): return None return p def save_config(self, epoch=None): logdir = self.exp_path+'/log/' cfg_filename = os.path.join(logdir, 'cfg-'+str(self.log_id)+'-'+str(epoch)+'.json') self.hps.save(cfg_filename) return def open_experiment(self, experiment_name='m1'): """ Creates sub-directory structure - Creates new log file - """ self.experiment_name = experiment_name self.exp_path = os.path.join(experiment_name) os.makedirs(self.exp_path, exist_ok=True) if not self.hps.eval and self.experiment_name != '.': # Backup source code & configs os.system('cp *.py ' + self.exp_path+ '/') logdir = self.exp_path+'/tblog/' os.makedirs(logdir, exist_ok=True) self.writer = SummaryWriter(logdir) logdir = self.exp_path+'/log/' os.makedirs(logdir, exist_ok=True) self.model_path =os.path.join(self.exp_path, 'models') os.makedirs(self.model_path, exist_ok=True) # Create new log files prefix = 'eval-' if self.hps.eval else 'train-' log_id = 0 while True: log_filename = prefix+'log-'+str(log_id)+'.txt' log_path = os.path.join(logdir, log_filename) if not os.path.isfile(log_path): break log_id += 1 if self.hps.log_stdout: stdout_log_filename = prefix+'stdout-'+str(log_id)+'.txt' stdout_log_filename = os.path.join(logdir, stdout_log_filename) self.stdout_logger = TeePipe(stdout_log_filename) print("Creating new log file:",log_path) self.logf = open(log_path, 'wt') self.log_id = log_id return def set_samples_num(self, stage_name, samples_num): self.iter_num[stage_name] = self.hps.batch_size * int(np.floor(samples_num / self.hps.batch_size)) def start_epoch(self, stage_name, epoch): """ Creates a null record with a current timestamp """ if self.start_time is None: self.start_time = time.time() # Stored the position of the first epoch record # There can be one start per stage self.m_first[stage_name] = len(self.data) self.m_last[stage_name] = len(self.data) self.last_report_pos[stage_name] = len(self.data) rec = [0]*len(self.dkeys_id) rec[0] = time.time() - self.start_time rec[1] = epoch rec[2] = self.iter_global rec[3] = stage_name self.data.append(rec) self.print_header = True return def log_loss(self, epoch, iter, losses, stage_name): """ Args: epoch (int): current epoch starting from 0 iter (int): sample iteration within the epoch stage_name (str): 'train', 'val', 'test' losses (dict): dictionary of loss_name->loss_val """ if iter is not None: self.iter_global = iter # Collect new value keys for key, val in losses.items(): if key not in self.dkeys_id: # Add new key=val self.dkeys_id.append(key) self.data_format_changed = True # Update the key-index lookup table if self.data_format_changed: self.dkeys = {} for i, key in enumerate(self.dkeys_id): self.dkeys[key] = i # Store new data rec = [0]*len(self.dkeys_id) rec[0] = time.time() - self.start_time rec[1] = epoch rec[2] = self.iter_global # Global iteration rec[3] = stage_name # Generate tensorboar record tboard_losses = {} for key, val in losses.items(): id = self.dkeys[key] rec[id] = val key = stage_name+'_'+key tboard_losses[key] = val self.data.append(rec) # Append log to the file if self.logf is not None: if self.data_format_changed: # Insert data format header header_str = [str(v) for v in self.dkeys_id] self.logf.write('\n'+' '.join(header_str)+'\n') line = [str(v) for v in rec] self.logf.write(' '.join(line)+'\n') self.logf.flush() # Update tensorboard # {'d_loss': d_loss, 'grad_penalty': grad_penalty} self.writer.add_scalars('losses', tboard_losses, self.iter_global) self.m_last[stage_name] = len(self.data)-1 self.data_format_changed= False return def print_table(self, name, data, header=None): """ max_iter = self.iter_num*self.epoch_num epoch_str = str(rec[-1][1])+" ("+str(int(done))+"%)" header = ['T', 'e('+str(self.epoch_num)+')', 'iter('+str(max_iter//1000)+'k)', 'batch (ms)'] data = [[rec[-1][3], epoch_str, str(last_iter), batch_took_avg*1000.0]] """ # Print table table_width = 0 if header is not None: self.col_width = [] line = "" for i, hv in enumerate(header): line += '{:<{c0}}'.format(hv, c0=len(hv)) self.col_width.append(len(hv)) print('') if name is not None: print(name) print(line) head_len = len(line) print('-'*head_len ) table_width = head_len # Print data for r, rv in enumerate(data): line = "" for c, cv in enumerate(rv): line += '{:<{c0}}'.format(cv, c0=self.col_width[c]) print(line, flush=True) if len(line) > table_width: table_width = len(line) return table_width def get_avg(self, begin, end, cols=[]): rec = self.data[begin:end] # Get the max number of stored value in this run mx = 0 for val in rec: if len(val)>mx: mx = len(val) # Create numpy vector for the averages rec = np.asarray([x+[0]*(mx-len(x)) for x in rec], dtype=np.object ) # Get only the records with loss values rec_avg = rec.copy() rec_avg[:,:4] = 0 rec_avg = rec_avg.astype(np.float) rec_avg = rec_avg.mean(0) return rec_avg def print_batch_stat(self, stage_name='t'): last_epoch_pos = self.m_last.get(stage_name, 0) last_report_pos = self.last_report_pos.get(stage_name, 0) if last_report_pos == last_epoch_pos: # Already reported return # Get averages since the last report rec_avg = self.get_avg(last_report_pos+1, last_epoch_pos+1) rec_last = self.data[last_epoch_pos] time_now, last_epoch, last_iter, last_stage_name = rec_last[:4] iter = last_iter - self.data[self.m_first[stage_name]][2] done = round(100*iter/self.iter_num.get(stage_name), 2) if stage_name in self.iter_num else 0 batch_took_avg = float(time_now) - float(self.data[last_report_pos+1][0]) if self.batch_size is not None: batch_took_avg /= self.batch_size self.last_report_pos[stage_name] = last_epoch_pos # Print table header = None if self.print_header: max_iter = self.iter_num.get(stage_name, 0)*self.epoch_num header = ['Time ', 'E('+str(self.epoch_num)+') ', 'Iter('+str(max_iter//1000)+'k) ', 'Batch (ms) '] for key in self.dkeys_id[4:]: header.append(key+' '*(15-len(key))) self.print_header=False data = [tohms(time_now), str(last_epoch)+' ('+str(done)+'%)', str(last_iter), round(batch_took_avg*1000.0, 3)] for key in self.dkeys_id[4:]: data.append(round(rec_avg[self.dkeys[key]], 4)) table_width = self.print_table(last_stage_name, [data], header) return def print_epoch_stat(self, stage_name, **kwargs): """ Batch train log format Epoch train log format Test log format """ first_epoch_pos = self.m_first.get(stage_name, 0) last_epoch_pos = self.m_last.get(stage_name, 0) rec_avg = self.get_avg(first_epoch_pos+1, last_epoch_pos+1) rec_last = self.data[last_epoch_pos] time_now, last_epoch, last_iter, last_stage_name = rec_last[:4] epoch_took = tohms(time_now - self.data[first_epoch_pos][0]) # Print table max_iter = self.iter_num*self.epoch_num header = ['Time ', 'E('+str(self.epoch_num)+') ', 'Iter('+str(max_iter//1000)+'k) ', 'Epoch (H:M:S) '] for key in self.dkeys_id[4:]: header.append(key) data = [tohms(time_now), str(last_epoch), str(last_iter), epoch_took] for key in self.dkeys_id[4:]: data.append(round(rec_avg[self.dkeys[key]], 4)) table_width = self.print_table(last_stage_name, [data], header) print("-"*table_width) return def log_images(self, x, epoch, name_suffix, name, channels=3, nrow=8): img_path = os.path.join(self.experiment_name, name) os.makedirs(img_path, exist_ok=True) img_size = self.im_size if img_size < 1: img_size2 = x.nelement() / x.size(0) / channels img_size = int(np.sqrt(img_size2)) x = x.view(-1, channels, img_size, img_size) # * 0.5 + 0.5 grid = save_image(x, img_path+'/sample_' + str(epoch) + "_" + str(name_suffix) + '.jpg', nrow = nrow, normalize=True, scale_each=True) img_grid = make_grid(x, normalize=True, scale_each=True, nrow=nrow) self.writer.add_image(name, img_grid , self.iter_global) return def _merge(self, images, size, labels=[], strike=[]): h, w = images.shape[1], images.shape[2] resize_factor=1.0 h_ = int(h * resize_factor) w_ = int(w * resize_factor) img = np.zeros((h_ * size[0], w_ * size[1])) for idx, image in enumerate(images): i = int(idx % size[1]) j = int(idx / size[1]) image_ = skimage.transform.resize(image, output_shape=(w_, h_)) img[j * h_:j * h_ + h_, i * w_:i * w_ + w_] = image_ if len(labels) == len(images): if labels[idx] == 1: img[j * h_:j * h_ + 2, i * w_:i * w_ + w_-4] = np.ones((2, w_-4)) if len(strike) == len(images): if strike[idx] == 1: img[j * h_+h_//2:j * h_ + h_//2+1, i * w_:i * w_ + w_-4] = np.ones((1, w_-4)) return img def save_images(self, images, img_size=(28,28), labels=[], strike=[], name='result.jpg'): n_img_y = 16 n_img_x = 32 images = images.reshape(n_img_x * n_img_y, img_size[0], img_size[1]) imageio.imsave(name, self._merge(images, [n_img_y, n_img_x], labels, strike)) #================================================================================= if __name__ == "__main__": print("NOT AN EXECUTABLE!")
[ "tensorboardX.SummaryWriter", "os.makedirs", "sys_utils.tohms", "numpy.floor", "parameters.Params", "numpy.zeros", "os.system", "numpy.ones", "time.time", "torchvision.utils.make_grid", "os.path.isfile", "skimage.transform.resize", "glob.glob", "os.path.join", "numpy.sqrt" ]
[((2599, 2622), 'glob.glob', 'glob.glob', (['cfg_filename'], {}), '(cfg_filename)\n', (2608, 2622), False, 'import glob\n'), ((2765, 2773), 'parameters.Params', 'Params', ([], {}), '()\n', (2771, 2773), False, 'from parameters import Params\n'), ((3307, 3336), 'os.path.join', 'os.path.join', (['experiment_name'], {}), '(experiment_name)\n', (3319, 3336), False, 'import os\n'), ((3345, 3386), 'os.makedirs', 'os.makedirs', (['self.exp_path'], {'exist_ok': '(True)'}), '(self.exp_path, exist_ok=True)\n', (3356, 3386), False, 'import os\n'), ((3606, 3640), 'os.makedirs', 'os.makedirs', (['logdir'], {'exist_ok': '(True)'}), '(logdir, exist_ok=True)\n', (3617, 3640), False, 'import os\n'), ((3663, 3684), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['logdir'], {}), '(logdir)\n', (3676, 3684), False, 'from tensorboardX import SummaryWriter\n'), ((3733, 3767), 'os.makedirs', 'os.makedirs', (['logdir'], {'exist_ok': '(True)'}), '(logdir, exist_ok=True)\n', (3744, 3767), False, 'import os\n'), ((3795, 3832), 'os.path.join', 'os.path.join', (['self.exp_path', '"""models"""'], {}), "(self.exp_path, 'models')\n", (3807, 3832), False, 'import os\n'), ((3842, 3885), 'os.makedirs', 'os.makedirs', (['self.model_path'], {'exist_ok': '(True)'}), '(self.model_path, exist_ok=True)\n', (3853, 3885), False, 'import os\n'), ((11366, 11413), 'sys_utils.tohms', 'tohms', (['(time_now - self.data[first_epoch_pos][0])'], {}), '(time_now - self.data[first_epoch_pos][0])\n', (11371, 11413), False, 'from sys_utils import tohms\n'), ((12150, 12190), 'os.path.join', 'os.path.join', (['self.experiment_name', 'name'], {}), '(self.experiment_name, name)\n', (12162, 12190), False, 'import os\n'), ((12199, 12235), 'os.makedirs', 'os.makedirs', (['img_path'], {'exist_ok': '(True)'}), '(img_path, exist_ok=True)\n', (12210, 12235), False, 'import os\n'), ((12690, 12746), 'torchvision.utils.make_grid', 'make_grid', (['x'], {'normalize': '(True)', 'scale_each': '(True)', 'nrow': 'nrow'}), '(x, normalize=True, scale_each=True, nrow=nrow)\n', (12699, 12746), False, 'from torchvision.utils import make_grid\n'), ((13049, 13087), 'numpy.zeros', 'np.zeros', (['(h_ * size[0], w_ * size[1])'], {}), '((h_ * size[0], w_ * size[1]))\n', (13057, 13087), True, 'import numpy as np\n'), ((3513, 3556), 'os.system', 'os.system', (["('cp *.py ' + self.exp_path + '/')"], {}), "('cp *.py ' + self.exp_path + '/')\n", (3522, 3556), False, 'import os\n'), ((4097, 4131), 'os.path.join', 'os.path.join', (['logdir', 'log_filename'], {}), '(logdir, log_filename)\n', (4109, 4131), False, 'import os\n'), ((4360, 4401), 'os.path.join', 'os.path.join', (['logdir', 'stdout_log_filename'], {}), '(logdir, stdout_log_filename)\n', (4372, 4401), False, 'import os\n'), ((4957, 4968), 'time.time', 'time.time', ([], {}), '()\n', (4966, 4968), False, 'import time\n'), ((5281, 5292), 'time.time', 'time.time', ([], {}), '()\n', (5290, 5292), False, 'import time\n'), ((6377, 6388), 'time.time', 'time.time', ([], {}), '()\n', (6386, 6388), False, 'import time\n'), ((10593, 10608), 'sys_utils.tohms', 'tohms', (['time_now'], {}), '(time_now)\n', (10598, 10608), False, 'from sys_utils import tohms\n'), ((11771, 11786), 'sys_utils.tohms', 'tohms', (['time_now'], {}), '(time_now)\n', (11776, 11786), False, 'from sys_utils import tohms\n'), ((13226, 13280), 'skimage.transform.resize', 'skimage.transform.resize', (['image'], {'output_shape': '(w_, h_)'}), '(image, output_shape=(w_, h_))\n', (13250, 13280), False, 'import skimage\n'), ((2698, 2726), 'os.path.isfile', 'os.path.isfile', (['cfg_files[0]'], {}), '(cfg_files[0])\n', (2712, 2726), False, 'import os\n'), ((4151, 4175), 'os.path.isfile', 'os.path.isfile', (['log_path'], {}), '(log_path)\n', (4165, 4175), False, 'import os\n'), ((4719, 4762), 'numpy.floor', 'np.floor', (['(samples_num / self.hps.batch_size)'], {}), '(samples_num / self.hps.batch_size)\n', (4727, 4762), True, 'import numpy as np\n'), ((12389, 12407), 'numpy.sqrt', 'np.sqrt', (['img_size2'], {}), '(img_size2)\n', (12396, 12407), True, 'import numpy as np\n'), ((13494, 13514), 'numpy.ones', 'np.ones', (['(2, w_ - 4)'], {}), '((2, w_ - 4))\n', (13501, 13514), True, 'import numpy as np\n'), ((13673, 13693), 'numpy.ones', 'np.ones', (['(1, w_ - 4)'], {}), '((1, w_ - 4))\n', (13680, 13693), True, 'import numpy as np\n')]
from datetime import timedelta from unittest import TestCase from django.utils import timezone from freezegun import freeze_time from utils.date import is_time_newer_than class UtilsDateTestCase(TestCase): def setUp(self) -> None: pass @freeze_time("2020-12-21 03:21:00") def test_is_less_old_than(self): t = timezone.now() - timedelta(seconds=60) self.assertFalse(is_time_newer_than(t, 50)) t = timezone.now() - timedelta(seconds=40) self.assertTrue(is_time_newer_than(t, 50))
[ "django.utils.timezone.now", "freezegun.freeze_time", "utils.date.is_time_newer_than", "datetime.timedelta" ]
[((258, 292), 'freezegun.freeze_time', 'freeze_time', (['"""2020-12-21 03:21:00"""'], {}), "('2020-12-21 03:21:00')\n", (269, 292), False, 'from freezegun import freeze_time\n'), ((342, 356), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (354, 356), False, 'from django.utils import timezone\n'), ((359, 380), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (368, 380), False, 'from datetime import timedelta\n'), ((406, 431), 'utils.date.is_time_newer_than', 'is_time_newer_than', (['t', '(50)'], {}), '(t, 50)\n', (424, 431), False, 'from utils.date import is_time_newer_than\n'), ((446, 460), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (458, 460), False, 'from django.utils import timezone\n'), ((463, 484), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(40)'}), '(seconds=40)\n', (472, 484), False, 'from datetime import timedelta\n'), ((509, 534), 'utils.date.is_time_newer_than', 'is_time_newer_than', (['t', '(50)'], {}), '(t, 50)\n', (527, 534), False, 'from utils.date import is_time_newer_than\n')]
from pwn import remote from gmpy2 import next_prime from Crypto.Util.number import * conn = remote("03.cr.yp.toc.tf", "25010") def get_params(): conn.recvuntil("[Q]uit") conn.sendline("s") conn.recvuntil(" = ") p = int(conn.recvuntil("\n").decode()) conn.recvuntil(" = ") r = int(conn.recvuntil("\n").decode()) return (p, r) def get_public(): conn.recvuntil("[Q]uit") conn.sendline("p") conn.recvuntil("[") return list(map(int, conn.recvuntil("]").decode().strip("]").split(", "))) def get_example(): conn.recvuntil("[Q]uit") conn.sendline("e") conn.recvuntil("\"") randstr = conn.recvuntil("\"").decode().strip("\"") conn.recvuntil("[") return randstr, list(map(int, conn.recvuntil("]").decode().strip("]").split(", "))) p, r = get_params() pubkey = get_public() randstr, sign = get_example() M = [ bytes_to_long(randstr[4*i:4*(i+1)].encode()) for i in range(len(randstr) // 4) ] q = int(next_prime(max(M))) privkey = [sig * inverse(m, q) % q for m, sig in zip(M, sign)] inv_r = inverse(r, p) s_list = ( (pubkey[0] + privkey[0]) * inv_r % p, (pubkey[0] + privkey[0] + q) * inv_r % p ) key = True for idx in range(1, len(privkey)): ts = (pubkey[idx] + privkey[idx]) * pow(inv_r, idx+1, p) % p if ts not in s_list: privkey[idx] += q elif key: key = False if ts == s_list[1]: privkey[0] += q conn.recvuntil("[Q]uit") conn.sendline("f") conn.recvuntil(": ") randmsg = conn.recvuntil("\n").decode().strip("\n") MM = [ bytes_to_long(randmsg[4*i:4*(i+1)].encode()) for i in range(len(randmsg) // 4) ] qq = int(next_prime(max(MM))) conn.sendline(",".join( map(str, (mm * priv % qq for mm, priv in zip(MM, privkey))) )) conn.recvuntil("'") print(conn.recvuntil("'").decode().strip("'"))
[ "pwn.remote" ]
[((94, 128), 'pwn.remote', 'remote', (['"""03.cr.yp.toc.tf"""', '"""25010"""'], {}), "('03.cr.yp.toc.tf', '25010')\n", (100, 128), False, 'from pwn import remote\n')]
import os import sys from pykit import fsutil fn = sys.argv[1] fsutil.write_file(fn, 'boo') stat = os.stat(fn) os.write(1, '{uid},{gid}'.format(uid=stat.st_uid, gid=stat.st_gid))
[ "pykit.fsutil.write_file", "os.stat" ]
[((66, 94), 'pykit.fsutil.write_file', 'fsutil.write_file', (['fn', '"""boo"""'], {}), "(fn, 'boo')\n", (83, 94), False, 'from pykit import fsutil\n'), ((102, 113), 'os.stat', 'os.stat', (['fn'], {}), '(fn)\n', (109, 113), False, 'import os\n')]
""" Define parameters for algorithms. """ import argparse def str2bool(v): return v.lower() == "true" def str2intlist(value): if not value: return value else: return [int(num) for num in value.split(",")] def str2list(value): if not value: return value else: return [num for num in value.split(",")] def create_parser(): """ Creates the argparser. Use this to add additional arguments to the parser later. """ parser = argparse.ArgumentParser( "Robot Learning Algorithms", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) # environment parser.add_argument( "--env", type=str, default="Hopper-v2", help="environment name", ) parser.add_argument("--seed", type=int, default=123) add_method_arguments(parser) return parser def add_method_arguments(parser): # algorithm parser.add_argument( "--algo", type=str, default="sac", choices=[ "sac", "ppo", "ddpg", "td3", "bc", "gail", "dac", ], ) # training parser.add_argument("--is_train", type=str2bool, default=True) parser.add_argument("--resume", type=str2bool, default=True) parser.add_argument("--init_ckpt_path", type=str, default=None) parser.add_argument("--gpu", type=int, default=None) # evaluation parser.add_argument("--ckpt_num", type=int, default=None) parser.add_argument("--num_eval", type=int, default=1, help="number of episodes for evaluation" ) # environment try: parser.add_argument("--screen_width", type=int, default=480) parser.add_argument("--screen_height", type=int, default=480) except: pass parser.add_argument("--action_repeat", type=int, default=1) # misc parser.add_argument("--run_prefix", type=str, default=None) parser.add_argument("--notes", type=str, default="") # log parser.add_argument("--log_interval", type=int, default=1) parser.add_argument("--evaluate_interval", type=int, default=10) parser.add_argument("--ckpt_interval", type=int, default=200) parser.add_argument("--log_root_dir", type=str, default="log") parser.add_argument( "--wandb", type=str2bool, default=False, help="set it True if you want to use wandb", ) parser.add_argument("--wandb_entity", type=str, default="clvr") parser.add_argument("--wandb_project", type=str, default="robot-learning") parser.add_argument("--record_video", type=str2bool, default=True) parser.add_argument("--record_video_caption", type=str2bool, default=True) try: parser.add_argument("--record_demo", type=str2bool, default=False) except: pass # observation normalization parser.add_argument("--ob_norm", type=str2bool, default=False) parser.add_argument("--max_ob_norm_step", type=int, default=int(1e6)) parser.add_argument( "--clip_obs", type=float, default=200, help="the clip range of observation" ) parser.add_argument( "--clip_range", type=float, default=5, help="the clip range after normalization of observation", ) parser.add_argument("--max_global_step", type=int, default=int(1e6)) parser.add_argument( "--batch_size", type=int, default=128, help="the sample batch size" ) add_policy_arguments(parser) # arguments specific to algorithms args, unparsed = parser.parse_known_args() if args.algo == "sac": add_sac_arguments(parser) elif args.algo == "ddpg": add_ddpg_arguments(parser) elif args.algo == "td3": add_td3_arguments(parser) elif args.algo == "ppo": add_ppo_arguments(parser) elif args.algo == "bc": add_il_arguments(parser) add_bc_arguments(parser) elif args.algo in ["gail", "gaifo", "gaifo-s"]: add_il_arguments(parser) add_ppo_arguments(parser) add_gail_arguments(parser) elif args.algo in ["dac"]: add_il_arguments(parser) add_gail_arguments(parser) add_dac_arguments(parser) return parser def add_policy_arguments(parser): # network parser.add_argument("--policy_mlp_dim", type=str2intlist, default=[256, 256]) parser.add_argument("--critic_mlp_dim", type=str2intlist, default=[256, 256]) parser.add_argument("--critic_ensemble", type=int, default=1) parser.add_argument( "--policy_activation", type=str, default="relu", choices=["relu", "elu", "tanh"] ) parser.add_argument("--tanh_policy", type=str2bool, default=True) parser.add_argument("--gaussian_policy", type=str2bool, default=True) # encoder parser.add_argument( "--encoder_type", type=str, default="mlp", choices=["mlp", "cnn"] ) parser.add_argument("--encoder_image_size", type=int, default=84) parser.add_argument("--encoder_conv_dim", type=int, default=32) parser.add_argument("--encoder_kernel_size", type=str2intlist, default=[3, 3, 3, 3]) parser.add_argument("--encoder_stride", type=str2intlist, default=[2, 1, 1, 1]) parser.add_argument("--encoder_conv_output_dim", type=int, default=50) parser.add_argument("--encoder_soft_update_weight", type=float, default=0.95) args, unparsed = parser.parse_known_args() if args.encoder_type == "cnn": parser.set_defaults(screen_width=100, screen_height=100) parser.set_defaults(policy_mlp_dim=[1024, 1024]) parser.set_defaults(critic_mlp_dim=[1024, 1024]) # actor-critic parser.add_argument( "--actor_lr", type=float, default=3e-4, help="the learning rate of the actor" ) parser.add_argument( "--critic_lr", type=float, default=3e-4, help="the learning rate of the critic" ) parser.add_argument( "--critic_soft_update_weight", type=float, default=0.995, help="the average coefficient" ) # absorbing state parser.add_argument("--absorbing_state", type=str2bool, default=False) def add_rl_arguments(parser): parser.add_argument( "--rl_discount_factor", type=float, default=0.99, help="the discount factor" ) parser.add_argument("--warm_up_steps", type=int, default=0) def add_on_policy_arguments(parser): parser.add_argument("--rollout_length", type=int, default=2000) parser.add_argument("--gae_lambda", type=float, default=0.95) def add_off_policy_arguments(parser): parser.add_argument( "--buffer_size", type=int, default=int(1e6), help="the size of the buffer" ) parser.set_defaults(warm_up_steps=1000) def add_sac_arguments(parser): add_rl_arguments(parser) add_off_policy_arguments(parser) parser.add_argument("--reward_scale", type=float, default=1.0, help="reward scale") parser.add_argument("--actor_update_freq", type=int, default=2) parser.add_argument("--critic_target_update_freq", type=int, default=2) parser.add_argument("--alpha_init_temperature", type=float, default=0.1) parser.add_argument( "--alpha_lr", type=float, default=1e-4, help="the learning rate of the actor" ) parser.set_defaults(actor_lr=1e-3) parser.set_defaults(critic_lr=1e-3) parser.set_defaults(evaluate_interval=5000) parser.set_defaults(ckpt_interval=10000) parser.set_defaults(log_interval=500) parser.set_defaults(critic_soft_update_weight=0.99) parser.set_defaults(buffer_size=100000) parser.set_defaults(critic_ensemble=2) def add_ppo_arguments(parser): add_rl_arguments(parser) add_on_policy_arguments(parser) parser.add_argument("--ppo_clip", type=float, default=0.2) parser.add_argument("--value_loss_coeff", type=float, default=0.5) parser.add_argument("--action_loss_coeff", type=float, default=1.0) parser.add_argument("--entropy_loss_coeff", type=float, default=1e-4) parser.add_argument("--ppo_epoch", type=int, default=5) parser.add_argument("--max_grad_norm", type=float, default=100) parser.set_defaults(critic_soft_update_weight=0.995) parser.set_defaults(evaluate_interval=20) parser.set_defaults(ckpt_interval=20) def add_ddpg_arguments(parser): add_rl_arguments(parser) add_off_policy_arguments(parser) parser.add_argument("--actor_update_delay", type=int, default=2000) parser.add_argument("--actor_update_freq", type=int, default=2) parser.add_argument("--actor_target_update_freq", type=int, default=2) parser.add_argument("--critic_target_update_freq", type=int, default=2) parser.add_argument( "--actor_soft_update_weight", type=float, default=0.995, help="the average coefficient" ) parser.set_defaults(critic_soft_update_weight=0.995) # epsilon greedy parser.add_argument("--epsilon_greedy", type=str2bool, default=False) parser.add_argument("--epsilon_greedy_eps", type=float, default=0.3) parser.add_argument("--policy_exploration_noise", type=float, default=0.1) parser.set_defaults(gaussian_policy=False) parser.set_defaults(evaluate_interval=10000) parser.set_defaults(ckpt_interval=50000) parser.set_defaults(log_interval=1000) def add_td3_arguments(parser): add_ddpg_arguments(parser) parser.set_defaults(critic_ensemble=2) parser.add_argument("--policy_noise", type=float, default=0.2) parser.add_argument("--policy_noise_clip", type=float, default=0.5) def add_il_arguments(parser): parser.add_argument("--demo_path", type=str, default=None, help="path to demos") parser.add_argument( "--demo_subsample_interval", type=int, default=1, # default=20, # used in GAIL help="subsample interval of expert transitions", ) def add_bc_arguments(parser): parser.set_defaults(gaussian_policy=False) parser.set_defaults(max_global_step=100) parser.add_argument( "--bc_lr", type=float, default=1e-3, help="learning rate for bc" ) parser.add_argument( "--val_split", type=float, default=0, help="how much of dataset to leave for validation set" ) def add_gail_arguments(parser): parser.add_argument("--gail_entropy_loss_coeff", type=float, default=0.0) parser.add_argument("--gail_vanilla_reward", type=str2bool, default=True) parser.add_argument("--discriminator_lr", type=float, default=1e-4) parser.add_argument("--discriminator_mlp_dim", type=str2intlist, default=[256, 256]) parser.add_argument( "--discriminator_activation", type=str, default="tanh", choices=["relu", "elu", "tanh"] ) parser.add_argument("--discriminator_update_freq", type=int, default=4) parser.add_argument("--gail_no_action", type=str2bool, default=False) parser.add_argument("--gail_env_reward", type=float, default=0.0) def add_dac_arguments(parser): parser.add_argument("--dac_rl_algo", type=str, default="td3", choices=["sac", "td3"]) args, unparsed = parser.parse_known_args() if args.dac_rl_algo == "sac": add_sac_arguments(parser) elif args.dac_rl_algo == "td3": add_td3_arguments(parser) def argparser(): """ Directly parses the arguments. """ parser = create_parser() args, unparsed = parser.parse_known_args() return args, unparsed
[ "argparse.ArgumentParser" ]
[((501, 614), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Robot Learning Algorithms"""'], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "('Robot Learning Algorithms', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (524, 614), False, 'import argparse\n')]
import os import pytest from ethereum import utils from ethereum.tools import tester from ethereum.abi import ContractTranslator from ethereum.config import config_metropolis from solc_simple import Builder GAS_LIMIT = 8000000 START_GAS = GAS_LIMIT - 1000000 config_metropolis['BLOCK_GAS_LIMIT'] = GAS_LIMIT # Compile contracts before testing OWN_DIR = os.path.dirname(os.path.realpath(__file__)) CONTRACTS_DIR = os.path.abspath(os.path.realpath(os.path.join(OWN_DIR, '../contracts'))) OUTPUT_DIR = os.path.abspath(os.path.realpath(os.path.join(OWN_DIR, '../build'))) builder = Builder(CONTRACTS_DIR, OUTPUT_DIR) builder.compile_all() @pytest.fixture def ethtester(): tester.chain = tester.Chain() return tester @pytest.fixture def ethutils(): return utils @pytest.fixture def get_contract(ethtester, ethutils): def create_contract(path, args=(), sender=ethtester.k0): abi, hexcode = builder.get_contract_data(path) bytecode = ethutils.decode_hex(hexcode) encoded_args = (ContractTranslator(abi).encode_constructor_arguments(args) if args else b'') code = bytecode + encoded_args address = ethtester.chain.tx(sender=sender, to=b'', startgas=START_GAS, data=code) return ethtester.ABIContract(ethtester.chain, abi, address) return create_contract @pytest.fixture def tree(ethtester, get_contract): contract = get_contract('PercentTrees') ethtester.chain.mine() return contract @pytest.fixture def dummy(ethtester, get_contract): contract = get_contract('Dummy') ethtester.chain.mine() return contract @pytest.fixture def p2pk(ethtester, get_contract): contract = get_contract('P2PK') ethtester.chain.mine() return contract @pytest.fixture def treetest(ethtester, get_contract): contract = get_contract('TreeTest') ethtester.chain.mine() return contract
[ "ethereum.abi.ContractTranslator", "os.path.realpath", "solc_simple.Builder", "ethereum.tools.tester.Chain", "os.path.join" ]
[((582, 616), 'solc_simple.Builder', 'Builder', (['CONTRACTS_DIR', 'OUTPUT_DIR'], {}), '(CONTRACTS_DIR, OUTPUT_DIR)\n', (589, 616), False, 'from solc_simple import Builder\n'), ((373, 399), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (389, 399), False, 'import os\n'), ((693, 707), 'ethereum.tools.tester.Chain', 'tester.Chain', ([], {}), '()\n', (705, 707), False, 'from ethereum.tools import tester\n'), ((450, 487), 'os.path.join', 'os.path.join', (['OWN_DIR', '"""../contracts"""'], {}), "(OWN_DIR, '../contracts')\n", (462, 487), False, 'import os\n'), ((536, 569), 'os.path.join', 'os.path.join', (['OWN_DIR', '"""../build"""'], {}), "(OWN_DIR, '../build')\n", (548, 569), False, 'import os\n'), ((1022, 1045), 'ethereum.abi.ContractTranslator', 'ContractTranslator', (['abi'], {}), '(abi)\n', (1040, 1045), False, 'from ethereum.abi import ContractTranslator\n')]
from agent import Agent from monitor import interact import gym import numpy as np env = gym.make('Taxi-v3') agent = Agent() avg_rewards, best_avg_reward = interact(env, agent)
[ "agent.Agent", "gym.make", "monitor.interact" ]
[((95, 114), 'gym.make', 'gym.make', (['"""Taxi-v3"""'], {}), "('Taxi-v3')\n", (103, 114), False, 'import gym\n'), ((124, 131), 'agent.Agent', 'Agent', ([], {}), '()\n', (129, 131), False, 'from agent import Agent\n'), ((164, 184), 'monitor.interact', 'interact', (['env', 'agent'], {}), '(env, agent)\n', (172, 184), False, 'from monitor import interact\n')]
#!/usr/bin/env python3 import sys import json for line in sys.stdin: s = line.split(" ") obj = {} obj['id'] = s[0] obj['date'] = s[1] + " " + s[2] + " " + s[3] obj['user'] = s[4] obj['text'] = " ".join(s[5:]) print(json.dumps(obj,ensure_ascii=False))
[ "json.dumps" ]
[((244, 279), 'json.dumps', 'json.dumps', (['obj'], {'ensure_ascii': '(False)'}), '(obj, ensure_ascii=False)\n', (254, 279), False, 'import json\n')]
from runpy import run_module import pytest from .subtest import _test_stdlib_symbols_in_namespace # TODO do some research about how to test an interactive CLI application @pytest.mark.xfail def test_console_script() -> None: _test_stdlib_symbols_in_namespace(run_module("importall"))
[ "runpy.run_module" ]
[((267, 290), 'runpy.run_module', 'run_module', (['"""importall"""'], {}), "('importall')\n", (277, 290), False, 'from runpy import run_module\n')]
from unittest import TestCase from fm_agent.fm_config import FastmodelConfig from fm_agent.utils import SimulatorError class TestFastmodelConfig(TestCase): def test_Setting_File(self): self.assertTrue(FastmodelConfig.SETTINGS_FILE,"settings.json") def test_parse_params_file_failed(self): c=FastmodelConfig() try: c.parse_params_file("FILE_NOT_EXIST") except SimulatorError as e: pass else: self.fail("failed to catch the exception") def test_parse_params_file(self): c=FastmodelConfig() try: c.parse_params_file("DEFAULT.conf") except SimulatorError as e: self.fail("caught an SimulatorError exception") def test_get_configs_none(self): c=FastmodelConfig() self.assertIsNone(c.get_configs("NOT_A_MODEL")) def test_get_configs(self): c=FastmodelConfig() self.assertIsNotNone(c.get_configs("FVP_MPS2_M3")) def test_get_all_configs(self): c=FastmodelConfig() self.assertIsNotNone(c.get_all_configs())
[ "fm_agent.fm_config.FastmodelConfig" ]
[((317, 334), 'fm_agent.fm_config.FastmodelConfig', 'FastmodelConfig', ([], {}), '()\n', (332, 334), False, 'from fm_agent.fm_config import FastmodelConfig\n'), ((586, 603), 'fm_agent.fm_config.FastmodelConfig', 'FastmodelConfig', ([], {}), '()\n', (601, 603), False, 'from fm_agent.fm_config import FastmodelConfig\n'), ((821, 838), 'fm_agent.fm_config.FastmodelConfig', 'FastmodelConfig', ([], {}), '()\n', (836, 838), False, 'from fm_agent.fm_config import FastmodelConfig\n'), ((946, 963), 'fm_agent.fm_config.FastmodelConfig', 'FastmodelConfig', ([], {}), '()\n', (961, 963), False, 'from fm_agent.fm_config import FastmodelConfig\n'), ((1078, 1095), 'fm_agent.fm_config.FastmodelConfig', 'FastmodelConfig', ([], {}), '()\n', (1093, 1095), False, 'from fm_agent.fm_config import FastmodelConfig\n')]
from datetime import datetime, date import unittest from grandfatherson import (FRIDAY, SATURDAY, SUNDAY) from grandfatherson.filters import (Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC) def utcdatetime(*args): return datetime(*args, tzinfo=UTC()) class TestSeconds(unittest.TestCase): def setUp(self): self.now = datetime(2000, 1, 1, 0, 0, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 0, 0, 1, 0), datetime(2000, 1, 1, 0, 0, 0, 1), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 12, 31, 23, 59, 57, 0), ] def test_mask(self): self.assertEqual( Seconds.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 12, 31, 23, 59, 59, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Seconds.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Seconds.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Seconds.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Seconds.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Seconds.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Seconds.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Seconds.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Seconds.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Seconds.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 0, 0, 1, 0)])) def test_duplicates(self): # Ensure we get the oldest per-second datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 0, 0, 0, 1) self.assertEqual(Seconds.filter(self.datetimes, number=2, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0)])) def test_microseconds(self): self.assertEqual(Seconds.filter(self.datetimes, number=3, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0)])) def test_before_start(self): # datetime(1999, 12, 31, 23, 59, 57, 0) is too old to show up # in the results self.assertEqual(Seconds.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0)])) def test_all_input(self): self.assertEqual(Seconds.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 12, 31, 23, 59, 57, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0)])) self.assertEqual(Seconds.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 12, 31, 23, 59, 57, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0)])) def test_with_tzinfo(self): utcnow = utcdatetime(2000, 1, 1, 0, 0, 1, 1) tzinfo_datetimes = [ utcdatetime(2000, 1, 1, 0, 0, 1, 0), utcdatetime(2000, 1, 1, 0, 0, 0, 1), utcdatetime(2000, 1, 1, 0, 0, 0, 0), utcdatetime(1999, 12, 31, 23, 59, 59, 999999), utcdatetime(1999, 12, 31, 23, 59, 57, 0), ] self.assertEqual(Seconds.filter(tzinfo_datetimes, number=5, now=utcnow), set([utcdatetime(1999, 12, 31, 23, 59, 57, 0), utcdatetime(1999, 12, 31, 23, 59, 59, 999999), utcdatetime(2000, 1, 1, 0, 0, 0, 0), utcdatetime(2000, 1, 1, 0, 0, 1, 0)])) self.assertEqual(Seconds.filter(tzinfo_datetimes, number=6, now=utcnow), set([utcdatetime(1999, 12, 31, 23, 59, 57, 0), utcdatetime(1999, 12, 31, 23, 59, 59, 999999), utcdatetime(2000, 1, 1, 0, 0, 0, 0), utcdatetime(2000, 1, 1, 0, 0, 1, 0)])) class TestMinutes(unittest.TestCase): def setUp(self): self.now = datetime(2000, 1, 1, 0, 1, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 0, 1, 0, 0), datetime(2000, 1, 1, 0, 0, 1, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 12, 31, 23, 57, 0, 0), ] def test_mask(self): self.assertEqual( Minutes.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 12, 31, 23, 59, 0, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Minutes.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Minutes.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Minutes.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Minutes.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Minutes.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Minutes.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Minutes.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Minutes.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Minutes.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 0, 1, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-minute datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 0, 0, 1, 0) self.assertEqual(Minutes.filter(self.datetimes, number=2, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0)])) def test_microseconds(self): self.assertEqual(Minutes.filter(self.datetimes, number=3, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0)])) def test_before_start(self): # datetime(1999, 12, 31, 23, 57, 0, 0) is too old to show up # in the results self.assertEqual(Minutes.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0)])) def test_all_input(self): self.assertEqual(Minutes.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 12, 31, 23, 57, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0)])) self.assertEqual(Minutes.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 12, 31, 23, 57, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0)])) class TestHours(unittest.TestCase): def setUp(self): self.now = datetime(2000, 1, 1, 1, 1, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 1, 0, 0, 0), datetime(2000, 1, 1, 0, 1, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 12, 31, 21, 0, 0, 0), ] def test_mask(self): self.assertEqual( Hours.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 12, 31, 23, 0, 0, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Hours.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Hours.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Hours.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Hours.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Hours.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Hours.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Hours.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Hours.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Hours.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 1, 0, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-hour datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 0, 1, 0, 0) self.assertEqual(Hours.filter(self.datetimes, number=2, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0)])) def test_microseconds(self): self.assertEqual(Hours.filter(self.datetimes, number=3, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0)])) def test_before_start(self): # datetime(1999, 12, 31, 21, 0, 0, 0) is too old to show up # in the results self.assertEqual(Hours.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0)])) def test_all_input(self): self.assertEqual(Hours.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 12, 31, 21, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0)])) self.assertEqual(Hours.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 12, 31, 21, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0)])) class TestDays(unittest.TestCase): def setUp(self): self.now = datetime(2000, 1, 1, 1, 1, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 1, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 12, 30, 0, 0, 0, 0), datetime(1999, 12, 28, 0, 0, 0, 0), ] def test_mask(self): self.assertEqual( Days.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 12, 31, 0, 0, 0, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Days.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Days.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Days.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Days.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Days.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Days.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Days.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Days.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Days.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-day datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0) self.assertEqual(Days.filter(self.datetimes, number=2, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_before_start(self): # datetime(1999, 12, 28, 0, 0, 0, 0) is too old to show up # in the results self.assertEqual(Days.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 30, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_all_input(self): self.assertEqual(Days.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 12, 28, 0, 0, 0, 0), datetime(1999, 12, 30, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) self.assertEqual(Days.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 12, 28, 0, 0, 0, 0), datetime(1999, 12, 30, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_leap_year(self): # 2004 is a leap year, because it is divisible by 4 now = datetime(2004, 3, 1, 0, 0, 0, 0) datetimes_2004 = [ datetime(2004, 3, 1, 0, 0, 0, 0), datetime(2004, 2, 29, 0, 0, 0, 0), datetime(2004, 2, 28, 0, 0, 0, 0), datetime(2004, 2, 27, 0, 0, 0, 0), ] self.assertEqual(Days.filter(datetimes_2004, number=1, now=now), set([datetime(2004, 3, 1, 0, 0, 0, 0)])) self.assertEqual(Days.filter(datetimes_2004, number=2, now=now), set([datetime(2004, 2, 29, 0, 0, 0, 0), datetime(2004, 3, 1, 0, 0, 0, 0)])) self.assertEqual(Days.filter(datetimes_2004, number=3, now=now), set([datetime(2004, 2, 28, 0, 0, 0, 0), datetime(2004, 2, 29, 0, 0, 0, 0), datetime(2004, 3, 1, 0, 0, 0, 0)])) def test_not_leap_year(self): # 1900 was not a leap year, because it is divisible by 400 now = datetime(1900, 3, 1, 0, 0, 0, 0) datetimes_1900 = [ datetime(1900, 3, 1, 0, 0, 0, 0), datetime(1900, 2, 28, 0, 0, 0, 0), datetime(1900, 2, 27, 0, 0, 0, 0), ] self.assertEqual(Days.filter(datetimes_1900, number=1, now=now), set([datetime(1900, 3, 1, 0, 0, 0, 0)])) self.assertEqual(Days.filter(datetimes_1900, number=2, now=now), set([datetime(1900, 2, 28, 0, 0, 0, 0), datetime(1900, 3, 1, 0, 0, 0, 0)])) self.assertEqual(Days.filter(datetimes_1900, number=3, now=now), set([datetime(1900, 2, 27, 0, 0, 0, 0), datetime(1900, 2, 28, 0, 0, 0, 0), datetime(1900, 3, 1, 0, 0, 0, 0)])) def test_with_tzinfo_and_date(self): tzinfo_datetimes = [ utcdatetime(2000, 1, 1, 1, 0, 0, 0), utcdatetime(2000, 1, 1, 0, 0, 0, 0), utcdatetime(1999, 12, 31, 23, 59, 59, 999999), utcdatetime(1999, 12, 30, 0, 0, 0, 0), utcdatetime(1999, 12, 28, 0, 0, 0, 0), ] today = date(2000, 1, 1) self.assertEqual(Days.filter(tzinfo_datetimes, number=5, now=today), set([utcdatetime(1999, 12, 28, 0, 0, 0, 0), utcdatetime(1999, 12, 30, 0, 0, 0, 0), utcdatetime(1999, 12, 31, 23, 59, 59, 999999), utcdatetime(2000, 1, 1, 0, 0, 0, 0)])) def test_with_date(self): today = date(2000, 1, 1) self.assertEqual(Days.filter(self.datetimes, number=5, now=today), set([datetime(1999, 12, 28, 0, 0, 0, 0), datetime(1999, 12, 30, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) class TestWeeks(unittest.TestCase): def setUp(self): # 1 January 2000 is a Saturday self.now = datetime(2000, 1, 1, 1, 1, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 1, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 4, 0, 0, 0, 0), ] def test_mask(self): # 31 December 1999 is a Friday. dt = datetime(1999, 12, 31, 23, 59, 59, 999999) self.assertEqual(dt.weekday(), FRIDAY) # Default firstweekday is Saturday self.assertEqual(Weeks.mask(dt), Weeks.mask(dt, firstweekday=SATURDAY)) self.assertEqual(Weeks.mask(dt), datetime(1999, 12, 25, 0, 0, 0, 0)) # Sunday self.assertEqual(Weeks.mask(dt, firstweekday=SUNDAY), datetime(1999, 12, 26, 0, 0, 0, 0)) # If firstweekday is the same as dt.weekday, then it should return # the same day. self.assertEqual(Weeks.mask(dt, firstweekday=dt.weekday()), Days.mask(dt)) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Weeks.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Weeks.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Weeks.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Weeks.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Weeks.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Weeks.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Weeks.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Weeks.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Weeks.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-day datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0) self.assertEqual(Weeks.filter(self.datetimes, number=2, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_before_start(self): # datetime(1999, 12, 4, 0, 0, 0, 0) is too old to show up # in the results self.assertEqual(Weeks.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_all_input(self): self.assertEqual(Weeks.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 12, 4, 0, 0, 0, 0), datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) self.assertEqual(Weeks.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 12, 4, 0, 0, 0, 0), datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_different_firstweekday(self): self.assertEqual( Weeks.filter( self.datetimes, number=3, firstweekday=3, now=self.now ), set([datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999)]) ) filtered = Weeks.filter( self.datetimes, number=5, firstweekday=3, now=self.now ) self.assertEqual( Weeks.filter( self.datetimes, number=5, firstweekday=3, now=self.now ), set([datetime(1999, 12, 18, 0, 0, 0, 0), datetime(1999, 12, 4, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999)]) ) class TestMonths(unittest.TestCase): def setUp(self): self.now = datetime(2000, 2, 1, 1, 1, 1, 1) self.datetimes = [ datetime(2000, 2, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 1, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1999, 10, 1, 0, 0, 0, 0), ] def test_mask(self): self.assertEqual( Months.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 12, 1, 0, 0, 0, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Months.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Months.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Months.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Months.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Months.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Months.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Months.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Months.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Months.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 2, 1, 0, 0, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-month datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0) self.assertEqual(Months.filter(self.datetimes, number=2, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 2, 1, 0, 0, 0, 0)])) def test_new_year(self): self.assertEqual(Months.filter(self.datetimes, number=3, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 2, 1, 0, 0, 0, 0)])) def test_before_start(self): # datetime(1999, 10, 1, 0, 0, 0, 0) is too old to show up # in the results self.assertEqual(Months.filter(self.datetimes, number=4, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 2, 1, 0, 0, 0, 0)])) def test_all_input(self): self.assertEqual(Months.filter(self.datetimes, number=5, now=self.now), set([datetime(1999, 10, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 2, 1, 0, 0, 0, 0)])) self.assertEqual(Months.filter(self.datetimes, number=6, now=self.now), set([datetime(1999, 10, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(2000, 2, 1, 0, 0, 0, 0)])) def test_multiple_years(self): now = datetime(2000, 1, 1, 0, 0, 0, 0) datetimes = [ datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 1, 0, 0, 0, 0), datetime(1999, 1, 1, 0, 0, 0, 0), datetime(1998, 12, 1, 0, 0, 0, 0), datetime(1997, 12, 1, 0, 0, 0, 0), ] # 12 months back ignores datetime(1999, 1, 1, 0, 0, 0, 0) self.assertEqual(Months.filter(datetimes, number=12, now=now), set([datetime(1999, 12, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0)])) # But 13 months back gets it self.assertEqual(Months.filter(datetimes, number=13, now=now), set([datetime(1999, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0)])) # But 14 months back gets datetime(1998, 12, 1, 0, 0, 0, 0) self.assertEqual(Months.filter(datetimes, number=14, now=now), set([datetime(1998, 12, 1, 0, 0, 0, 0), datetime(1999, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0)])) # As does 24 months back self.assertEqual(Months.filter(datetimes, number=24, now=now), set([datetime(1998, 12, 1, 0, 0, 0, 0), datetime(1999, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0)])) # 36 months back should get datetime(1997, 12, 1, 0, 0, 0, 0) self.assertEqual(Months.filter(datetimes, number=36, now=now), set([datetime(1997, 12, 1, 0, 0, 0, 0), datetime(1998, 12, 1, 0, 0, 0, 0), datetime(1999, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 1, 0, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0)])) class TestYears(unittest.TestCase): def setUp(self): self.now = datetime(2000, 1, 1, 1, 1, 1, 1) self.datetimes = [ datetime(2000, 1, 1, 1, 0, 0, 0), datetime(2000, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(1998, 1, 1, 0, 0, 0, 0), datetime(1996, 1, 1, 0, 0, 0, 0), ] def test_mask(self): self.assertEqual( Years.mask(datetime(1999, 12, 31, 23, 59, 59, 999999)), datetime(1999, 1, 1, 0, 0, 0, 0) ) def test_future(self): datetimes = [datetime(2010, 1, 15, 0, 0, 0, 0)] # Wikipedia self.assertEqual(Years.filter(datetimes, number=0, now=self.now), set(datetimes)) self.assertEqual(Years.filter(datetimes, number=1, now=self.now), set(datetimes)) def test_invalid_number(self): self.assertRaises(ValueError, Years.filter, [], number=-1, now=self.now) self.assertRaises(ValueError, Years.filter, [], number=0.1, now=self.now) self.assertRaises(ValueError, Years.filter, [], number='1', now=self.now) def test_no_input(self): self.assertEqual(Years.filter([], number=1, now=self.now), set()) def test_no_results(self): self.assertEqual(Years.filter([self.now], number=0, now=self.now), set()) self.assertEqual(Years.filter(self.datetimes, number=0, now=self.now), set()) def test_current(self): self.assertEqual(Years.filter(self.datetimes, number=1, now=self.now), set([datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_duplicates(self): # Ensure we get the oldest per-month datetime when there are # duplicates: i.e. not datetime(2000, 1, 1, 1, 0, 0, 0) self.assertEqual(Years.filter(self.datetimes, number=2, now=self.now), set([datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_before_start(self): # datetime(1996, 1, 1, 0, 0, 0, 0) is too old to show up # in the results self.assertEqual(Years.filter(self.datetimes, number=4, now=self.now), set([datetime(1998, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) def test_all_input(self): self.assertEqual(Years.filter(self.datetimes, number=5, now=self.now), set([datetime(1996, 1, 1, 0, 0, 0, 0), datetime(1998, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)])) self.assertEqual(Years.filter(self.datetimes, number=6, now=self.now), set([datetime(1996, 1, 1, 0, 0, 0, 0), datetime(1998, 1, 1, 0, 0, 0, 0), datetime(1999, 12, 31, 23, 59, 59, 999999), datetime(2000, 1, 1, 0, 0, 0, 0)]))
[ "grandfatherson.filters.Weeks.filter", "grandfatherson.filters.Seconds.filter", "grandfatherson.filters.UTC", "grandfatherson.filters.Months.filter", "grandfatherson.filters.Days.mask", "datetime.date", "grandfatherson.filters.Days.filter", "datetime.datetime", "grandfatherson.filters.Years.filter", "grandfatherson.filters.Weeks.mask", "grandfatherson.filters.Hours.filter", "grandfatherson.filters.Minutes.filter" ]
[((384, 416), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(1)'], {}), '(2000, 1, 1, 0, 0, 1, 1)\n', (392, 416), False, 'from datetime import datetime, date\n'), ((5547, 5579), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 0, 1, 1, 1)\n', (5555, 5579), False, 'from datetime import datetime, date\n'), ((9500, 9532), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 1, 1, 1, 1)\n', (9508, 9532), False, 'from datetime import datetime, date\n'), ((13177, 13209), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 1, 1, 1, 1)\n', (13185, 13209), False, 'from datetime import datetime, date\n'), ((16525, 16557), 'datetime.datetime', 'datetime', (['(2004)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 3, 1, 0, 0, 0, 0)\n', (16533, 16557), False, 'from datetime import datetime, date\n'), ((17513, 17545), 'datetime.datetime', 'datetime', (['(1900)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 3, 1, 0, 0, 0, 0)\n', (17521, 17545), False, 'from datetime import datetime, date\n'), ((18694, 18710), 'datetime.date', 'date', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (18698, 18710), False, 'from datetime import datetime, date\n'), ((19120, 19136), 'datetime.date', 'date', (['(2000)', '(1)', '(1)'], {}), '(2000, 1, 1)\n', (19124, 19136), False, 'from datetime import datetime, date\n'), ((19602, 19634), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 1, 1, 1, 1)\n', (19610, 19634), False, 'from datetime import datetime, date\n'), ((19994, 20036), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (20002, 20036), False, 'from datetime import datetime, date\n'), ((23773, 23841), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(5)', 'firstweekday': '(3)', 'now': 'self.now'}), '(self.datetimes, number=5, firstweekday=3, now=self.now)\n', (23785, 23841), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((24266, 24298), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 2, 1, 1, 1, 1, 1)\n', (24274, 24298), False, 'from datetime import datetime, date\n'), ((27880, 27912), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (27888, 27912), False, 'from datetime import datetime, date\n'), ((30061, 30093), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 1, 1, 1, 1)\n', (30069, 30093), False, 'from datetime import datetime, date\n'), ((297, 302), 'grandfatherson.filters.UTC', 'UTC', ([], {}), '()\n', (300, 302), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((456, 488), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (464, 488), False, 'from datetime import datetime, date\n'), ((502, 534), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(1)'], {}), '(2000, 1, 1, 0, 0, 0, 1)\n', (510, 534), False, 'from datetime import datetime, date\n'), ((548, 580), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (556, 580), False, 'from datetime import datetime, date\n'), ((594, 636), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (602, 636), False, 'from datetime import datetime, date\n'), ((650, 687), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(57)', '(0)'], {}), '(1999, 12, 31, 23, 59, 57, 0)\n', (658, 687), False, 'from datetime import datetime, date\n'), ((833, 870), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(0)'], {}), '(1999, 12, 31, 23, 59, 59, 0)\n', (841, 870), False, 'from datetime import datetime, date\n'), ((930, 963), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (938, 963), False, 'from datetime import datetime, date\n'), ((1003, 1052), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (1017, 1052), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((1120, 1169), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (1134, 1169), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((1632, 1674), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (1646, 1674), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((1765, 1815), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (1779, 1815), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((1874, 1928), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (1888, 1928), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((2056, 2110), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (2070, 2110), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((2409, 2463), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (2423, 2463), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((2694, 2748), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(3)', 'now': 'self.now'}), '(self.datetimes, number=3, now=self.now)\n', (2708, 2748), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((3148, 3202), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (3162, 3202), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((3504, 3558), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (3518, 3558), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((3899, 3953), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (3913, 3953), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((4679, 4733), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['tzinfo_datetimes'], {'number': '(5)', 'now': 'utcnow'}), '(tzinfo_datetimes, number=5, now=utcnow)\n', (4693, 4733), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((5086, 5140), 'grandfatherson.filters.Seconds.filter', 'Seconds.filter', (['tzinfo_datetimes'], {'number': '(6)', 'now': 'utcnow'}), '(tzinfo_datetimes, number=6, now=utcnow)\n', (5100, 5140), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((5619, 5651), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (5627, 5651), False, 'from datetime import datetime, date\n'), ((5665, 5697), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (5673, 5697), False, 'from datetime import datetime, date\n'), ((5711, 5743), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (5719, 5743), False, 'from datetime import datetime, date\n'), ((5757, 5799), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (5765, 5799), False, 'from datetime import datetime, date\n'), ((5813, 5849), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(57)', '(0)', '(0)'], {}), '(1999, 12, 31, 23, 57, 0, 0)\n', (5821, 5849), False, 'from datetime import datetime, date\n'), ((5995, 6031), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(0)', '(0)'], {}), '(1999, 12, 31, 23, 59, 0, 0)\n', (6003, 6031), False, 'from datetime import datetime, date\n'), ((6091, 6124), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (6099, 6124), False, 'from datetime import datetime, date\n'), ((6164, 6213), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (6178, 6213), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((6281, 6330), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (6295, 6330), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((6793, 6835), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (6807, 6835), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((6926, 6976), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (6940, 6976), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((7035, 7089), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (7049, 7089), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((7217, 7271), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (7231, 7271), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((7570, 7624), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (7584, 7624), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((7855, 7909), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(3)', 'now': 'self.now'}), '(self.datetimes, number=3, now=self.now)\n', (7869, 7909), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((8308, 8362), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (8322, 8362), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((8664, 8718), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (8678, 8718), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((9058, 9112), 'grandfatherson.filters.Minutes.filter', 'Minutes.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (9072, 9112), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((9572, 9604), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (9580, 9604), False, 'from datetime import datetime, date\n'), ((9618, 9650), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (9626, 9650), False, 'from datetime import datetime, date\n'), ((9664, 9696), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (9672, 9696), False, 'from datetime import datetime, date\n'), ((9710, 9752), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (9718, 9752), False, 'from datetime import datetime, date\n'), ((9766, 9801), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(21)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 31, 21, 0, 0, 0)\n', (9774, 9801), False, 'from datetime import datetime, date\n'), ((9945, 9980), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 31, 23, 0, 0, 0)\n', (9953, 9980), False, 'from datetime import datetime, date\n'), ((10040, 10073), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (10048, 10073), False, 'from datetime import datetime, date\n'), ((10113, 10160), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (10125, 10160), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((10228, 10275), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (10240, 10275), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((10732, 10772), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (10744, 10772), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((10863, 10911), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (10875, 10911), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((10970, 11022), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (10982, 11022), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((11110, 11162), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (11122, 11162), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((11419, 11471), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (11431, 11471), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((11700, 11752), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(3)', 'now': 'self.now'}), '(self.datetimes, number=3, now=self.now)\n', (11712, 11752), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((12110, 12162), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (12122, 12162), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((12424, 12476), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (12436, 12476), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((12775, 12827), 'grandfatherson.filters.Hours.filter', 'Hours.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (12787, 12827), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((13249, 13281), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (13257, 13281), False, 'from datetime import datetime, date\n'), ((13295, 13327), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (13303, 13327), False, 'from datetime import datetime, date\n'), ((13341, 13383), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (13349, 13383), False, 'from datetime import datetime, date\n'), ((13397, 13431), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(30)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 30, 0, 0, 0, 0)\n', (13405, 13431), False, 'from datetime import datetime, date\n'), ((13445, 13479), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 28, 0, 0, 0, 0)\n', (13453, 13479), False, 'from datetime import datetime, date\n'), ((13622, 13656), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 31, 0, 0, 0, 0)\n', (13630, 13656), False, 'from datetime import datetime, date\n'), ((13716, 13749), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (13724, 13749), False, 'from datetime import datetime, date\n'), ((13789, 13835), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (13800, 13835), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((13903, 13949), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (13914, 13949), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((14403, 14442), 'grandfatherson.filters.Days.filter', 'Days.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (14414, 14442), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((14533, 14580), 'grandfatherson.filters.Days.filter', 'Days.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (14544, 14580), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((14639, 14690), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (14650, 14690), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((14778, 14829), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (14789, 14829), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((15085, 15136), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (15096, 15136), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((15429, 15480), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (15440, 15480), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((15744, 15795), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (15755, 15795), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((16095, 16146), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (16106, 16146), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((16597, 16629), 'datetime.datetime', 'datetime', (['(2004)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 3, 1, 0, 0, 0, 0)\n', (16605, 16629), False, 'from datetime import datetime, date\n'), ((16643, 16676), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(29)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 29, 0, 0, 0, 0)\n', (16651, 16676), False, 'from datetime import datetime, date\n'), ((16690, 16723), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 28, 0, 0, 0, 0)\n', (16698, 16723), False, 'from datetime import datetime, date\n'), ((16737, 16770), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(27)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 27, 0, 0, 0, 0)\n', (16745, 16770), False, 'from datetime import datetime, date\n'), ((16808, 16854), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_2004'], {'number': '(1)', 'now': 'now'}), '(datetimes_2004, number=1, now=now)\n', (16819, 16854), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((16948, 16994), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_2004'], {'number': '(2)', 'now': 'now'}), '(datetimes_2004, number=2, now=now)\n', (16959, 16994), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((17153, 17199), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_2004'], {'number': '(3)', 'now': 'now'}), '(datetimes_2004, number=3, now=now)\n', (17164, 17199), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((17585, 17617), 'datetime.datetime', 'datetime', (['(1900)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 3, 1, 0, 0, 0, 0)\n', (17593, 17617), False, 'from datetime import datetime, date\n'), ((17631, 17664), 'datetime.datetime', 'datetime', (['(1900)', '(2)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 2, 28, 0, 0, 0, 0)\n', (17639, 17664), False, 'from datetime import datetime, date\n'), ((17678, 17711), 'datetime.datetime', 'datetime', (['(1900)', '(2)', '(27)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 2, 27, 0, 0, 0, 0)\n', (17686, 17711), False, 'from datetime import datetime, date\n'), ((17749, 17795), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_1900'], {'number': '(1)', 'now': 'now'}), '(datetimes_1900, number=1, now=now)\n', (17760, 17795), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((17889, 17935), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_1900'], {'number': '(2)', 'now': 'now'}), '(datetimes_1900, number=2, now=now)\n', (17900, 17935), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((18094, 18140), 'grandfatherson.filters.Days.filter', 'Days.filter', (['datetimes_1900'], {'number': '(3)', 'now': 'now'}), '(datetimes_1900, number=3, now=now)\n', (18105, 18140), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((18737, 18787), 'grandfatherson.filters.Days.filter', 'Days.filter', (['tzinfo_datetimes'], {'number': '(5)', 'now': 'today'}), '(tzinfo_datetimes, number=5, now=today)\n', (18748, 18787), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((19163, 19211), 'grandfatherson.filters.Days.filter', 'Days.filter', (['self.datetimes'], {'number': '(5)', 'now': 'today'}), '(self.datetimes, number=5, now=today)\n', (19174, 19211), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((19674, 19706), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (19682, 19706), False, 'from datetime import datetime, date\n'), ((19720, 19752), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (19728, 19752), False, 'from datetime import datetime, date\n'), ((19766, 19808), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (19774, 19808), False, 'from datetime import datetime, date\n'), ((19822, 19856), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (19830, 19856), False, 'from datetime import datetime, date\n'), ((19870, 19903), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(4)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 4, 0, 0, 0, 0)\n', (19878, 19903), False, 'from datetime import datetime, date\n'), ((20153, 20167), 'grandfatherson.filters.Weeks.mask', 'Weeks.mask', (['dt'], {}), '(dt)\n', (20163, 20167), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20194, 20231), 'grandfatherson.filters.Weeks.mask', 'Weeks.mask', (['dt'], {'firstweekday': 'SATURDAY'}), '(dt, firstweekday=SATURDAY)\n', (20204, 20231), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20258, 20272), 'grandfatherson.filters.Weeks.mask', 'Weeks.mask', (['dt'], {}), '(dt)\n', (20268, 20272), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20299, 20333), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(25)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 25, 0, 0, 0, 0)\n', (20307, 20333), False, 'from datetime import datetime, date\n'), ((20378, 20413), 'grandfatherson.filters.Weeks.mask', 'Weeks.mask', (['dt'], {'firstweekday': 'SUNDAY'}), '(dt, firstweekday=SUNDAY)\n', (20388, 20413), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20440, 20474), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(26)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 26, 0, 0, 0, 0)\n', (20448, 20474), False, 'from datetime import datetime, date\n'), ((20669, 20682), 'grandfatherson.filters.Days.mask', 'Days.mask', (['dt'], {}), '(dt)\n', (20678, 20682), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20733, 20766), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (20741, 20766), False, 'from datetime import datetime, date\n'), ((20806, 20853), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (20818, 20853), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((20921, 20968), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (20933, 20968), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((21425, 21465), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (21437, 21465), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((21556, 21604), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (21568, 21604), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((21663, 21715), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (21675, 21715), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((21803, 21855), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (21815, 21855), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((22111, 22163), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (22123, 22163), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((22455, 22507), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (22467, 22507), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((22771, 22823), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (22783, 22823), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((23122, 23174), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (23134, 23174), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((23529, 23597), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(3)', 'firstweekday': '(3)', 'now': 'self.now'}), '(self.datetimes, number=3, firstweekday=3, now=self.now)\n', (23541, 23597), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((23910, 23978), 'grandfatherson.filters.Weeks.filter', 'Weeks.filter', (['self.datetimes'], {'number': '(5)', 'firstweekday': '(3)', 'now': 'self.now'}), '(self.datetimes, number=5, firstweekday=3, now=self.now)\n', (23922, 23978), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((24338, 24370), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (24346, 24370), False, 'from datetime import datetime, date\n'), ((24384, 24416), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (24392, 24416), False, 'from datetime import datetime, date\n'), ((24430, 24462), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (24438, 24462), False, 'from datetime import datetime, date\n'), ((24476, 24518), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (24484, 24518), False, 'from datetime import datetime, date\n'), ((24532, 24565), 'datetime.datetime', 'datetime', (['(1999)', '(10)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 10, 1, 0, 0, 0, 0)\n', (24540, 24565), False, 'from datetime import datetime, date\n'), ((24710, 24743), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (24718, 24743), False, 'from datetime import datetime, date\n'), ((24803, 24836), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (24811, 24836), False, 'from datetime import datetime, date\n'), ((24876, 24924), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (24889, 24924), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((24992, 25040), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (25005, 25040), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((25500, 25541), 'grandfatherson.filters.Months.filter', 'Months.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (25513, 25541), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((25632, 25681), 'grandfatherson.filters.Months.filter', 'Months.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (25645, 25681), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((25740, 25793), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (25753, 25793), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((25881, 25934), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (25894, 25934), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((26192, 26245), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (26205, 26245), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((26432, 26485), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(3)', 'now': 'self.now'}), '(self.datetimes, number=3, now=self.now)\n', (26445, 26485), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((26841, 26894), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (26854, 26894), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((27156, 27209), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (27169, 27209), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((27506, 27559), 'grandfatherson.filters.Months.filter', 'Months.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (27519, 27559), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((27947, 27979), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (27955, 27979), False, 'from datetime import datetime, date\n'), ((27993, 28026), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (28001, 28026), False, 'from datetime import datetime, date\n'), ((28040, 28072), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (28048, 28072), False, 'from datetime import datetime, date\n'), ((28086, 28119), 'datetime.datetime', 'datetime', (['(1998)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 12, 1, 0, 0, 0, 0)\n', (28094, 28119), False, 'from datetime import datetime, date\n'), ((28133, 28166), 'datetime.datetime', 'datetime', (['(1997)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1997, 12, 1, 0, 0, 0, 0)\n', (28141, 28166), False, 'from datetime import datetime, date\n'), ((28270, 28314), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(12)', 'now': 'now'}), '(datetimes, number=12, now=now)\n', (28283, 28314), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((28510, 28554), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(13)', 'now': 'now'}), '(datetimes, number=13, now=now)\n', (28523, 28554), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((28845, 28889), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(14)', 'now': 'now'}), '(datetimes, number=14, now=now)\n', (28858, 28889), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((29210, 29254), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(24)', 'now': 'now'}), '(datetimes, number=24, now=now)\n', (29223, 29254), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((29612, 29656), 'grandfatherson.filters.Months.filter', 'Months.filter', (['datetimes'], {'number': '(36)', 'now': 'now'}), '(datetimes, number=36, now=now)\n', (29625, 29656), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((30133, 30165), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (30141, 30165), False, 'from datetime import datetime, date\n'), ((30179, 30211), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (30187, 30211), False, 'from datetime import datetime, date\n'), ((30225, 30267), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (30233, 30267), False, 'from datetime import datetime, date\n'), ((30281, 30313), 'datetime.datetime', 'datetime', (['(1998)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 1, 1, 0, 0, 0, 0)\n', (30289, 30313), False, 'from datetime import datetime, date\n'), ((30327, 30359), 'datetime.datetime', 'datetime', (['(1996)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1996, 1, 1, 0, 0, 0, 0)\n', (30335, 30359), False, 'from datetime import datetime, date\n'), ((30503, 30535), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (30511, 30535), False, 'from datetime import datetime, date\n'), ((30595, 30628), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(15)', '(0)', '(0)', '(0)', '(0)'], {}), '(2010, 1, 15, 0, 0, 0, 0)\n', (30603, 30628), False, 'from datetime import datetime, date\n'), ((30668, 30715), 'grandfatherson.filters.Years.filter', 'Years.filter', (['datetimes'], {'number': '(0)', 'now': 'self.now'}), '(datetimes, number=0, now=self.now)\n', (30680, 30715), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((30783, 30830), 'grandfatherson.filters.Years.filter', 'Years.filter', (['datetimes'], {'number': '(1)', 'now': 'self.now'}), '(datetimes, number=1, now=self.now)\n', (30795, 30830), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((31287, 31327), 'grandfatherson.filters.Years.filter', 'Years.filter', (['[]'], {'number': '(1)', 'now': 'self.now'}), '([], number=1, now=self.now)\n', (31299, 31327), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((31418, 31466), 'grandfatherson.filters.Years.filter', 'Years.filter', (['[self.now]'], {'number': '(0)', 'now': 'self.now'}), '([self.now], number=0, now=self.now)\n', (31430, 31466), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((31525, 31577), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(0)', 'now': 'self.now'}), '(self.datetimes, number=0, now=self.now)\n', (31537, 31577), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((31665, 31717), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(1)', 'now': 'self.now'}), '(self.datetimes, number=1, now=self.now)\n', (31677, 31717), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((31975, 32027), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(2)', 'now': 'self.now'}), '(self.datetimes, number=2, now=self.now)\n', (31987, 32027), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((32318, 32370), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(4)', 'now': 'self.now'}), '(self.datetimes, number=4, now=self.now)\n', (32330, 32370), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((32632, 32684), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(5)', 'now': 'self.now'}), '(self.datetimes, number=5, now=self.now)\n', (32644, 32684), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((32980, 33032), 'grandfatherson.filters.Years.filter', 'Years.filter', (['self.datetimes'], {'number': '(6)', 'now': 'self.now'}), '(self.datetimes, number=6, now=self.now)\n', (32992, 33032), False, 'from grandfatherson.filters import Seconds, Minutes, Hours, Days, Weeks, Months, Years, UTC\n'), ((776, 818), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (784, 818), False, 'from datetime import datetime, date\n'), ((5938, 5980), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (5946, 5980), False, 'from datetime import datetime, date\n'), ((9888, 9930), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (9896, 9930), False, 'from datetime import datetime, date\n'), ((13565, 13607), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (13573, 13607), False, 'from datetime import datetime, date\n'), ((24653, 24695), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (24661, 24695), False, 'from datetime import datetime, date\n'), ((30446, 30488), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (30454, 30488), False, 'from datetime import datetime, date\n'), ((2182, 2214), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (2190, 2214), False, 'from datetime import datetime, date\n'), ((2535, 2567), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (2543, 2567), False, 'from datetime import datetime, date\n'), ((2599, 2631), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (2607, 2631), False, 'from datetime import datetime, date\n'), ((2820, 2862), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (2828, 2862), False, 'from datetime import datetime, date\n'), ((2894, 2926), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (2902, 2926), False, 'from datetime import datetime, date\n'), ((2958, 2990), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (2966, 2990), False, 'from datetime import datetime, date\n'), ((3274, 3316), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (3282, 3316), False, 'from datetime import datetime, date\n'), ((3348, 3380), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (3356, 3380), False, 'from datetime import datetime, date\n'), ((3412, 3444), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (3420, 3444), False, 'from datetime import datetime, date\n'), ((3630, 3667), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(57)', '(0)'], {}), '(1999, 12, 31, 23, 59, 57, 0)\n', (3638, 3667), False, 'from datetime import datetime, date\n'), ((3699, 3741), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (3707, 3741), False, 'from datetime import datetime, date\n'), ((3773, 3805), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (3781, 3805), False, 'from datetime import datetime, date\n'), ((3837, 3869), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (3845, 3869), False, 'from datetime import datetime, date\n'), ((4025, 4062), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(57)', '(0)'], {}), '(1999, 12, 31, 23, 59, 57, 0)\n', (4033, 4062), False, 'from datetime import datetime, date\n'), ((4094, 4136), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (4102, 4136), False, 'from datetime import datetime, date\n'), ((4168, 4200), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (4176, 4200), False, 'from datetime import datetime, date\n'), ((4232, 4264), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(1)', '(0)'], {}), '(2000, 1, 1, 0, 0, 1, 0)\n', (4240, 4264), False, 'from datetime import datetime, date\n'), ((7343, 7375), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (7351, 7375), False, 'from datetime import datetime, date\n'), ((7696, 7728), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (7704, 7728), False, 'from datetime import datetime, date\n'), ((7760, 7792), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (7768, 7792), False, 'from datetime import datetime, date\n'), ((7981, 8023), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (7989, 8023), False, 'from datetime import datetime, date\n'), ((8055, 8087), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (8063, 8087), False, 'from datetime import datetime, date\n'), ((8119, 8151), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (8127, 8151), False, 'from datetime import datetime, date\n'), ((8434, 8476), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (8442, 8476), False, 'from datetime import datetime, date\n'), ((8508, 8540), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (8516, 8540), False, 'from datetime import datetime, date\n'), ((8572, 8604), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (8580, 8604), False, 'from datetime import datetime, date\n'), ((8790, 8826), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(57)', '(0)', '(0)'], {}), '(1999, 12, 31, 23, 57, 0, 0)\n', (8798, 8826), False, 'from datetime import datetime, date\n'), ((8858, 8900), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (8866, 8900), False, 'from datetime import datetime, date\n'), ((8932, 8964), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (8940, 8964), False, 'from datetime import datetime, date\n'), ((8996, 9028), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (9004, 9028), False, 'from datetime import datetime, date\n'), ((9183, 9219), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(57)', '(0)', '(0)'], {}), '(1999, 12, 31, 23, 57, 0, 0)\n', (9191, 9219), False, 'from datetime import datetime, date\n'), ((9250, 9292), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (9258, 9292), False, 'from datetime import datetime, date\n'), ((9323, 9355), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (9331, 9355), False, 'from datetime import datetime, date\n'), ((9386, 9418), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 1, 0, 0)\n', (9394, 9418), False, 'from datetime import datetime, date\n'), ((11194, 11226), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (11202, 11226), False, 'from datetime import datetime, date\n'), ((11541, 11573), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (11549, 11573), False, 'from datetime import datetime, date\n'), ((11605, 11637), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (11613, 11637), False, 'from datetime import datetime, date\n'), ((11784, 11826), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (11792, 11826), False, 'from datetime import datetime, date\n'), ((11858, 11890), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (11866, 11890), False, 'from datetime import datetime, date\n'), ((11922, 11954), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (11930, 11954), False, 'from datetime import datetime, date\n'), ((12194, 12236), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (12202, 12236), False, 'from datetime import datetime, date\n'), ((12268, 12300), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (12276, 12300), False, 'from datetime import datetime, date\n'), ((12332, 12364), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (12340, 12364), False, 'from datetime import datetime, date\n'), ((12508, 12543), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(21)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 31, 21, 0, 0, 0)\n', (12516, 12543), False, 'from datetime import datetime, date\n'), ((12575, 12617), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (12583, 12617), False, 'from datetime import datetime, date\n'), ((12649, 12681), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (12657, 12681), False, 'from datetime import datetime, date\n'), ((12713, 12745), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (12721, 12745), False, 'from datetime import datetime, date\n'), ((12859, 12894), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(21)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 31, 21, 0, 0, 0)\n', (12867, 12894), False, 'from datetime import datetime, date\n'), ((12926, 12968), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (12934, 12968), False, 'from datetime import datetime, date\n'), ((13000, 13032), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (13008, 13032), False, 'from datetime import datetime, date\n'), ((13064, 13096), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 1, 0, 0, 0)\n', (13072, 13096), False, 'from datetime import datetime, date\n'), ((14861, 14893), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (14869, 14893), False, 'from datetime import datetime, date\n'), ((15168, 15210), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (15176, 15210), False, 'from datetime import datetime, date\n'), ((15242, 15274), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (15250, 15274), False, 'from datetime import datetime, date\n'), ((15512, 15546), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(30)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 30, 0, 0, 0, 0)\n', (15520, 15546), False, 'from datetime import datetime, date\n'), ((15578, 15620), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (15586, 15620), False, 'from datetime import datetime, date\n'), ((15652, 15684), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (15660, 15684), False, 'from datetime import datetime, date\n'), ((15827, 15861), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 28, 0, 0, 0, 0)\n', (15835, 15861), False, 'from datetime import datetime, date\n'), ((15893, 15927), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(30)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 30, 0, 0, 0, 0)\n', (15901, 15927), False, 'from datetime import datetime, date\n'), ((15959, 16001), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (15967, 16001), False, 'from datetime import datetime, date\n'), ((16033, 16065), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (16041, 16065), False, 'from datetime import datetime, date\n'), ((16178, 16212), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 28, 0, 0, 0, 0)\n', (16186, 16212), False, 'from datetime import datetime, date\n'), ((16244, 16278), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(30)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 30, 0, 0, 0, 0)\n', (16252, 16278), False, 'from datetime import datetime, date\n'), ((16310, 16352), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (16318, 16352), False, 'from datetime import datetime, date\n'), ((16384, 16416), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (16392, 16416), False, 'from datetime import datetime, date\n'), ((16886, 16918), 'datetime.datetime', 'datetime', (['(2004)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 3, 1, 0, 0, 0, 0)\n', (16894, 16918), False, 'from datetime import datetime, date\n'), ((17026, 17059), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(29)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 29, 0, 0, 0, 0)\n', (17034, 17059), False, 'from datetime import datetime, date\n'), ((17091, 17123), 'datetime.datetime', 'datetime', (['(2004)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 3, 1, 0, 0, 0, 0)\n', (17099, 17123), False, 'from datetime import datetime, date\n'), ((17231, 17264), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 28, 0, 0, 0, 0)\n', (17239, 17264), False, 'from datetime import datetime, date\n'), ((17296, 17329), 'datetime.datetime', 'datetime', (['(2004)', '(2)', '(29)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 2, 29, 0, 0, 0, 0)\n', (17304, 17329), False, 'from datetime import datetime, date\n'), ((17361, 17393), 'datetime.datetime', 'datetime', (['(2004)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2004, 3, 1, 0, 0, 0, 0)\n', (17369, 17393), False, 'from datetime import datetime, date\n'), ((17827, 17859), 'datetime.datetime', 'datetime', (['(1900)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 3, 1, 0, 0, 0, 0)\n', (17835, 17859), False, 'from datetime import datetime, date\n'), ((17967, 18000), 'datetime.datetime', 'datetime', (['(1900)', '(2)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 2, 28, 0, 0, 0, 0)\n', (17975, 18000), False, 'from datetime import datetime, date\n'), ((18032, 18064), 'datetime.datetime', 'datetime', (['(1900)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 3, 1, 0, 0, 0, 0)\n', (18040, 18064), False, 'from datetime import datetime, date\n'), ((18172, 18205), 'datetime.datetime', 'datetime', (['(1900)', '(2)', '(27)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 2, 27, 0, 0, 0, 0)\n', (18180, 18205), False, 'from datetime import datetime, date\n'), ((18237, 18270), 'datetime.datetime', 'datetime', (['(1900)', '(2)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 2, 28, 0, 0, 0, 0)\n', (18245, 18270), False, 'from datetime import datetime, date\n'), ((18302, 18334), 'datetime.datetime', 'datetime', (['(1900)', '(3)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1900, 3, 1, 0, 0, 0, 0)\n', (18310, 18334), False, 'from datetime import datetime, date\n'), ((19243, 19277), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(28)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 28, 0, 0, 0, 0)\n', (19251, 19277), False, 'from datetime import datetime, date\n'), ((19309, 19343), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(30)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 30, 0, 0, 0, 0)\n', (19317, 19343), False, 'from datetime import datetime, date\n'), ((19375, 19417), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (19383, 19417), False, 'from datetime import datetime, date\n'), ((19449, 19481), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (19457, 19481), False, 'from datetime import datetime, date\n'), ((21887, 21919), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (21895, 21919), False, 'from datetime import datetime, date\n'), ((22195, 22237), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (22203, 22237), False, 'from datetime import datetime, date\n'), ((22269, 22301), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (22277, 22301), False, 'from datetime import datetime, date\n'), ((22539, 22573), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (22547, 22573), False, 'from datetime import datetime, date\n'), ((22605, 22647), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (22613, 22647), False, 'from datetime import datetime, date\n'), ((22679, 22711), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (22687, 22711), False, 'from datetime import datetime, date\n'), ((22855, 22888), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(4)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 4, 0, 0, 0, 0)\n', (22863, 22888), False, 'from datetime import datetime, date\n'), ((22920, 22954), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (22928, 22954), False, 'from datetime import datetime, date\n'), ((22986, 23028), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (22994, 23028), False, 'from datetime import datetime, date\n'), ((23060, 23092), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (23068, 23092), False, 'from datetime import datetime, date\n'), ((23206, 23239), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(4)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 4, 0, 0, 0, 0)\n', (23214, 23239), False, 'from datetime import datetime, date\n'), ((23271, 23305), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (23279, 23305), False, 'from datetime import datetime, date\n'), ((23337, 23379), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (23345, 23379), False, 'from datetime import datetime, date\n'), ((23411, 23443), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (23419, 23443), False, 'from datetime import datetime, date\n'), ((23646, 23680), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (23654, 23680), False, 'from datetime import datetime, date\n'), ((23699, 23741), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (23707, 23741), False, 'from datetime import datetime, date\n'), ((24027, 24061), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(18)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 18, 0, 0, 0, 0)\n', (24035, 24061), False, 'from datetime import datetime, date\n'), ((24080, 24113), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(4)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 4, 0, 0, 0, 0)\n', (24088, 24113), False, 'from datetime import datetime, date\n'), ((24132, 24174), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (24140, 24174), False, 'from datetime import datetime, date\n'), ((25966, 25998), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (25974, 25998), False, 'from datetime import datetime, date\n'), ((26277, 26309), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (26285, 26309), False, 'from datetime import datetime, date\n'), ((26341, 26373), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (26349, 26373), False, 'from datetime import datetime, date\n'), ((26517, 26559), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (26525, 26559), False, 'from datetime import datetime, date\n'), ((26591, 26623), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (26599, 26623), False, 'from datetime import datetime, date\n'), ((26655, 26687), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (26663, 26687), False, 'from datetime import datetime, date\n'), ((26926, 26968), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (26934, 26968), False, 'from datetime import datetime, date\n'), ((27000, 27032), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (27008, 27032), False, 'from datetime import datetime, date\n'), ((27064, 27096), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (27072, 27096), False, 'from datetime import datetime, date\n'), ((27241, 27274), 'datetime.datetime', 'datetime', (['(1999)', '(10)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 10, 1, 0, 0, 0, 0)\n', (27249, 27274), False, 'from datetime import datetime, date\n'), ((27306, 27348), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (27314, 27348), False, 'from datetime import datetime, date\n'), ((27380, 27412), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (27388, 27412), False, 'from datetime import datetime, date\n'), ((27444, 27476), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (27452, 27476), False, 'from datetime import datetime, date\n'), ((27591, 27624), 'datetime.datetime', 'datetime', (['(1999)', '(10)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 10, 1, 0, 0, 0, 0)\n', (27599, 27624), False, 'from datetime import datetime, date\n'), ((27656, 27698), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (27664, 27698), False, 'from datetime import datetime, date\n'), ((27730, 27762), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (27738, 27762), False, 'from datetime import datetime, date\n'), ((27794, 27826), 'datetime.datetime', 'datetime', (['(2000)', '(2)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 2, 1, 0, 0, 0, 0)\n', (27802, 27826), False, 'from datetime import datetime, date\n'), ((28346, 28379), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (28354, 28379), False, 'from datetime import datetime, date\n'), ((28411, 28443), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (28419, 28443), False, 'from datetime import datetime, date\n'), ((28586, 28618), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (28594, 28618), False, 'from datetime import datetime, date\n'), ((28650, 28683), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (28658, 28683), False, 'from datetime import datetime, date\n'), ((28715, 28747), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (28723, 28747), False, 'from datetime import datetime, date\n'), ((28921, 28954), 'datetime.datetime', 'datetime', (['(1998)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 12, 1, 0, 0, 0, 0)\n', (28929, 28954), False, 'from datetime import datetime, date\n'), ((28986, 29018), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (28994, 29018), False, 'from datetime import datetime, date\n'), ((29050, 29083), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (29058, 29083), False, 'from datetime import datetime, date\n'), ((29115, 29147), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (29123, 29147), False, 'from datetime import datetime, date\n'), ((29286, 29319), 'datetime.datetime', 'datetime', (['(1998)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 12, 1, 0, 0, 0, 0)\n', (29294, 29319), False, 'from datetime import datetime, date\n'), ((29351, 29383), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (29359, 29383), False, 'from datetime import datetime, date\n'), ((29415, 29448), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (29423, 29448), False, 'from datetime import datetime, date\n'), ((29480, 29512), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (29488, 29512), False, 'from datetime import datetime, date\n'), ((29688, 29721), 'datetime.datetime', 'datetime', (['(1997)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1997, 12, 1, 0, 0, 0, 0)\n', (29696, 29721), False, 'from datetime import datetime, date\n'), ((29753, 29786), 'datetime.datetime', 'datetime', (['(1998)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 12, 1, 0, 0, 0, 0)\n', (29761, 29786), False, 'from datetime import datetime, date\n'), ((29818, 29850), 'datetime.datetime', 'datetime', (['(1999)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 1, 1, 0, 0, 0, 0)\n', (29826, 29850), False, 'from datetime import datetime, date\n'), ((29882, 29915), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1999, 12, 1, 0, 0, 0, 0)\n', (29890, 29915), False, 'from datetime import datetime, date\n'), ((29947, 29979), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (29955, 29979), False, 'from datetime import datetime, date\n'), ((31749, 31781), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (31757, 31781), False, 'from datetime import datetime, date\n'), ((32059, 32101), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (32067, 32101), False, 'from datetime import datetime, date\n'), ((32133, 32165), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (32141, 32165), False, 'from datetime import datetime, date\n'), ((32402, 32434), 'datetime.datetime', 'datetime', (['(1998)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 1, 1, 0, 0, 0, 0)\n', (32410, 32434), False, 'from datetime import datetime, date\n'), ((32466, 32508), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (32474, 32508), False, 'from datetime import datetime, date\n'), ((32540, 32572), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (32548, 32572), False, 'from datetime import datetime, date\n'), ((32716, 32748), 'datetime.datetime', 'datetime', (['(1996)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1996, 1, 1, 0, 0, 0, 0)\n', (32724, 32748), False, 'from datetime import datetime, date\n'), ((32780, 32812), 'datetime.datetime', 'datetime', (['(1998)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 1, 1, 0, 0, 0, 0)\n', (32788, 32812), False, 'from datetime import datetime, date\n'), ((32844, 32886), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (32852, 32886), False, 'from datetime import datetime, date\n'), ((32918, 32950), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (32926, 32950), False, 'from datetime import datetime, date\n'), ((33064, 33096), 'datetime.datetime', 'datetime', (['(1996)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1996, 1, 1, 0, 0, 0, 0)\n', (33072, 33096), False, 'from datetime import datetime, date\n'), ((33128, 33160), 'datetime.datetime', 'datetime', (['(1998)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(1998, 1, 1, 0, 0, 0, 0)\n', (33136, 33160), False, 'from datetime import datetime, date\n'), ((33192, 33234), 'datetime.datetime', 'datetime', (['(1999)', '(12)', '(31)', '(23)', '(59)', '(59)', '(999999)'], {}), '(1999, 12, 31, 23, 59, 59, 999999)\n', (33200, 33234), False, 'from datetime import datetime, date\n'), ((33266, 33298), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0, 0, 0)\n', (33274, 33298), False, 'from datetime import datetime, date\n')]
""" Players! """ import random from readchar import readchar class Player(object): """ Base player object """ def next_move(self, game): """ Retrieve the next move for this player in this game """ raise NotImplementedError class HumanPlayer(Player): """ Human player input """ def next_move(self, game): return readchar().upper() class BotPlayer(Player): """ Base bot player """ def reproduce(self, allowed_moves, min_mutations, max_mutations): """ Create a new bot based on this bot """ raise NotImplementedError class PlannedBot(BotPlayer): """ Bot is "born" with a fixed set of moves """ def __init__(self, moves=None): self.moves = moves or [] self.games = {} def next_move(self, game): idx = self.games.get(game, 0) self.games[game] = idx + 1 if idx < len(self.moves): return self.moves[idx] # HACK return 'Q' def reproduce(self, allowed_moves, min_mutations, max_mutations): """ Create a new bot based on this bot """ mutations = random.randint(min_mutations, max_mutations) new_moves = self.moves[:] for _ in xrange(mutations): new_moves.append(random.choice(allowed_moves)) return self.__class__(new_moves) class NeatBot(BotPlayer): network_factory = GameNetwork() def __init__(self, network): self.games = {} self.network = network or self.network_factory() def next_move(self, game): moves = self.network.eval_game(game) # HACK preference = 'wasd' for move in preference: if move in moves: return move # HACK return 'Q' def reproduce(self, allowed_moves, min_mutations, max_mutations): mutations = random.randint(min_mutations, max_mutations) new_network = self.network.deep_copy() mutators = [ # FIXME: Add weights new_network.add_random_neuron, new_network.add_random_connection ] for _ in xrange(mutations): mutator = random.choice(mutators) mutator() return self.__class__(new_network)
[ "readchar.readchar", "random.choice", "random.randint" ]
[((1190, 1234), 'random.randint', 'random.randint', (['min_mutations', 'max_mutations'], {}), '(min_mutations, max_mutations)\n', (1204, 1234), False, 'import random\n'), ((1919, 1963), 'random.randint', 'random.randint', (['min_mutations', 'max_mutations'], {}), '(min_mutations, max_mutations)\n', (1933, 1963), False, 'import random\n'), ((2213, 2236), 'random.choice', 'random.choice', (['mutators'], {}), '(mutators)\n', (2226, 2236), False, 'import random\n'), ((387, 397), 'readchar.readchar', 'readchar', ([], {}), '()\n', (395, 397), False, 'from readchar import readchar\n'), ((1334, 1362), 'random.choice', 'random.choice', (['allowed_moves'], {}), '(allowed_moves)\n', (1347, 1362), False, 'import random\n')]
__author__ = 'Administrator' import requests import xlstr import time class Ticket: #车票信息 train_no = '' station_train_code = '' #车次编号,例如K540 from_station_telecode = '' from_station_name = '' to_station_telecode = '' to_station_name = '' yp_info = '' #未知信息 location_code = '' secret_str = '' start_train_date = '' #乘车日期,例如20140127 #乘车信息 train_date = '' train_date_utc='' seat_type = '' def __init__(self, ticket_obj, buy_type): self.train_no = ticket_obj['queryLeftNewDTO']['train_no'] self.from_station_telecode = ticket_obj['queryLeftNewDTO']['from_station_telecode'] self.from_station_name = ticket_obj['queryLeftNewDTO']['from_station_name'] self.to_station_telecode = ticket_obj['queryLeftNewDTO']['to_station_telecode'] self.to_station_name = ticket_obj['queryLeftNewDTO']['to_station_name'] self.yp_info = ticket_obj['queryLeftNewDTO']['yp_info'] self.start_train_date = ticket_obj['queryLeftNewDTO']['start_train_date'] self.location_code = ticket_obj['queryLeftNewDTO']['location_code'] self.secret_str = ticket_obj['secretStr'] self.station_train_code = ticket_obj['queryLeftNewDTO']['station_train_code'] trainTime = time.strptime(self.start_train_date, '%Y%m%d') self.train_date = time.strftime('%Y-%m-%d', trainTime) self.train_date_utc=time.strftime('%a %b %d %H:%M:%S UTC+0800 %Y',trainTime) self.seat_type = buy_type SeatType={'M':'一等座','O':'二等座','4':'软卧','3':'硬卧','1':'硬座'}
[ "time.strptime", "time.strftime" ]
[((1287, 1333), 'time.strptime', 'time.strptime', (['self.start_train_date', '"""%Y%m%d"""'], {}), "(self.start_train_date, '%Y%m%d')\n", (1300, 1333), False, 'import time\n'), ((1360, 1396), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""', 'trainTime'], {}), "('%Y-%m-%d', trainTime)\n", (1373, 1396), False, 'import time\n'), ((1425, 1482), 'time.strftime', 'time.strftime', (['"""%a %b %d %H:%M:%S UTC+0800 %Y"""', 'trainTime'], {}), "('%a %b %d %H:%M:%S UTC+0800 %Y', trainTime)\n", (1438, 1482), False, 'import time\n')]
import mock from unittest import TestCase from ncssl_api_client.config.api import settings try: reload # Python 2.7 except NameError: try: from importlib import reload # Python 3.4+ except ImportError: from imp import reload # Python 3.0 - 3.3 try: __import__('__builtin__') open_reference = "__builtin__.open" # Python 2.7 except ImportError: open_reference = "builtins.open" # Python 3.x DATA = 'API_SETTINGS_TEST: TEST' class ApiSettingsLoadingTest(TestCase): @mock.patch(open_reference, mock.mock_open(read_data=DATA)) def test_update_locals(self): reload(settings) self.assertEqual(settings.API_SETTINGS_TEST, 'TEST')
[ "imp.reload", "mock.mock_open" ]
[((619, 635), 'imp.reload', 'reload', (['settings'], {}), '(settings)\n', (625, 635), False, 'from imp import reload\n'), ((545, 575), 'mock.mock_open', 'mock.mock_open', ([], {'read_data': 'DATA'}), '(read_data=DATA)\n', (559, 575), False, 'import mock\n')]
import noisereduce as nr def remove_noise(data, rate): return nr.reduce_noise(y=data, sr=rate, thresh_n_mult_nonstationary=2, stationary=False)
[ "noisereduce.reduce_noise" ]
[((68, 154), 'noisereduce.reduce_noise', 'nr.reduce_noise', ([], {'y': 'data', 'sr': 'rate', 'thresh_n_mult_nonstationary': '(2)', 'stationary': '(False)'}), '(y=data, sr=rate, thresh_n_mult_nonstationary=2, stationary=\n False)\n', (83, 154), True, 'import noisereduce as nr\n')]
# Copyright 2017 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ from sawtooth_validator.journal.block_wrapper import NULL_BLOCK_IDENTIFIER from sawtooth_validator.protobuf.block_pb2 import BlockHeader from sawtooth_validator.protobuf.transaction_pb2 import TransactionHeader class MissingDependency(Exception): def __init__(self, txn_id): super().__init__("Missing dependency: {}".format(txn_id)) self.transaction_id = txn_id class DuplicateTransaction(Exception): def __init__(self, txn_id): super().__init__("Duplicate transaction: {}".format(txn_id)) self.transaction_id = txn_id class DuplicateBatch(Exception): def __init__(self, batch_id): super().__init__("Duplicate batch: {}".format(batch_id)) self.batch_id = batch_id class BlockStoreUpdated(Exception): pass class ChainCommitState: """Checking to see if a batch or transaction in a block has already been committed is somewhat difficult because of the presence of forks. While the block store is the definitive source for all batches and transactions that have been committed on the current chain, validation of blocks on another fork requires determing what blocks would actually be in the chain if that block were to be committed and only checking the batches and transactions contained within. ChainCommitState abstracts this process. """ def __init__(self, head_id, block_manager, block_store): """The constructor should be passed the previous block id of the block being validated.""" uncommitted_block_ids = list() uncommitted_batch_ids = set() uncommitted_txn_ids = set() # Find the most recent ancestor of this block that is in the block # store. Batches and transactions that are in a block that is in the # block store and that has a greater block number than this block must # be ignored. if head_id != NULL_BLOCK_IDENTIFIER: head = next(block_manager.get([head_id])) ancestor = head while ancestor.header_signature not in block_store: # For every block not in the block store, we need to track all # its batch ids and transaction ids separately to ensure there # are no duplicates. for batch in ancestor.batches: uncommitted_batch_ids.add(batch.header_signature) for txn in batch.transactions: uncommitted_txn_ids.add(txn.header_signature) uncommitted_block_ids.append(ancestor.header_signature) ancestor_header = BlockHeader() ancestor_header.ParseFromString(ancestor.header) previous_block_id = ancestor_header.previous_block_id if previous_block_id == NULL_BLOCK_IDENTIFIER: break ancestor = next(block_manager.get([previous_block_id])) else: ancestor = None self.block_store = block_store ancestor_header = None if ancestor: ancestor_header = BlockHeader() ancestor_header.ParseFromString(ancestor.header) self.common_ancestor = ancestor_header self.uncommitted_block_ids = uncommitted_block_ids self.uncommitted_batch_ids = uncommitted_batch_ids self.uncommitted_txn_ids = uncommitted_txn_ids def _block_in_chain(self, block): if self.common_ancestor is not None: return block.block_num <= self.common_ancestor.block_num return False @staticmethod def _check_for_duplicates_within(key_fn, items): """Checks that for any two items in `items`, calling `key_fn` on both does not return equal values.""" for i, item_i in enumerate(items): for item_j in items[i + 1:]: if key_fn(item_i) == key_fn(item_j): return key_fn(item_i) return None def check_for_duplicate_transactions(self, transactions): """Check that none of the transactions passed in have already been committed in the chain. Also checks that the list of transactions passed contains no duplicates.""" # Same as for batches duplicate = self._check_for_duplicates_within( lambda txn: txn.header_signature, transactions) if duplicate is not None: raise DuplicateTransaction(duplicate) for txn in transactions: txn_id = txn.header_signature if txn_id in self.uncommitted_txn_ids: raise DuplicateTransaction(txn_id) if self.block_store.has_transaction(txn_id): try: committed_block =\ self.block_store.get_block_by_transaction_id(txn_id) except ValueError: raise BlockStoreUpdated( "The BlockStore updated while checking for duplicate" " transactions." ) if self._block_in_chain(committed_block): raise DuplicateTransaction(txn_id) def check_for_duplicate_batches(self, batches): """Check that none of the batches passed in have already been committed in the chain. Also checks that the list of batches passed contains no duplicates.""" # Check for duplicates within the given list duplicate = self._check_for_duplicates_within( lambda batch: batch.header_signature, batches) if duplicate is not None: raise DuplicateBatch(duplicate) for batch in batches: batch_id = batch.header_signature # Make sure the batch isn't in one of the uncommitted block if batch_id in self.uncommitted_batch_ids: raise DuplicateBatch(batch_id) # Check if the batch is in one of the committed blocks if self.block_store.has_batch(batch_id): try: committed_block =\ self.block_store.get_block_by_batch_id(batch_id) except ValueError: raise BlockStoreUpdated( "The BlockStore updated while checking for duplicate" " transactions." ) # This is only a duplicate batch if the batch is in a block # that would stay committed if this block were committed. This # is equivalent to asking if the number of the block that this # batch is in is less than or equal to the number of the common # ancestor block. if self._block_in_chain(committed_block): raise DuplicateBatch(batch_id) def check_for_transaction_dependencies(self, transactions): """Check that all explicit dependencies in all transactions passed have been satisfied.""" dependencies = [] txn_ids = [] for txn in transactions: txn_ids.append(txn.header_signature) txn_hdr = TransactionHeader() txn_hdr.ParseFromString(txn.header) dependencies.extend(txn_hdr.dependencies) for dep in dependencies: # Check for dependency within the given block's batches if dep in txn_ids: continue # Check for dependency in the uncommitted blocks if dep in self.uncommitted_txn_ids: continue # Check for dependency in the committe blocks if self.block_store.has_transaction(dep): committed_block =\ self.block_store.get_block_by_transaction_id(dep) # Make sure the block wouldn't be uncomitted if the given block # were uncommitted if self._block_in_chain(committed_block): continue raise MissingDependency(dep) class _CommitCache: """Tracks the commit status of a set of identifiers and these identifiers are either explicitly committed, or explicitly uncommitted. If they fall in to neither of these cases then the fallback is to look in the BlockStore to see if they are there. Explicit committed ids take priority over uncommitted since one of the common use cases we have is to simulate the committed state at a previous state of the BlockStore and we allow for the identifiers to be re-committed. """ def __init__(self, block_store_check): self.block_store_check = block_store_check self._committed = set() # the set of items # committed by this chain self._uncommitted = set() # the set of items # uncommitted by the current chain when it is rolled back. def add(self, identifier): self._committed.add(identifier) def remove(self, identifier): self._committed.discard(identifier) def uncommit(self, identifier): self._uncommitted.add(identifier) def __contains__(self, identifier): if identifier in self._committed: return True if identifier in self._uncommitted: return False return self.block_store_check(identifier) class TransactionCommitCache(_CommitCache): """Tracks the set of Transactions that are committed to a hypothetical blockchain. This is used to detect duplicate transactions or missing dependencies when building a block. """ def __init__(self, block_store): super(TransactionCommitCache, self).__init__( block_store.has_transaction) def add_batch(self, batch): for txn in batch.transactions: self._committed.add(txn.header_signature) def remove_batch(self, batch): for txn in batch.transactions: self._committed.discard(txn.header_signature)
[ "sawtooth_validator.protobuf.block_pb2.BlockHeader", "sawtooth_validator.protobuf.transaction_pb2.TransactionHeader" ]
[((3753, 3766), 'sawtooth_validator.protobuf.block_pb2.BlockHeader', 'BlockHeader', ([], {}), '()\n', (3764, 3766), False, 'from sawtooth_validator.protobuf.block_pb2 import BlockHeader\n'), ((7790, 7809), 'sawtooth_validator.protobuf.transaction_pb2.TransactionHeader', 'TransactionHeader', ([], {}), '()\n', (7807, 7809), False, 'from sawtooth_validator.protobuf.transaction_pb2 import TransactionHeader\n'), ((3277, 3290), 'sawtooth_validator.protobuf.block_pb2.BlockHeader', 'BlockHeader', ([], {}), '()\n', (3288, 3290), False, 'from sawtooth_validator.protobuf.block_pb2 import BlockHeader\n')]
# coding: utf-8 """.""" from tango import Database # Get reference to tango database DB = Database() print('=' * 80) print('Database info:') print('=' * 80) print(DB.get_info()) print('=' * 80) print('Server list:') print('=' * 80) print(DB.get_server_list().value_string) print('')
[ "tango.Database" ]
[((91, 101), 'tango.Database', 'Database', ([], {}), '()\n', (99, 101), False, 'from tango import Database\n')]
import unittest import shutil import os import sys import tests.prep_tests as prep import validate.validator as v from validate.common_constants import * import tests.test_values as test_arrays class BasicTestCase(unittest.TestCase): def setUp(self): self.test_storepath = "./tests/data" os.makedirs(self.test_storepath, exist_ok=True) def tearDown(self): shutil.rmtree(self.test_storepath) def test_validate_good_file_extension(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv.gz") validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_ext = validator.validate_file_extension() self.assertTrue(valid_ext) # alternative test_filepath = os.path.join(self.test_storepath, "test_file.csv.gz") validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_ext = validator.validate_file_extension() self.assertTrue(valid_ext) def test_validate_bad_file_extension(self): test_filepath = os.path.join(self.test_storepath, "test_file.zip") validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_ext = validator.validate_file_extension() self.assertFalse(valid_ext) def test_validate_good_file_headers(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertTrue(valid_headers) def test_validate_file_headers_missing_snp(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.set_test_data_dict() setup_file.test_data_dict.pop(SNP_DSET) # remove a snp field setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertTrue(valid_headers) def test_validate_file_headers_missing_pos(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.set_test_data_dict() setup_file.test_data_dict.pop(CHR_DSET) # remove the chr field setup_file.test_data_dict.pop(BP_DSET) # remove the pos field setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertTrue(valid_headers) def test_validate_file_headers_missing_snp_and_pos(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.set_test_data_dict() setup_file.test_data_dict.pop(SNP_DSET) # remove a snp field setup_file.test_data_dict.pop(CHR_DSET) # remove the chr field setup_file.test_data_dict.pop(BP_DSET) # remove the pos field setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertFalse(valid_headers) def test_validate_bad_file_headers(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.set_test_data_dict() setup_file.test_data_dict.pop(EFFECT_DSET) # remove a mandatory field setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertFalse(valid_headers) def test_validate_bad_file_headers_missing_effect(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") setup_file = prep.SSTestFile() setup_file.set_test_data_dict() setup_file.test_data_dict.pop(EFFECT_WEIGHT_DSET) # remove effect_weight field setup_file.test_data_dict.pop(OR_DSET) # remove odds ratio field setup_file.test_data_dict.pop(HR_DSET) # remove hazard ratio field setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=test_filepath + ".LOG") valid_headers = validator.validate_headers() self.assertFalse(valid_headers) def test_validate_good_file_data(self): test_filepath = os.path.join(self.test_storepath, "test_file.tsv") logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile() setup_file.prep_test_file() validator = v.Validator(test_filepath, "pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertTrue(valid_data) def test_validate_bad_snp_file_data(self): test_filename = "bad_snp.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[SNP_DSET] = ["invalid", 123, "1_1234_A_G", "ss151232"] # set bad snps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 4) self.assertFalse(valid_data) def test_validate_bad_snp_and_no_pos_file_data(self): test_filename = "bad_snp_no_pos.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[SNP_DSET] = ["invalid", "rs123", "1_1234_A_G", "ss151232"] # set bad snps setup_file.test_data_dict[BP_DSET] = [None, 123, "NA", None] # only one good row setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 3) self.assertFalse(valid_data) def test_validate_bad_chr_file_data(self): test_filename = "bad_chr.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[CHR_DSET] = [1, 123, "CHR1", "X"] # set 2 bad chrs setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 2) self.assertFalse(valid_data) def test_validate_bad_chr_and_no_snp_file_data(self): test_filename = "bad_chr_no_snp.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[CHR_DSET] = [1, 123, "CHR1", "X"] # set 2 bad chrs setup_file.test_data_dict[SNP_DSET] = ["invalid", 123, "rs1234", "rs151"] # set only one good row setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 3) self.assertFalse(valid_data) def test_validate_bad_bp_file_data(self): test_filename = "bad_bp.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[BP_DSET] = [1, 1234567890, "CHR1_122334", 123245] # set 2 bad bps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 2) self.assertFalse(valid_data) def test_validate_bad_bp_and_no_snp_file_data(self): test_filename = "bad_bp_no_snp.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[BP_DSET] = [1, 1234567890, "CHR1_122334", 123245] # set 2 bad bps setup_file.test_data_dict[SNP_DSET] = ["invalid", 123, None, "rs1234"] # set so only one good row setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 3) self.assertFalse(valid_data) def test_validate_bad_optional_effect_weight_file_data(self): test_filename = "bad_weight.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[EFFECT_WEIGHT_DSET] = [1.1232e-23, "invalid", 0.123, .3245] # set 1 bad bps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 1) self.assertFalse(valid_data) def test_validate_bad_optional_odds_ratio_file_data(self): test_filename = "bad_odds.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[OR_DSET] = [1.1232e-23, "invalid", 0.123, .3245] # set 1 bad bps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 1) self.assertFalse(valid_data) def test_validate_bad_optional_hazard_ratio_file_data(self): test_filename = "bad_hazard.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[HR_DSET] = [1.1232e-23, "invalid", 0.123, .3245] # set 1 bad bps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 1) self.assertFalse(valid_data) def test_validate_bad_effect_allele_file_data(self): test_filename = "bad_effect.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[EFFECT_DSET] = ['A', 'AGG', 'INS:T', 'd'] # set 2 bad alleles setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 2) self.assertFalse(valid_data) def test_validate_empty_snp_file_data(self): test_filename = "empty_snp.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[SNP_DSET] = ["NA", None, None, None] # set bad snps setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 4) self.assertFalse(valid_data) def test_validate_empty_snp_no_pos_file_data(self): test_filename = "empty_snp_no_pos.tsv" test_filepath = os.path.join(self.test_storepath, test_filename) logfile=test_filepath.replace('tsv', 'LOG') setup_file = prep.SSTestFile(filename=test_filename) setup_file.set_test_data_dict() setup_file.test_data_dict[SNP_DSET] = ["NA", None, "1234", "rs1"] # set bad snps setup_file.test_data_dict[BP_DSET] = [None, 123, "NA", None] # only one good bp setup_file.prep_test_file() validator = v.Validator(file=test_filepath, filetype="pgs-upload", logfile=logfile) valid_data = validator.validate_data() self.assertEqual(len(validator.bad_rows), 4) self.assertFalse(valid_data) if __name__ == '__main__': unittest.main()
[ "unittest.main", "validate.validator.Validator", "os.makedirs", "tests.prep_tests.SSTestFile", "shutil.rmtree", "os.path.join" ]
[((13692, 13707), 'unittest.main', 'unittest.main', ([], {}), '()\n', (13705, 13707), False, 'import unittest\n'), ((310, 357), 'os.makedirs', 'os.makedirs', (['self.test_storepath'], {'exist_ok': '(True)'}), '(self.test_storepath, exist_ok=True)\n', (321, 357), False, 'import os\n'), ((391, 425), 'shutil.rmtree', 'shutil.rmtree', (['self.test_storepath'], {}), '(self.test_storepath)\n', (404, 425), False, 'import shutil\n'), ((500, 553), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv.gz"""'], {}), "(self.test_storepath, 'test_file.tsv.gz')\n", (512, 553), False, 'import os\n'), ((574, 646), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (585, 646), True, 'import validate.validator as v\n'), ((784, 837), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.csv.gz"""'], {}), "(self.test_storepath, 'test_file.csv.gz')\n", (796, 837), False, 'import os\n'), ((858, 930), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (869, 930), True, 'import validate.validator as v\n'), ((1095, 1145), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.zip"""'], {}), "(self.test_storepath, 'test_file.zip')\n", (1107, 1145), False, 'import os\n'), ((1166, 1238), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (1177, 1238), True, 'import validate.validator as v\n'), ((1403, 1453), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (1415, 1453), False, 'import os\n'), ((1475, 1492), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (1490, 1492), True, 'import tests.prep_tests as prep\n'), ((1549, 1621), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (1560, 1621), True, 'import validate.validator as v\n'), ((1793, 1843), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (1805, 1843), False, 'import os\n'), ((1865, 1882), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (1880, 1882), True, 'import tests.prep_tests as prep\n'), ((2048, 2120), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (2059, 2120), True, 'import validate.validator as v\n'), ((2292, 2342), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (2304, 2342), False, 'import os\n'), ((2364, 2381), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (2379, 2381), True, 'import tests.prep_tests as prep\n'), ((2620, 2692), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (2631, 2692), True, 'import validate.validator as v\n'), ((2872, 2922), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (2884, 2922), False, 'import os\n'), ((2944, 2961), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (2959, 2961), True, 'import tests.prep_tests as prep\n'), ((3269, 3341), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (3280, 3341), True, 'import validate.validator as v\n'), ((3506, 3556), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (3518, 3556), False, 'import os\n'), ((3578, 3595), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (3593, 3595), True, 'import tests.prep_tests as prep\n'), ((3770, 3842), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (3781, 3842), True, 'import validate.validator as v\n'), ((4022, 4072), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (4034, 4072), False, 'import os\n'), ((4094, 4111), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (4109, 4111), True, 'import tests.prep_tests as prep\n'), ((4465, 4537), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': "(test_filepath + '.LOG')"}), "(test_filepath, 'pgs-upload', logfile=test_filepath + '.LOG')\n", (4476, 4537), True, 'import validate.validator as v\n'), ((4700, 4750), 'os.path.join', 'os.path.join', (['self.test_storepath', '"""test_file.tsv"""'], {}), "(self.test_storepath, 'test_file.tsv')\n", (4712, 4750), False, 'import os\n'), ((4824, 4841), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {}), '()\n', (4839, 4841), True, 'import tests.prep_tests as prep\n'), ((4898, 4955), 'validate.validator.Validator', 'v.Validator', (['test_filepath', '"""pgs-upload"""'], {'logfile': 'logfile'}), "(test_filepath, 'pgs-upload', logfile=logfile)\n", (4909, 4955), True, 'import validate.validator as v\n'), ((5149, 5197), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (5161, 5197), False, 'import os\n'), ((5271, 5310), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (5286, 5310), True, 'import tests.prep_tests as prep\n'), ((5511, 5582), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (5522, 5582), True, 'import validate.validator as v\n'), ((5848, 5896), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (5860, 5896), False, 'import os\n'), ((5970, 6009), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (5985, 6009), True, 'import tests.prep_tests as prep\n'), ((6303, 6374), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (6314, 6374), True, 'import validate.validator as v\n'), ((6622, 6670), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (6634, 6670), False, 'import os\n'), ((6744, 6783), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (6759, 6783), True, 'import tests.prep_tests as prep\n'), ((6965, 7036), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (6976, 7036), True, 'import validate.validator as v\n'), ((7302, 7350), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (7314, 7350), False, 'import os\n'), ((7424, 7463), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (7439, 7463), True, 'import tests.prep_tests as prep\n'), ((7751, 7822), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (7762, 7822), True, 'import validate.validator as v\n'), ((8068, 8116), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (8080, 8116), False, 'import os\n'), ((8190, 8229), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (8205, 8229), True, 'import tests.prep_tests as prep\n'), ((8426, 8497), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (8437, 8497), True, 'import validate.validator as v\n'), ((8761, 8809), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (8773, 8809), False, 'import os\n'), ((8883, 8922), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (8898, 8922), True, 'import tests.prep_tests as prep\n'), ((9225, 9296), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (9236, 9296), True, 'import validate.validator as v\n'), ((9566, 9614), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (9578, 9614), False, 'import os\n'), ((9688, 9727), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (9703, 9727), True, 'import tests.prep_tests as prep\n'), ((9934, 10005), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (9945, 10005), True, 'import validate.validator as v\n'), ((10270, 10318), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (10282, 10318), False, 'import os\n'), ((10392, 10431), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (10407, 10431), True, 'import tests.prep_tests as prep\n'), ((10627, 10698), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (10638, 10698), True, 'import validate.validator as v\n'), ((10967, 11015), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (10979, 11015), False, 'import os\n'), ((11089, 11128), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (11104, 11128), True, 'import tests.prep_tests as prep\n'), ((11324, 11395), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (11335, 11395), True, 'import validate.validator as v\n'), ((11656, 11704), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (11668, 11704), False, 'import os\n'), ((11778, 11817), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (11793, 11817), True, 'import tests.prep_tests as prep\n'), ((12010, 12081), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (12021, 12081), True, 'import validate.validator as v\n'), ((12333, 12381), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (12345, 12381), False, 'import os\n'), ((12455, 12494), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (12470, 12494), True, 'import tests.prep_tests as prep\n'), ((12677, 12748), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (12688, 12748), True, 'import validate.validator as v\n'), ((13014, 13062), 'os.path.join', 'os.path.join', (['self.test_storepath', 'test_filename'], {}), '(self.test_storepath, test_filename)\n', (13026, 13062), False, 'import os\n'), ((13136, 13175), 'tests.prep_tests.SSTestFile', 'prep.SSTestFile', ([], {'filename': 'test_filename'}), '(filename=test_filename)\n', (13151, 13175), True, 'import tests.prep_tests as prep\n'), ((13449, 13520), 'validate.validator.Validator', 'v.Validator', ([], {'file': 'test_filepath', 'filetype': '"""pgs-upload"""', 'logfile': 'logfile'}), "(file=test_filepath, filetype='pgs-upload', logfile=logfile)\n", (13460, 13520), True, 'import validate.validator as v\n')]
# You need to use this script every time you want to rebuild pyinstaller under the pyQ5 MainWindow.py #------------------------------------------------------------- import os import sys def resource_path(relative_path): if hasattr(sys, '_MEIPASS'): return os.path.join(sys._MEIPASS, relative_path) return os.path.join(os.path.abspath("."), relative_path) #--------------------------------------------------------------------
[ "os.path.abspath", "os.path.join" ]
[((273, 314), 'os.path.join', 'os.path.join', (['sys._MEIPASS', 'relative_path'], {}), '(sys._MEIPASS, relative_path)\n', (285, 314), False, 'import os\n'), ((340, 360), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (355, 360), False, 'import os\n')]
from app.models import Material from app import db import json def handleGetMaterials(page,per_page): materials = Material.query.paginate(page=page, per_page=per_page, error_out=False) res = db.engine.execute("select count(*) from material") count = [r[0] for r in res][0] materialInfo = { 'materials': [a.to_json() for a in materials.items], 'count': count } return json.dumps(materialInfo) def handleSubmitMaterialEditForm(material): mater = Material.query.filter_by(mid=material['mid']).first() if mater: mater.name=material['name'] mater.purity=material['purity'] mater.manufacturer=material['manufacturer'] mater.note=material['note'] db.session.commit() return "success" else: return "failure" def HnadleSubmitMaterialAddForm(material): mater = Material(name=material["name"], purity=material["purity"], manufacturer=material["manufacturer"], note=material["note"]) db.session.add(mater) db.session.commit() return "success" def handleRemoveMaterial(mid): material = Material.query.filter_by(mid=mid).first() if material: db.session.delete(material) db.session.commit() return "success" else: return "failure" def handleMaterialBatchDelete(midList): for mid in midList: material = Material.query.filter_by(mid=mid).first() if material: db.session.delete(material) db.session.commit() return "success" def handleMaterialQueryContent(selectType,content,page,per_page): countQuery = "db.session.query(Material).filter(Material." + selectType + ".like('%" + content + "%')).count()" count = eval(countQuery) result = "db.session.query(Material).filter(Material." + selectType + ".like('%" + content + "%')).paginate(page=" + page + ", per_page=" + per_page + ", error_out=False)" materials = eval(result) materialInfo = { 'materials': [a.to_json() for a in materials.items], 'count': count } return json.dumps(materialInfo)
[ "app.models.Material", "app.models.Material.query.paginate", "app.models.Material.query.filter_by", "json.dumps", "app.db.session.delete", "app.db.session.commit", "app.db.engine.execute", "app.db.session.add" ]
[((119, 189), 'app.models.Material.query.paginate', 'Material.query.paginate', ([], {'page': 'page', 'per_page': 'per_page', 'error_out': '(False)'}), '(page=page, per_page=per_page, error_out=False)\n', (142, 189), False, 'from app.models import Material\n'), ((200, 250), 'app.db.engine.execute', 'db.engine.execute', (['"""select count(*) from material"""'], {}), "('select count(*) from material')\n", (217, 250), False, 'from app import db\n'), ((408, 432), 'json.dumps', 'json.dumps', (['materialInfo'], {}), '(materialInfo)\n', (418, 432), False, 'import json\n'), ((865, 990), 'app.models.Material', 'Material', ([], {'name': "material['name']", 'purity': "material['purity']", 'manufacturer': "material['manufacturer']", 'note': "material['note']"}), "(name=material['name'], purity=material['purity'], manufacturer=\n material['manufacturer'], note=material['note'])\n", (873, 990), False, 'from app.models import Material\n'), ((1012, 1033), 'app.db.session.add', 'db.session.add', (['mater'], {}), '(mater)\n', (1026, 1033), False, 'from app import db\n'), ((1038, 1057), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1055, 1057), False, 'from app import db\n'), ((2087, 2111), 'json.dumps', 'json.dumps', (['materialInfo'], {}), '(materialInfo)\n', (2097, 2111), False, 'import json\n'), ((730, 749), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (747, 749), False, 'from app import db\n'), ((1193, 1220), 'app.db.session.delete', 'db.session.delete', (['material'], {}), '(material)\n', (1210, 1220), False, 'from app import db\n'), ((1229, 1248), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1246, 1248), False, 'from app import db\n'), ((490, 535), 'app.models.Material.query.filter_by', 'Material.query.filter_by', ([], {'mid': "material['mid']"}), "(mid=material['mid'])\n", (514, 535), False, 'from app.models import Material\n'), ((1126, 1159), 'app.models.Material.query.filter_by', 'Material.query.filter_by', ([], {'mid': 'mid'}), '(mid=mid)\n', (1150, 1159), False, 'from app.models import Material\n'), ((1467, 1494), 'app.db.session.delete', 'db.session.delete', (['material'], {}), '(material)\n', (1484, 1494), False, 'from app import db\n'), ((1507, 1526), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1524, 1526), False, 'from app import db\n'), ((1392, 1425), 'app.models.Material.query.filter_by', 'Material.query.filter_by', ([], {'mid': 'mid'}), '(mid=mid)\n', (1416, 1425), False, 'from app.models import Material\n')]
import scipy.spatial as sci_spatial import skimage.draw as ski_draw import shapely.geometry as shapely_geom import numpy as np import os, sys def create_landscape(no_of_circles, radius): # create the middle points of the ponds (the ponds should not overlap) x,y = np.random.randint(0,400), np.random.randint(0,400) list_of_points = [(x + 400, y + 400), (x + 400, y), (x + 800, y + 400), (x + 400, y + 800), (x, y + 400)] for i in range(no_of_circles-1): new_point_found = False trials = 0 while not new_point_found and trials < 500: x,y = np.random.randint(0,400), np.random.randint(0,400) new_point = shapely_geom.Point((x + 400, y + 400)) trials += 1 if not new_point.buffer(radius * 2 + 50).intersects(shapely_geom.MultiPoint(list_of_points)): new_point_found = True list_of_points.append((x + 400, y + 400)) list_of_points.append((x + 400, y)) list_of_points.append((x + 800, y + 400)) list_of_points.append((x + 400, y + 800)) list_of_points.append((x, y + 400)) # landscape with ponds ponds_img = np.full((1200 + 2*radius, 1200 + 2*radius), 55) # draw the ponds for point_i in list_of_points: rr, cc = ski_draw.disk(point_i, radius) ponds_img[rr + radius, cc + radius] = 105 ponds_img = ponds_img[400+radius : 800+radius, 400+radius : 800+radius] # pond-id ponds_id_img = np.full((1200 + 2*radius, 1200 + 2*radius), -999) # draw the ponds id_i = 0 for point_i, id_i in zip(list_of_points, np.repeat(np.arange(len(list_of_points)/5), 5)): rr, cc = ski_draw.disk(point_i, radius) ponds_id_img[rr + radius, cc + radius] = id_i ponds_id_img = ponds_id_img[400+radius : 800+radius, 400+radius : 800+radius] # create an raster image with the middle points marked is_center_img = np.zeros_like(ponds_img) boundary = shapely_geom.Polygon([(399, 399), (799, 399), (799, 799), (399, 799)]) selection = [shapely_geom.Point(point_i).intersects(boundary) for point_i in list_of_points] x,y = np.array(list_of_points)[selection].T x -= 400 y -= 400 is_center_img[x, y] = 1 return is_center_img, ponds_img, ponds_id_img def make_corridors(is_center, ponds): without_boundaries = np.zeros((400*3, 400*3)) without_boundaries[0:400, 400:800] = is_center without_boundaries[400:800, 0:400] = is_center without_boundaries[400:800, 400:800] = is_center without_boundaries[800:1200, 400:800] = is_center without_boundaries[400:800, 800:1200] = is_center loc = np.where(without_boundaries == 1) center_points = np.swapaxes(loc, 0, 1) result = sci_spatial.distance.cdist(center_points, center_points) new_img = np.full_like(without_boundaries, 55) # 55 --> green in netlogo points_with_corridors = np.where(np.logical_and( result != 0, result < 170)) #mean(result[result != 0]) * 0.3 for i in np.arange(0, np.shape(points_with_corridors)[1]): index_from = points_with_corridors[0][i] index_to = points_with_corridors[1][i] x = [loc[1][index_from], loc[1][index_to]] y = [loc[0][index_from], loc[0][index_to]] x_corr, y_corr = shapely_geom.LineString([(x[0], y[0]), (x[1], y[1])]).buffer(4.5).exterior.coords.xy rr, cc = ski_draw.polygon(y_corr, x_corr, without_boundaries.shape) new_img[rr, cc] = 35 # 35 --> brown in netlogo final_img = new_img[400:800, 400:800] final_img[np.where(ponds == 105)] = 105 # 105 --> blue in netlogo return final_img def make_buffers(corridor_img, is_center_img): radius = 15 corridor_area = np.sum(corridor_img == 35) no_of_ponds = np.sum(is_center_img) buffer_radius = np.sqrt( ( (corridor_area / no_of_ponds) + np.pi *radius **2) / np.pi ) without_boundaries = np.zeros((400*3, 400*3)) without_boundaries[0:400, 400:800] = is_center_img without_boundaries[400:800, 0:400] = is_center_img without_boundaries[400:800, 400:800] = is_center_img without_boundaries[800:1200, 400:800] = is_center_img without_boundaries[400:800, 800:1200] = is_center_img x,y = np.where(without_boundaries == 1) new_img = np.full_like(without_boundaries, 55) # 55 --> green in netlogo # make buffers for x_i, y_i in zip(x,y): rr, cc = ski_draw.disk((x_i, y_i), buffer_radius) filter_1 = (rr >= 0) & (rr <= 1199) filter_2 = (cc >= 0) & (cc <= 1199) rr = rr[filter_1 & filter_2] cc = cc[filter_1 & filter_2] new_img[rr, cc] = 35 # make ponds for x_i, y_i in zip(x,y): rr, cc = ski_draw.disk((x_i, y_i), radius) filter_1 = (rr >= 0) & (rr <= 1199) filter_2 = (cc >= 0) & (cc <= 1199) rr = rr[filter_1 & filter_2] cc = cc[filter_1 & filter_2] new_img[rr, cc] = 105 return new_img[400:800, 400:800] if __name__ == "__main__": #print('Scenario-Number:', sys.argv[1]) os.makedirs('gis_output/' + sys.argv[1]) os.chdir('gis_output/' + sys.argv[1]) is_center_of_pond, pond, pond_id = create_landscape(no_of_circles=int(sys.argv[2]), radius=int(sys.argv[3])) corridors = make_corridors(is_center_of_pond, pond) buffers = make_buffers(corridors, is_center_of_pond) with open("../pcolor.asc") as myfile: head = [next(myfile) for x in range(6)] np.savetxt('corridors.asc',corridors, fmt='%i', newline='\n', header=''.join(head)[:-1], comments='') np.savetxt('buffers.asc',buffers, fmt='%i', newline='\n', header=''.join(head)[:-1], comments='') np.savetxt('center.asc',is_center_of_pond, fmt='%i', newline='\n', header=''.join(head)[:-1], comments='') np.savetxt('id.asc',pond_id, fmt='%i', newline='\n', header=''.join(head)[:-1], comments='')
[ "numpy.sum", "numpy.shape", "numpy.random.randint", "os.chdir", "numpy.full", "numpy.full_like", "numpy.zeros_like", "shapely.geometry.Point", "shapely.geometry.MultiPoint", "shapely.geometry.Polygon", "shapely.geometry.LineString", "numpy.swapaxes", "scipy.spatial.distance.cdist", "skimage.draw.polygon", "os.makedirs", "numpy.logical_and", "skimage.draw.disk", "numpy.zeros", "numpy.where", "numpy.array", "numpy.sqrt" ]
[((1304, 1355), 'numpy.full', 'np.full', (['(1200 + 2 * radius, 1200 + 2 * radius)', '(55)'], {}), '((1200 + 2 * radius, 1200 + 2 * radius), 55)\n', (1311, 1355), True, 'import numpy as np\n'), ((1632, 1685), 'numpy.full', 'np.full', (['(1200 + 2 * radius, 1200 + 2 * radius)', '(-999)'], {}), '((1200 + 2 * radius, 1200 + 2 * radius), -999)\n', (1639, 1685), True, 'import numpy as np\n'), ((2093, 2117), 'numpy.zeros_like', 'np.zeros_like', (['ponds_img'], {}), '(ponds_img)\n', (2106, 2117), True, 'import numpy as np\n'), ((2138, 2208), 'shapely.geometry.Polygon', 'shapely_geom.Polygon', (['[(399, 399), (799, 399), (799, 799), (399, 799)]'], {}), '([(399, 399), (799, 399), (799, 799), (399, 799)])\n', (2158, 2208), True, 'import shapely.geometry as shapely_geom\n'), ((2528, 2556), 'numpy.zeros', 'np.zeros', (['(400 * 3, 400 * 3)'], {}), '((400 * 3, 400 * 3))\n', (2536, 2556), True, 'import numpy as np\n'), ((2827, 2860), 'numpy.where', 'np.where', (['(without_boundaries == 1)'], {}), '(without_boundaries == 1)\n', (2835, 2860), True, 'import numpy as np\n'), ((2881, 2903), 'numpy.swapaxes', 'np.swapaxes', (['loc', '(0)', '(1)'], {}), '(loc, 0, 1)\n', (2892, 2903), True, 'import numpy as np\n'), ((2917, 2973), 'scipy.spatial.distance.cdist', 'sci_spatial.distance.cdist', (['center_points', 'center_points'], {}), '(center_points, center_points)\n', (2943, 2973), True, 'import scipy.spatial as sci_spatial\n'), ((2989, 3025), 'numpy.full_like', 'np.full_like', (['without_boundaries', '(55)'], {}), '(without_boundaries, 55)\n', (3001, 3025), True, 'import numpy as np\n'), ((3903, 3929), 'numpy.sum', 'np.sum', (['(corridor_img == 35)'], {}), '(corridor_img == 35)\n', (3909, 3929), True, 'import numpy as np\n'), ((3948, 3969), 'numpy.sum', 'np.sum', (['is_center_img'], {}), '(is_center_img)\n', (3954, 3969), True, 'import numpy as np\n'), ((4000, 4068), 'numpy.sqrt', 'np.sqrt', (['((corridor_area / no_of_ponds + np.pi * radius ** 2) / np.pi)'], {}), '((corridor_area / no_of_ponds + np.pi * radius ** 2) / np.pi)\n', (4007, 4068), True, 'import numpy as np\n'), ((4100, 4128), 'numpy.zeros', 'np.zeros', (['(400 * 3, 400 * 3)'], {}), '((400 * 3, 400 * 3))\n', (4108, 4128), True, 'import numpy as np\n'), ((4419, 4452), 'numpy.where', 'np.where', (['(without_boundaries == 1)'], {}), '(without_boundaries == 1)\n', (4427, 4452), True, 'import numpy as np\n'), ((4467, 4503), 'numpy.full_like', 'np.full_like', (['without_boundaries', '(55)'], {}), '(without_boundaries, 55)\n', (4479, 4503), True, 'import numpy as np\n'), ((5251, 5291), 'os.makedirs', 'os.makedirs', (["('gis_output/' + sys.argv[1])"], {}), "('gis_output/' + sys.argv[1])\n", (5262, 5291), False, 'import os, sys\n'), ((5296, 5333), 'os.chdir', 'os.chdir', (["('gis_output/' + sys.argv[1])"], {}), "('gis_output/' + sys.argv[1])\n", (5304, 5333), False, 'import os, sys\n'), ((279, 304), 'numpy.random.randint', 'np.random.randint', (['(0)', '(400)'], {}), '(0, 400)\n', (296, 304), True, 'import numpy as np\n'), ((305, 330), 'numpy.random.randint', 'np.random.randint', (['(0)', '(400)'], {}), '(0, 400)\n', (322, 330), True, 'import numpy as np\n'), ((1430, 1460), 'skimage.draw.disk', 'ski_draw.disk', (['point_i', 'radius'], {}), '(point_i, radius)\n', (1443, 1460), True, 'import skimage.draw as ski_draw\n'), ((1833, 1863), 'skimage.draw.disk', 'ski_draw.disk', (['point_i', 'radius'], {}), '(point_i, radius)\n', (1846, 1863), True, 'import skimage.draw as ski_draw\n'), ((3090, 3131), 'numpy.logical_and', 'np.logical_and', (['(result != 0)', '(result < 170)'], {}), '(result != 0, result < 170)\n', (3104, 3131), True, 'import numpy as np\n'), ((3560, 3618), 'skimage.draw.polygon', 'ski_draw.polygon', (['y_corr', 'x_corr', 'without_boundaries.shape'], {}), '(y_corr, x_corr, without_boundaries.shape)\n', (3576, 3618), True, 'import skimage.draw as ski_draw\n'), ((3732, 3754), 'numpy.where', 'np.where', (['(ponds == 105)'], {}), '(ponds == 105)\n', (3740, 3754), True, 'import numpy as np\n'), ((4598, 4638), 'skimage.draw.disk', 'ski_draw.disk', (['(x_i, y_i)', 'buffer_radius'], {}), '((x_i, y_i), buffer_radius)\n', (4611, 4638), True, 'import skimage.draw as ski_draw\n'), ((4896, 4929), 'skimage.draw.disk', 'ski_draw.disk', (['(x_i, y_i)', 'radius'], {}), '((x_i, y_i), radius)\n', (4909, 4929), True, 'import skimage.draw as ski_draw\n'), ((767, 805), 'shapely.geometry.Point', 'shapely_geom.Point', (['(x + 400, y + 400)'], {}), '((x + 400, y + 400))\n', (785, 805), True, 'import shapely.geometry as shapely_geom\n'), ((2316, 2340), 'numpy.array', 'np.array', (['list_of_points'], {}), '(list_of_points)\n', (2324, 2340), True, 'import numpy as np\n'), ((3194, 3225), 'numpy.shape', 'np.shape', (['points_with_corridors'], {}), '(points_with_corridors)\n', (3202, 3225), True, 'import numpy as np\n'), ((692, 717), 'numpy.random.randint', 'np.random.randint', (['(0)', '(400)'], {}), '(0, 400)\n', (709, 717), True, 'import numpy as np\n'), ((718, 743), 'numpy.random.randint', 'np.random.randint', (['(0)', '(400)'], {}), '(0, 400)\n', (735, 743), True, 'import numpy as np\n'), ((2226, 2253), 'shapely.geometry.Point', 'shapely_geom.Point', (['point_i'], {}), '(point_i)\n', (2244, 2253), True, 'import shapely.geometry as shapely_geom\n'), ((895, 934), 'shapely.geometry.MultiPoint', 'shapely_geom.MultiPoint', (['list_of_points'], {}), '(list_of_points)\n', (918, 934), True, 'import shapely.geometry as shapely_geom\n'), ((3457, 3510), 'shapely.geometry.LineString', 'shapely_geom.LineString', (['[(x[0], y[0]), (x[1], y[1])]'], {}), '([(x[0], y[0]), (x[1], y[1])])\n', (3480, 3510), True, 'import shapely.geometry as shapely_geom\n')]