content
stringlengths
0
1.05M
origin
stringclasses
2 values
type
stringclasses
2 values
import libsinan from libsinan import handler, output, jsax class VersionCheckTaskHandler(output.SimpleTaskHandler): def __init__(self): output.SimpleTaskHandler.__init__(self) self.version = None def object_end(self): """ We only get one object per right now so lets print it out when we get it """ if self.task == "version": if self.event_type == 'info': self.version = self.desc return True else: return output.SimpleTaskHandler.object_end(self) class VersionCheckHandler(handler.Handler): def handles(self, task): return task == "version" def handle(self, largs): self.do_request(largs, handle) version = None def handle(task, conn): global version if conn.status == 200: try: task_handler = VersionCheckTaskHandler() jsax.parse(conn, task_handler) version = task_handler.version return 0 except ValueError, msg: print "Got an error back from sinan. Check the logs at ~/.sinan/logs/kernel.log" else: return 1
nilq/baby-python
python
import urllib.request, json from .models import News_Update from .models import Article api_key = None base_url = None articles_url = None def configure_request(app): global api_key, base_url, articles_url api_key = app.config['NEWS_API_KEY'] base_url = app.config['NEWS_API_BASE_URL'] articles_url = app.config['ARTICLES_BASE_URL'] def get_updates(category): ''' function to get json response of out request :param category :return: ''' get_updates_url = base_url.format(category, api_key) print(get_updates_url) with urllib.request.urlopen(get_updates_url) as url: get_updates_data = url.read() get_updates_response = json.loads(get_updates_data) update_results = [] if get_updates_response['sources']: update_results = get_updates_response['sources'] update_results = process_results(update_results) return update_results def process_results(update_results_list): ''' process update result and transform to list of object ''' update_results = [] for update_content in update_results_list: id = update_content.get('id') name = update_content.get('name') category = update_content.get('category') url = update_content.get('url') update_object = News_Update(id, name, category, url) update_results.append(update_object) return update_results def get_articles(id): get_articles_url = articles_url.format(id, api_key) print(get_articles_url) with urllib.request.urlopen(get_articles_url) as url: get_articles_data = url.read() get_articles_response = json.loads(get_articles_data) articles_results = None if get_articles_response['articles']: articles_results = get_articles_response['articles'] articles_results = process_articles(articles_results) return articles_results # articles_results = json.loads(url.read()) # articles_object = None # if articles_results['articles']: # articles_object = process_articles(articles_results['articles']) # # return articles_object def process_articles(articles_list): articles_results = [] for article_cont in articles_list: id = article_cont.get('id') author = article_cont.get('author') title = article_cont.get('title') description = article_cont.get('description') url = article_cont.get('url') image = article_cont.get('urlToImage') date = article_cont.get('publishedAt') articles_object = Article(id,author,title,description,url,image,date) articles_results.append(articles_object) return articles_results
nilq/baby-python
python
# -*- coding: UTF-8 -*- from django.shortcuts import render from rest_framework import authentication, viewsets from rest_framework.decorators import api_view, permission_classes from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import Response from rest_framework import status from django.core.exceptions import ValidationError from django.db.utils import IntegrityError import jwt from rest_framework_jwt.utils import jwt_payload_handler from restfulexperiment.restful.models import User from restfulexperiment.restful.serializers import UserSerializer @api_view(['POST']) @permission_classes((AllowAny, )) def login(request): ''' TODO Incomplete ''' if request.method == 'POST': email = request.data.get('email') password = request.data.get('password') user = User.objects.get(email=email, password=password) payload = jwt_payload_handler(user) token = jwt.encode(payload, settings.SECRET_KEY) return Response(serializer.data) return Response({'mensagem': 'todo'}, status=404) @api_view(['GET']) @permission_classes((AllowAny, )) def user_collection(request): if request.method == 'GET': users = User.objects.all().order_by('-created') serializer = UserSerializer(users, many=True) return Response(serializer.data) @api_view(['GET', 'POST']) @permission_classes((AllowAny, )) def user_element(request, pk=None): if request.method == 'GET': try: user = User.objects.get(pk=pk) except User.DoesNotExist: return Response(status=404) serializer = UserSerializer(user) return Response(serializer.data) if request.method == 'POST': data = { "name": request.data.get('name'), "email": request.data.get('email'), "password": request.data.get('password'), "phones": request.data.get('phones'), } serializer = UserSerializer(data=data) if serializer.is_valid(): try: serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) except IntegrityError: return Response({'mensagem': 'E-mail ja existente'}, status=status.HTTP_406_NOT_ACCEPTABLE) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
nilq/baby-python
python
""" Docstring for the app_test.py module. """ import pytest from app import app @pytest.fixture def client(): """ Method to yield a test client from app. """ app.config['TESTING'] = True client = app.test_client() yield client def test_ping(client): """ Function to test debug route. :param client: A testing client object. """ rep = client.get("/ping") assert 200 == rep.status_code def test_weather(client): """ Function to test weather route. :param client: A testing client object. """ rep = client.get('temperature/London,uk') assert 200 == rep.status_code def test_all_temperature(client): """ Function to test weather cached route. :param client: A testing client object. """ rep = client.get('temperature?max=4') assert 200 == rep.status_code
nilq/baby-python
python
#!/usr/bin/python # An example vendordata server implementation for OpenStack Nova. With a giant # nod in the direction of Chad Lung for his very helpful blog post at # http://www.giantflyingsaucer.com/blog/?p=4701 import json import sys from webob import Response from webob.dec import wsgify from paste import httpserver from paste.deploy import loadapp from oslo_config import cfg from oslo_log import log as logging CONF = cfg.CONF LOG = logging.getLogger(__name__) @wsgify def application(req): if req.environ.get('HTTP_X_IDENTITY_STATUS') != 'Confirmed': return Response('User is not authenticated', status=401) try: data = req.environ.get('wsgi.input').read() if not data: return Response('No data provided', status=500) # Get the data nova handed us for this request # # An example of this data: # { # "hostname": "foo", # "image-id": "75a74383-f276-4774-8074-8c4e3ff2ca64", # "instance-id": "2ae914e9-f5ab-44ce-b2a2-dcf8373d899d", # "metadata": {}, # "project-id": "039d104b7a5c4631b4ba6524d0b9e981", # "user-data": null # } indata = json.loads(data) # We need to make up a response. This is where your interesting thing # would happen. However, I don't have anything interesting to do, so # I just return Carrie Fisher quotes instead. quotes = {'0': 'Instant gratification takes too long.', '1': ('Resentment is like drinking poison and waiting for ' 'the other person to die.'), '2': ('I was street smart, but unfortunately the street was ' 'Rodeo Drive.'), '3': ('You can\'t find any true closeness in Hollywood, ' 'because everybody does the fake closeness so well.'), '4': ('As you get older, the pickings get slimmer, but the ' 'people don\'t.'), '5': ('There is no point at which you can say, "Well, I\'m ' 'successful now. I might as well take a nap."'), '6': ('I really love the internet. They say chat-rooms are ' 'the trailer park of the internet but I find it ' 'amazing.'), '7': ('I don\'t think Christmas is necessarily about ' 'things. It\'s about being good to one another, it\'s ' 'about the Christian ethic, it\'s about kindness.'), '8': ('I don\'t want my life to imitate art, I want my ' 'life to be art.'), '9': ('I am a spy in the house of me. I report back from ' 'the front lines of the battle that is me. I am ' 'somewhat nonplused by the event that is my life.'), 'a': 'I drowned in moonlight, strangled by my own bra.', 'b': 'Even in space there\'s a double standard for women.', 'c': ('Everyone drives somebody crazy. I just have a bigger ' 'car.'), 'd': ('Sometimes you can only find Heaven by slowly ' 'backing away from Hell.'), 'e': 'I\'m thinking of having my DNA fumigated.', 'f': 'Leia follows me like a vague smell.' } outdata = {'carrie_says': quotes[indata['instance-id'][-1]]} return Response(json.dumps(outdata, indent=4, sort_keys=True)) except Exception as e: return Response('Server error while processing request: %s' % e, status=500) def app_factory(global_config, **local_config): return application def main(): logging.register_options(CONF) # Make keystonemiddleware emit debug logs extra_default_log_levels = ['keystonemiddleware=DEBUG'] logging.set_defaults(default_log_levels=(logging.get_default_log_levels() + extra_default_log_levels)) # Parse our config CONF(sys.argv[1:]) # Set us up to log as well logging.setup(CONF, 'vendordata') # Start the web server wsgi_app = loadapp('config:paste.ini', relative_to='.') httpserver.serve(wsgi_app, host='0.0.0.0', port=8888) if __name__ == '__main__': main()
nilq/baby-python
python
#PYTHON 3.6 #coding : utf8 from tkinter import * class Aplication(Frame): def __init__(self, master=None): Frame.__init__(self, master) self.msg = Label(self, text='Hello World') self.msg.pack() self.bye = Button(self, text="Bye", command=self.quit) self.bye.pack() self.pack() app = Aplication() app.master.title('Exemplo') app.master.geometry('200x200+100+100') mainloop()
nilq/baby-python
python
# %matplotlib notebook import os, re, sys, urllib, requests, base64, IPython, io, pickle, glob import itertools as itt import numpy as np import subprocess as sb import pandas as pd import matplotlib.pyplot as plt import matplotlib.image as mpimg import roadrunner from bs4 import BeautifulSoup as BS from IPython.display import Image, display from matplotlib import rcParams import analyzerTools as AT def run_test(analyzer, test_no, t_end=1000, atomize=False): if(analyzer.run_single_test(test_no, t_end=100, atomize=atomize)): print("run successful {}".format(test_no)) analyzer.plot_results(test_no, legend=True, save_fig=True) # if(analyzer.run_old_test(test_no, t_end=100, atomize=atomize)): # print("run successful {}".format(test_no)) # analyzer.plot_old_results(test_no, legend=False, save_fig=True) else: print("run failed {}".format(test_no)) def update_results(results, fname="analyzer.pkl"): if os.path.isfile(fname): with open(fname, "rb") as f: old_results = pickle.load(f) for key in results.keys(): old_results[key] = results[key] with open(fname, "wb") as f: pickle.dump(old_results, f) else: with open(fname, "wb") as f: pickle.dump(results, f) print("updated results") return True # All the paths we need # The BNG2.pl file for bionetgen runs bng_path = "/home/monoid/apps/BioNetGen-2.5.0/BNG2.pl" # This is the python file that can be called from the command line sbml_translator_path = "/home/monoid/Development/fresh_atomizer_checks/atomizer/SBMLparser/sbmlTranslator.py" # if you give this the ATOMIZER ANALYZER 5000 will import atomizer and run internally # translator_package_path = "/home/monoid/Development/fresh_atomizer_checks/atomizer/SBMLparser" translator_package_path = None # This is neccesary for atomizer, has default naming conventions and a lot more # this path will be sym linked to everywhere you want to run translator under config_path = "/home/monoid/Development/fresh_atomizer_checks/atomizer/SBMLparser/config" # the path to the folder that contains 5 zero padded folders for each test tests_path = "/home/monoid/Development/fresh_atomizer_checks/atomizer/SBMLparser/test/curated" # Now we also add COPASI PATH!!_!_ copasi_path = "/home/monoid/apps/copasi/4.27/bin/CopasiSE" # change directory to where we want to run the tests os.chdir("/home/monoid/Development/fresh_atomizer_checks/atomizer/SBMLparser/test/analyzerTools") # The analyzer setup ba = AT.BiomodelAnalyzer(bng_path, sbml_translator_path, config_path, tests_path, translator_import=translator_package_path, copasi_path=copasi_path) # test_no = 3cleanup 1.0s symbolically, should fix some errors, less bug prone for future # imgdats = ba.load_test_data(test_no)207 # print(len(imgdats)) # Image(imgdats[0]) # # if(ba.run_single_test(test_no, t_end=100)): # ba.plot_results(test_no,legend=False)print(r) # Let's re-run everything # tests = list(range(419,730)) tests = list(range(1,915)) known_issues = set([24,25,34,154,155,196,201,589,613,668,669,696,468, # Not implemented 643,644,645, # Complex "i" is used in function/parameter 63,245,248,305,556,575,578,542, # rule named used as parameter 342,429,457,547,570,627,637,638, # compartment used as parameter 527,562,592,593,596,723,250, # Actually broken, even in Copasi 304,324,330,331,341,343,345,349,367,371,374,377,381,533,548, 549,551,618,642,670,671,680,682,684,118,252,673,531,532,555, 561, # no reactions 306,307,308,309,310,311,388,390,391,393,409, 428,505,512,528,557,566,567,719,641,71,90,173, 253, # assignment rules used in reactions 610, # function defs for v16/v17 558,568,674,722,412,445,302,208,268,51,55,162,180,179,579, 691,465,466,238,312,538,603,604,605,215, # Uses time 635,636, # Uses not only time but also encoded strings for parameters 119, # single reaction, not really suitable for translation 47,483,484,486,487, # initial states should result in no reactions, 164,165,167,326,375,400,554,577,664,672,693,698, 234,237,286,450, # Uses piecewise definitions 396,398,507,522,705, 499,474, # SBML modeller is careless and uses species that should be params 607, # Function not defined properly/links to another function 319,206,39,145,353,385,392,463,608,470,472, # non-integer stoichiometry 161,182,239, # true multi-compartment model 271 # multi-compartment and the modeller has issues ]) # Need to figure out, mostly CVODE list_of_fails = set([246,336,378,383,384,387,438,9,107,123,183,192,269, 279,292,328,617,678,606, # new ones 616, # Legitimate bug, if species name is very simple AND rate constant # only depenent on the species concentration AND we end up generating # an observable with the same name as species name, then BNGL thinkg # we are giving obs name as the rate constant, leading to a bug 255, # Circular dependency in funcs? 401,402,403, # if func messes with func ordering 559, # can't load copasi result 64, # Due to website addition? also in too long set 232, # BNG takes too long? 172,176,177 # doesn't end up translating, takes a long time? ]) too_long = set([64,574,426,70,217,247,503,469,471,473,506,451,595, # WAAAY TOO LONG - debug 332,334, # ATOMIZER BREAKS THESE 217,247,293,426,469 # too long when atomized ]) ################# NEW CHECKS ############## # A complete new set of checks to see the latest state of the tool as we are # writing the manuscript. new_checks = set([64,217, # too long 63, # fairly long but does complete 119,465,468, # no data? 247,269,469,470,471,472,473,474, 503,505,506,595,606,608,835,863 # long, didn't check if completes ]) ################# RUN FAILS ############### run_fails = set([9,24,25,34,51,55,107, 123,154,155,162,164,165,167,172,176,177,179,180,183,192, 201,208,215,232,234,237,238,245,246,248,250,255,268,279,286,292, 302,305,312,326,328,332,334,336,353,375,383,384,385,387,396,398, 400,401,402,403,412,426,429,438,445,450,451,457,463,466,483,484, 486,487,499,507,522,527,531,532,538,542,547,554,555,556,558,559, 561,562,574,575,577,578,579,589,592,593,599,600,602,607,610,617, 627,635,636,637,638,643,644,645,664,668,669,672,673,674,675,678, 687,688,692,693,696,698,705,722,723,730,731,748,749,757,759,760, 763,764,766,775,801,802,808,815,824,826,833,837,840,841,849,851, 858,859,876,879,880 # run_failed ]) ################# EVENTS ################# w_event = set([1,7,56,77,81,87,88,95,96,97,101,104,109, # models with events 111,117,120,121,122,124,125,126,127,128,129,130,131, # models with events 132,133,134,135,136,137,139,140,141,142,144,148,149, # models with events 152,153,158,186,187,188,189,193,194,195,196,227,235, # models with events 241,244,256,265,281,285,287,297,301,316,317,318,327, # models with events 337,338,339,340,342,344,404,408,422,436,437,439,479, # models with events 480,488,493,494,496,497,534,535,536,537,540,541,563, # models with events 570,571,597,598,601,612,613,620,621,628,632,634,650, # models with events 659,681,695,699,702,706,711,718,727,734,735,736,786, # models with events 789,791,794,806,814,816,817,818,820,822,825,829,834, # models with events 856,860,862,864,901]) # models with events ################# END CHECKS ############## all_issues = known_issues.union(w_event) all_issues = all_issues.union(list_of_fails) # run tests for test_no in tests: #if test_no in all_issues: # continue if test_no in w_event or test_no in new_checks or test_no in run_fails: continue if (os.path.isfile("../curated/BIOMD{0:010d}.xml".format(test_no))): run_test(ba, test_no, t_end=100, atomize=False) # update_results(ba.all_results) else: print("number {} doesn't exist".format(test_no))
nilq/baby-python
python
from datetime import datetime, timedelta from freezegun import freeze_time from pyobjdb import PyObjDB def test_basic(tmp_path): db = PyObjDB(str(tmp_path / 'test.db')) db.put('key_str', 'foo') assert db.get('key_str') == 'foo' assert db.get(b'key_str') == 'foo' db.put('key_str', 'bar') assert db.get('key_str') == 'bar' db.put('key_int', 42) assert db.get('key_int') == 42 db.put('key_float', 4.125) assert db.get('key_float') == 4.125 db.put('key_list', ['foo', 42, 4.125]) assert db.get('key_list') == ['foo', 42, 4.125] db.put('key_tuple', ('foo', 42, 4.125)) db.put('key_dict', {'foo': 42, 'bar': 4.125}) assert db.get('key_dict') == {'foo': 42, 'bar': 4.125} db.close() def test_reopen(tmp_path): db1 = PyObjDB(str(tmp_path / 'test.db')) db1.put('foo', 'bar') assert db1.get('foo') == 'bar' db1.close() db2 = PyObjDB(str(tmp_path / 'test.db')) assert db2.get('foo') == 'bar' db2.close() def test_ttl(tmp_path): db = PyObjDB(str(tmp_path / 'test.db')) with freeze_time(datetime.utcnow()) as frozen_time: db.put('foo', 'bar', ttl=5) assert db.get('foo') == 'bar' frozen_time.tick(timedelta(seconds=3)) assert db.get('foo') == 'bar' frozen_time.tick(timedelta(seconds=5)) assert db.get('foo') is None def test_delete(tmp_path): db = PyObjDB(str(tmp_path / 'test.db')) db.put('foo', 'bar') assert db.get('foo') == 'bar' db.delete('foo') assert db.get('foo') is None class Greeter(object): def __init__(self, name): self.name = name def get_greeting(self): return f'Hello, {self.name}!' def test_custom_object(tmp_path): db = PyObjDB(str(tmp_path / 'test.db')) obj1 = Greeter('Kermit') db.put('hello_kermit', obj1) obj2 = db.get('hello_kermit') assert isinstance(obj2, Greeter) assert obj2.name == 'Kermit' def test_cleanup(tmp_path): db = PyObjDB( str(tmp_path / 'test.db'), cleanup_interval=60, compaction_interval=3600, ) with freeze_time(datetime.utcnow()) as frozen_time: db.put('foo', 'bar', ttl=5) db.put('baz', 'qux', ttl=7) db.put('wibble', 'wobble', ttl=3600) assert db.get('foo') == 'bar' frozen_time.tick(timedelta(seconds=3)) assert db.get('foo') == 'bar' frozen_time.tick(timedelta(seconds=5)) assert db.get('foo') is None assert db.cleanup() == 0 assert db.get('wibble') == 'wobble' frozen_time.tick(timedelta(seconds=120)) assert db.cleanup() == 2 assert db.get('wibble') == 'wobble' frozen_time.tick(timedelta(seconds=7200)) db.cleanup()
nilq/baby-python
python
""" This file contains tests for partition explainer. """ import tempfile import pytest import numpy as np import shap def test_serialization_partition(): """ This tests the serialization of partition explainers. """ AutoTokenizer = pytest.importorskip("transformers").AutoTokenizer AutoModelForSeq2SeqLM = pytest.importorskip("transformers").AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-es") model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-en-es") # define the input sentences we want to translate data = [ "In this picture, there are four persons: my father, my mother, my brother and my sister.", "Transformers have rapidly become the model of choice for NLP problems, replacing older recurrent neural network models" ] explainer_original = shap.Explainer(model, tokenizer) shap_values_original = explainer_original(data) temp_serialization_file = tempfile.TemporaryFile() # Serialization explainer_original.save(temp_serialization_file) temp_serialization_file.seek(0) # Deserialization explainer_new = shap.Explainer.load(temp_serialization_file) temp_serialization_file.close() shap_values_new = explainer_new(data) assert np.array_equal(shap_values_original[0].base_values,shap_values_new[0].base_values) assert np.array_equal(shap_values_original[0].values,shap_values_new[0].values) assert type(explainer_original) == type(explainer_new) assert type(explainer_original.masker) == type(explainer_new.masker)
nilq/baby-python
python
""" This module handles teams - collections of Characters """ from maelstrom.util.serialize import AbstractJsonSerialable import functools class Team(AbstractJsonSerialable): """ stores and manages Characters """ def __init__(self, **kwargs): """ Required kwargs: - name: str - members: list of Characters. Expects at least 1 member """ super().__init__(**dict(kwargs, type="Team")) self.name = kwargs["name"] self.members = [] self.membersRemaining = [] for member in kwargs["members"]: self.addMember(member) self.addSerializedAttributes("name", "members") def __str__(self): return self.name def addMember(self, member: "Character"): if member in self.members: raise Exception(f'cannot add duplicate member {str(member)}') member.team = self self.members.append(member) self.membersRemaining.append(member) def getXpGiven(self)->int: """ provides how much XP this Team provides when encountered """ totalLevel = functools.reduce(lambda xp, member: member.level + xp, self.members, 0) return int(10 * totalLevel / len(self.members)) def eachMember(self, consumer: "function(Character)"): """ calls the given consumer on each member of this Team """ for member in self.members: consumer(member) def eachMemberRemaining(self, consumer: "function(Character)"): """ calls the given consumer on each member of this Team who isn't out of the game """ for member in self.membersRemaining: consumer(member) def getMembersRemaining(self)->"List<Character>": """ returns a shallow copy of this Team's remaining members """ return [member for member in self.membersRemaining] def initForBattle(self): """ this method must be called at the start of each Battle """ self.membersRemaining.clear() for member in self.members: # can't use lambda with "each" here member.initForBattle() self.membersRemaining.append(member) self.updateMembersRemaining() # updates ordinals def isDefeated(self)->bool: return len(self.membersRemaining) == 0 def updateMembersRemaining(self)->"List<str>": msgs = [] newList = [] nextOrdinal = 0 # records which index of the array each member is in for member in self.membersRemaining: if member.isKoed(): msgs.append(f'{member.name} is out of the game!') else: newList.append(member) member.ordinal = nextOrdinal nextOrdinal += 1 member.update() self.membersRemaining = newList return msgs
nilq/baby-python
python
# tifffile/__main__.py """Tifffile package command line script.""" import sys from .tifffile import main sys.exit(main())
nilq/baby-python
python
import pygame import sys import numpy as np pygame.init() WIDTH = 600 HEIGHT = 600 LINE_WIDTH = 15 WIN_LINE_WIDTH = 15 BOARD_ROWS = 3 BOARD_COLS = 3 SQUARE_SIZE = 200 CIRCLE_RADIUS = 60 CIRCLE_WIDTH = 15 CROSS_WIDTH = 25 SPACE = 55 BG_COLOR = (255,0,0) LINE_COLOR = (0,0,0) CIRCLE_COLOR = (239, 231, 200) CROSS_COLOR = (0,0,0) screen = pygame.display.set_mode( (WIDTH, HEIGHT) ) pygame.display.set_caption( 'TIC TAC TOE' ) screen.fill( BG_COLOR ) board = np.zeros( (BOARD_ROWS, BOARD_COLS) ) def draw_lines(): pygame.draw.line( screen, LINE_COLOR, (0, SQUARE_SIZE), (WIDTH, SQUARE_SIZE), LINE_WIDTH ) pygame.draw.line( screen, LINE_COLOR, (0, 2 * SQUARE_SIZE), (WIDTH, 2 * SQUARE_SIZE), LINE_WIDTH ) pygame.draw.line( screen, LINE_COLOR, (SQUARE_SIZE, 0), (SQUARE_SIZE, HEIGHT), LINE_WIDTH ) pygame.draw.line( screen, LINE_COLOR, (2 * SQUARE_SIZE, 0), (2 * SQUARE_SIZE, HEIGHT), LINE_WIDTH ) def draw_figures(): for row in range(BOARD_ROWS): for col in range(BOARD_COLS): if board[row][col] == 1: pygame.draw.circle( screen, CIRCLE_COLOR, (int( col * SQUARE_SIZE + SQUARE_SIZE//2 ), int( row * SQUARE_SIZE + SQUARE_SIZE//2 )), CIRCLE_RADIUS, CIRCLE_WIDTH ) elif board[row][col] == 2: pygame.draw.line( screen, CROSS_COLOR, (col * SQUARE_SIZE + SPACE, row * SQUARE_SIZE + SQUARE_SIZE - SPACE), (col * SQUARE_SIZE + SQUARE_SIZE - SPACE, row * SQUARE_SIZE + SPACE), CROSS_WIDTH ) pygame.draw.line( screen, CROSS_COLOR, (col * SQUARE_SIZE + SPACE, row * SQUARE_SIZE + SPACE), (col * SQUARE_SIZE + SQUARE_SIZE - SPACE, row * SQUARE_SIZE + SQUARE_SIZE - SPACE), CROSS_WIDTH ) def mark_square(row, col, player): board[row][col] = player def available_square(row, col): return board[row][col] == 0 def is_board_full(): for row in range(BOARD_ROWS): for col in range(BOARD_COLS): if board[row][col] == 0: return False return True def check_win(player): # vertical win check for col in range(BOARD_COLS): if board[0][col] == player and board[1][col] == player and board[2][col] == player: draw_vertical_winning_line(col, player) return True # horizontal win check for row in range(BOARD_ROWS): if board[row][0] == player and board[row][1] == player and board[row][2] == player: draw_horizontal_winning_line(row, player) return True # asc diagonal win check if board[2][0] == player and board[1][1] == player and board[0][2] == player: draw_asc_diagonal(player) return True # desc diagonal win chek if board[0][0] == player and board[1][1] == player and board[2][2] == player: draw_desc_diagonal(player) return True return False def draw_vertical_winning_line(col, player): posX = col * SQUARE_SIZE + SQUARE_SIZE//2 if player == 1: color = CIRCLE_COLOR elif player == 2: color = CROSS_COLOR pygame.draw.line( screen, color, (posX, 15), (posX, HEIGHT - 15), LINE_WIDTH ) def draw_horizontal_winning_line(row, player): posY = row * SQUARE_SIZE + SQUARE_SIZE//2 if player == 1: color = CIRCLE_COLOR elif player == 2: color = CROSS_COLOR pygame.draw.line( screen, color, (15, posY), (WIDTH - 15, posY), WIN_LINE_WIDTH ) def draw_asc_diagonal(player): if player == 1: color = CIRCLE_COLOR elif player == 2: color = CROSS_COLOR pygame.draw.line( screen, color, (15, HEIGHT - 15), (WIDTH - 15, 15), WIN_LINE_WIDTH ) def draw_desc_diagonal(player): if player == 1: color = CIRCLE_COLOR elif player == 2: color = CROSS_COLOR pygame.draw.line( screen, color, (15, 15), (WIDTH - 15, HEIGHT - 15), WIN_LINE_WIDTH ) def restart(): screen.fill( BG_COLOR ) draw_lines() for row in range(BOARD_ROWS): for col in range(BOARD_COLS): board[row][col] = 0 draw_lines() player = 1 game_over = False while True: for event in pygame.event.get(): if event.type == pygame.QUIT: sys.exit() if event.type == pygame.MOUSEBUTTONDOWN and not game_over: mouseX = event.pos[0] # x mouseY = event.pos[1] # y clicked_row = int(mouseY // SQUARE_SIZE) clicked_col = int(mouseX // SQUARE_SIZE) if available_square( clicked_row, clicked_col ): mark_square( clicked_row, clicked_col, player ) if check_win( player ): game_over = True player = player % 2 + 1 draw_figures() if event.type == pygame.KEYDOWN: if event.key == pygame.K_r: restart() player = 1 game_over = False pygame.display.update()
nilq/baby-python
python
from nnrecsys.models.metrics import mean_reciprocal_rank import tensorflow as tf def model_fn(features, labels, mode, params): print(features) input_layer, sequence_length = tf.contrib.feature_column.sequence_input_layer(features, params['feature_columns']) with tf.name_scope('encoder'): def rnn_cell(): with tf.name_scope('recurrent_layer'): cell = tf.nn.rnn_cell.GRUCell(params['rnn_units'], activation=params['hidden_activation']) drop_cell = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=params['dropout']) return drop_cell stacked_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell() for _ in range(params['rnn_layers'])]) x, states = tf.nn.dynamic_rnn(stacked_cell, inputs=input_layer, dtype=tf.float32, sequence_length=sequence_length) tf.summary.histogram('rnn_outputs', x) tf.summary.histogram('rnn_state', states) for variable in stacked_cell.variables: tf.summary.histogram('gru_vars/' + variable.name, variable) logits = tf.layers.dense(x, params['n_items'], activation=None) if mode == tf.estimator.ModeKeys.PREDICT: scores, predicted_items = tf.nn.top_k(logits, k=params['k'], sorted=True, name='top_k') predictions = { 'scores': scores, 'item_ids': predicted_items, } return tf.estimator.EstimatorSpec(mode, predictions=predictions) padding_mask = tf.sequence_mask(sequence_length, dtype=tf.float32) loss = tf.contrib.seq2seq.sequence_loss(logits, labels, weights=padding_mask, name='seq_loss') recall_at_k = tf.metrics.recall_at_k(labels, logits, name='recall_at_k', k=params['k']) reshaped_logits = tf.reshape(logits, (-1, logits.shape[-1])) reshaped_labels = tf.reshape(labels, (-1,)) one_hot_labels = tf.one_hot(reshaped_labels, depth=logits.shape[-1]) mrr = mean_reciprocal_rank(one_hot_labels, reshaped_logits, topn=params['k'], name='mrr_at_k') metrics = {'recall_at_k': recall_at_k, 'mrr': mrr} tf.summary.scalar('recall_at_k', recall_at_k[1]) if mode == tf.estimator.ModeKeys.EVAL: return tf.estimator.EstimatorSpec( mode, loss=loss, eval_metric_ops=metrics) optimizer = tf.train.AdagradOptimizer(learning_rate=0.1) train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)
nilq/baby-python
python
import numpy as np import torch def default_collate_fn(batch): batch, targets = zip(*batch) batch = np.stack(batch, axis=0).astype(np.float32) batch = torch.from_numpy(batch).permute(0, 3, 1, 2).contiguous() for i, target in enumerate(targets): for k, v in target.items(): if isinstance(v, np.ndarray): targets[i][k] = torch.from_numpy(v) return batch, targets
nilq/baby-python
python
from random import randint from django.contrib.auth.models import User from .models import Analytic, Group def get_client_ip(request): x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR") if x_forwarded_for: ip = x_forwarded_for.split(",")[0] else: ip = request.META.get("REMOTE_ADDR") return ip def log_analytic(request): new_analytic = Analytic( querystring=request.GET.urlencode(), ip=get_client_ip(request), path=request.path, ) if request.user.is_authenticated: new_analytic.user = User.objects.get(id=request.user.id) new_analytic.save() def get_group_route(): new_route = 0 while True: route_candidate = randint(1000, 9999) if not Group.objects.filter(route=route_candidate).exists(): new_route = route_candidate break return str(new_route)
nilq/baby-python
python
from __future__ import absolute_import, print_function, division import os import numpy import theano from theano.compat import PY3 from theano import config from theano.compile import DeepCopyOp from theano.misc.pkl_utils import CompatUnpickler from .config import test_ctx_name from .test_basic_ops import rand_gpuarray from ..type import GpuArrayType, gpuarray_shared_constructor import pygpu def test_deep_copy(): a = rand_gpuarray(20, dtype='float32') g = GpuArrayType(dtype='float32', broadcastable=(False,))('g') f = theano.function([g], g) assert isinstance(f.maker.fgraph.toposort()[0].op, DeepCopyOp) res = f(a) assert GpuArrayType.values_eq(res, a) def test_values_eq_approx(): a = rand_gpuarray(20, dtype='float32') assert GpuArrayType.values_eq_approx(a, a) b = a.copy() b[0] = numpy.asarray(b[0]) + 1. assert not GpuArrayType.values_eq_approx(a, b) b = a.copy() b[0] = -numpy.asarray(b[0]) assert not GpuArrayType.values_eq_approx(a, b) def test_specify_shape(): a = rand_gpuarray(20, dtype='float32') g = GpuArrayType(dtype='float32', broadcastable=(False,))('g') f = theano.function([g], theano.tensor.specify_shape(g, [20])) f(a) def test_filter_float(): theano.compile.shared_constructor(gpuarray_shared_constructor) try: s = theano.shared(numpy.array(0.0, dtype='float32'), target=test_ctx_name) theano.function([], updates=[(s, 0.0)]) finally: del theano.compile.sharedvalue.shared.constructors[-1] def test_unpickle_gpuarray_as_numpy_ndarray_flag0(): """ Test when pygpu isn't there for unpickle are in test_pickle.py""" oldflag = config.experimental.unpickle_gpu_on_cpu config.experimental.unpickle_gpu_on_cpu = False try: testfile_dir = os.path.dirname(os.path.realpath(__file__)) fname = 'GpuArray.pkl' with open(os.path.join(testfile_dir, fname), 'rb') as fp: if PY3: u = CompatUnpickler(fp, encoding="latin1") else: u = CompatUnpickler(fp) mat = u.load() assert isinstance(mat, pygpu.gpuarray.GpuArray) assert numpy.asarray(mat)[0] == -42.0 finally: config.experimental.unpickle_gpu_on_cpu = oldflag
nilq/baby-python
python
import os import sys myfolder = os.path.dirname(os.path.abspath(__file__)) def rpienv_source(): import subprocess if not os.path.exists(str(myfolder) + '/.rpienv'): print("[ ENV ERROR ] " + str(myfolder) + "/.rpienv path not exits!") sys.exit(1) command = ['bash', '-c', 'source ' + str(myfolder) + '/.rpienv -s && env'] proc = subprocess.Popen(command, stdout = subprocess.PIPE) for line in proc.stdout: if type(line) is bytes: line = line.decode("utf-8") try: name = line.partition("=")[0] value = line.partition("=")[2] if type(value) is unicode: value = value.encode('ascii','ignore') value = value.rstrip() os.environ[name] = value except Exception as e: if "name 'unicode' is not defined" != str(e): print(e) proc.communicate() rpienv_source() lib_path = os.path.join(os.path.dirname(os.environ['CONFIGHANDLERPY'])) sys.path.append(lib_path) import ConfigHandler SECTION = "HALARM" CFG = None def get_confighandler_object(): global CFG if CFG is None: CFG = ConfigHandler.init(validate_print=False) return CFG def get_HALARM_value_by_key(option): global SECTION cfg = get_confighandler_object() value = cfg.get(SECTION, option, reparse=False) return value if __name__ == "__main__": print(get_HALARM_value_by_key("cpu_max_temp_alarm_celsius"))
nilq/baby-python
python
# =============================================================================== # Copyright 2020 ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== from enable.component_editor import ComponentEditor from traits.api import Instance, HasTraits, Float, List, Int, on_trait_change, Button from traitsui.api import UItem, TableEditor, HGroup, HSplit from traitsui.table_column import ObjectColumn from pychron.canvas.canvas2D.irradiation_canvas import IrradiationCanvas from pychron.core.helpers.traitsui_shortcuts import okcancel_view from pychron.core.pychron_traits import RestrictedStr from pychron.envisage.icon_button_editor import icon_button_editor from pychron.loggable import Loggable class Position(HasTraits): id = Int x = Float y = Float radius = Float(0.1) def totuple(self): return self.x, self.y, self.radius, str(self.id) def dump(self, inches=False): x, y = self.x, self.y if inches: x /= 25.4 y /= 25.4 return '{},{:0.5f},{:0.5f}'.format(self.id, x, y) class TrayMaker(Loggable): canvas = Instance(IrradiationCanvas, ()) positions = List add_position_button = Button refresh_button = Button names = List name = RestrictedStr(name='names') save_button = Button def gen(self): rows = [(5, -2), (9, -4), (13, -6), (15, -7), (17, -8), (19, -9), (19, -9), (21, -10), (21, -10), (23, -11), (23, -11), (23, -11), (23, -11), (23, -11), (21, -10), (21, -10), (19, -9), (19, -9), (17, -8), (15, -7), (13, -6), (9, -4), (5, -2)] space = 2 oy = 24 ps = [] for ri, (rc, ox) in enumerate(rows): y = oy - ri * space for ji in range(rc): x = (ox * space) + ji * space p = Position(x=x, y=y, radius=1) ps.append(p) print(x, y) self.positions = ps def holes(self): return [p.totuple() for p in self.positions] def _add_position_button_fired(self): p = Position() self.positions.append(p) def _save_button_fired(self): out = 'out.txt' with open(out, 'w') as wfile: wfile.write('circle, 0.02\n') wfile.write('\n\n') for p in self.positions: wfile.write('{}\n'.format(p.dump('inches'))) @on_trait_change('positions[], positions:[x,y]') def _positions_changed(self): for i, p in enumerate(self.positions): p.id = i + 1 self.canvas.load_scene(self.holes()) self.canvas.invalidate_and_redraw() def traits_view(self): cols = [ObjectColumn(name='id'), ObjectColumn(name='x'), ObjectColumn(name='y')] v = okcancel_view(HGroup(icon_button_editor('add_position_button', 'add'), icon_button_editor('save_button', 'save')), UItem('name'), HSplit(UItem('positions', width=0.25, editor=TableEditor(columns=cols)), UItem('canvas', width=0.75, editor=ComponentEditor())), width=900, height=900, ) return v if __name__ == '__main__': t = TrayMaker() t.gen() t.names = ['a', 'bc'] t.configure_traits() # ============= EOF =============================================
nilq/baby-python
python
from lib import action class ConsulParseNodesAction(action.ConsulBaseAction): def run(self, data): nodes = [] # Loop through the keys, and return the needful return nodes
nilq/baby-python
python
from fastapi import FastAPI import routes from middleware import auth_check from starlette.middleware.base import BaseHTTPMiddleware app = FastAPI() # TO RUN THE APP SPECIFY THIS INSTANCE OF THE FastApi class # uvicorn file_name:instance name --reload app.include_router(routes.router) app.add_middleware(BaseHTTPMiddleware, dispatch=auth_check)
nilq/baby-python
python
import os import os.path as op from ..externals.six.moves import cPickle as pickle import glob import warnings import numpy as np from numpy.testing import assert_array_equal, assert_array_almost_equal from nose.tools import assert_true, assert_raises from mne.datasets import sample from mne import (label_time_courses, read_label, stc_to_label, read_source_estimate, read_source_spaces, grow_labels, labels_from_parc, parc_from_labels) from mne.label import Label from mne.utils import requires_mne, run_subprocess, _TempDir, requires_sklearn from mne.fixes import in1d warnings.simplefilter('always') # enable b/c these tests throw warnings data_path = sample.data_path(download=False) subjects_dir = op.join(data_path, 'subjects') stc_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-meg-lh.stc') real_label_fname = op.join(data_path, 'MEG', 'sample', 'labels', 'Aud-lh.label') real_label_rh_fname = op.join(data_path, 'MEG', 'sample', 'labels', 'Aud-rh.label') src_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis-eeg-oct-6p-fwd.fif') test_path = op.join(op.split(__file__)[0], '..', 'fiff', 'tests', 'data') label_fname = op.join(test_path, 'test-lh.label') label_rh_fname = op.join(test_path, 'test-rh.label') tempdir = _TempDir() # This code was used to generate the "fake" test labels: #for hemi in ['lh', 'rh']: # label = Label(np.unique((np.random.rand(100) * 10242).astype(int)), # hemi=hemi, comment='Test ' + hemi, subject='fsaverage') # label.save(op.join(test_path, 'test-%s.label' % hemi)) def assert_labels_equal(l0, l1, decimal=5): for attr in ['comment', 'hemi', 'subject']: assert_true(getattr(l0, attr) == getattr(l1, attr)) for attr in ['vertices', 'pos', 'values']: a0 = getattr(l0, attr) a1 = getattr(l1, attr) assert_array_almost_equal(a0, a1, decimal) def test_label_subject(): """Test label subject name extraction """ label = read_label(label_fname) assert_true(label.subject is None) assert_true('unknown' in repr(label)) label = read_label(label_fname, subject='fsaverage') assert_true(label.subject == 'fsaverage') assert_true('fsaverage' in repr(label)) def test_label_addition(): """Test label addition """ pos = np.random.rand(10, 3) values = np.arange(10.) / 10 idx0 = list(range(7)) idx1 = list(range(7, 10)) # non-overlapping idx2 = list(range(5, 10)) # overlapping l0 = Label(idx0, pos[idx0], values[idx0], 'lh') l1 = Label(idx1, pos[idx1], values[idx1], 'lh') l2 = Label(idx2, pos[idx2], values[idx2], 'lh') assert len(l0) == len(idx0) # adding non-overlapping labels l01 = l0 + l1 assert len(l01) == len(l0) + len(l1) assert_array_equal(l01.values[:len(l0)], l0.values) # adding overlappig labels l = l0 + l2 i0 = np.where(l0.vertices == 6)[0][0] i2 = np.where(l2.vertices == 6)[0][0] i = np.where(l.vertices == 6)[0][0] assert l.values[i] == l0.values[i0] + l2.values[i2] assert l.values[0] == l0.values[0] assert_array_equal(np.unique(l.vertices), np.unique(idx0 + idx2)) # adding lh and rh l2.hemi = 'rh' # this now has deprecated behavior bhl = l0 + l2 assert bhl.hemi == 'both' assert len(bhl) == len(l0) + len(l2) bhl = l1 + bhl assert_labels_equal(bhl.lh, l01) @sample.requires_sample_data def test_label_io_and_time_course_estimates(): """Test IO for label + stc files """ values, times, vertices = label_time_courses(real_label_fname, stc_fname) assert_true(len(times) == values.shape[1]) assert_true(len(vertices) == values.shape[0]) def test_label_io(): """Test IO of label files """ label = read_label(label_fname) label.save(op.join(tempdir, 'foo')) label2 = read_label(op.join(tempdir, 'foo-lh.label')) assert_labels_equal(label, label2) # pickling dest = op.join(tempdir, 'foo.pickled') with open(dest, 'wb') as fid: pickle.dump(label, fid, pickle.HIGHEST_PROTOCOL) with open(dest, 'rb') as fid: label2 = pickle.load(fid) assert_labels_equal(label, label2) def _assert_labels_equal(labels_a, labels_b, ignore_pos=False): """Make sure two sets of labels are equal""" for label_a, label_b in zip(labels_a, labels_b): assert_array_equal(label_a.vertices, label_b.vertices) assert_true(label_a.name == label_b.name) assert_true(label_a.hemi == label_b.hemi) if not ignore_pos: assert_array_equal(label_a.pos, label_b.pos) @sample.requires_sample_data def test_labels_from_parc(): """Test reading labels from FreeSurfer parcellation """ # test some invalid inputs assert_raises(ValueError, labels_from_parc, 'sample', hemi='bla', subjects_dir=subjects_dir) assert_raises(ValueError, labels_from_parc, 'sample', annot_fname='bla.annot', subjects_dir=subjects_dir) # read labels using hemi specification labels_lh, colors_lh = labels_from_parc('sample', hemi='lh', subjects_dir=subjects_dir) for label in labels_lh: assert_true(label.name.endswith('-lh')) assert_true(label.hemi == 'lh') assert_true(len(labels_lh) == len(colors_lh)) # read labels using annot_fname annot_fname = op.join(subjects_dir, 'sample', 'label', 'rh.aparc.annot') labels_rh, colors_rh = labels_from_parc('sample', annot_fname=annot_fname, subjects_dir=subjects_dir) assert_true(len(labels_rh) == len(colors_rh)) for label in labels_rh: assert_true(label.name.endswith('-rh')) assert_true(label.hemi == 'rh') # combine the lh, rh, labels and sort them labels_lhrh = list() labels_lhrh.extend(labels_lh) labels_lhrh.extend(labels_rh) names = [label.name for label in labels_lhrh] labels_lhrh = [label for (name, label) in sorted(zip(names, labels_lhrh))] # read all labels at once labels_both, colors = labels_from_parc('sample', subjects_dir=subjects_dir) assert_true(len(labels_both) == len(colors)) # we have the same result _assert_labels_equal(labels_lhrh, labels_both) # aparc has 68 cortical labels assert_true(len(labels_both) == 68) # test regexp label = labels_from_parc('sample', parc='aparc.a2009s', regexp='Angu', subjects_dir=subjects_dir)[0][0] assert_true(label.name == 'G_pariet_inf-Angular-lh') # silly, but real regexp: label = labels_from_parc('sample', parc='aparc.a2009s', regexp='.*-.{4,}_.{3,3}-L', subjects_dir=subjects_dir)[0][0] assert_true(label.name == 'G_oc-temp_med-Lingual-lh') assert_raises(RuntimeError, labels_from_parc, 'sample', parc='aparc', annot_fname=annot_fname, regexp='JackTheRipper', subjects_dir=subjects_dir) @sample.requires_sample_data @requires_mne def test_labels_from_parc_annot2labels(): """Test reading labels from parc. by comparing with mne_annot2labels """ def _mne_annot2labels(subject, subjects_dir, parc): """Get labels using mne_annot2lables""" label_dir = _TempDir() cwd = os.getcwd() try: os.chdir(label_dir) env = os.environ.copy() env['SUBJECTS_DIR'] = subjects_dir cmd = ['mne_annot2labels', '--subject', subject, '--parc', parc] run_subprocess(cmd, env=env) label_fnames = glob.glob(label_dir + '/*.label') label_fnames.sort() labels = [read_label(fname) for fname in label_fnames] finally: del label_dir os.chdir(cwd) return labels labels, _ = labels_from_parc('sample', subjects_dir=subjects_dir) labels_mne = _mne_annot2labels('sample', subjects_dir, 'aparc') # we have the same result, mne does not fill pos, so ignore it _assert_labels_equal(labels, labels_mne, ignore_pos=True) @sample.requires_sample_data def test_parc_from_labels(): """Test writing FreeSurfer parcellation from labels""" labels, colors = labels_from_parc('sample', subjects_dir=subjects_dir) # write left and right hemi labels: fnames = ['%s/%s-myparc' % (tempdir, hemi) for hemi in ['lh', 'rh']] for fname in fnames: parc_from_labels(labels, colors, annot_fname=fname) # read it back labels2, colors2 = labels_from_parc('sample', subjects_dir=subjects_dir, annot_fname=fnames[0]) labels22, colors22 = labels_from_parc('sample', subjects_dir=subjects_dir, annot_fname=fnames[1]) labels2.extend(labels22) colors2.extend(colors22) names = [label.name for label in labels2] for label, color in zip(labels, colors): idx = names.index(label.name) assert_labels_equal(label, labels2[idx]) assert_array_almost_equal(np.array(color), np.array(colors2[idx])) # make sure we can't overwrite things assert_raises(ValueError, parc_from_labels, labels, colors, annot_fname=fnames[0]) # however, this works parc_from_labels(labels, colors=None, annot_fname=fnames[0], overwrite=True) # test some other invalid inputs assert_raises(ValueError, parc_from_labels, labels[:-1], colors, annot_fname=fnames[0], overwrite=True) colors2 = np.asarray(colors) assert_raises(ValueError, parc_from_labels, labels, colors2[:, :3], annot_fname=fnames[0], overwrite=True) colors2[0] = 1.1 assert_raises(ValueError, parc_from_labels, labels, colors2, annot_fname=fnames[0], overwrite=True) @sample.requires_sample_data @requires_sklearn def test_stc_to_label(): """Test stc_to_label """ src = read_source_spaces(src_fname) stc = read_source_estimate(stc_fname, 'sample') os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects') labels1 = stc_to_label(stc, src='sample', smooth=3) with warnings.catch_warnings(record=True) as w: # connectedness warning warnings.simplefilter('always') labels2 = stc_to_label(stc, src=src, smooth=3) assert_true(len(w) == 1) assert_true(len(labels1) == len(labels2)) for l1, l2 in zip(labels1, labels2): assert_labels_equal(l1, l2, decimal=4) with warnings.catch_warnings(record=True) as w: # connectedness warning warnings.simplefilter('always') labels_lh, labels_rh = stc_to_label(stc, src=src, smooth=3, connected=True) assert_true(len(w) == 1) assert_raises(ValueError, stc_to_label, stc, 'sample', smooth=3, connected=True) assert_true(len(labels_lh) == 1) assert_true(len(labels_rh) == 1) @sample.requires_sample_data def test_morph(): """Test inter-subject label morphing """ label_orig = read_label(real_label_fname) label_orig.subject = 'sample' # should work for specifying vertices for both hemis, or just the # hemi of the given label vals = list() for grade in [5, [np.arange(10242), np.arange(10242)], np.arange(10242)]: label = label_orig.copy() # this should throw an error because the label has all zero values assert_raises(ValueError, label.morph, 'sample', 'fsaverage') label.values.fill(1) label.morph(None, 'fsaverage', 5, grade, subjects_dir, 2, copy=False) label.morph('fsaverage', 'sample', 5, None, subjects_dir, 2, copy=False) assert_true(np.mean(in1d(label_orig.vertices, label.vertices)) == 1.0) assert_true(len(label.vertices) < 3 * len(label_orig.vertices)) vals.append(label.vertices) assert_array_equal(vals[0], vals[1]) # make sure label smoothing can run label.morph(label.subject, 'fsaverage', 5, [np.arange(10242), np.arange(10242)], subjects_dir, 2, copy=False) # subject name should be inferred now label.smooth(subjects_dir=subjects_dir) @sample.requires_sample_data def test_grow_labels(): """Test generation of circular source labels""" seeds = [0, 50000] # these were chosen manually in mne_analyze should_be_in = [[49, 227], [51207, 48794]] hemis = [0, 1] labels = grow_labels('sample', seeds, 3, hemis, n_jobs=2) for label, seed, hemi, sh in zip(labels, seeds, hemis, should_be_in): assert(np.any(label.vertices == seed)) assert np.all(in1d(sh, label.vertices)) if hemi == 0: assert(label.hemi == 'lh') else: assert(label.hemi == 'rh') @sample.requires_sample_data def test_label_time_course(): """Test extracting label data from SourceEstimate""" values, times, vertices = label_time_courses(real_label_fname, stc_fname) stc = read_source_estimate(stc_fname) label_lh = read_label(real_label_fname) stc_lh = stc.in_label(label_lh) assert_array_almost_equal(stc_lh.data, values) assert_array_almost_equal(stc_lh.times, times) assert_array_almost_equal(stc_lh.vertno[0], vertices) label_rh = read_label(real_label_rh_fname) stc_rh = stc.in_label(label_rh) label_bh = label_rh + label_lh stc_bh = stc.in_label(label_bh) assert_array_equal(stc_bh.data, np.vstack((stc_lh.data, stc_rh.data)))
nilq/baby-python
python
from django.contrib import admin from .models import MataKuliah, Tugas # Register your models here. admin.site.register(MataKuliah) admin.site.register(Tugas)
nilq/baby-python
python
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2020-02-16 14:41 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('folio', '0007_auto_20200216_1720'), ] operations = [ migrations.AddField( model_name='profile', name='profile_pic', field=models.ImageField(default='kent.jpg', upload_to='pictures/'), ), migrations.AddField( model_name='project', name='project_pic', field=models.ImageField(default='kent.jpg', upload_to='pictures/'), ), ]
nilq/baby-python
python
@bot.command(brief="Kicks a server member", description="b!kick <member> [reason]") @commands.has_permissions(kick_members=True) async def kick(ctx, member: discord.Member, *, reason=None): try: await member.kick(reason=reason) await ctx.send(f'User {member} has been kicked.') except: await ctx.send("The bot has missing permissions\n\nMake sure the Bot's top-most role is above the member's top-most role (the member who you are going to kick)")
nilq/baby-python
python
#!/usr/bin/env python3 """ Python module to assist creating and maintaining docker openHab stacks.""" import crypt from enum import Enum from typing import NamedTuple import logging import os import sys import json as pyjson from hashlib import md5 from shutil import copy2 from subprocess import PIPE, run from time import sleep import bcrypt import docker import questionary as qust from ruamel.yaml import YAML from prompt_toolkit.styles import Style # Configure YAML yaml = YAML() yaml.indent(mapping=4, sequence=4, offset=2) # Log level during development is info logging.basicConfig(level=logging.WARNING) # Prompt style st = Style([ ('qmark', 'fg:#00c4b4 bold'), # token in front of question ('question', 'bold'), # question text ('answer', 'fg:#00c4b4 bold'), # submitted answer question ('pointer', 'fg:#00c4b4 bold'), # pointer for select and checkbox ('selected', 'fg:#00c4b4'), # selected item checkbox ('separator', 'fg:#00c4b4'), # separator in lists ('instruction', '') # user instructions for selections ]) # ****************************** # Constants <<< # ****************************** # Directories for config generation CUSTOM_DIR = 'custom_configs' TEMPLATE_DIR = 'template_configs' COMPOSE_NAME = 'docker-stack.yml' SKELETON_NAME = 'docker-skeleton.yml' TEMPLATES_NAME = 'docker-templates.yml' CONFIG_DIRS = ['mosquitto', 'nodered', 'ssh', 'filebrowser', 'traefik', 'volumerize', 'postgres', 'pb-framr'] TEMPLATE_FILES = [ 'mosquitto/mosquitto.conf', 'nodered/nodered_package.json', 'pb-framr/logo.svg', 'nodered/nodered_settings.js', 'ssh/sshd_config', 'traefik/traefik.toml' ] EDIT_FILES = { "mosquitto_passwords": "mosquitto/mosquitto_passwords", "sftp_users": "ssh/sftp_users.conf", "traefik_users": "traefik/traefik_users", "id_rsa": "ssh/id_rsa", "host_key": "ssh/ssh_host_ed25519_key", "known_hosts": "ssh/known_hosts", "backup_config": "volumerize/backup_config", "postgres_user": "postgres/user", "postgres_passwd": "postgres/passwd", "pb_framr_pages": "pb-framr/pages.json", "filebrowser_conf": "filebrowser/filebrowser.json" } CONSTRAINTS = {"building": "node.labels.building"} # Default Swarm port SWARM_PORT = 2377 # UID for admin UID = 9001 # Username for admin ADMIN_USER = 'ohadmin' # USB DEVICES (e.g. Zwave stick) USB_DEVICES = [{ "name": "Aeotec Z-Stick Gen5 (ttyACM0)", "value": "zwave_stick" }] class ServiceBody(NamedTuple): fullname: str prefix: str additional: bool frontend: bool sftp: bool = False icon: str = None class Service(ServiceBody, Enum): SFTP = ServiceBody("SFTP", "sftp", False, False) OPENHAB = ServiceBody("OpenHAB", "openhab", True, True, icon='dashboard', sftp=True) NODERED = ServiceBody("Node-RED", "nodered", False, True, icon='ballot', sftp=True) POSTGRES = ServiceBody("Postgre SQL", "postgres", True, False) MQTT = ServiceBody("Mosquitto MQTT Broker", "mqtt", True, False) FILES = ServiceBody("File Manager", "files", False, True, icon='folder') BACKUP = ServiceBody("Volumerize Backups", "backup", False, False, sftp=True) @classmethod def service_by_prefix(cls, prefix): # cls here is the enumeration return next(service for service in cls if service.prefix == prefix) # >>> # ****************************** # State Variables <<< # ****************************** base_dir = sys.path[0] template_path = f'{base_dir}/{TEMPLATE_DIR}' custom_path = f'{base_dir}/{CUSTOM_DIR}' # >>> # ****************************** # Compose file functions <<< # ****************************** # Functions to generate initial file def generate_initial_compose(): """Creates the initial compose using the skeleton """ # compose file compose = custom_path + '/' + COMPOSE_NAME # skeleton file skeleton = template_path + '/' + SKELETON_NAME with open(skeleton, 'r') as skeleton_f, open(compose, 'w+') as compose_f: init_content = yaml.load(skeleton_f) yaml.dump(init_content, compose_f) def add_sftp_service(building, number=0): """Generates an sftp entry and adds it to the compose file :building: names of building that the services is added to :number: increment of exposed port to prevent overlaps """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'sftp_{building}' # template template = get_service_template(Service.SFTP.prefix) # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") template['ports'] = [f'{2222 + number}:22'] # attach volumes volume_base = '/home/ohadmin/' template['volumes'] = get_attachable_volume_list(volume_base, building) add_or_update_compose_service(compose_path, service_name, template) def add_openhab_service(building, host): """Generates an openhab entry and adds it to the compose file :building: name of building that the services is added to :host: host the building is added to, used for routing """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'openhab_{building}' # template template = get_service_template(Service.OPENHAB.prefix) # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") # include in backups of this building template['deploy']['labels'].append(f'backup={building}') # traefik backend template['deploy']['labels'].append(f'traefik.backend={service_name}') # traefik frontend domain->openhab template['deploy']['labels'].extend( generate_traefik_host_labels(host, segment='main')) # traefik frontend subdomain openhab_hostname.* -> openhab template['deploy']['labels'].append( f'traefik.sub.frontend.rule=HostRegexp:' f'{service_name}.{{domain:[a-zA-z0-9-]+}}') template['deploy']['labels'].append('traefik.sub.frontend.priority=2') # replace volumes with named entries in template template['volumes'] = generate_named_volumes( template['volumes'], service_name, compose_path) add_or_update_compose_service(compose_path, service_name, template) def move_openhab_service(building, new_host): """Updates an openhab entry to be accessible on another host :building: name of building that the services is uses :host: host the building service is moved to, used for routing """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'openhab_{building}' # template entry = get_service_entry(service_name) # traefik remove old domain by filtering old_labels = entry['deploy']['labels'] filtered_labels = [ l for l in old_labels if not l.startswith('traefik.main.frontend')] # traefik frontend new_domain->openhab filtered_labels.extend( generate_traefik_host_labels(new_host, segment='main')) entry['deploy']['labels'] = filtered_labels add_or_update_compose_service(compose_path, service_name, entry) def add_nodered_service(building): """Generates an nodered entry and adds it to the compose file :building: name of building that the services is added to """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'nodered_{building}' # template template = get_service_template(Service.NODERED.prefix) # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") template['deploy']['labels'].append(f'traefik.backend={service_name}') template['deploy']['labels'].append(f'backup={building}') template['deploy']['labels'].extend( generate_traefik_path_labels(service_name, segment='main')) template['deploy']['labels'].extend( generate_traefik_subdomain_labels(service_name, segment='sub')) # replace volumes with named entries in template template['volumes'] = generate_named_volumes( template['volumes'], service_name, compose_path) add_or_update_compose_service(compose_path, service_name, template) def add_mqtt_service(building, number=0): """Generates an mqtt entry and adds it to the compose file :building: name of building that the services is added to :number: increment of exposed port to prevent overlaps """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'mqtt_{building}' # template template = get_service_template(Service.MQTT.prefix) # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") # ports incremented by number of services template['ports'] = [f'{1883 + number}:1883', f'{9001 + number}:9001'] # replace volumes with named entries in template template['volumes'] = generate_named_volumes( template['volumes'], service_name, compose_path) add_or_update_compose_service(compose_path, service_name, template) def add_postgres_service(building, postfix=None): """Generates an postgres entry and adds it to the compose file :building: name of building that the services is added to :postfix: an identifier for this service """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # use building as postfix when empty if postfix is None: service_name = f'postgres_{building}' else: service_name = f'postgres_{postfix}' # template template = get_service_template(Service.POSTGRES.prefix) # only label constraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") # replace volumes with named entries in template template['volumes'] = generate_named_volumes( template['volumes'], service_name, compose_path) add_or_update_compose_service(compose_path, service_name, template) def add_file_service(building): """Generates a file manager entry and adds it to the compose file :building: names of host that the services is added to """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'{Service.FILES.prefix}_{building}' # template template = get_service_template(Service.FILES.prefix) # add command that sets base url template['command'] = f'-b /{service_name}' # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") template['deploy']['labels'].append(f'traefik.backend={service_name}') template['deploy']['labels'].extend( generate_traefik_path_labels(service_name, segment='main', redirect=False)) # attach volumes volume_base = '/srv/' template['volumes'] = get_attachable_volume_list(volume_base, building) add_or_update_compose_service(compose_path, service_name, template) def add_volumerize_service(building): """Generates a volumerize backup entry and adds it to the compose file :building: names of host that the services is added to """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' # service name service_name = f'{Service.BACKUP.prefix}_{building}' # template template = get_service_template(Service.BACKUP.prefix) # only label contraint is building template['deploy']['placement']['constraints'][0] = ( f"{CONSTRAINTS['building']} == {building}") # attach volumes volume_base = '/source/' template['volumes'].extend( get_attachable_volume_list(volume_base, building)) # adjust config config_list = template['configs'] # get backup entry from configs index, entry = next((i, c) for i, c in enumerate(config_list) if c['source'] == 'backup_config') entry['source'] = f'backup_config_{building}' template['configs'][index] = entry add_or_update_compose_service(compose_path, service_name, template) # Functions to delete services def delete_service(service_name): """Deletes a service from the compose file :returns: list of current services """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' with open(compose_path, 'r+') as compose_f: # load compose file compose = yaml.load(compose_f) # generate list of names compose['services'].pop(service_name, None) # start writing from file start compose_f.seek(0) # write new compose content yaml.dump(compose, compose_f) # reduce file to new size compose_f.truncate() # Functions to extract information def get_current_services(placement=None): """Gets a list of currently used services may be restricted to a placement :placement: placement contraint the service shall match :returns: list of current services """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' with open(compose_path, 'r') as compose_f: # load compose file compose = yaml.load(compose_f) # generate list of names service_names = [] for (name, entry) in compose['services'].items(): if placement is None or get_building_of_entry(entry) == placement: service_names.append(name) return service_names def get_current_building_constraints(): """Gets a list of currently used building constraints :returns: set of current buildings """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' with open(compose_path, 'r') as compose_f: # load compose file compose = yaml.load(compose_f) # generate list of buildings building_names = set() for (name, entry) in compose['services'].items(): building = get_building_of_entry(entry) if building: building_names.add(building) return building_names def get_building_of_entry(service_dict): """Extract the configured building constraint from an yaml service entry :service_dict: service dict from yaml :returns: building that is set """ # get constraints constraint_list = service_dict['deploy']['placement']['constraints'] # convert them to dicts label_dict = {i.split("==")[0].strip(): i.split("==")[1].strip() for i in constraint_list} return label_dict.get('node.labels.building') def get_service_entry_info(service_entry): """Gets service name and instance of a service entry :service_entry: service entry name :return: tuple with service_name and instance name """ entry_split = service_entry.split("_") name = entry_split[0] instance = entry_split[1] return name, instance def get_service_volumes(service_name): """Gets a list of volumes of a service :returns: list of volumes """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' with open(compose_path, 'r') as compose_f: # load compose file compose = yaml.load(compose_f) # load service service = compose['services'].get(service_name) # extract volume names volume_dict = yaml_list_to_dict(service['volumes']) volumes = list(volume_dict.keys()) # filter only named volumes named_volumes = [v for v in volumes if '/' not in v] return named_volumes # Helper functions def get_attachable_volume_list(volume_base, building): """Get a list of volumes from a building that can be attatched for file acccess :volume_base: Base path of volumes :building: building to consider :returns: list of attachable volume entries """ volume_list = [] host_services = get_current_services(building) for host_service in host_services: name, instance = get_service_entry_info(host_service) volume_service = Service.service_by_prefix(name) # only apply to services that want their volumes attatched if volume_service.sftp: volumes = get_service_volumes(host_service) # collect volumes not already in list vlist = [ f'{v}:{volume_base}{v}' for v in volumes if f'{v}:{volume_base}{v}' not in volume_list] volume_list.extend(vlist) return volume_list def generate_named_volumes(template_volume_list, service_name, compose_path): """Generates volumes including name of services and ads them to the compose file :template_volume_list: List of volume entries from template :service_name: Name of the service instance :compose_path: path to compose file :returns: list of named entries """ volume_entries = yaml_list_to_dict(template_volume_list) # add name to entries (that are named volumes named_volume_entries = {} for (volume, target) in volume_entries.items(): if "/" not in volume: named_volume_entries[f"{service_name}_{volume}"] = target else: named_volume_entries[f"{volume}"] = target for (volume, target) in named_volume_entries.items(): # declare volume if it is a named one if "/" not in volume: add_volume_entry(compose_path, volume) return dict_to_yaml_list(named_volume_entries) def yaml_list_to_dict(yaml_list): """Converts a yaml list (volumes, configs etc) into a python dict :yaml_list: list of a yaml containing colon separated entries :return: python dict """ return {i.split(":")[0]: i.split(":")[1] for i in yaml_list} def dict_to_yaml_list(pdict): """Converts a python dict into a yaml list (volumes, configs etc) :pdict: python dict :return: list of a yaml containing colon separated entries """ return [f'{k}:{v}' for (k, v) in pdict.items()] def get_service_entry(service_name): """Gets a service entry from the compose yaml :return: yaml entry of a service """ # compose file compose_path = f'{custom_path}/{COMPOSE_NAME}' with open(compose_path, 'r') as templates_file: compose_content = yaml.load(templates_file) return compose_content['services'][service_name] def get_service_template(service_name): """Gets a service template entry from the template yaml :return: yaml entry of a service """ templates = template_path + '/' + TEMPLATES_NAME with open(templates, 'r') as templates_file: template_content = yaml.load(templates_file) return template_content['services'][service_name] def generate_traefik_host_labels(hostname, segment=None, priority=1): """Generates a traefik path url with necessary redirects :hostname: Hostname that gets assigned by the label :segment: Optional traefik segment when using multiple rules :priority: Priority of frontend rule :returns: list of labels for traefik """ label_list = [] # check segment segment = f'.{segment}' if segment is not None else '' # fill list label_list.append( f'traefik{segment}.frontend.rule=HostRegexp:{{domain:{hostname}}}') label_list.append(f'traefik{segment}.frontend.priority={priority}') return label_list def generate_traefik_subdomain_labels(subdomain, segment=None, priority=2): """Generates a traefik subdomain with necessary redirects :subdomain: subdomain that will be assigned to a service :segment: Optional traefik segment when using multiple rules :priority: Priority of frontend rule :returns: list of labels for traefik """ label_list = [] # check segment segment = f'.{segment}' if segment is not None else '' # fill list label_list.append( f'traefik{segment}.frontend.rule=' f'HostRegexp:{subdomain}.{{domain:[a-zA-z0-9-]+}}') label_list.append(f'traefik{segment}.frontend.priority={priority}') return label_list def generate_traefik_path_labels(url_path, segment=None, priority=2, redirect=True): """Generates a traefik path url with necessary redirects :url_path: path that should be used for the site :segment: Optional traefik segment when using multiple rules :priority: Priority of frontend rule :redirect: Redirect to path with trailing slash :returns: list of labels for traefik """ label_list = [] # check segment segment = f'.{segment}' if segment is not None else '' # fill list label_list.append(f'traefik{segment}.frontend.priority={priority}') if redirect: label_list.append( f'traefik{segment}.frontend.redirect.regex=^(.*)/{url_path}$$') label_list.append( f'traefik{segment}.frontend.redirect.replacement=$$1/{url_path}/') label_list.append( f'traefik{segment}.frontend.rule=PathPrefix:/{url_path};' f'ReplacePathRegex:^/{url_path}/(.*) /$$1') else: label_list.append( f'traefik{segment}.frontend.rule=PathPrefix:/{url_path}') return label_list def add_or_update_compose_service(compose_path, service_name, service_content): """Adds or replaces a service in a compose file :compose_path: path of the compose file to change :service_name: name of the service to add/replace :service_content: service definition to add """ with open(compose_path, 'r+') as compose_f: # load compose file compose = yaml.load(compose_f) # add / update service with template compose['services'][service_name] = service_content # write content starting from first line compose_f.seek(0) # write new compose content yaml.dump(compose, compose_f) # reduce file to new size compose_f.truncate() def add_volume_entry(compose_path, volume_name): """Creates an additional volume entry in the stack file :compose_path: path of the compose file to change :volume_name: name of the additional volume """ with open(compose_path, 'r+') as compose_f: # load compose file compose = yaml.load(compose_f) # add volume compose['volumes'][volume_name] = None # write content starting from first line compose_f.seek(0) # write new compose content yaml.dump(compose, compose_f) # reduce file to new size compose_f.truncate() def add_config_entry(compose_path, config_name, config_path): """Creates an additional config entry in the stack file or updates it :compose_path: path of the compose file to change :config_name: name of the additional config :config_path: path of the additional config """ with open(compose_path, 'r+') as compose_f: # load compose file compose = yaml.load(compose_f) # add config compose['configs'][config_name] = {"file": config_path} # write content starting from first line compose_f.seek(0) # write new compose content yaml.dump(compose, compose_f) # reduce file to new size compose_f.truncate() # >>> # ****************************** # Config file functions <<< # ****************************** def generate_config_folders(): """Generate folders for configuration files """ if not os.path.exists(custom_path): os.makedirs(custom_path) print(f'Initialize configuration in {custom_path}') # generate empty config dirs for d in CONFIG_DIRS: new_dir = f'{custom_path}/{d}' if not os.path.exists(new_dir): os.makedirs(new_dir) # copy template configs for template_file in TEMPLATE_FILES: copy_template_config(template_file) def copy_template_config(config_path): """Copies template configuration files into custom folder :config_path: relative path of config to copy from template """ custom_config_path = f'{custom_path}/{config_path}' template_config = f"{template_path}/{config_path}" logging.info( f'Copy {config_path} from {template_config} to {custom_path}') copy2(template_config, custom_config_path) def generate_mosquitto_user_line(username, password): """Generates a line for a mosquitto user with a crypt hashed password :username: username to use :password: password that will be hashed (SHA512) :returns: a line as expected by mosquitto """ password_hash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512)) line = f"{username}:{password_hash}" return line def generate_sftp_user_line(username, password, directories=None): """Generates a line for a sftp user with a hashed password :username: username to use :password: password that will be hashed (SHA512) :directories: list of directories which the user should have :returns: a line as expected by sshd """ # generate user line with hashed password password_hash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512)) line = f"{username}:{password_hash}:e:{UID}:{UID}" # add directory entries when available if directories: # create comma separated string from list dir_line = ','.join(d for d in directories) line = f"{line}:{dir_line}" return line def generate_traefik_user_line(username, password): """Generates a line for a traefik user with a bcrypt hashed password :username: username to use :password: password that will be hashed (bcrypt) :returns: a line as expected by traefik """ password_hash = get_bcrypt_hash(password) line = f"{username}:{password_hash}" return line def generate_pb_framr_entry(building, host, service): """Generates a single entry of the framr file :building: building this entry is intended for :host: host this entry is intended for :service: entry from service enum :returns: a dict fitting the asked entry """ entry = {} entry['title'] = service.fullname if service == Service.OPENHAB: entry['url'] = f'http://{host}/' pass else: entry['url'] = f'/{service.prefix}_{building}/' entry['icon'] = service.icon return entry def generate_mosquitto_file(username, password): """Generates a mosquitto password file using mosquitto_passwd system tool :username: username to use :password: password that will be used """ passwd_path = f"{custom_path}/{EDIT_FILES['mosquitto_passwords']}" # ensure file exists if not os.path.exists(passwd_path): open(passwd_path, 'a').close() # execute mosquitto passwd mos_result = run( ['mosquitto_passwd', '-b', passwd_path, username, password], universal_newlines=True) return mos_result.returncode == 0 def generate_sftp_file(username, password, direcories=None): """Generates a sftp password file :username: username to use :password: password that will be used :directories: list of directories which the user should have """ # generate line and save it into a file file_content = generate_sftp_user_line(username, password, direcories) create_or_replace_config_file(EDIT_FILES['sftp_users'], file_content) def generate_postgres_files(username, password): """Generates postgres user and password files :username: username to use :password: password that will be used """ # content is purely username and (hashed) password hashed_pass = ( f'md5{md5(username.encode() + password.encode()).hexdigest()}') create_or_replace_config_file(EDIT_FILES['postgres_user'], username) create_or_replace_config_file(EDIT_FILES['postgres_passwd'], hashed_pass) def generate_id_rsa_files(): """Generates id_rsa and id_rsa.pub private/public keys using ssh-keygen """ id_path = f"{custom_path}/{EDIT_FILES['id_rsa']}" # execute ssh-keygen id_result = run( ['ssh-keygen', '-m', 'PEM', '-t', 'rsa', '-b', '4096', '-f', id_path, '-N', ''], universal_newlines=True, stdout=PIPE) return id_result.returncode == 0 def generate_host_key_files(hosts): """Generates ssh host keys and matching known_hosts using ssh-keygen """ key_path = f"{custom_path}/{EDIT_FILES['host_key']}" # ssh-keygen generates public key with .pub postfix pub_path = key_path + '.pub' # host_names with sftp_ postfix sftp_hosts = [f'sftp_{host}' for host in hosts] # execute ssh-keygen id_result = run(['ssh-keygen', '-t', 'ed25519', '-f', key_path, '-N', ''], universal_newlines=True, stdout=PIPE) # read content of public key as known line known_line = "" with open(pub_path, 'r') as pub_file: pub_line = pub_file.readline() split_line = pub_line.split() # delete last list element del split_line[-1] # collect sftp hosts as comma separated string hosts_line = ','.join(h for h in sftp_hosts) split_line.insert(0, hosts_line) # collect parts as space separated string known_line = ' '.join(sp for sp in split_line) # write new known_line file create_or_replace_config_file(EDIT_FILES['known_hosts'], known_line) return id_result.returncode == 0 def generate_filebrowser_file(username, password): """Generates a configuration for the filebrowser web app :username: username to use :password: password that will be used """ # generate line and save it into a file file_content = { "port": "80", "address": "", "username": f"{username}", "password": f"{get_bcrypt_hash(password)}", "log": "stdout", "root": "/srv" } create_or_replace_config_file(EDIT_FILES['filebrowser_conf'], file_content, json=True) def generate_traefik_file(username, password): """Generates a traefik password file :username: username to use :password: password that will be used """ # generate line and save it into a file file_content = generate_traefik_user_line(username, password) create_or_replace_config_file(EDIT_FILES['traefik_users'], file_content) def generate_volumerize_files(host_entries): """Generates config for volumerize backups :host_entries: dickt of host entries """ compose_path = f'{custom_path}/{COMPOSE_NAME}' # create one config per host for h in host_entries: configs = [] # Each host knows other hosts for t in host_entries: host_config = { 'description': f"'Backup Server on {t['building_name']}", 'url': f"sftp://ohadmin@sftp_{t['building_id']}:" f"//home/ohadmin/backup_data/backup/{h['building_id']}" } configs.append(host_config) config_file = f"{EDIT_FILES['backup_config']}_{h['building_id']}.json" create_or_replace_config_file(config_file, configs, json=True) add_config_entry( compose_path, f"backup_config_{h['building_id']}", f"./{config_file}") def generate_pb_framr_file(frames): """Generates config for pb framr landing page :frames: a dict that contains hosts with matching name and services """ configs = [] for f in frames: building = { 'instance': f['building_name'], 'entries': [generate_pb_framr_entry(f['building_id'], f['host'], s) for s in f['services'] if s.frontend] } configs.append(building) create_or_replace_config_file( EDIT_FILES['pb_framr_pages'], configs, json=True) def update_pb_framr_host(old_host, new_host): """Updates framr config to use changed host name :old_host: old host that shall be replaced :new_host: host that will be the new target """ configs = [] config_path = EDIT_FILES['pb_framr_pages'] custom_config_path = f'{custom_path}/{config_path}' with open(custom_config_path, 'r') as file: configs = pyjson.load(file) for c in configs: for e in c['entries']: if e['url'] == f"http://{old_host}/": e['url'] = f"http://{new_host}/" if configs: create_or_replace_config_file( EDIT_FILES['pb_framr_pages'], configs, json=True) def create_or_replace_config_file(config_path, content, json=False): """Creates or replaces a config file with new content :config_path: relative path of config :content: content of the file as a string """ custom_config_path = f'{custom_path}/{config_path}' with open(custom_config_path, 'w+') as file: if json: import json json.dump(content, file, indent=2) else: file.write(content) # Functions to modify existing files def add_user_to_traefik_file(username, password): """Adds or modifies user in traefik file :username: username to use :password: password that will be used """ # get current users current_users = get_traefik_users() # ensure to delete old entry if user exists users = [u for u in current_users if u['username'] != username] # collect existing users lines user_lines = [] for u in users: user_lines.append(f"{u['username']}:{u['password']}") # add new/modified user user_lines.append(generate_traefik_user_line(username, password)) # generate content file_content = "\n".join(user_lines) create_or_replace_config_file(EDIT_FILES['traefik_users'], file_content) def remove_user_from_traefik_file(username): """Removes user from traefik file :username: username to delete """ # get current users current_users = get_traefik_users() # ensure to delete entry if user exists users = [u for u in current_users if u['username'] != username] # collect other user lines user_lines = [] for u in users: user_lines.append(f"{u['username']}:{u['password']}") # generate content and write file file_content = "\n".join(user_lines) create_or_replace_config_file(EDIT_FILES['traefik_users'], file_content) # Functions to get content from files def get_users_from_files(): """Gets a list of users in files :returns: list of users """ users = [] # add treafik users users.extend([u['username'] for u in get_traefik_users()]) return users def get_traefik_users(): """Gets a list of dicts containing users and password hashes :returns: list of users / password dicts """ users = [] # get treafik users traefik_file = f"{custom_path}/{EDIT_FILES['traefik_users']}" with open(traefik_file, 'r') as file: lines = file.read().splitlines() for line in lines: # username in traefik file is first entry unitl colon username = line.split(':')[0] password = line.split(':')[1] users.append({"username": username, "password": password}) return users # Additional helper functions def get_bcrypt_hash(password): """Returns bcrypt hash for a password :password: password to hash :returns: bcrypt hash of password """ return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() # >>> # ****************************** # Docker machine functions <<< # ****************************** def get_machine_list(): """Get a list of docker machine names using the docker-machine system command :returns: a list of machine names managed by docker-machine """ machine_result = run(['docker-machine', 'ls', '-q'], universal_newlines=True, stdout=PIPE) return machine_result.stdout.splitlines() def check_machine_exists(machine_name): """Checks weather a docker machine exists and is available :machine_name: Name of the machine to check :returns: True when machine is available """ machines = get_machine_list() return machine_name in machines def get_machine_env(machine_name): """Gets dict of env settings from a machine :machine_name: Name of the machine to check :returns: Dict of env variables for this machine """ env_result = run(['docker-machine', 'env', machine_name], universal_newlines=True, stdout=PIPE) machine_envs = {} lines = env_result.stdout.splitlines() for line in lines: if 'export' in line: assign = line.split('export ', 1)[1] env_entry = [a.strip('"') for a in assign.split('=', 1)] machine_envs[env_entry[0]] = env_entry[1] return machine_envs def get_machine_ip(machine_name): """Asks for the ip of the docker machine :machine_name: Name of the machine to use for init """ machine_result = run(['docker-machine', 'ip', machine_name], universal_newlines=True, stdout=PIPE) return machine_result.stdout.strip() def init_swarm_machine(machine_name): """Creates a new swarm with the specified machine as leader :machine_name: Name of the machine to use for init :return: True if swarm init was successful """ machine_ip = get_machine_ip(machine_name) init_command = 'docker swarm init --advertise-addr ' + machine_ip init_result = run(['docker-machine', 'ssh', machine_name, init_command], universal_newlines=True) return init_result.returncode == 0 def join_swarm_machine(machine_name, leader_name): """Joins the swarm of the specified leader :machine_name: Name of the machine to join a swarm :leader_name: Name of the swarm leader machine :return: True if join to swarm was successful """ token_command = 'docker swarm join-token manager -q' token_result = run(['docker-machine', 'ssh', leader_name, token_command], universal_newlines=True, stdout=PIPE) token = token_result.stdout.strip() leader_ip = get_machine_ip(leader_name) logging.info(f"Swarm leader with ip {leader_ip} uses token {token}") join_cmd = f'docker swarm join --token {token} {leader_ip}:{SWARM_PORT}' logging.info(f'Machine {machine_name} joins using command {join_cmd}') join_result = run(['docker-machine', 'ssh', machine_name, join_cmd], universal_newlines=True) return join_result.returncode == 0 def generate_swarm(machines): """Generates a swarm, the first machine will be the initial leader :machines: list of machines in the swarm """ leader = None for machine in machines: # init swarm with first machine if leader is None: leader = machine print(f'Create initial swarm with leader {leader}') if init_swarm_machine(leader): print('Swarm init successful\n') assign_label_to_node(leader, 'building', leader, manager=leader) else: print(f'Machine {machine} joins swarm of leader {leader}') if (join_swarm_machine(machine, leader)): print('Joining swarm successful\n') assign_label_to_node(machine, 'building', machine, manager=leader) def check_dir_on_machine(dirpath, machine): """Checks weather a dir exists on a machine :dirpath: Directory to check :machine: Machine to check :returns: True when dir exists false otherwise """ check_command = f"[ -d {dirpath} ]" check_result = run(['docker-machine', 'ssh', machine, check_command]) return check_result.returncode == 0 def check_file_on_machine(filepath, machine): """Checks weather a file exists on a machine :filepath: File to check :machine: Machine to check :returns: True when file exists false otherwise """ check_command = f"[ -f {filepath} ]" check_result = run(['docker-machine', 'ssh', machine, check_command]) return check_result.returncode == 0 def copy_files_to_machine(filepath, machine): """Copyies a directory and its content or a file to a machine :filepath: Direcotry or file to copy :machine: Machine to copy to """ run(['docker-machine', 'scp', '-r', filepath, f'{machine}:']) def execute_command_on_machine(command, machine): """Executes a command on a docker machine :command: Command to execute :machine: Machine to execute command """ run([f'docker-machine ssh {machine} {command}'], shell=True) # >>> # ****************************** # Systemd functions <<< # ****************************** def list_enabled_devices(): """Presents a list of enabled devices (systemd services) :returns: list of enabled devices """ list_result = run(['systemctl', 'list-units'], stdout=PIPE, universal_newlines=True) device_list = list_result.stdout.splitlines() # Filter out only swarm-device services device_list = [d.strip() for d in device_list if 'swarm-device' in d] # Extract service name device_list = [d.split()[0] for d in device_list] return device_list # >>> # ****************************** # Docker client commands <<< # ****************************** def deploy_docker_stack(machine): """Deploys the custom stack in the custom_path :machine: Docker machine to execute command """ # Set CLI environment to target docker machine machine_env = get_machine_env(machine) os_env = os.environ.copy() os_env.update(machine_env) # Get compose file and start stack compose_file = f'{custom_path}/{COMPOSE_NAME}' deploy_command = f'docker stack deploy -c {compose_file} ohpb' run([f'{deploy_command}'], shell=True, env=os_env) def remove_docker_stack(machine): """Removes the custom stack in the custom_path :machine: Docker machine to execute command """ # Set CLI environment to target docker machine machine_env = get_machine_env(machine) os_env = os.environ.copy() os_env.update(machine_env) remove_command = f'docker stack rm ohpb' run([f'{remove_command}'], shell=True, env=os_env) def resolve_service_nodes(service): """Returnes nodes running a specified service :service: name or id of a service :returns: list of nodes running the service """ node_result = run(['docker', 'service', 'ps', service, '--format', '{{.Node}}', '-f', 'desired-state=running'], universal_newlines=True, stdout=PIPE) return node_result.stdout.splitlines() def get_container_list(manager=None): """Return a list of containers running on a machine :manager: Docker machine to use for command, otherwise local :returns: list of containers """ client = get_docker_client(manager) return [c.name for c in client.containers.list()] def get_service_list(manager=None): """Return a list of services managed by a machine :manager: Docker machine to use for command, otherwise local :returns: list of services """ client = get_docker_client(manager) return [s.name for s in client.services.list()] def remove_label_from_nodes(label, value, manager=None): """Removes label with matching value from all nodes :label: Label you want to remove :value: The value to match before removing :manager: Docker machine to use for command, otherwise local :return: Nodes with removed label """ client = get_docker_client(manager) nodes = client.nodes.list() matching_nodes = [n for n in nodes if label in n.attrs['Spec']['Labels'] and n.attrs['Spec']['Labels'][label] == value] print(f'Matches {matching_nodes}') for m in matching_nodes: spec = m.attrs['Spec'] spec['Labels'].pop(label) m.update(spec) logging.info(f'Remove label {label} with value {value} from {m}') client.close() return [n.attrs['Description']['Hostname'] for n in matching_nodes] def assign_label_to_node(nodeid, label, value, manager=None): """Assigns a label to a node (e.g. building) :nodeid: Id or name of the node :label: Label you want to add :value: The value to assign to the label :manager: Docker machine to use for command, otherwise local """ client = get_docker_client(manager) node = client.nodes.get(nodeid) spec = node.attrs['Spec'] spec['Labels'][label] = value node.update(spec) logging.info(f'Assign label {label} with value {value} to {nodeid}') client.close() def run_command_in_service(service, command, building=None): """Runs a command in a service based on its name. When no matching container is found or the service name is ambigous an error will be displayed and the function exits :param service: Name of the service to execute command :param command: Command to execute :param building: Optional building, make service unambigous (Default: None) """ client = get_docker_client(building) # Find containers matching name service_name_filter = {"name": service} containers = client.containers.list(filters=service_name_filter) # Ensure match is unambigous if (len(containers) > 1): print(f'Found multiple containers matching service name {service}, ' 'ensure service is unambigous') elif (len(containers) < 1): print(f'Found no matching container for service name {service}') else: service_container = containers[0] print(f'Executing {command} in container {service_container.name}' f'({service_container.id}) on building {building}\n') command_exec = service_container.exec_run(command) print(command_exec.output.decode()) client.close() def get_docker_client(manager=None): """Returns docker client instance :manager: Optional machine to use, local otherwise :returns: Docker client instance """ if manager: machine_env = get_machine_env(manager) client = docker.from_env(environment=machine_env) else: client = docker.from_env() return client def restore_building_backup(manager, building, new_machine=None): client = get_docker_client(manager) # get backup services of the building services = client.services.list(filters={'label': f'backup={building}'}) # scale down services (to prevent writes during restore) for s in services: s.scale(0) # Give services 10 seconds to shutdown print("Wait for services to shutdown...") sleep(10) # When a new machine is used, (un-)assign labels if new_machine: # Remove old node labels and add new old_nodes = remove_label_from_nodes('building', building, manager) assign_label_to_node(new_machine, 'building', building, manager) print("Wait for services to start on new machine") if wait_for_containers(new_machine, 'backup|sftp', expected_count=2): run_command_in_service('backup', 'restore', new_machine) # When building was moved update host entry of openhab in compose move_openhab_service(building, new_machine) update_pb_framr_host(old_nodes[0], new_machine) else: logging.error( f"Failed to start services on {new_machine}, " " rolling back changes") # restore labels to old nodes remove_label_from_nodes('building', building, manager) for on in old_nodes: assign_label_to_node(on, 'building', building, manager) update_pb_framr_host(new_machine, on) else: # execute restore command in backup service run_command_in_service('backup', 'restore', manager) # reload and scale up services again for s in services: s.reload() s.scale(1) # close client client.close() def wait_for_containers(machine, name_filter, expected_count=1, timeout=60): """Waits until containers matching filters are available :machine: machine to check for container :name_filter: regexp to filter names by :expected_count: number of services that are expected to match :timeout: Time to at least wait for before abborting check :returns: true if found, false when timed out """ client = get_docker_client(machine) for t in range(timeout): cl = client.containers.list(filters={'name': name_filter}) if len(cl) >= expected_count: logging.info("Let serivces boot up") sleep(3) return True else: sleep(1) logging.error(f"Timed out wait for containers matching {name_filter}.") return False # >>> # ****************************** # CLI base commands <<< # ****************************** def init_config_dirs_command(args): """Initialize config directories :args: parsed commandline arguments """ # generate basic config folder generate_config_folders() def assign_building_command(args): """Assigns the role of a building to a node :args: parsed commandline arguments """ node = args.node building = args.building print(f'Assign role of building {building} to node {node}') assign_label_to_node(node, 'building', building) def execute_command(args): """Top level function to manage command executions from CLI :args: parsed commandline arguments """ service = args.service command = " ".join(str(x) for x in args.command) # list to string building = args.building run_command_in_service(service, command, building) def restore_command(args): """Top level function to manage command executions from CLI :args: parsed commandline arguments """ building = args.building target = args.target if not check_machine_exists(target): print(f'Machine with name {target} not found') return print(f'Restoring building {building} on machine {target}') get_machine_env(target) def interactive_command(args): """Top level function to start the interactive mode :args: parsed command line arguments """ main_menu(args) # >>> # ****************************** # Interactive menu entries <<< # ****************************** def main_menu(args): """ Display main menu """ # Main menu prompts selection contains function choice = qust.select('Public Building Manager - Main Menu', choices=load_main_entires(), style=st).ask() # Call funtion of menu entry if choice: choice(args) def load_main_entires(): """Loads entries for main menu depending on available files :returns: entries of main menu """ entries = [] if not os.path.exists(custom_path): entries.append({'name': 'Create initial structure', 'value': init_menu}) else: entries.append({'name': 'Manage Services', 'value': service_menu}) entries.append({'name': 'Manage Users', 'value': user_menu}) entries.append({'name': 'Manage Devices', 'value': device_menu}) entries.append({'name': 'Manage Backups', 'value': backup_menu}) entries.append({'name': 'Execute a command in a service container', 'value': exec_menu}) entries.append({'name': 'Exit', 'value': sys.exit}) return entries def exit_menu(args): """Exits the programm """ sys.exit() # *** Init Menu Entries *** def init_menu(args): """Menu entry for initial setup and file generation :args: Passed commandline arguments """ # Prompts stack_name = qust.text('Choose a name for your setup', style=st).ask() hosts = (qust.checkbox( 'What docker machines will be used?', choices=generate_cb_choices(get_machine_list()), style=st) .skip_if(not stack_name) .ask()) # Cancel init if no hosts selected if not hosts: return # Ensure passwords match password_match = False while not password_match: password = qust.password( 'Choose a password for the ohadmin user:', style=st).ask() confirm = qust.password( 'Repeat password for the ohadmin user:', style=st).ask() if password == confirm: password_match = True else: print("Passwords did not match, try again") # Initialize custom configuration dirs and templates generate_config_folders() generate_initial_compose() frames = [] for i, host in enumerate(hosts): building_id, building_name, services = init_machine_menu(host, i) if building_id and building_name and services: frames.append({'host': host, 'building_id': building_id, 'building_name': building_name, 'services': services}) else: return # When frames is not empty generate frame config if frames: generate_pb_framr_file(frames) generate_volumerize_files(frames) building_ids = [f['building_id'] for f in frames] generate_host_key_files(building_ids) # Generate config files based on input username = ADMIN_USER generate_sftp_file(username, password, ['backup_data/backup']) generate_postgres_files(username, password) generate_mosquitto_file(username, password) generate_traefik_file(username, password) generate_filebrowser_file(username, password) generate_id_rsa_files() # print(answers) print(f"Configuration files for {stack_name} created in {custom_path}") # Check if changes shall be applied to docker environment generate = (qust.confirm( 'Apply changes to docker environment?', default=True, style=st) .ask()) if generate: generate_swarm(hosts) def init_machine_menu(host, increment): """Prompts to select server services :host: docker-machine host :increment: incrementing number to ensure ports are unique :return: choosen building id, name and services or None if canceld """ # Print divider print('----------') # Prompt for services building_id = (qust.text( f'Choose an identifier for the building on server {host} ' '(lowercase no space)', default=f'{host}', style=st) .skip_if(not host) .ask()) building = (qust.text( f'Choose a display name for building on server {host}', default=f'{host.capitalize()}', style=st) .skip_if(not building_id) .ask()) services = (qust.checkbox( f'What services shall {host} provide?', choices=generate_cb_service_choices(checked=True), style=st) .skip_if(not building) .ask()) if services is None: return None, None, None if Service.OPENHAB in services: add_openhab_service(building_id, host) if Service.NODERED in services: add_nodered_service(building_id) if Service.MQTT in services: add_mqtt_service(building_id, increment) if Service.POSTGRES in services: add_postgres_service(building_id) if Service.BACKUP in services: add_volumerize_service(building_id) if Service.FILES in services: add_file_service(building_id) if Service.SFTP in services: add_sftp_service(building_id, increment) return building_id, building, services # *** Exec Menu Entries *** def exec_menu(args): """Menu entry for executing commands in services :args: Passed commandline arguments """ machine = docker_client_prompt(" to execute command at") service_name = qust.select( 'Which service container shall execute the command?', choices=get_container_list(machine), style=st).ask() command = qust.text('What command should be executed?', style=st).ask() run_command_in_service(service_name, command, machine) # *** User Menu Entries *** def user_menu(args): """Menu entry for user managment :args: Passed commandline arguments """ # Ask for action choice = qust.select("What do you want to do?", choices=[ 'Add a new user', 'Modify existing user', 'Exit'], style=st).ask() if "Add" in choice: new_user_menu() elif "Modify" in choice: modify_user_menu() def new_user_menu(): """Menu entry for new users """ current_users = get_users_from_files() new_user = False while not new_user: username = qust.text("Choose a new username:", style=st).ask() if username not in current_users: new_user = True else: print(f"User with name {username} already exists, try again") # Ensure passwords match (only if username was selected) password_match = False password = None while username and not password_match: password = qust.password( f'Choose a password for the user {username}:', style=st).ask() confirm = (qust.password( f'Repeat password for the user {username}:', style=st) .skip_if(not password) .ask()) if password == confirm: password_match = True else: print("Passwords did not match, try again") if password and username: add_user_to_traefik_file(username, password) def modify_user_menu(): """Menu entry to remove users or change passwords """ current_users = get_users_from_files() user = qust.select("Choose user to modify:", choices=current_users, style=st).ask() if user is None: return elif user == 'ohadmin': choices = [{'name': 'Delete user', 'disabled': 'Disabled: cannot delete admin user'}, 'Change password', 'Exit'] else: choices = ['Delete user', 'Change password', 'Exit'] action = qust.select( f"What should we do with {user}?", choices=choices, style=st).ask() if action is None: return if 'Delete' in action: is_sure = qust.confirm( f"Are you sure you want to delete user {user}?", style=st).ask() if is_sure: remove_user_from_traefik_file(user) elif 'Change' in action: password_match = False while not password_match: password = qust.password( f'Choose a password for the user {user}:', style=st).ask() confirm = (qust.password( f'Repeat password for the user {user}:', style=st) .skip_if(password is None) .ask()) if password == confirm: password_match = True else: print("Passwords did not match, try again") if password: add_user_to_traefik_file(user, password) # *** Service Menu Entries *** def service_menu(args): """Menu entry for service managment :args: Passed commandline arguments """ # Ask for action choice = qust.select("What do you want to do?", choices=[ 'Re-/Start docker stack', 'Stop docker stack', 'Modify existing services', 'Add additional service', 'Exit'], style=st).ask() if "Add" in choice: service_add_menu() elif "Modify" in choice: service_modify_menu() elif "Start" in choice: machine = docker_client_prompt(" to execute deploy") if machine: deploy_docker_stack(machine) elif "Stop" in choice: machine = docker_client_prompt(" to execute remove") if machine: remove_docker_stack(machine) def service_add_menu(): """Menu to add additional services """ services = [s for s in Service if s.additional] service = qust.select( 'What service do you want to add?', style=st, choices=generate_cb_service_choices(service_list=services)).ask() host = (qust.select('Where should the service be located?', choices=generate_cb_choices( get_machine_list()), style=st) .skip_if(not service) .ask()) identifier = (qust.text( 'Input an all lower case identifier:', style=st) .skip_if(not host) .ask()) if service and host and identifier: if service == Service.POSTGRES: add_postgres_service(host, postfix=identifier) def service_modify_menu(): """Menu to modify services """ services = get_current_services() service = qust.select( 'What service do you want to modify?', choices=services).ask() if service is None: return elif service in ['proxy', 'landing']: choices = [{'name': 'Remove service', 'disabled': 'Disabled: cannot remove framework services'}, 'Exit'] else: choices = ['Remove service', 'Exit'] action = (qust.select( f"What should we do with {service}?", choices=choices, style=st) .skip_if(not service) .ask()) if action is None: return elif 'Remove' in action: delete_service(service) # *** Device Menu Functions *** def device_menu(args): """Menu to manage devices :args: Arguments form commandline """ # Check if device scripts are installed bin_path = '/usr/bin/enable-swarm-device' choices = ['Install device scripts'] if os.path.exists(bin_path): choices.append('Link device to service') choices.append('Unlink device') choices.append('Exit') # Ask for action choice = qust.select("What do you want to do? (root required)", choices=choices, style=st).ask() if "Install" in choice: print("Installing device scripts (needs root)") device_install_menu() elif "Link" in choice: device_link_menu() elif "Unlink" in choice: device_unlink_menu() def device_install_menu(): """Install scripts to link devices """ machine = docker_client_prompt(" to install usb support") if machine: # Name of base dir on machines external_base_dir = os.path.basename(base_dir) # Check if files are available on targeted machine machine_dir = f"{external_base_dir}/install-usb-support.sh" print(machine_dir) if not check_file_on_machine(machine_dir, machine): print("Scripts missing on machine, will be copied") copy_files_to_machine(base_dir, machine) else: print("Scripts available on machine") execute_command_on_machine(f'sudo {machine_dir}', machine) else: print("Cancelled device script installation") def device_link_menu(): """Link device to a service """ machine = docker_client_prompt(" to link device on") device = (qust.select("What device should be linked?", choices=USB_DEVICES, style=st) .skip_if(not machine) .ask()) if machine and device: # Start systemd service that ensures link (escapes of backslash needed) link_cmd = f"sudo systemctl enable --now swarm-device@" + \ f"{device}\\\\\\\\x20openhab.service" # Needs enable to keep after reboot execute_command_on_machine(link_cmd, machine) print(f"Linked device {device} to openHAB service on {machine}") else: print("Cancelled device linking") def device_unlink_menu(): """Unlink a device from a service """ machine = docker_client_prompt(" to unlink device from") device = (qust.select("What device should be unlinked?", choices=USB_DEVICES, style=st) .skip_if(not machine) .ask()) if machine and device: # Stop systemd service that ensures link (escapes of backslash needed) link_cmd = f"sudo systemctl disable --now swarm-device@" + \ f"{device}\\\\\\\\x20openhab.service" execute_command_on_machine(link_cmd, machine) print(f"Unlinked device {device} on machine {machine}") else: print("Cancelled device unlinking") # *** Backup Menu Entries *** def backup_menu(args): """Menu entry for backup managment :args: Passed commandline arguments """ # Ask for action choice = qust.select("What do you want to do?", choices=[ 'Execute backup', 'Restore backup', 'Move building', 'Exit'], style=st).ask() if "Execute" in choice: execute_backup_menu() elif "Restore" in choice: restore_backup_menu() elif "Move" in choice: restore_new_building_menu() def execute_backup_menu(): """Submenu for backup execution """ machine = docker_client_prompt(" to backup") full = (qust.confirm("Execute full backup (otherwise partial)?", default=False, style=st) .skip_if(not machine) .ask()) if full is None: return elif full: run_command_in_service('backup', 'backupFull', machine) print("Full backup completed") else: run_command_in_service('backup', 'backup', machine) print("Partial backup completed") def restore_backup_menu(): """Submenu for backup execution """ machine = docker_client_prompt(" to restore") confirm = (qust.confirm( f'Restore services from last backup on machine {machine} ' '(current data will be lost)?', default=False, style=st) .skip_if(not machine) .ask()) if confirm: restore_building_backup(machine, machine) print("Restore completed") else: print("Restore canceled") def restore_new_building_menu(): """Submenu for backup execution on a new building """ machine = docker_client_prompt(" to execute restores with.") current_building = compose_building_prompt(" to move", skip_if=not machine) new_machine = docker_client_prompt(" to move building to", skip_if=not current_building) confirm = (qust.confirm( f'Recreate {current_building} from last backup' f' on machine {new_machine}', default=False, style=st) .skip_if(not new_machine, default=False) .ask()) if confirm: restore_building_backup(machine, current_building, new_machine) else: print("Restore canceled") # *** Menu Helper Functions *** def generate_cb_choices(list, checked=False): """Generates checkbox entries for lists of strings :list: pyhton list that shall be converted :checked: if true, selections will be checked by default :returns: A list of dicts with name keys """ return [{'name': m, 'checked': checked} for m in list] def generate_cb_service_choices(checked=False, service_list=None): """Generates checkbox entries for the sevice enum :checked: if true, selections will be checked by default :service_list: optional list of services, use all if empty :returns: A list of dicts with name keys """ services = service_list if service_list is not None else Service return [ {'name': s.fullname, 'value': s, 'checked': checked} for s in services ] def docker_client_prompt(message_details='', skip_if=False): """Show list of docker machines and return selection :manager: Optional machine to use, prompt otherwise :returns: Docker client instance """ machine = (qust.select(f'Choose manager machine{message_details}', choices=get_machine_list(), style=st) .skip_if(skip_if) .ask()) return machine def compose_building_prompt(message_details='', skip_if=False): """Show list of building contraints used in compose :returns: Docker client instance """ building = qust.select(f'Choose building{message_details}:', choices=get_current_building_constraints(), style=st).skip_if(skip_if).ask() return building # >>> # ****************************** # Script main (entry) <<< # ****************************** if __name__ == '__main__': import argparse parser = argparse.ArgumentParser( prog='building_manager', description='Generate and manage multi' 'building configurations of openHAB with docker swarm') parser.add_argument( '--config_dir', '-d', help='Directory to creat config folders in, default is current dir') subparsers = parser.add_subparsers() # Interactive mode parser_interactive = subparsers.add_parser( 'interactive', help='Starts the interactive mode of the building manager') parser_interactive.set_defaults(func=interactive_command) # Restore command parser_restore = subparsers.add_parser('restore', help='Restore backups') parser_restore.add_argument( 'building', help='Name (label) of the building that shall be restored') parser_restore.add_argument( 'target', help='Name of the machine to restore to') parser_restore.set_defaults(func=restore_command) # Assign building command parser_assign_building = subparsers.add_parser( 'assign_building', help='Assign the role of a building to a node') parser_assign_building.add_argument( 'node', help='Name (or ID) of the node that gets the role assigned') parser_assign_building.add_argument( 'building', help='Name of the building that will be assigned') parser_assign_building.set_defaults(func=assign_building_command) # Execute command parser_exec = subparsers.add_parser( 'exec', help='Execute commands in a service container') parser_exec.add_argument( 'service', help='Name of the service that will run the command') parser_exec.add_argument( 'command', help='Command to be executed', nargs=argparse.REMAINDER) parser_exec.add_argument( '--building', '-b', help='Building name (label) of the service if ' 'service location is ambiguous') parser_exec.set_defaults(func=execute_command) # Config commands parser_config = subparsers.add_parser( 'config', help='Manage configuration files') parser_config_subs = parser_config.add_subparsers() # - Config init parser_config_init = parser_config_subs.add_parser( 'init', help='Initialize config file directories') parser_config_init.set_defaults(func=init_config_dirs_command) # Parse arguments into args dict args = parser.parse_args() # Check if custom config dir is used if args.config_dir: custom_path = args.config_dir # when no subcommand is defined show interactive menu try: args.func(args) except AttributeError: interactive_command(args) # >>> # --- vim settings --- # vim:foldmethod=marker:foldlevel=0:foldmarker=<<<,>>>
nilq/baby-python
python
import os from bs4 import BeautifulSoup bicycle = {'Price':'------','Brand':'------','Model':'------','Frame': '------', 'Color': '------', 'Size': '------', 'Fork': '------', 'Headset': '------', 'Stem': '------', 'Handlebar': '------', 'Grips': '------', 'Rear Derailleur': '------', 'Front Derailleur': '------', 'Shifter': '------', 'Brake': '------', 'Crankset': '------', 'Cassette': '------', 'Chain': '------', 'Rims': '------', 'Hub Front': '------', 'Hub Rear': '------', 'Tires': '------', 'Pedals': '------', 'Saddle': '------', 'Seat Post': '------', 'Seat Post Clamp': '------', 'Weight (KG)': '------', 'Bike Type:': '------', 'Target Group:': '------', 'Material:': '------', 'Wheel Size:': '------', 'Model year:': '------'} parsed = BeautifulSoup(open('Cube Access WS Exc black n blue - Hardtail Mountainbike Women.html'), 'html.parser') description = parsed.find(attrs={'class':'product--description'}).findAll('tr') properties = parsed.find(attrs={'class':'product--properties'}).findAll('tr') for d in description: data = d.findAll('td') try: key = data[0].text.strip() value = data[1].text.strip() except: print(data) else: bicycle[key] = value for p in properties: data = p.findAll('td') try: key = data[0].text.strip() value = data[1].text.strip() except: print(data) else: bicycle[key] = value
nilq/baby-python
python
a=int(input("enter a number")) for i in range(a+1): if(i>1): for j in range(2,i): if(i%j==0): break else: print(i)
nilq/baby-python
python
from spinn_front_end_common.utilities.notification_protocol.\ notification_protocol import NotificationProtocol import logging logger = logging.getLogger(__name__) class FrontEndCommonNotificationProtocol(object): """ The notification protocol for external device interaction """ def __call__( self, wait_for_read_confirmation, socket_addresses, database_file_path): """ :param wait_for_read_confirmation: :param socket_addresses: :param database_interface: :return: """ # notification protocol self._notification_protocol = \ NotificationProtocol(socket_addresses, wait_for_read_confirmation) self.send_read_notification(database_file_path) return {"notification_interface": self} def wait_for_confirmation(self): """ Waits for devices to confirm they have read the database via the\ notification protocol :return: """ self._notification_protocol.wait_for_confirmation() def send_read_notification(self, database_directory): """ Send the read notifications via the notification protocol :param database_directory: the path to the database :return: """ self._notification_protocol.send_read_notification(database_directory) def send_start_notification(self): """ Send the start notifications via the notification protocol :return: """ self._notification_protocol.send_start_notification() def stop(self): """ Ends the notification protocol :return: """ logger.debug("[data_base_thread] Stopping") self._notification_protocol.close()
nilq/baby-python
python
#!/usr/bin/env python # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import hashlib import optparse import os import urllib2 import sys import time # Print a dot every time this number of bytes is read. PROGRESS_SPACING = 128 * 1024 def ReadFile(filename): fh = open(filename, 'r') try: return fh.read() finally: fh.close() def WriteFile(filename, data): fh = open(filename, 'w') try: fh.write(data) finally: fh.close() def HashFile(filename): hasher = hashlib.sha1() fh = open(filename, 'rb') try: while True: data = fh.read(4096) if len(data) == 0: break hasher.update(data) finally: fh.close() return hasher.hexdigest() def CopyStream(input_stream, output_stream): """Copies the contents of input_stream to output_stream. Prints dots to indicate progress. """ bytes_read = 0 dots_printed = 0 while True: data = input_stream.read(4096) if len(data) == 0: break output_stream.write(data) bytes_read += len(data) if bytes_read / PROGRESS_SPACING > dots_printed: sys.stdout.write('.') sys.stdout.flush() dots_printed += 1 def RenameWithRetry(old_path, new_path): # Renames of files that have recently been closed are known to be # unreliable on Windows, because virus checkers like to keep the # file open for a little while longer. This tends to happen more # for files that look like Windows executables, which does not apply # to our files, but we retry the rename here just in case. if sys.platform in ('win32', 'cygwin'): for i in range(5): try: if os.path.exists(new_path): os.remove(new_path) os.rename(old_path, new_path) return except Exception, exn: sys.stdout.write('Rename failed with %r. Retrying...\n' % str(exn)) sys.stdout.flush() time.sleep(1) raise Exception('Unabled to rename irt file') else: os.rename(old_path, new_path) def DownloadFile(dest_path, url): url_path = '%s.url' % dest_path temp_path = '%s.temp' % dest_path if os.path.exists(url_path) and ReadFile(url_path).strip() == url: # The URL matches that of the file we previously downloaded, so # there should be nothing to do. return sys.stdout.write('Downloading %r to %r\n' % (url, dest_path)) output_fh = open(temp_path, 'wb') stream = urllib2.urlopen(url) CopyStream(stream, output_fh) output_fh.close() sys.stdout.write(' done\n') if os.path.exists(url_path): os.unlink(url_path) RenameWithRetry(temp_path, dest_path) WriteFile(url_path, url + '\n') stream.close() def DownloadFileWithRetry(dest_path, url): for i in range(5): try: DownloadFile(dest_path, url) break except urllib2.HTTPError, exn: if exn.getcode() == 404: raise sys.stdout.write('Download failed with error %r. Retrying...\n' % str(exn)) sys.stdout.flush() time.sleep(1) def EvalDepsFile(path): scope = {'Var': lambda name: scope['vars'][name]} execfile(path, {}, scope) return scope def Main(): parser = optparse.OptionParser() parser.add_option( '--base_url', dest='base_url', # For a view of this site that includes directory listings, see: # http://gsdview.appspot.com/nativeclient-archive2/ # (The trailing slash is required.) default=('http://commondatastorage.googleapis.com/' 'nativeclient-archive2/irt'), help='Base URL from which to download.') parser.add_option( '--nacl_revision', dest='nacl_revision', help='Download an IRT binary that was built from this ' 'SVN revision of Native Client.') parser.add_option( '--file_hash', dest='file_hashes', action='append', nargs=2, default=[], metavar='ARCH HASH', help='ARCH gives the name of the architecture (e.g. "x86_32") for ' 'which to download an IRT binary. ' 'HASH gives the expected SHA1 hash of the file.') options, args = parser.parse_args() if len(args) != 0: parser.error('Unexpected arguments: %r' % args) if options.nacl_revision is None and len(options.file_hashes) == 0: # The script must have been invoked directly with no arguments, # rather than being invoked by gclient. In this case, read the # DEPS file ourselves rather than having gclient pass us values # from DEPS. deps_data = EvalDepsFile(os.path.join('src', 'DEPS')) options.nacl_revision = deps_data['vars']['nacl_revision'] options.file_hashes = [ ('x86_32', deps_data['vars']['nacl_irt_hash_x86_32']), ('x86_64', deps_data['vars']['nacl_irt_hash_x86_64']), ] nacl_dir = os.path.join('src', 'native_client') if not os.path.exists(nacl_dir): # If "native_client" is not present, this might be because the # developer has put '"src/native_client": None' in their # '.gclient' file, because they don't want to build Chromium with # Native Client support. So don't create 'src/native_client', # because that would interfere with checking it out from SVN # later. sys.stdout.write( 'The directory %r does not exist: skipping downloading binaries ' 'for Native Client\'s IRT library\n' % nacl_dir) return if len(options.file_hashes) == 0: sys.stdout.write('No --file_hash arguments given: nothing to update\n') new_deps = [] for arch, expected_hash in options.file_hashes: url = '%s/r%s/irt_%s.nexe' % (options.base_url, options.nacl_revision, arch) dest_dir = os.path.join(nacl_dir, 'irt_binaries') if not os.path.exists(dest_dir): os.makedirs(dest_dir) dest_path = os.path.join(dest_dir, 'nacl_irt_%s.nexe' % arch) DownloadFileWithRetry(dest_path, url) downloaded_hash = HashFile(dest_path) if downloaded_hash != expected_hash: sys.stdout.write( 'Hash mismatch: the file downloaded from URL %r had hash %r, ' 'but we expected %r\n' % (url, downloaded_hash, expected_hash)) new_deps.append(' "nacl_irt_hash_%s": "%s",\n' % (arch, downloaded_hash)) if len(new_deps) > 0: sys.stdout.write('\nIf you have changed nacl_revision, the DEPS file ' 'probably needs to be updated with the following:\n%s\n' % ''.join(new_deps)) sys.exit(1) if __name__ == '__main__': Main()
nilq/baby-python
python
import RPi.GPIO as GPIO from queue import Queue EventClick = 'C' class ButtonWorker(object): def __init__(self, pin): self.gpio = GPIO self.gpio.setwarnings(False) self.queue = Queue() self.pin = pin self.gpio.setmode(GPIO.BCM) self.gpio.setup(self.pin, self.gpio.IN, pull_up_down=self.gpio.PUD_UP) self.gpio.add_event_detect(self.pin, GPIO.RISING, callback=self.Call, bouncetime=500) def Call(self, pin): state = self.gpio.input(pin) self.queue.put(EventClick) def check(self): result = False while not (self.queue.empty()): m = self.queue.get_nowait() if m == EventClick: print ("Clicked") result = True self.queue.task_done() return result
nilq/baby-python
python
from pyrogram.types import InlineQueryResultArticle,InputTextMessageContent from uuid import uuid4 class InlineQueryResults(list): def __init__(self): self.results = list() super().__init__(self.results) def add(self,title,message_text,message_parse_mode = None,message_disable_web_page_preview = None, url = None, description = None, thumb_url = None,reply_markup = None): self.results.append( InlineQueryResultArticle( id = uuid4(), title = title, input_message_content = InputTextMessageContent(message_text=message_text,parse_mode=message_parse_mode,disable_web_page_preview=message_disable_web_page_preview), url = url, description = description, thumb_url = thumb_url, reply_markup = reply_markup ) ) super().__init__(self.results)
nilq/baby-python
python
from .FSError import * class ProtectFlags: FIBF_DELETE = 1 FIBF_EXECUTE = 2 FIBF_WRITE = 4 FIBF_READ = 8 FIBF_ARCHIVE = 16 FIBF_PURE = 32 FIBF_SCRIPT = 64 flag_txt = "HSPArwed" flag_num = len(flag_txt) flag_none = 0xF # -------- empty_string = "-" * flag_num def __init__(self, mask=0): self.mask = mask def get_mask(self): return self.mask def __str__(self): txt = "" pos = self.flag_num - 1 m = 1 << pos for i in range(self.flag_num): bit = self.mask & m == m show = "-" flg = self.flag_txt[i] flg_low = flg.lower() if bit: if flg_low != flg: show = flg_low else: if flg_low == flg: show = flg_low txt += show m >>= 1 pos -= 1 return txt def bin_str(self): res = "" m = 1 << (self.flag_num - 1) for i in range(self.flag_num): if m & self.mask == m: res += "1" else: res += "0" m >>= 1 return res def short_str(self): return str(self).replace("-", "") def parse_full(self, s): """parse a string with all flags""" n = len(self.flag_txt) if len(s) != n: raise ValueError("full string size mismatch!") mask = 0 for i in range(n): val = s[i] ref = self.flag_txt[i] ref_lo = ref.lower() if val not in (ref, ref_lo, "-"): raise ValueError("invalid protect char: " + val) is_lo = ref == ref_lo is_blank = val == "-" if is_lo: do_set = is_blank else: do_set = not is_blank if do_set: bit_pos = n - i - 1 bit_mask = 1 << bit_pos mask |= bit_mask self.mask = mask def parse(self, s): if len(s) == 0: return # allow to add with '+' or sub with '-' n = self.flag_txt mode = "+" self.mask = self.flag_none for a in s.lower(): if a in "+-": mode = a else: mask = None is_low = None for i in range(self.flag_num): flg = self.flag_txt[i] flg_low = flg.lower() if flg_low == a: mask = 1 << (self.flag_num - 1 - i) is_low = flg_low == flg break if mask == None: raise FSError(INVALID_PROTECT_FORMAT, extra="char: " + a) # apply mask if mode == "+": if is_low: self.mask &= ~mask else: self.mask |= mask else: if is_low: self.mask |= mask else: self.mask &= ~mask def is_set(self, mask): return self.mask & mask == 0 # LO active def set(self, mask): self.mask &= ~mask def clr(self, mask): self.mask |= mask def is_d(self): return self.is_set(self.FIBF_DELETE) def is_e(self): return self.is_set(self.FIBF_EXECUTE) def is_w(self): return self.is_set(self.FIBF_WRITE) def is_r(self): return self.is_set(self.FIBF_READ) if __name__ == "__main__": inp = ["h", "s", "p", "a", "r", "w", "e", "d"] for i in inp: p = ProtectFlags() p.parse(i) s = str(p) if not i in s: print(s)
nilq/baby-python
python
# -*- coding: utf-8 -*- """ tipfyext.mako ~~~~~~~~~~~~~ Mako template support for Tipfy. Learn more about Mako at http://www.makotemplates.org/ :copyright: 2011 by tipfy.org. :license: BSD, see LICENSE.txt for more details. """ from __future__ import absolute_import from cStringIO import StringIO from mako.lookup import TemplateLookup from mako.runtime import Context from werkzeug import cached_property #: Default configuration values for this module. Keys are: #: #: templates_dir #: Directory for templates. Default is `templates`. default_config = { 'templates_dir': 'templates', } class Mako(object): def __init__(self, app, _globals=None, filters=None): self.app = app config = app.config[__name__] dirs = config.get('templates_dir') if isinstance(dirs, basestring): dirs = [dirs] self.environment = TemplateLookup(directories=dirs, output_encoding='utf-8', encoding_errors='replace') def render(self, _filename, **context): """Renders a template and returns a response object. :param _filename: The template filename, related to the templates directory. :param context: Keyword arguments used as variables in the rendered template. These will override values set in the request context. :returns: A rendered template. """ template = self.environment.get_template(_filename) return template.render_unicode(**context) def render_template(self, _handler, _filename, **context): """Renders a template and returns a response object. :param _filename: The template filename, related to the templates directory. :param context: Keyword arguments used as variables in the rendered template. These will override values set in the request context. :returns: A rendered template. """ ctx = _handler.context.copy() ctx.update(context) return self.render(_filename, **ctx) def render_response(self, _handler, _filename, **context): """Returns a response object with a rendered template. :param _filename: The template filename, related to the templates directory. :param context: Keyword arguments used as variables in the rendered template. These will override values set in the request context. """ res = self.render_template(_handler, _filename, **context) return self.app.response_class(res) @classmethod def factory(cls, _app, _name, **kwargs): if _name not in _app.registry: _app.registry[_name] = cls(_app, **kwargs) return _app.registry[_name] class MakoMixin(object): """Mixin that adds ``render_template`` and ``render_response`` methods to a :class:`tipfy.RequestHandler`. It will use the request context to render templates. """ # The Mako creator. mako_class = Mako @cached_property def mako(self): return self.mako_class.factory(self.app, 'mako') def render_template(self, _filename, **context): return self.mako.render_template(self, _filename, **context) def render_response(self, _filename, **context): return self.mako.render_response(self, _filename, **context)
nilq/baby-python
python
# coding=utf-8 import unittest import sys from helpers import xroad, auditchecker from main.maincontroller import MainController from tests.xroad_configure_service_222 import configure_service class XroadDeleteService(unittest.TestCase): """ SERVICE_15 Delete a Security Server Client's WSDL RIA URL: https://jira.ria.ee/browse/XT-272, https://jira.ria.ee/browse/XTKB-27, https://jira.ria.ee/browse/XTKB-95 Depends on finishing other test(s): XroadSecurityServerClientRegistration, XroadConfigureService Requires helper scenarios: X-Road version: 6.16.0 """ def test_xroad_configure_service(self): main = MainController(self) # Set test name and number main.test_number = 'SERVICE_15' main.test_name = self.__class__.__name__ ss_host = main.config.get('ss2.host') ss_user = main.config.get('ss2.user') ss_pass = main.config.get('ss2.pass') ss_ssh_host = main.config.get('ss2.ssh_host') ss_ssh_user = main.config.get('ss2.ssh_user') ss_ssh_pass = main.config.get('ss2.ssh_pass') client = xroad.split_xroad_id(main.config.get('ss2.client_id')) log_checker = auditchecker.AuditChecker(ss_ssh_host, ss_ssh_user, ss_ssh_pass) wsdl_url = main.config.get('wsdl.remote_path').format(main.config.get('wsdl.service_wsdl')) wsdl_test_service = main.config.get('wsdl.service_wsdl_test_service1') # Delete the added service test_delete_service = configure_service.test_delete_service(case=main, client=client, wsdl_url=wsdl_url, log_checker=log_checker) # Delete the other added service wsdl_test_service_url = main.config.get('wsdl.remote_path').format(wsdl_test_service) test_delete_service1 = configure_service.test_delete_service(case=main, client=client, wsdl_url=wsdl_test_service_url) try: main.log('Trying to check for and remove leftover service (2): {0}'.format(wsdl_test_service_url)) main.reload_webdriver(url=ss_host, username=ss_user, password=ss_pass) test_delete_service1() except Exception: main.log('XroadDeleteService: Service (2) not found, no need to delete.') sys.exc_clear() try: # Delete service main.reload_webdriver(url=ss_host, username=ss_user, password=ss_pass) test_delete_service() except: main.log('XroadDeleteService: Failed to delete service') assert False finally: # Test teardown main.tearDown()
nilq/baby-python
python
from typing import Callable, Tuple import numpy as np from fedot.core.data.data import InputData from fedot.core.validation.compose.metric_estimation import metric_evaluation from fedot.core.validation.split import ts_cv_generator def ts_metric_calculation(reference_data: InputData, cv_folds: int, validation_blocks: int, metrics: [str, Callable] = None, pipeline=None, log=None) -> [Tuple[float, ...], None]: """ Determine metric value for time series forecasting pipeline based on data for validation :param reference_data: InputData for validation :param cv_folds: number of folds to split data :param validation_blocks: number of validation blocks for time series validation :param metrics: name of metric or callable object :param pipeline: Pipeline for validation :param log: object for logging """ log.debug(f'Pipeline {pipeline.root_node.descriptive_id} fit for cross validation started') try: evaluated_metrics = [[] for _ in range(len(metrics))] for train_data, test_data, vb_number in ts_cv_generator(reference_data, cv_folds, validation_blocks, log): # Calculate metric value for every fold of data evaluated_metrics = metric_evaluation(pipeline, train_data, test_data, metrics, evaluated_metrics, vb_number) evaluated_metrics = tuple(map(lambda x: np.mean(x), evaluated_metrics)) log.debug(f'Pipeline {pipeline.root_node.descriptive_id} with metrics: {list(evaluated_metrics)}') except Exception as ex: log.debug(f'{__name__}. Pipeline assessment warning: {ex}. Continue.') evaluated_metrics = None return evaluated_metrics
nilq/baby-python
python
# # PySNMP MIB module TRAPEZE-NETWORKS-BASIC-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TRAPEZE-NETWORKS-BASIC-MIB # Produced by pysmi-0.3.4 at Wed May 1 15:27:11 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") iso, Counter64, NotificationType, Counter32, IpAddress, Integer32, Bits, Unsigned32, ModuleIdentity, Gauge32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "Counter64", "NotificationType", "Counter32", "IpAddress", "Integer32", "Bits", "Unsigned32", "ModuleIdentity", "Gauge32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "ObjectIdentity") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") TrpzLicenseFeature, = mibBuilder.importSymbols("TRAPEZE-NETWORKS-LICENSE-FEATURE-TC-MIB", "TrpzLicenseFeature") trpzMibs, = mibBuilder.importSymbols("TRAPEZE-NETWORKS-ROOT-MIB", "trpzMibs") trpzBasic = ModuleIdentity((1, 3, 6, 1, 4, 1, 14525, 4, 2)) trpzBasic.setRevisions(('2009-11-16 00:10', '2006-07-10 00:08', '2006-04-14 00:07', '2005-01-01 00:00',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: trpzBasic.setRevisionsDescriptions(('v3.0.0: Moved TrpzLicenseFeature into its own module for easier maintenance. This will be published in 7.1 release.', 'v2.0.6: Fixed MAX-ACCESS of trpzMobilityMemberEntryAddr, an index that was also the only column', 'v2.0.5: Revised for 4.1 release', 'v1: initial version, as for 4.0 and older releases',)) if mibBuilder.loadTexts: trpzBasic.setLastUpdated('200911160010Z') if mibBuilder.loadTexts: trpzBasic.setOrganization('Trapeze Networks') if mibBuilder.loadTexts: trpzBasic.setContactInfo('Trapeze Networks Technical Support www.trapezenetworks.com US: 866.TRPZ.TAC International: 925.474.2400 support@trapezenetworks.com') if mibBuilder.loadTexts: trpzBasic.setDescription("Basic objects for Trapeze Networks wireless switches. Copyright 2004-2009 Trapeze Networks, Inc. All rights reserved. This Trapeze Networks SNMP Management Information Base Specification (Specification) embodies Trapeze Networks' confidential and proprietary intellectual property. Trapeze Networks retains all title and ownership in the Specification, including any revisions. This Specification is supplied 'AS IS' and Trapeze Networks makes no warranty, either express or implied, as to the use, operation, condition, or performance of the Specification.") trpzBasicSystemInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 14525, 4, 2, 1)) trpzSerialNumber = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzSerialNumber.setStatus('current') if mibBuilder.loadTexts: trpzSerialNumber.setDescription('The serial number of the switch.') trpzSwMajorVersionNumber = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzSwMajorVersionNumber.setStatus('current') if mibBuilder.loadTexts: trpzSwMajorVersionNumber.setDescription('The major release version of the running software.') trpzSwMinorVersionNumber = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 99))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzSwMinorVersionNumber.setStatus('current') if mibBuilder.loadTexts: trpzSwMinorVersionNumber.setDescription('The minor release version of the running software.') trpzVersionString = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzVersionString.setStatus('current') if mibBuilder.loadTexts: trpzVersionString.setDescription('The version string of the running software, including the major, minor, patch and build numbers, such as 3.0.0.185') trpzMobilityDomainInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2)) trpzMobilityDomainName = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzMobilityDomainName.setStatus('current') if mibBuilder.loadTexts: trpzMobilityDomainName.setDescription('The mobility domain containing the switch, or a zero-length string when the mobility domain is unknown.') trpzMobilitySeedIp = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzMobilitySeedIp.setStatus('current') if mibBuilder.loadTexts: trpzMobilitySeedIp.setDescription("The IPv4 address of the seed switch for this switch's mobility domain, or the IPv4 address 0.0.0.0 if unknown.") trpzMobilityMemberTableSize = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzMobilityMemberTableSize.setStatus('current') if mibBuilder.loadTexts: trpzMobilityMemberTableSize.setDescription('The number of entries in the mobility member table, trpzMobilityMemberTable.') trpzMobilityMemberTable = MibTable((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 4), ) if mibBuilder.loadTexts: trpzMobilityMemberTable.setStatus('current') if mibBuilder.loadTexts: trpzMobilityMemberTable.setDescription('Table of members of the mobility domain, indexed by the member IPv4 address.') trpzMobilityMemberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 4, 1), ).setIndexNames((0, "TRAPEZE-NETWORKS-BASIC-MIB", "trpzMobilityMemberEntryAddr")) if mibBuilder.loadTexts: trpzMobilityMemberEntry.setStatus('current') if mibBuilder.loadTexts: trpzMobilityMemberEntry.setDescription('An entry in the trpzMobilityMemberTable table.') trpzMobilityMemberEntryAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 14525, 4, 2, 2, 4, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzMobilityMemberEntryAddr.setStatus('current') if mibBuilder.loadTexts: trpzMobilityMemberEntryAddr.setDescription('IPv4 address of a member of the mobility domain.') trpzLicenseInfoGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3)) trpzLicenseInfoTableSize = MibScalar((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzLicenseInfoTableSize.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoTableSize.setDescription('The number of entries in the license table, trpzLicenseInfoTable.') trpzLicenseInfoTable = MibTable((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 2), ) if mibBuilder.loadTexts: trpzLicenseInfoTable.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoTable.setDescription('Table of installed licenses on the switch. The licences provide additional capabilities over the default capabilities of the switch.') trpzLicenseInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 2, 1), ).setIndexNames((0, "TRAPEZE-NETWORKS-BASIC-MIB", "trpzLicenseInfoEntryFeature")) if mibBuilder.loadTexts: trpzLicenseInfoEntry.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoEntry.setDescription('A license table entry.') trpzLicenseInfoEntryFeature = MibTableColumn((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 2, 1, 1), TrpzLicenseFeature()) if mibBuilder.loadTexts: trpzLicenseInfoEntryFeature.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoEntryFeature.setDescription('The feature being reported on') trpzLicenseInfoEntryValue = MibTableColumn((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4096))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzLicenseInfoEntryValue.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoEntryValue.setDescription('The value of the feature enabled, for example a feature may have multiple levels of licensing, so the value will very with the license level.') trpzLicenseInfoEntryDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 14525, 4, 2, 3, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly") if mibBuilder.loadTexts: trpzLicenseInfoEntryDescr.setStatus('current') if mibBuilder.loadTexts: trpzLicenseInfoEntryDescr.setDescription("A human interpretable description of this license, for example, '120 APs or DAPs.'") mibBuilder.exportSymbols("TRAPEZE-NETWORKS-BASIC-MIB", trpzBasicSystemInfo=trpzBasicSystemInfo, trpzSwMinorVersionNumber=trpzSwMinorVersionNumber, trpzBasic=trpzBasic, trpzMobilityMemberTableSize=trpzMobilityMemberTableSize, trpzMobilityDomainName=trpzMobilityDomainName, trpzLicenseInfoTable=trpzLicenseInfoTable, trpzLicenseInfoTableSize=trpzLicenseInfoTableSize, trpzVersionString=trpzVersionString, trpzMobilityMemberTable=trpzMobilityMemberTable, trpzLicenseInfoGroup=trpzLicenseInfoGroup, trpzLicenseInfoEntryDescr=trpzLicenseInfoEntryDescr, PYSNMP_MODULE_ID=trpzBasic, trpzMobilityMemberEntry=trpzMobilityMemberEntry, trpzSerialNumber=trpzSerialNumber, trpzSwMajorVersionNumber=trpzSwMajorVersionNumber, trpzMobilityMemberEntryAddr=trpzMobilityMemberEntryAddr, trpzLicenseInfoEntry=trpzLicenseInfoEntry, trpzLicenseInfoEntryValue=trpzLicenseInfoEntryValue, trpzMobilityDomainInfo=trpzMobilityDomainInfo, trpzLicenseInfoEntryFeature=trpzLicenseInfoEntryFeature, trpzMobilitySeedIp=trpzMobilitySeedIp)
nilq/baby-python
python
#!/usr/bin/env python2.7 # Copyright 2016, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Definition of targets to build distribution packages.""" import os.path import sys sys.path.insert(0, os.path.abspath('..')) import python_utils.jobset as jobset def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={}, flake_retries=0, timeout_retries=0): """Creates jobspec for a task running under docker.""" environ = environ.copy() environ['RUN_COMMAND'] = shell_command docker_args=[] for k,v in environ.items(): docker_args += ['-e', '%s=%s' % (k, v)] docker_env = {'DOCKERFILE_DIR': dockerfile_dir, 'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh', 'OUTPUT_DIR': 'artifacts'} jobspec = jobset.JobSpec( cmdline=['tools/run_tests/dockerize/build_and_run_docker.sh'] + docker_args, environ=docker_env, shortname='build_package.%s' % (name), timeout_seconds=30*60, flake_retries=flake_retries, timeout_retries=timeout_retries) return jobspec def create_jobspec(name, cmdline, environ=None, cwd=None, shell=False, flake_retries=0, timeout_retries=0): """Creates jobspec.""" jobspec = jobset.JobSpec( cmdline=cmdline, environ=environ, cwd=cwd, shortname='build_package.%s' % (name), timeout_seconds=10*60, flake_retries=flake_retries, timeout_retries=timeout_retries, shell=shell) return jobspec class CSharpPackage: """Builds C# nuget packages.""" def __init__(self, linux=False): self.linux = linux self.labels = ['package', 'csharp'] if linux: self.name = 'csharp_package_dotnetcli_linux' self.labels += ['linux'] else: self.name = 'csharp_package_dotnetcli_windows' self.labels += ['windows'] def pre_build_jobspecs(self): return [] def build_jobspec(self): if self.linux: return create_docker_jobspec( self.name, 'tools/dockerfile/test/csharp_coreclr_x64', 'src/csharp/build_packages_dotnetcli.sh') else: return create_jobspec(self.name, ['build_packages_dotnetcli.bat'], cwd='src\\csharp', shell=True) def __str__(self): return self.name class NodePackage: """Builds Node NPM package and collects precompiled binaries""" def __init__(self): self.name = 'node_package' self.labels = ['package', 'node', 'linux'] def pre_build_jobspecs(self): return [] def build_jobspec(self): return create_docker_jobspec( self.name, 'tools/dockerfile/grpc_artifact_linux_x64', 'tools/run_tests/artifacts/build_package_node.sh') class RubyPackage: """Collects ruby gems created in the artifact phase""" def __init__(self): self.name = 'ruby_package' self.labels = ['package', 'ruby', 'linux'] def pre_build_jobspecs(self): return [] def build_jobspec(self): return create_docker_jobspec( self.name, 'tools/dockerfile/grpc_artifact_linux_x64', 'tools/run_tests/artifacts/build_package_ruby.sh') class PythonPackage: """Collects python eggs and wheels created in the artifact phase""" def __init__(self): self.name = 'python_package' self.labels = ['package', 'python', 'linux'] def pre_build_jobspecs(self): return [] def build_jobspec(self): return create_docker_jobspec( self.name, 'tools/dockerfile/grpc_artifact_linux_x64', 'tools/run_tests/artifacts/build_package_python.sh') class PHPPackage: """Copy PHP PECL package artifact""" def __init__(self): self.name = 'php_package' self.labels = ['package', 'php', 'linux'] def pre_build_jobspecs(self): return [] def build_jobspec(self): return create_docker_jobspec( self.name, 'tools/dockerfile/grpc_artifact_linux_x64', 'tools/run_tests/artifacts/build_package_php.sh') def targets(): """Gets list of supported targets""" return [CSharpPackage(), CSharpPackage(linux=True), NodePackage(), RubyPackage(), PythonPackage(), PHPPackage()]
nilq/baby-python
python
"""Module with implementation of the Grid classes.""" from bubblebox.library.create import Dataset,Block,Data from bubblebox.library.utilities import Action import numpy import pymorton class GridBase(Dataset): """Base class for the Grid.""" type_ = 'base' def __init__(self, varlist, nx, ny, xmin, xmax, ymin, ymax, xblocks=1, yblocks=1, user_bc_type=None, user_bc_val=None): """ Initialize the Grid object and allocate the data. Parameters ---------- varlist : list of strings List of names for the variables to create. nx : integer Number of cells in the x-direction. ny : integer Number of cells in the y-direction. xblocks : integer Number of blocks in the x-direction yblocks : integer Number of blocks in the y-direction xmin : float Domain limit at the left side. xmax : float Domain limit at the right side. ymin : float Domain limit at the bottom side. ymax : float Domain limit at the top side. user_bc_type : dictionary of (string, list) items User-defined boundary types to overwrite default ones. user_bc_val : dictionary of (string, list) items User-defined boundary values to overwrite default ones. """ # Perform checks if nx%xblocks or ny%yblocks: raise ValueError('[flowx.domain.GridBase]:(nx,ny) must be exactly '+ 'divisible by (xblocks,yblocks)') elif (xblocks%2 or yblocks%2) and xblocks!=1 and yblocks!=1: raise ValueError('[flowx.domain.GridBase]:(xblocks,yblocks) must be exactly '+ 'divisible by 2') # Organize data at coarsest level dx,dy = abs(xmax-xmin)/nx,abs(ymax-ymin)/ny nxb,nyb = int(nx/xblocks),int(ny/yblocks) levels = None # Save grid attributes at coarsest level self.nx,self.ny = nx,ny self.dx,self.dy = dx,dy # Initialize block attributes block_attributes = self.__class__.initialize_block_attributes(xblocks,yblocks,dx,dy, xmin,xmax,ymin,ymax,levels) # Initialize data attributes nblocks = len(block_attributes) data_attributes = self.__class__.initialize_data_attributes(nblocks,nxb,nyb,varlist) # Create data and block objects data = Data(**data_attributes) blocklist = [Block(data,**attributes) for attributes in block_attributes] # Call base class constructor super().__init__(blocklist,data) # Set gridline coordinates self.set_gridline_coordinates() # Set boundary blocks #self.set_domain_boundaries() # Boundary condition information self.bc_type = {} self.bc_val = {} self.set_default_bc(varlist) if user_bc_type is not None and user_bc_val is not None: self.set_user_bc(user_bc_type, user_bc_val) self.fill_guard_cells(varlist) def __del__(self): """Destructor""" self.purge() @staticmethod def initialize_block_attributes(xblocks,yblocks,dx,dy,xmin,xmax,ymin,ymax,levels): """Private method for initialization""" block_attributes = [] for lblock in range(xblocks*yblocks): iloc,jloc = pymorton.deinterleave2(lblock) imin,imax = [xmin + (iloc/xblocks)*(xmax-xmin), xmin + ((iloc+1)/xblocks)*(xmax-xmin)] jmin,jmax = [ymin + (jloc/yblocks)*(ymax-ymin), ymin + ((jloc+1)/yblocks)*(ymax-ymin)] block_attributes.append({'dx' : dx, 'dy' : dy, 'xmin' : imin, 'xmax' : imax, 'ymin' : jmin, 'ymax' : jmax, 'tag' : lblock}) return block_attributes @staticmethod def initialize_data_attributes(nblocks,nxb,nyb,varlist): """Private method for initialization""" raise NotImplementedError def set_gridline_coordinates(self): """Set the gridline coordinates.""" raise NotImplementedError def addvar(self,varkey): """Add a variable""" super().addvar(varkey) self.set_default_bc(varkey) def set_default_bc(self,varlist): """Set default boundary conditions (homogeneous Neumann).""" if type(varlist) is str: varlist = [varlist] default_bc_type = 4 * ['neumann'] default_bc_val = 4 * [0.0] num = len(varlist) self.bc_type = {**self.bc_type, **dict(zip(varlist, num * [default_bc_type]))} self.bc_val = {**self.bc_val, **dict(zip(varlist, num * [default_bc_val]))} def set_user_bc(self, user_bc_type, user_bc_val): """Overwrite default boundary conditions with user-provided ones. Parameters ---------- user_bc_type : dictionary of (string, list) items User-defined boundary types. user_bc_val : dictionary of (string, list) items User-defined boundary values. """ # Overwrite default boundary types self.bc_type = {**self.bc_type, **user_bc_type} # Overwrite default boundary values self.bc_val = {**self.bc_val, **user_bc_val} def update_bc_val(self, user_bc_val): """Overwrite boundary condition values with user-provided ones. Parameters ---------- user_bc_val : dictionary of (string, list) items User-defined boundary values. """ self.bc_val = {**self.bc_val, **user_bc_val} def update_bc_type(self, user_bc_type): self.bc_type = {**self.bc_type, **user_bc_type} def compute_error(self, eror, ivar, asol): """Compute the error between the numerical and analytical solutions. Error is defined as the absolute difference between the two solutions. Arguments --------- eror : string Name of the grid variable of the error. ivar : string Name of the grid variable of the numerical solution. asol : string Name of the grid variable of the analytical solution. """ for block in self.blocklist: block[eror] = numpy.abs(block[ivar] - block[asol]) def get_l2_norm(self, eror): """Compute the L2 norm for a given variable. Arguments --------- eror : string Name of the grid variable for which norm is desired Returns ------- l2_norm : float The L2-norm. """ l2_norm = 0. for block in self.blocklist: l2_norm = l2_norm + (numpy.sqrt(numpy.sum(block[eror]**2)) / ((self.nxb+2*self.xguard) * (self.nyb+2*self.yguard))) return l2_norm/self.nblocks def fill_guard_cells(self, varlist, **kwargs): """Fill value at guard cells for given variable names. Parameters ---------- varlist : string or list of strings Name of variables to update. """ self.halo_exchange(varlist, **kwargs) # Convert single string to a list if type(varlist) is str: varlist = [varlist] locations = ['xlow','xhigh','ylow','yhigh'] # TODO add a call to exchange data between blocks # TODO figure out how to tag blocks at boundary etc. # TODO make this efficient for varkey in varlist: bc_type_var = self.bc_type[varkey] bc_val_var = self.bc_val[varkey] for block in self.blocklist: deltas = [block.dx, block.dx, block.dy, block.dy] neighbors = [block.neighdict[location] for location in locations] blockdata = block[varkey] for location,neighbor,delta,bc_type,bc_val in zip(locations,neighbors,deltas, bc_type_var,bc_val_var): if neighbor is None: if bc_type == 'neumann': self.__class__.fill_guard_cells_neumann(blockdata,location,bc_val,delta) elif bc_type == 'dirichlet': self.__class__.fill_guard_cells_dirichlet(blockdata,location,bc_val) elif bc_type == 'outflow': self.__class__.fill_guard_cells_dirichlet(blockdata,location,bc_val) elif bc_type == 'projection': self.__class__.fill_guard_cells_projection(blockdata,location) elif bc_type == None: None else: raise ValueError('Boundary type "{}" not implemented'.format(bc_type)) @staticmethod def fill_guard_cells_dirichlet(blockdata, loc, bc_val): """Fill guard cells using a Dirichlet condition. Method implemented in child classes. Parameters ---------- loc : string Boundary location; choices: ['left', 'right', 'bottom', 'top']. bc_val : float Neumann boundary value. """ raise NotImplementedError() @staticmethod def fill_guard_cells_neumann(blockdata, loc, bc_val, delta): """Fill guard cells using a Neumann condition. Parameters ---------- loc : string Boundary location; choices: ['left', 'right', 'bottom', 'top']. bc_val : float Neumann boundary value. delta : float Grid-cell width. """ if loc == 'xlow': blockdata[:,:,0] = bc_val * delta + blockdata[:,:,1] elif loc == 'xhigh': blockdata[:,:,-1] = bc_val * delta + blockdata[:,:,-2] elif loc == 'ylow': blockdata[:,0,:] = bc_val * delta + blockdata[:,1,:] elif loc == 'yhigh': blockdata[:,-1,:] = bc_val * delta + blockdata[:,-2,:] else: raise ValueError('Unknown boundary location "{}"'.format(loc)) @staticmethod def fill_guard_cells_projection(blockdata, loc): """Fill guard cells with projection BC. Parameters ---------- loc : string Boundary location; choices: ['left', 'right', 'bottom', 'top']. """ if loc == 'xlow': blockdata[:,:,0] = 2*blockdata[:,:,1] - blockdata[:,:,2] elif loc == 'xhigh': blockdata[:,:,-1] = 2*blockdata[:,:,-2] - blockdata[:,:,-3] elif loc == 'ylow': blockdata[:,0,:] = 2*blockdata[:,1,:] - blockdata[:,2,:] elif loc == 'yhigh': blockdata[:,-1,:] = 2*blockdata[:,-2,:] - blockdata[:,-3,:] else: raise ValueError('Unknown boundary location "{}"'.format(loc))
nilq/baby-python
python
COLUMNS = [ 'TIPO_REGISTRO', 'NRO_RV_ORIGINAL', 'NRO_CARTAO', 'NRO_PV_ORIGINAL', 'DT_TRANSACAO_CV', 'NRO_NSU', 'VL_TRANSACAO_ORIGINAL', 'NRO_AUTORIZACAO', 'TID', 'NRO_PEDIDO' ]
nilq/baby-python
python
"""Setup script""" try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup, find_packages from Cython.Build import cythonize import numpy as np my_modules = cythonize("pysparselp/*.pyx", annotate=True) libname = "pysparselp" setup( name=libname, version="0.0.1", author="Martin de La Gorce", author_email="martin.delagorce@gmail.com", description="Python algorithms to solve linear programming problems with with sparse matrices", packages=find_packages(), license="MIT", ext_modules=my_modules, # additional source file(s)), include_dirs=[np.get_include()], package_data={"pysparselp": ["*.pyx"]}, install_requires=["numpy", "scipy"], )
nilq/baby-python
python
import pandas as pd class HelperDataFrame(pd.DataFrame): """Inherits from a Pandas Data Frame and adds a couple methods.""" def __init__(self, df): super().__init__(data=df) # self.random_state = 42 def randomize(self): """Shuffles observations of a dataframe""" return self.sample(frac=1, random_state=42) def null_count(self): """Get total null cells""" return self.isnull().sum().sum() if __name__ == "__main__": print("HelperDataFrame")
nilq/baby-python
python
#!/usr/bin/env python # -*- coding: utf-8 -*- """ app ~~~~~~~~~~~ The Flask application module. :author: Jeff Kereakoglow :date: 2014-11-14 :copyright: (c) 2014 by Alexis Digital :license: MIT, see LICENSE for more details """ import os from utils import prepare_json_response from flask import Flask, jsonify, request from werkzeug.contrib.cache import SimpleCache from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.httpauth import HTTPBasicAuth # Initialize core objects app = Flask(__name__) cache = SimpleCache(__name__) db = SQLAlchemy(app) auth = HTTPBasicAuth() app.config.from_object("config") #-- Models from app.models import user if not os.path.exists("db.sqlite"): db.create_all() #-- Controllers from app.controllers import default from app.controllers import user app.register_blueprint(default.mod) app.register_blueprint(user.mod) #-- Error handlers # Override the default handlers with JSON responses @app.errorhandler(400) def forbidden(error): """ Renders 400 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 400: Bad request", success=False, data=None ) ), 400 @app.errorhandler(401) def forbidden(error): """ Renders 400 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 401: Unauthorized", success=False, data=None ) ), 401 @app.errorhandler(403) def forbidden(error): """ Renders 403 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 403: Forbidden", success=False, data=None ) ), 403 @app.errorhandler(404) def not_found(error): """ Renders 404 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 404: Not found", success=False, data=None ) ), 404 @app.errorhandler(405) def not_found(error): """ Renders 405 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 405: Method not allowed", success=False, data=None ) ), 405 @app.errorhandler(500) def internal_server_error(error): """ Renders 500 response :returns: JSON :rtype: flask.Response """ return jsonify( prepare_json_response( message="Error 500: Internal server error", success=False, data=None ) ), 405
nilq/baby-python
python
""" This falls into my "bad idea that I'm playing with" category. Withold judgement and ye lunches. Upgraded to plausible. """ from importlib import import_module class Singleton(type): instance_list = {} def __call__(klass, *args, **kwargs): if not klass in klass.instance_list: klass.instance_list[klass] = super(Singleton, klass).__call__(*args, **kwargs) return klass.instance_list[klass] def lockable_class(self): self.__is_locked = False return self class MissingPluginException(Exception): pass class attach_methods(object): def __init__(self, *modules, **kwargs): self.methods = {} #allow installing the functions under a specific dictionary self.method_dict_name = kwargs.get("method_dict_name", None) self.filter_attribute = kwargs.get("filter_attribute", None) self.modules = modules self.methods = {} def __call__(self, klass): self.get_methods(klass) self.install_methods(klass) return klass def get_methods(self, klass): filter_attribute = getattr(klass, "filter_attribute", self.filter_attribute) for _module in self.modules: imported_module = import_module(_module) for method in dir(imported_module): resolved_method = getattr(imported_module, method) if (method[0:2] != "__" and not filter_attribute) or (filter_attribute and getattr(resolved_method, filter_attribute, False)): self.methods[method] = resolved_method def install_methods(self, klass): method_dict_name = getattr(klass, "method_dict_name", self.method_dict_name) if method_dict_name: setattr(klass, method_dict_name, self.methods) else: for method in self.methods: setattr(klass, method, self.methods[method]) def plugin(func): def wrapped(*args, **kwargs): print "Executing " + func.__name__ return func(*args, **kwargs) set_function_attribute(wrapped, "plugin", True) return wrapped def set_function_attribute(func, name, value): setattr(func, name, value) class PluggableObject(object): filter_attribute = "plugin" method_dict_name = "plugins" def __init__(self): pass def dispatch_plugin(self, name, *args, **kwargs): try: plugin = self.plugins[name] except KeyError: raise MissingPluginException("There is not a plugin installed for %s" % name) return plugin(self, *args, **kwargs)
nilq/baby-python
python
# -*- coding: utf-8 -*- # @FILE : consts.py # @AUTH : model_creater
nilq/baby-python
python
#!/usr/bin/env python import numpy as np import math from multi_link_common import * #height is probably 0 from multi_link_common.py #total mass and total length are also defined in multi_link_common.py num_links = 8.0 link_length = total_length/num_links link_mass = total_mass/num_links ee_location = np.matrix([0., -link_length*8.0, height]).T #bod_shapes = ['cube', 'cube', 'cube', 'cube', 'cube', 'cube', 'cube','cube'] bod_shapes = ['capsule', 'capsule', 'capsule', 'capsule', 'capsule', 'capsule', 'capsule', 'capsule'] bod_dimensions = [[0.03, 0.03, link_length]]*8 bod_com_position = [[0., -link_length/2., height], [0., -3.0/2.0*link_length, height], [0., -5.0/2.0*link_length, height], [0., -7.0/2.0*link_length, height], [0., -9.0/2.0*link_length, height], [0., -11.0/2.0*link_length, height], [0., -13.0/2.0*link_length, height], [0., -15.0/2.0*link_length, height]] bod_color = [[0.4, 0.4, 0.4, 1], [0.8, 0.8, 0.8, 1], [0.33, 0.33, 0.33, 1], [0.5, 0.5, 0.5, 1], [0.7, 0.7, 0.7, 1], [0.45, 0.45, 0.45, 1], [0.35, 0.35, 0.35, 1], [0.6, 0.6, 0.6, 1]] bod_num_links = 8 bod_mass = [link_mass]*bod_num_links bod_names = ['link1', 'link2', 'link3', 'link4', 'link5', 'link6', 'link7', 'link8'] bodies ={'shapes':bod_shapes, 'dim':bod_dimensions, 'num_links':bod_num_links, 'com_pos':bod_com_position, 'mass':bod_mass, 'name':bod_names, 'color':bod_color} b_jt_axis = [[0.,0.,1.],[0.,0.,1.], [0.,0.,1.], [0.,0.,1.],[0.,0.,1.], [0.,0.,1.], [0.,0.,1.], [0.,0.,1.]] b_jt_anchor = [[0., 0., height], [0., -link_length, height], [0., -2*link_length, height], [0., -3*link_length, height], [0., -4*link_length, height], [0., -5*link_length, height], [0., -6*link_length, height], [0., -7*link_length, height]] b_jt_kp = [25., 10., 8., 6., 5., 2.5, 1.5, 1.] #[30., 20., 15., 5., 4., 3., 2., 1.] b_jt_kd = [1.8, 1.0, 1.0, 1.0, 1.2, 0.8, 0.5, 0.2] #[16.1, 10., 8., 3., 2., 1., 0.8, 0.5] b_jt_limits_max = np.radians([180, 120, 120, 120, 120, 120, 120, 120]).tolist() b_jt_limits_min = np.radians([-180, -120, -120, -120, -120, -120, -120, -120]).tolist() b_jt_axis = [[0.,0.,1.],[0.,0.,1.], [0.,0.,1.], [0.,0.,1.],[0.,0.,1.], [0.,0.,1.], [0.,0.,1.], [0.,0.,1.]] b_jt_attach = [[0, -1], [1, 0], [2,1], [3,2], [4,3], [5,4], [6,5], [7,6]] b_jt_start = [-2.06, 0.766, 0.446, 0.467, 0.811, 0.882, 0.775, 0.243] #(gives ee pos of [0, -0.2, 0] b_jts = {'anchor':b_jt_anchor, 'axis':b_jt_axis, 'jt_lim_max':b_jt_limits_max, 'jt_lim_min':b_jt_limits_min, 'jt_init':b_jt_start, 'jt_attach':b_jt_attach, 'jt_stiffness':b_jt_kp, 'jt_damping':b_jt_kd}
nilq/baby-python
python
import streamlit as st st.sidebar.subheader("About dspy") st.sidebar.info("A webapp that is running on python and teaching python!") st.sidebar.markdown(""" <img src="https://media.giphy.com/media/3o7527pa7qs9kCG78A/giphy.gif" width="200"> """, unsafe_allow_html=True) st.title("`dspy` - Data Science with Python") st.markdown(""" ___ """) st.subheader("Please select what you would like to do") features = ["python 101 - Learn the basics of python", "pyPrac - Solve problems using python", "pandas - Learn data analysis and manipulation",] selection = st.radio("", features) if selection == features[0]: st.balloons() else: st.write("![](https://media3.giphy.com/media/STZxU3AXEdwW4caLwS/giphy.gif?cid=790b761115e96593923fc6494cb027cacde63a309c048f29&rid=giphy.gif&ct=g)")
nilq/baby-python
python
#! /usr/bin/env python3 import sys import os import cmd2 import logging import inspect # local modules import subcmd from subcmdfactory import SubCmdFactory from config import Config, Observer, Subject class QsmShell(cmd2.Cmd, Observer): intro = 'Type help or ? to list the command.\n' def emptyline(self): """ Disable the last command when hitting enter """ pass def do_shell(self, line): """Run a shell command by use a ! prefix """ print ("running shell command:", line) output = os.popen(line).read() print (output) self.last_output = output def do_exit(self, arg): """ exit from the shell """ return True def do_EOF(self, arg): return True def regCmds(self, cmds): """ Register all of the support commands into cmd2 """ for cmd in cmds: self.regCmd(cmd) def regCmd(self, cmd): """ based cmd name to register the method with do_xxx help_xxx complete_xxx """ funcdef = """def do_{}(self, arg): SubCmdFactory().Factory('{}').run(arg)""".format(cmd, cmd) assign = "QsmShell.do_{0} = do_{0}".format(cmd) exec(funcdef) exec(assign) funcdef = """def help_{}(self): print(SubCmdFactory().Factory('{}').__doc__)""".format(cmd, cmd) assign = "QsmShell.help_{0} = help_{0}".format(cmd) exec(funcdef) exec(assign) funcdef = """def complete_{}(self, text, line, begidx, endidx): subcls = SubCmdFactory().Factory('{}') return [ i for i in subcls.getSupportCmds() if i.startswith(text)] """.format(cmd, cmd.capitalize()) assign = "QsmShell.complete_{0} = complete_{0}".format(cmd) exec(funcdef) exec(assign) def __init__(self, **kwarg): """ load the shell environment from config """ # Attach the shell to the config publisher. Config().attach(self) self.__setPrompt(Config().current) super().__init__(**kwarg) def __setPrompt(self, env): """ setup the prompt shell by providing a dict. """ self.prompt = "{}:{}({})>".format(env.get('host'), env.get('user'), env.get('passw')) def update(self, subject: Subject) -> None: self.__setPrompt(subject)
nilq/baby-python
python
import os import time import gpustat import numpy as np from redlock import Redlock GPU_LOCK_TIMEOUT = 5000 # ms class GPUManager(object): def __init__(self, verbose: bool=False): self.lock_manager = Redlock([{"host": "localhost", "port": 6379, "db": 0}, ]) self.verbose = verbose def get_free_gpu(self): """ If some GPUs are available, try reserving one by checking out an exclusive redis lock. If none available or can't get lock, sleep and check again. """ while True: gpu_ind = self._get_free_gpu() if gpu_ind is not None: return gpu_ind if self.verbose: print(f'pid {os.getpid()} sleeping') time.sleep(GPU_LOCK_TIMEOUT / 1000) def _get_free_gpu(self): try: available_gpu_inds = [ gpu.index for gpu in gpustat.GPUStatCollection.new_query() if gpu.memory_used < 0.5 * gpu.memory_total ] except Exception: return [0] # Return dummy GPU index if no CUDA GPUs are installed if available_gpu_inds: gpu_ind = np.random.choice(available_gpu_inds) if self.verbose: print(f'pid {os.getpid()} picking gpu {gpu_ind}') if self.lock_manager.lock(f'gpu_{gpu_ind}', GPU_LOCK_TIMEOUT): return int(gpu_ind) if self.verbose: print(f'pid {os.getpid()} couldnt get lock') return None
nilq/baby-python
python
from matplotlib import pyplot as plt from matplotlib import animation import random import numpy as np from boids.flock import Flock from boids.flight import Flight from argparse import ArgumentParser import yaml import os from nose.tools import assert_equal from nose.tools import assert_raises
nilq/baby-python
python
import orca import numpy as np from urbansim.utils import misc def register_skim_access_variable( column_name, variable_to_summarize, impedance_measure, distance, skims_table, agg=np.sum, log=False): """ Register skim-based accessibility variable with orca. Parameters ---------- column_name : str Name of the orca column to register this variable as. impedance_measure : str Name of the skims column to use to measure inter-zone impedance. variable_to_summarize : str Name of the zonal variable to summarize. distance : int Distance to query in the skims (e.g. 30 minutes travel time). mode_name: str Name of the mode to query in the skims. period: str Period (AM, PM, OffPeak) to query in the skims. Returns ------- column_func : function """ @orca.column('zones', column_name, cache=True, cache_scope='iteration') def column_func(zones): df = skims_table.to_frame() results = misc.compute_range( df, zones.get_column(variable_to_summarize), impedance_measure, distance, agg=agg) if len(results) < len(zones): results = results.reindex(zones.index).fillna(0) # add vars from orig zone, typically not included in skims results = results + zones[variable_to_summarize] if log: results = results.apply(eval('np.log1p')) return results return
nilq/baby-python
python
import chainer from chainer.dataset import dataset_mixin from chainercv.chainer_experimental.datasets.sliceable import GetterDataset import chainercv from collections import defaultdict import glob import os import numpy as np import xml.etree.ElementTree as ET class DogDataset(dataset_mixin.DatasetMixin): def __init__(self, crop=False, size=32, use_cache=False, **kwargs): root = '../input/all-dogs/all-dogs/' paths = sorted(os.listdir(root)) self.crop = crop self.size = size self.use_cache = use_cache if self.crop: self._dataset = DogCropDataset() else: self._dataset = chainer.datasets.ImageDataset(paths, root=root) self.idx_cache_dict = dict() def __len__(self): return len(self._dataset) def get_example(self, i): if self.crop: if self.use_cache and i in self.idx_cache_dict: path, label = self.idx_cache_dict[i] img = chainercv.utils.read_image(path) else: img, bbox, label = self._dataset[i] # TODO: translation ymin, xmin, ymax, xmax = bbox img = img[:, ymin:ymax, xmin:xmax] if self.use_cache: path = '/kaggle/{}.png'.format(i) chainercv.utils.write_image(img, path) self.idx_cache_dict[i] = (path, label) else: img = self._dataset[i] label = 0 # img = chainercv.transforms.resize(img, (32, 32)) img = chainercv.transforms.scale(img, self.size, fit_short=True) img = chainercv.transforms.random_crop(img, (self.size, self.size)) img = chainercv.transforms.random_flip(img, x_random=True) img = (img / 128. - 1.).astype(np.float32) img += np.random.uniform(size=img.shape, low=0., high=1. / 128) return img, label class DogBBoxDataset(GetterDataset): def __init__(self): super(DogBBoxDataset, self).__init__() root_image = '../input/all-dogs/all-dogs/' root_annot = '../input/annotation/Annotation/' annots = glob.glob(root_annot + '*/*') annots = sorted(annots) breeds = os.listdir(root_annot) breeds = ['-'.join(breed.split('-')[1:]) for breed in breeds] self.names = list(set(breeds)) self.image_annot_dict = defaultdict(list) for annot in annots: annot_ = annot.split('/') breed, path = annot_[:-1], annot_[-1] self.image_annot_dict[path + '.jpg'].append(annot) image_paths = sorted(list(self.image_annot_dict.keys())) # no image for ../input/all-dogs/all-dogs/n02105855_2933.jpg image_paths = [path for path in image_paths if os.path.isfile(os.path.join(root_image, path))] self._dataset = chainer.datasets.ImageDataset(image_paths, root=root_image) self.add_getter('image', self.get_image) self.add_getter(('bbox', 'label'), self.get_annotation) def __len__(self): return len(self._dataset) def get_image(self, i): img = self._dataset[i] return img def get_annotation(self, i): path = self._dataset._paths[i] annots = self.image_annot_dict[path] bbox = list() label = list() for annot in annots: tree = ET.parse(annot) root = tree.getroot() objects = root.findall('object') for o in objects: bndbox = o.find('bndbox') ymin = int(bndbox.find('ymin').text) xmin = int(bndbox.find('xmin').text) ymax = int(bndbox.find('ymax').text) xmax = int(bndbox.find('xmax').text) bbox.append((ymin, xmin, ymax, xmax)) nm = o.find('name') label.append(self.names.index(nm.text)) bbox = np.array(bbox) label = np.array(label) return bbox, label class DogCropDataset(dataset_mixin.DatasetMixin): def __init__(self): self.dataset = DogBBoxDataset() self.names = self.dataset.names self.indices = list() self.bboxes = list() self.labels = list() for i in range(len(self.dataset)): bbox, label = self.dataset.get_example_by_keys(i, (1, 2)) self.indices.append(np.ones_like(label) * i) self.bboxes.append(bbox) self.labels.append(label) self.indices = np.concatenate(self.indices, axis=0) self.bboxes = np.concatenate(self.bboxes, axis=0) self.labels = np.concatenate(self.labels, axis=0) def __len__(self): return len(self.labels) def get_example(self, i): idx = self.indices[i] img, = self.dataset.get_example_by_keys(idx, (0,)) bbox, label = self.bboxes[i], self.labels[i] return img, bbox, label
nilq/baby-python
python
#! /usr/bin/env python import numpy as np import cv2 import glob import yaml class CameraCalib : def __init__(self,img_path='/tmp',CHESSX=8,CHESSY=6,extension=".jpg"): """ Initialize Camera Calibration Class @param: img_path = [path to get images], CHESSX = [chessboard corners in X direction ] CHESSY = [chessboard corners in Y direction] """ self.img_path = img_path self.chessx = CHESSX self.chessy = CHESSY self.data = {} self.file_extension = extension def show_image(self,image,time=1000): """ Image Visualization for [time] msecs. @param: image, time [in msecs] """ y = 540 x = 1.5*y imS = cv2.resize(image, (int(x), y)) # Resize image cv2.imshow("output", imS) cv2.waitKey(time) def calcReprojectionError(self, objpoints, imgpoints, mtx, dist, rvecs, tvecs): mean_error = 0 for i in xrange(len(objpoints)): imgpoints2, _ = cv2.projectPoints(objpoints[i], rvecs[i], tvecs[i], mtx, dist) error = cv2.norm(imgpoints[i], imgpoints2, cv2.NORM_L2) / len(imgpoints2) mean_error += error print("Re-projection Error: {}".format(mean_error / len(objpoints))) def compute(self,visualization=True,save_yaml=True): """ Camera calibration and camera matrix computation. @param: visualization = [True|False] to enable imgs visualization, save_yaml = [True|False] to save image in a yaml file. """ # termination criteria criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) # prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0) objp = np.zeros((self.chessy*self.chessx,3), np.float32) objp[:,:2] = np.mgrid[0:self.chessx,0:self.chessy].T.reshape(-1,2) # Arrays to store object points and image points from all the images. objpoints = [] # 3d point in real world space imgpoints = [] # 2d points in image plane. images = glob.glob(self.img_path+'/*'+self.file_extension) for fname in images: img = cv2.imread(fname) gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # show grey image if(visualization): self.show_image(gray) # Find the chess board corners ret, corners = cv2.findChessboardCorners(gray, (self.chessx,self.chessy),None) # If found, add object points, image points (after refining them) if ret == True: objpoints.append(objp) corners2 = cv2.cornerSubPix(gray,corners,(11,11),(-1,-1),criteria) imgpoints.append(corners2) # Draw and display the corners img = cv2.drawChessboardCorners(img, (self.chessx,self.chessy), corners2,ret) if(visualization): self.show_image(img) cv2.destroyAllWindows() # calibration ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1], None, None) # transform the matrix and distortion coefficients to writable lists self.data = {'camera_matrix': np.asarray(mtx).tolist(), 'dist_coeff': np.asarray(dist).tolist()} self.calcReprojectionError(objpoints,imgpoints,mtx,dist,rvecs,tvecs) # print results print("Camera Calibration Matrix:\n",self.data) # and save it to a file if (save_yaml): with open("calibration_matrix.yaml", "w") as f: yaml.dump(self.data, f) if __name__ == "__main__": import argparse ap = argparse.ArgumentParser() ap.add_argument("-cw", "--chessboard_width", required=False, default="8", help="number of intersections in x axis") ap.add_argument("-ch", "--chessboard_height", required=False, default="6", help="number of intersections in y axis") ap.add_argument("-sd", "--square_dimension", required=False, default="0.026", help="square dimension in meters") ap.add_argument("-p", "--path", required=True, help="path to images folder") ap.add_argument("-e", "--file_extension", required=False, default=".jpg", help="extension of images") ap.add_argument("-a", "--auto_mode", required=False, default="True", \ help="automatic mode uses all images inside images folder to run calibration") args = vars(ap.parse_args()) auto_mode = eval(args["auto_mode"]) CHESSBOARD_WIDTH = int(args["chessboard_width"]) CHESSBOARD_HEIGHT = int(args["chessboard_height"]) CALIBRATION_SQUARE_DIMENSION = float(args["square_dimension"]) # meters # initialize class cam_calibration = CameraCalib(img_path=args["path"],CHESSX=CHESSBOARD_WIDTH, CHESSY=CHESSBOARD_HEIGHT,extension=args["file_extension"]) # Compute Calibration cam_calibration.compute(True)
nilq/baby-python
python
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from edward.util.tensorflow import get_control_variate_coef class test_get_control_variate_coef(tf.test.TestCase): def test_calculate_correct_coefficient(self): with self.test_session(): f = tf.constant([1.0, 2.0, 3.0, 4.0]) h = tf.constant([2.0, 3.0, 8.0, 1.0]) self.assertAllClose(get_control_variate_coef(f, h).eval(), 0.03448276) if __name__ == '__main__': tf.test.main()
nilq/baby-python
python
# Copyright 2020 Jiang Shenghu # SPDX-License-Identifier: Apache-2.0 from tvm import topi from ..poly import TensorTable, Statement, ScheduleTree from .conv import PlainConv2d, Conv2d def schedule(**kwargs): init_t = 'stmt_init[n, c, h, w]' calc_t = 'stmt_calc[n, c, h, w, i, j, k]' output_constraints = '0 <= n < batch and 0 <= c < out_channel ' \ 'and 0 <= h < out_height and 0 <= w < out_width' calc_constraints = '0 <= i < in_group_size and 0 <= j < kernel_height and 0 <= k < kernel_width' domain = '[batch, in_channel, in_height, in_width, out_channel, out_height, out_width, ' \ 'kernel_height, kernel_width, in_group_size] -> {' \ f'{init_t}: {output_constraints}; ' \ f'{calc_t}: {output_constraints} and {calc_constraints}' \ '}' outer_schedule = '[%s]' % ', '.join(map( lambda x: f'{{{init_t}->[({x})];{calc_t}->[({x})]}}', ('n', 'c', 'h', 'w'))) inner_schedule = '[%s]' % ', '.join(map( lambda x: f'{{{calc_t}->[({x})]}}', ('i', 'j', 'k'))) tree = ScheduleTree.from_yaml(f''' domain: "{domain}" child: schedule: "{outer_schedule}" permutable: 1 coincident: [1, 1, 1, 1] child: sequence: - filter: "{{{init_t}}}" - filter: "{{{calc_t}}}" child: schedule: "{inner_schedule}" permutable: 1 coincident: [1, 1, 1] ''') tree.apply_params(**kwargs) return tree def tensors(batch=1, in_channel=1, in_height=1, in_width=1, out_channel=1, out_height=1, out_width=1, kernel_height=1, kernel_width=1, in_group_size=1, **_): table = TensorTable() table.add_tensor('x', [batch, in_channel, in_height, in_width]) table.add_tensor('weight', [out_channel, in_group_size, kernel_height, kernel_width]) table.add_tensor('out', [batch, out_channel, out_height, out_width]) return table def statements(stride_height=1, stride_width=1, in_group_size=1, out_group_size=1, **_): def stmt_init(t, n, c, h, w): t['out'][n, c, h, w] = 0.0 def stmt_calc(t, n, c, h, w, i, j, k): in_offset = c // out_group_size * in_group_size t['out'][n, c, h, w] = t['out'][n, c, h, w] \ + t['x'][n, i + in_offset, h * stride_height + j, w * stride_width + k] \ * t['weight'][c, i, j, k] res = {} for f in [stmt_init, stmt_calc]: res[f.__name__] = Statement.from_calc(f) return res class PlainGroupedConv2d(PlainConv2d): required_args = PlainConv2d.required_args + ['groups'] calculated_args = {**PlainConv2d.calculated_args, **{ 'in_group_size': lambda **a: a['in_channel'] // a['groups'], 'out_group_size': lambda **a: a['out_channel'] // a['groups'], }} schedule_factory = schedule tensors_factory = tensors statements_factory = statements topi_cuda_task_name = 'group_conv2d_nchw.cuda' def topi_cuda_args(self, x=None, weight=None, out=None): return [x, weight, [self.stride_height, self.stride_width], 0, 1, self.groups, out.dtype] topi_cuda_calc_func = topi.cuda.group_conv2d_nchw topi_cuda_schedule_func = topi.cuda.schedule_group_conv2d_nchw topi_cuda_calc_ret_map = ['out'] class GroupedConv2d(Conv2d): def __init__(self, groups=1, **kwargs): super().__init__(**kwargs) op_idx = self._ops.index(self.conv) self.conv = PlainGroupedConv2d(name=self.name + '.conv', groups=groups, **self.conv.arguments) self.weight = self.conv.tensors['weight'] self._ops[op_idx] = self.conv
nilq/baby-python
python
# ============================================================================= # SIMULATION-BASED ENGINEERING LAB (SBEL) - http://sbel.wisc.edu # University of Wisconsin-Madison # # Copyright (c) 2020 SBEL # All rights reserved. # # Use of this source code is governed by a BSD-style license that can be found # at https://opensource.org/licenses/BSD-3-Clause # # ============================================================================= # Contributors: Nic Olsen, Milad Rakhsha # ============================================================================= """ Writes contact forces to files """ import numpy as np def writeforcefile(c_pos, f_contact, filename, params): with open(filename, 'w') as file: file.write('cx,cy,cz,fn,fu,fw\n') if len(f_contact) != 0: for i in range(f_contact.shape[0]): out = [str(c_pos[i*3 + j]) for j in range(3)] + [str(f_contact[i,j]) for j in range(3)] file.write(','.join(out) + '\n') else: out = [str(0.0)]*6 file.write(','.join(out) + '\n') def writeforcefile_with_pairs(contact_pair, f_contact, phi, frame, params): file= open(params.prefix + "force" +frame + params.suffix, 'w') file.write('bi,bj,Fn,Ft,phi\n') if len(f_contact) != 0: for i in range(f_contact.shape[0]): out = [str(contact_pair[i][j]) for j in range(2)] + [str(f_contact[i,0]),str(np.linalg.norm(f_contact[i,1:2],2))] + [str(phi[i])] file.write(','.join(out) + '\n') else: pass
nilq/baby-python
python
""" Write a function with a list of ints as a paramter. / Return True if any two nums sum to 0. / >>> add_to_zero([]) / False / >>> add_to_zero([1]) / False / >>> add_to_zero([1, 2, 3]) / False / >>> add_to_zero([1, 2, 3, -2]) / True / """
nilq/baby-python
python
# encoding=utf-8 # A collection of regular expressions for parsing Tweet text. The regular expression # list is frozen at load time to ensure immutability. These reular expressions are # used throughout the Twitter classes. Special care has been taken to make # sure these reular expressions work with Tweets in all languages. import re, string REGEXEN = {} # :nodoc: # Space is more than %20, U+3000 for example is the full-width space used with Kanji. Provide a short-hand # to access both the list of characters and a pattern suitible for use with String#split # Taken from: ActiveSupport::Multibyte::Handlers::UTF8Handler::UNICODE_WHITESPACE UNICODE_SPACES = [] for space in [9, 10, 11, 12, 13, 32, 133, 160, 5760, 6158, 8192, 8193, 8194, 8195, 8196, 8197, 8198, 8199, 8200, 8201, 8202, 8232, 8233, 8239, 8287, 12288]: UNICODE_SPACES.append(hex(space)) REGEXEN['spaces'] = re.compile(ur'|'.join(UNICODE_SPACES)) REGEXEN['at_signs'] = re.compile(ur'[%s]' % ur'|'.join(list(u'@@'))) REGEXEN['extract_mentions'] = re.compile(ur'(^|[^a-zA-Z0-9_])(%s)([a-zA-Z0-9_]{1,20})(?=(.|$))' % REGEXEN['at_signs'].pattern) REGEXEN['extract_reply'] = re.compile(ur'^(?:[%s])*%s([a-zA-Z0-9_]{1,20})' % (REGEXEN['spaces'].pattern, REGEXEN['at_signs'].pattern)) REGEXEN['list_name'] = re.compile(ur'^[a-zA-Z\u0080-\u00ff].{0,79}$') # Latin accented characters (subtracted 0xD7 from the range, it's a confusable multiplication sign. Looks like "x") LATIN_ACCENTS = [] for accent in [192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 248, 249, 250, 251, 252, 253, 254, 255]: LATIN_ACCENTS.append(hex(accent)) REGEXEN['latin_accents'] = re.compile(ur''.join(LATIN_ACCENTS)) # Characters considered valid in a hashtag but not at the beginning, where only a-z and 0-9 are valid. HASHTAG_CHARACTERS = re.compile(ur'[a-z0-9_%s]' % REGEXEN['latin_accents'].pattern, re.IGNORECASE) REGEXEN['auto_link_hashtags'] = re.compile(ur'(^|[^0-9A-Z&\/]+)(#|#)([0-9A-Z_]*[A-Z_]+%s*)' % HASHTAG_CHARACTERS.pattern, re.IGNORECASE) REGEXEN['auto_link_usernames_or_lists'] = re.compile(ur'([^a-zA-Z0-9_]|^)([@@]+)([a-zA-Z0-9_]{1,20})(\/[a-zA-Z][a-zA-Z0-9\u0080-\u00ff\-]{0,79})?') REGEXEN['auto_link_emoticon'] = re.compile(ur'(8\-\#|8\-E|\+\-\(|\`\@|\`O|\&lt;\|:~\(|\}:o\{|:\-\[|\&gt;o\&lt;|X\-\/|\[:-\]\-I\-|\/\/\/\/Ö\\\\\\\\|\(\|:\|\/\)|∑:\*\)|\( \| \))') # URL related hash regex collection REGEXEN['valid_preceding_chars'] = re.compile(ur"(?:[^\/\"':!=]|^|\:)") punct = re.escape(string.punctuation) REGEXEN['valid_domain'] = re.compile(ur'(?:[^%s\s][\.-](?=[^%s\s])|[^%s\s]){1,}\.[a-z]{2,}(?::[0-9]+)?' % (punct, punct, punct), re.IGNORECASE) REGEXEN['valid_url_path_chars'] = re.compile(ur'[\.\,]?[a-z0-9!\*\'\(\);:=\+\$\/%#\[\]\-_,~@\.]', re.IGNORECASE) # Valid end-of-path chracters (so /foo. does not gobble the period). # 1. Allow ) for Wikipedia URLs. # 2. Allow =&# for empty URL parameters and other URL-join artifacts REGEXEN['valid_url_path_ending_chars'] = re.compile(ur'[a-z0-9\)=#\/]', re.IGNORECASE) REGEXEN['valid_url_query_chars'] = re.compile(ur'[a-z0-9!\*\'\(\);:&=\+\$\/%#\[\]\-_\.,~]', re.IGNORECASE) REGEXEN['valid_url_query_ending_chars'] = re.compile(ur'[a-z0-9_&=#]', re.IGNORECASE) REGEXEN['valid_url'] = re.compile(u''' (%s) ( (https?:\/\/|www\.) (%s) (/%s*%s?)? (\?%s*%s)? ) ''' % ( REGEXEN['valid_preceding_chars'].pattern, REGEXEN['valid_domain'].pattern, REGEXEN['valid_url_path_chars'].pattern, REGEXEN['valid_url_path_ending_chars'].pattern, REGEXEN['valid_url_query_chars'].pattern, REGEXEN['valid_url_query_ending_chars'].pattern ), re.IGNORECASE + re.X) # groups: # 1 - Preceding character # 2 - URL # 3 - Protocol or www. # 4 - Domain and optional port number # 5 - URL path # 6 - Query string
nilq/baby-python
python
# !/usr/bin/env python # -*-coding:utf-8 -*- # PROJECT : algorithm_mad # Time :2020/12/22 11:06 # Warning :The Hard Way Is Easier import random """ 堆排序 """ '''堆化''' def heapify(array, length, i): largest = i left = 2 * i + 1 right = 2 * i + 2 if left < length and array[largest] < array[left]: largest = left if right < length and array[largest] < array[right]: largest = right if largest != i: array[i], array[largest] = array[largest], array[i] heapify(array, length, largest) '''堆排序''' def HeapSort(array): length = len(array) for i in range(length, -1, -1): heapify(array, length, i) for i in range(length - 1, 0, -1): array[i], array[0] = array[0], array[i] heapify(array, i, 0) return array if __name__ == '__main__': array = [random.randint(0, 100) for _ in range(10)] array_sort = HeapSort(array.copy()) print('INPUT:\n%s' % ','.join([str(i) for i in array])) print('OUTPUT:\n%s' % ','.join([str(i) for i in array_sort]))
nilq/baby-python
python
from concurrent.futures import Future from typing import Any, Callable, TypeVar from threading import Lock from amino import do, Do, IO, Map, Dat from amino.logging import module_log from ribosome.rpc.error import RpcReadError from ribosome.rpc.data.rpc import ActiveRpc A = TypeVar('A') log = module_log() PendingRpc = Map[int, Future] class Requests(Dat['Requests']): @staticmethod def cons(current_id: int=0, to_vim: PendingRpc=Map(), from_vim: PendingRpc=Map()) -> 'Requests': return Requests(current_id, to_vim, from_vim) def __init__(self, current_id: int, to_vim: PendingRpc, from_vim: PendingRpc) -> None: self.current_id = current_id self.to_vim = to_vim self.from_vim = from_vim OnMessage = Callable[[bytes], IO[None]] OnError = Callable[[RpcReadError], IO[None]] class RpcConcurrency(Dat['RpcConcurrency']): @staticmethod def cons( requests: Requests=None, lock: Lock=None, ) -> 'RpcConcurrency': return RpcConcurrency( requests or Requests.cons(), lock or Lock(), ) def exclusive(self, f: Callable[..., IO[A]], *a: Any, **kw: Any) -> IO[A]: def wrap() -> IO[A]: with self.lock: return IO.from_either(f(*a, **kw).attempt) return IO.suspend(wrap) def __init__(self, requests: Requests, lock: Lock) -> None: self.requests = requests self.lock = lock def exclusive_unregister_rpc(rc: RpcConcurrency, requests: PendingRpc, rpc: ActiveRpc) -> IO[Future]: return IO.delay(requests.pop, rpc.id) def unregister_rpc(rc: RpcConcurrency, requests: PendingRpc, rpc: ActiveRpc) -> IO[Future]: log.debug1(f'unregistering {rpc}') return ( IO.failed(f'invalid request id from vim after execution: {rpc}. active requests: {requests}') if rpc.id not in requests else rc.exclusive(exclusive_unregister_rpc, rc, requests, rpc) ) @do(IO[Future]) def exclusive_register_rpc(rc: RpcConcurrency, requests: PendingRpc, rpc: ActiveRpc) -> Do: f: Future = Future() yield IO.delay(requests.update, {rpc.id: f}) return f @do(IO[Future]) def register_rpc(rc: RpcConcurrency, requests: PendingRpc, rpc: ActiveRpc) -> Do: log.debug1(f'registering {rpc}') yield ( IO.failed(f'duplicate id in request from vim: {rpc}') if rpc.id in requests else rc.exclusive(exclusive_register_rpc, rc, requests, rpc) ) __all__ = ('Requests', 'OnMessage', 'OnError', 'RpcConcurrency', 'unregister_rpc', 'register_rpc',)
nilq/baby-python
python
# Copyright 2021 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 from django.contrib.auth.models import User from uw_gws.utilities import fdao_gws_override from uw_pws.util import fdao_pws_override from uw_uwnetid.util import fdao_uwnetid_override def get_user(username): try: user = User.objects.get(username=username) return user except Exception as ex: user = User.objects.create_user(username, password='pass') return user def get_user_pass(username): return 'pass'
nilq/baby-python
python
from Jumpscale import j import os # import copy # import sys import inspect import types class JSBase: def __init__(self, parent=None, topclass=True, **kwargs): """ :param parent: parent is object calling us :param topclass: if True means no-one inherits from us """ self._parent = parent self._class_init() # is needed to init class properties if topclass: self._init2(**kwargs) self._init() self._obj_cache_reset() def _class_init(self, topclass=True): if not hasattr(self.__class__, "_class_init_done"): # print("_class init:%s"%self.__class__.__name__) # only needed to execute once, needs to be done at init time, class inheritance does not exist self.__class__._dirpath_ = "" # path of the directory hosting this class self.__class__.__objcat_name = "" self.__class__._cache_expiration = 3600 # expiration of the cache self.__class__._test_runs = {} self.__class__._test_runs_error = {} if not hasattr(self.__class__, "_name"): self.__class__._name = j.core.text.strip_to_ascii_dense(str(self.__class__)).split(".")[-1].lower() # short location name: if "__jslocation__" in self.__dict__: self.__class__._location = self.__jslocation__ elif "__jslocation__" in self.__class__.__dict__: self.__class__._location = self.__class__.__jslocation__ elif "__jscorelocation__" in self.__dict__: self.__class__._location = self.__jslocation__ else: self.__class__._location = None parent = self._parent while parent is not None: if hasattr(parent, "__jslocation__"): self.__class__._location = parent.__jslocation__ break parent = parent._parent if self.__class__._location is None: self.__class__._location = self.__class__._name # walk to all parents, let them know that there are child classes self.__class__._class_children = [] parent = self._parent while parent is not None: if parent.__class__ not in parent._class_children: parent._class_children.append(parent.__class__) parent = parent._parent self.__class__._methods_ = [] self.__class__._properties_ = [] self.__class__._inspected_ = False # print("classinit_2:%s"%self.__class__) # print(self.__class__._properties_) self.__class__._logger_min_level = 100 self.__class__._class_init_done = True self._key = "%s:%s" % (self.__class__._location, self.__class__._name) # lets make sure the initial loglevel gets set self._logger_set(children=False, parents=False) def _logging_enable_check(self): """ check if logging should be disabled for current js location according to logger includes and excludes (configured) includes have a higher priority over excludes will not take minlevel into consideration, its only the excludes & includes :return: True if logging is enabled :rtype: bool """ if j.core.myenv.config.get("DEBUG", False): return True self._key = self._key.lower() def check(checkitems): for finditem in checkitems: finditem = finditem.strip().lower() if finditem == "*": return True if finditem == "": continue if "*" in finditem: if finditem[-1] == "*": # means at end if self._key.startswith(finditem[:-1]): return True elif finditem[0] == "*": if self._key.endswith(finditem[1:]): return True else: raise RuntimeError("find item can only have * at start or at end") else: if self._key == finditem: return True return False if check(j.core.myenv.log_includes) and not check(j.core.myenv.log_excludes): return True return False def _logger_set(self, minlevel=None, children=True, parents=True): """ :param min_level if not set then will use the LOGGER_LEVEL from /sandbox/cfg/jumpscale_config.toml make sure that logging above minlevel will happen, std = 100 if 100 means will not log anything - CRITICAL 50 - ERROR 40 - WARNING 30 - INFO 20 - STDOUT 15 - DEBUG 10 - NOTSET 0 if parents and children: will be set on all classes of the self.location e.g. j.clients.ssh (children, ...) if minlevel specified then it will always consider the logging to be enabled :return: """ if minlevel is not None or self._logging_enable_check(): # if minlevel specified we overrule anything # print ("%s:loginit"%self.__class__._name) if minlevel is None: minlevel = int(j.core.myenv.config.get("LOGGER_LEVEL", 15)) if minlevel is not None or not self._logging_enable_check(): self.__class__._logger_min_level = minlevel if parents: parent = self._parent while parent is not None: parent._logger_minlevel_set(minlevel) parent = parent._parent if children: for kl in self.__class__._class_children: # print("%s:minlevel:%s"%(kl,minlevel)) kl._logger_min_level = minlevel def _init(self): pass def _init2(self, **kwargs): """ meant to be used by developers of the base classes :return: """ self._obj_cache_reset() self._key = "%s:%s" % ( self.__class__._location, self.__class__._name, ) # needs to be done 2, first in class init def _obj_cache_reset(self): """ this empties the runtime state of an obj and the logger and the testruns :return: """ self.__class__._test_runs = {} self._cache_ = None self._objid_ = None for key, obj in self.__dict__.items(): del obj @property def _dirpath(self): if self.__class__._dirpath_ == "": self.__class__._dirpath_ = os.path.dirname(inspect.getfile(self.__class__)) return self.__class__._dirpath_ @property def _objid(self): if self._objid_ is None: id = self.__class__._location id2 = "" try: id2 = self.data.name except: pass if id2 == "": try: if self.data.id is not None: id2 = self.data.id except: pass if id2 == "": for item in ["instance", "_instance", "_id", "id", "name", "_name"]: if item in self.__dict__ and self.__dict__[item]: self._log_debug("found extra for obj_id") id2 = str(self.__dict__[item]) break if id2 != "": self._objid_ = "%s_%s" % (id, id2) else: self._objid_ = id return self._objid_ def _logger_enable(self): self._logger_set(0) @property def _cache(self): if self._cache_ is None: self._cache_ = j.core.cache.get(self._objid, expiration=self._cache_expiration) return self._cache_ def _inspect(self): if not self.__class__._inspected_: # print("INSPECT:%s"%self.__class__) assert self.__class__._methods_ == [] assert self.__class__._properties_ == [] for name, obj in inspect.getmembers(self.__class__): if inspect.ismethod(obj): self.__class__._methods_.append(name) # elif name.startswith("_"): # continue elif inspect.ismethoddescriptor(obj): continue elif inspect.isfunction(obj): self.__class__._methods_.append(name) elif inspect.isclass(obj): self.__class__._properties_.append(name) elif inspect.isgetsetdescriptor(obj): continue else: self.__class__._properties_.append(name) for item in self.__dict__.keys(): if item.startswith("_"): continue if item not in self._methods_: self.__class__._properties_.append(item) self.__class__._inspected_ = True # else: # print("not inspect:%s"%self.__class__) def _properties(self, prefix=""): self._inspect() if prefix == "_": return [ item for item in self.__class__._properties_ if (item.startswith("_") and not item.startswith("__") and not item.endswith("_")) ] if prefix == "": return [item for item in self.__class__._properties_ if not item.startswith("_")] else: return [item for item in self.__class__._properties_ if item.startswith(prefix)] def _methods(self, prefix=""): self._inspect() if prefix == "_": return [ item for item in self.__class__._methods_ if (item.startswith("_") and not item.startswith("__") and not item.endswith("_")) ] if prefix == "": return [item for item in self.__class__._methods_ if not item.startswith("_")] else: return [item for item in self.__class__._methods_ if item.startswith(prefix)] def _properties_children(self): return [] def _properties_model(self): return [] @property def _ddict(self): res = {} for key in self.__dict__.keys(): if not key.startswith("_"): v = self.__dict__[key] if not isinstance(v, types.MethodType): res[key] = v return res ################ def _print(self, msg, cat=""): self._log(msg, cat=cat, level=15) def _log_debug(self, msg, cat="", data=None, context=None, _levelup=1): self._log(msg, cat=cat, level=10, data=data, context=context, _levelup=_levelup) def _log_info(self, msg, cat="", data=None, context=None, _levelup=1): self._log(msg, cat=cat, level=20, data=data, context=context, _levelup=_levelup) def _log_warning(self, msg, cat="", data=None, context=None, _levelup=1): self._log(msg, cat=cat, level=30, data=data, context=context, _levelup=_levelup) def _log_error(self, msg, cat="", data=None, context=None, _levelup=1): self._log(msg, cat=cat, level=40, data=data, context=context, _levelup=_levelup) def _log_critical(self, msg, cat="", data=None, context=None, _levelup=1): self._log(msg, cat=cat, level=50, data=data, context=context, _levelup=_levelup) def _log(self, msg, cat="", level=10, data=None, context=None, _levelup=1): """ :param msg: what you want to log :param cat: any dot notation category :param level: level of the log :return: can use {RED}, {RESET}, ... see color codes levels: - CRITICAL 50 - ERROR 40 - WARNING 30 - INFO 20 - STDOUT 15 - DEBUG 10 """ if j.application._in_autocomplete == 2: raise RuntimeError("s") if j.application._in_autocomplete: return None if j.application.debug or self.__class__._logger_min_level - 1 < level: # now we will log frame_ = inspect.currentframe().f_back levelup = 0 while frame_ and levelup < _levelup: frame_ = frame_.f_back levelup += 1 fname = frame_.f_code.co_filename.split("/")[-1] defname = frame_.f_code.co_name linenr = frame_.f_lineno # while obj is None and frame_: # locals_ = frame_.f_locals # # if tbc2 in locals_: # obj = locals_[tbc2] # else: # frame_ = frame_.f_back # if self._location not in [None,""]: # if not self._location.endswith(self._name): # context = "%s:%s:%s"%(self._location,self._name,defname) # else: # context = "%s:%s"%(self._location,defname) # if context=="": # context = defname logdict = {} logdict["linenr"] = linenr logdict["processid"] = j.application.appname logdict["message"] = msg logdict["filepath"] = fname logdict["level"] = level if context: logdict["context"] = context else: try: logdict["context"] = self._key except Exception as e: from pudb import set_trace set_trace() logdict["context"] = "" pass # TODO:*1 is not good logdict["cat"] = cat logdict["data"] = data if data and isinstance(data, dict): # shallow copy the data to avoid changing the original data hidden_data = data.copy() if "password" in data or "secret" in data or "passwd" in data: hidden_data["password"] = "***" logdict["data"] = hidden_data j.core.tools.log2stdout(logdict) ################ def _done_check(self, name="", reset=False): if reset: self._done_reset(name=name) if name == "": return j.core.db.hexists("done", self._objid) else: return j.core.db.hexists("done", "%s:%s" % (self._objid, name)) def _done_set(self, name="", value="1"): if name == "": return j.core.db.hset("done", self._objid, value) else: return j.core.db.hset("done", "%s:%s" % (self._objid, name), value) def _done_get(self, name=""): if name == "": return j.core.db.hget("done", self._objid) else: return j.core.db.hget("done", "%s:%s" % (self._objid, name)) def _done_reset(self, name=""): """ if name =="" then will remove all from this object :param name: :return: """ if name == "": for item in j.core.db.hkeys("done"): item = item.decode() # print("reset todo:%s" % item) if item.find(self._objid) != -1: j.core.db.hdel("done", item) # print("reset did:%s" % item) else: return j.core.db.hdel("done", "%s:%s" % (self._objid, name)) def _test_error(self, name, error): j.errorhandler.try_except_error_process(error, die=False) self.__class__._test_runs_error[name] = error def _test_run(self, name="", obj_key="main", die=True, **kwargs): """ :param name: name of file to execute can be e.g. 10_test_my.py or 10_test_my or subtests/test1.py the tests are found in subdir tests of this file if empty then will use all files sorted in tests subdir, but will not go in subdirs :param obj_key: is the name of the function we will look for to execute, cannot have arguments to pass arguments to the example script, use the templating feature, std = main :return: result of the tests """ res = self.__test_run(name=name, obj_key=obj_key, die=die, **kwargs) if self.__class__._test_runs_error != {}: for key, e in self.__class__._test_runs_error.items(): self._log_error("ERROR FOR TEST: %s\n%s" % (key, e)) self._log_error("SOME TESTS DIT NOT COMPLETE SUCCESFULLY") else: self._log_info("ALL TESTS OK") return res def __test_run(self, name=None, obj_key="main", die=True, **kwargs): if name == "": name = None if name is not None: self._log_info("##: TEST RUN: %s" % name.upper()) if name is not None: if name.endswith(".py"): name = name[:-3] tpath = "%s/tests/%s" % (self._dirpath, name) tpath = tpath.replace("//", "/") if not name.endswith(".py"): tpath += ".py" if not j.sal.fs.exists(tpath): for item in j.sal.fs.listFilesInDir("%s/tests" % self._dirpath, recursive=False, filter="*.py"): bname = j.sal.fs.getBaseName(item) if "_" in bname: bname2 = "_".join(bname.split("_", 1)[1:]) # remove part before first '_' else: bname2 = bname if bname2.endswith(".py"): bname2 = bname2[:-3] if bname2.strip().lower() == name: self.__test_run(name=bname, obj_key=obj_key, **kwargs) return return self._test_error( name, RuntimeError("Could not find, test:%s in %s/tests/" % (name, self._dirpath)) ) self._log_debug("##: path: %s\n\n" % tpath) else: items = [ j.sal.fs.getBaseName(item) for item in j.sal.fs.listFilesInDir("%s/tests" % self._dirpath, recursive=False, filter="*.py") ] items.sort() for name in items: self.__test_run(name=name, obj_key=obj_key, **kwargs) return method = j.tools.codeloader.load(obj_key=obj_key, path=tpath) self._log_debug("##:LOAD: path: %s\n\n" % tpath) if die or j.application.debug: res = method(self=self, **kwargs) else: try: res = method(self=self, **kwargs) except Exception as e: if j.application.debug: raise e else: j.errorhandler.try_except_error_process(e, die=False) self.__class__._test_runs_error[name] = e return e self.__class__._test_runs[name] = res return res def __str__(self): out = "## {GRAY}%s {RED}%s{BLUE} %s{RESET}\n\n" % ( self.__objcat_name, self.__class__._location, self.__class__.__name__, ) def add(name, color, items, out): if len(items) > 0: out += "{%s}### %s:\n" % (color, name) if len(items) < 20: for item in items: out += " - %s\n" % item else: out += " - ...\n" out += "\n" return out out = add("children", "GREEN", self._properties_children(), out) out = add("data", "YELLOW", self._properties_model(), out) out = add("methods", "BLUE", self._methods(), out) out = add("properties", "GRAY", self._properties(), out) out += "{RESET}" out = j.core.tools.text_replace(out) print(out) # TODO: *1 dirty hack, the ansi codes are not printed, need to check why return "" __repr__ = __str__
nilq/baby-python
python
# -*- coding: utf-8 -*- """ Protocol implementation for `Tokyo Tyrant <http://1978th.net/tokyotyrant/>`_. Let's assume some defaults for our sandbox:: >>> TEST_HOST = '127.0.0.1' >>> TEST_PORT = 1983 # default port is 1978 """ import math import socket import struct import exceptions # Pyrant constants MAGIC_NUMBER = 0xc8 ENCODING = 'UTF-8' ENCODING_ERROR_HANDLING = 'strict' # set to 'replace' or 'ignore' if needed # Table Types DB_BTREE = 'B+ tree' DB_TABLE = 'table' DB_MEMORY = 'on-memory hash' DB_HASH = 'hash' TABLE_COLUMN_SEP = '\x00' def _ulen(expr): "Returns length of the string in bytes." return len(expr.encode(ENCODING)) if isinstance(expr, unicode) else len(expr) def _pack(code, *args): # Craft string that we'll use to send data based on args type and content buf = '' fmt = '>BB' largs = [] for arg in args: if isinstance(arg, int): fmt += 'I' largs.append(arg) elif isinstance(arg, str): buf += arg elif isinstance(arg, unicode): buf += arg.encode(ENCODING) elif isinstance(arg, long): fmt += 'Q' largs.append(arg) elif isinstance(arg, (list, tuple)): for v in arg: if isinstance(v, unicode): v = v.encode(ENCODING) else: v = str(v) buf += "%s%s" % (struct.pack(">I", len(v)), v) return "%s%s" % (struct.pack(fmt, MAGIC_NUMBER, code, *largs), buf) class _TyrantSocket(object): """ Socket logic. We use this class as a wrapper to raw sockets. """ def __init__(self, host, port, timeout=None): self._sock = socket.socket() if not timeout is None: self._sock.settimeout(timeout) self._sock.connect((host, port)) self._sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) def __del__(self): self._sock.close() def send(self, *args, **kwargs): """ Packs arguments and sends the buffer to the socket. """ sync = kwargs.pop('sync', True) # Send message to socket, then check for errors as needed. self._sock.sendall(_pack(*args)) if not sync: return fail_code = ord(self.get_byte()) if fail_code: raise exceptions.get_for_code(fail_code) def recv(self, bytes): """ Retrieves given number of bytes from the socket and returns them as string. """ d = '' while len(d) < bytes: c = self._sock.recv(min(8192, bytes - len(d))) if not c: raise socket.error('server disconnected unexpectedly') # pragma: nocover d += c return d def get_byte(self): """ Retrieves one byte from the socket and returns it. """ return self.recv(1) def get_int(self): """ Retrieves an integer (4 bytes) from the socket and returns it. """ return struct.unpack('>I', self.recv(4))[0] def get_long(self): """ Retrieves a long integer (8 bytes) from the socket and returns it. """ return struct.unpack('>Q', self.recv(8))[0] def get_str(self): """ Retrieves a string (n bytes, which is an integer just before string) from the socket and returns it. """ return self.recv(self.get_int()) def get_unicode(self): """ Retrieves a unicode string from the socket and returns it. This method uses :meth:`get_str`, which in turn makes use of :meth:`get_int`. """ return self.get_str().decode(ENCODING, ENCODING_ERROR_HANDLING) def get_double(self): """ Retrieves two long integers (16 bytes) from the socket and returns them. """ intpart, fracpart = struct.unpack('>QQ', self.recv(16)) return intpart + (fracpart * 1e-12) def get_strpair(self): """ Retrieves a pair of strings (n bytes, n bytes which are 2 integers just before the pair) and returns them as a tuple of strings. """ klen = self.get_int() vlen = self.get_int() return self.recv(klen), self.recv(vlen) class TyrantProtocol(object): """ A straightforward implementation of the Tokyo Tyrant protocol. Provides all low level constants and operations. Provides a level of abstraction that is just enough to communicate with server from Python using Tyrant API. More sophisticated APIs can be built on top of this class. Two of them are included in pyrant: the dict-like API (:class:`~pyrant.Pyrant`) and the query API (:class:`~pyrant.query.Query`). Let's connect to a sanbdox Tyrant server:: >>> from pyrant import protocol >>> p = protocol.TyrantProtocol(host=TEST_HOST, port=TEST_PORT) # remove anything that could be left from previous time >>> p.vanish() # make sure there are zero records in the database >>> p.rnum() 0 """ # Protocol commands PUT = 0x10 PUTKEEP = 0x11 PUTCAT = 0x12 PUTSHL = 0x13 PUTNR = 0x18 OUT = 0x20 GET = 0x30 MGET = 0x31 VSIZ = 0x38 ITERINIT = 0x50 ITERNEXT = 0x51 FWMKEYS = 0x58 ADDINT = 0x60 ADDDOUBLE = 0x61 EXT = 0x68 SYNC = 0x70 VANISH = 0x72 COPY = 0x73 RESTORE = 0x74 SETMST = 0x78 RNUM = 0x80 SIZE = 0x81 STAT = 0x88 MISC = 0x90 # Query conditions RDBQCSTREQ = 0 # string is equal to RDBQCSTRINC = 1 # string is included in RDBQCSTRBW = 2 # string begins with RDBQCSTREW = 3 # string ends with RDBQCSTRAND = 4 # string includes all tokens in RDBQCSTROR = 5 # string includes at least one token in RDBQCSTROREQ = 6 # string is equal to at least one token in RDBQCSTRRX = 7 # string matches regular expressions of RDBQCNUMEQ = 8 # number is equal to RDBQCNUMGT = 9 # number is greater than RDBQCNUMGE = 10 # number is greater than or equal to RDBQCNUMLT = 11 # number is less than RDBQCNUMLE = 12 # number is less than or equal to RDBQCNUMBT = 13 # number is between two tokens of RDBQCNUMOREQ = 14 # number is equal to at least one token in RDBQCFTSPH = 15 # full-text search with the phrase of RDBQCFTSAND = 16 # full-text search with all tokens in RDBQCFTSOR = 17 # full-text search with at least one token in RDBQCFTSEX = 18 # full-text search with the compound expression of RDBQCNEGATE = 1 << 24 # negation flag RDBQCNOIDX = 1 << 25 # no index flag # Order types RDBQOSTRASC = 0 # string ascending RDBQOSTRDESC = 1 # string descending RDBQONUMASC = 2 # number ascending RDBQONUMDESC = 3 # number descending # Operation types TDBMSUNION = 0 # union TDBMSISECT = 1 # intersection TDBMSDIFF = 2 # difference # Miscellaneous operation options RDBMONOULOG = 1 # omission of update log # Scripting extension options RDBXOLCKREC = 1 # record locking RDBXOLCKGLB = 2 # global locking # Index types (for table databases) TDBITLEXICAL = 0 # lexical string TDBITDECIMAL = 1 # decimal string TDBITTOKEN = 2 # token inverted index TDBITQGRAM = 3 # q-gram inverted index TDBITOPT = 9998 # optimize index TDBITVOID = 9999 # remove index TDBITKEEP = 1 << 24 # keep existing index def __init__(self, host, port, timeout=None): # connect to the remote database self._sock = _TyrantSocket(host, port, timeout) # expose connection info (not used internally) self.host = host self.port = port def put(self, key, value): """ Unconditionally sets key to value:: >>> p.put(u'foo', u'bar\x00baz') >>> p.rnum() 1 >>> p.put('fox', u'box\x00quux') >>> p.rnum() 2 """ self._sock.send(self.PUT, _ulen(key), _ulen(value), key, value) def putkeep(self, key, value): """ Sets key to value if key does not already exist. """ self._sock.send(self.PUTKEEP, _ulen(key), _ulen(value), key, value) def putcat(self, key, value): """ Appends value to the existing value for key, or sets key to value if it does not already exist. """ self._sock.send(self.PUTCAT, _ulen(key), _ulen(value), key, value) def putshl(self, key, value, width): """ Equivalent to:: self.putcat(key, value) self.put(key, self.get(key)[-width:]) """ self._sock.send(self.PUTSHL, _ulen(key), _ulen(value), width, key, value) def putnr(self, key, value): """ Sets key to value without waiting for a server response. """ self._sock.send(self.PUTNR, _ulen(key), _ulen(value), key, value, sync=False) def out(self, key): """ Removes key from server. """ self._sock.send(self.OUT, _ulen(key), key) def genuid(self): """ Generates and returns a unique primary key. Raises `ValueError` if the database could not return sensible data. """ res = self.misc('genuid', []) if not len(res) == 1 or not res[0]: raise ValueError('Could not generate primary key: %s' % repr(res)) # pragma: nocover return res[0] def get(self, key, literal=False): """ Returns the value of `key` as stored on the server:: >>> p.get(u'foo') u'bar\x00baz' >>> p.get(u'fox') u'box\x00quux' """ self._sock.send(self.GET, _ulen(key), key) return self._sock.get_str() if literal else self._sock.get_unicode() def getint(self, key): """ Returns an integer for given `key`. Value must be set by :meth:`~pyrant.protocol.TyrantProtocol.addint`. """ return self.addint(key) def getdouble(self, key): """ Returns a double for given key. Value must be set by :meth:`~adddouble`. """ return self.adddouble(key) def mget(self, keys): """ Returns key,value pairs from the server for the given list of keys:: >>> p.mget(['foo', 'fox']) [('foo', 'bar\x00baz'), ('fox', 'box\x00quux')] """ self._sock.send(self.MGET, len(keys), keys) numrecs = self._sock.get_int() return [self._sock.get_strpair() for i in xrange(numrecs)] def vsiz(self, key): """ Returns the size of a value for given key. """ self._sock.send(self.VSIZ, _ulen(key), key) return self._sock.get_int() def iterinit(self): """ Begins iteration over all keys of the database. >>> p.iterinit() # now we can call iternext() """ self._sock.send(self.ITERINIT) def iternext(self): """ Returns the next key after ``iterinit`` call. Raises an exception which is subclass of :class:`~pyrant.protocol.TyrantError` on iteration end:: >>> p.iternext() # assume iterinit() was already called u'foo' >>> p.iternext() u'fox' >>> p.iternext() Traceback (most recent call last): ... InvalidOperation """ self._sock.send(self.ITERNEXT) return self._sock.get_unicode() def fwmkeys(self, prefix, maxkeys=-1): """ Get up to the first maxkeys starting with prefix """ self._sock.send(self.FWMKEYS, _ulen(prefix), maxkeys, prefix) numkeys = self._sock.get_int() return [self._sock.get_unicode() for i in xrange(numkeys)] def addint(self, key, num=0): """ Adds given integer to existing one. Stores and returns the sum. """ self._sock.send(self.ADDINT, _ulen(key), num, key) return self._sock.get_int() def adddouble(self, key, num=0.0): """ Adds given double to existing one. Stores and returns the sum. """ fracpart, intpart = math.modf(num) fracpart, intpart = int(fracpart * 1e12), int(intpart) self._sock.send(self.ADDDOUBLE, _ulen(key), long(intpart), long(fracpart), key) return self._sock.get_double() def ext(self, func, opts, key, value): """ Calls ``func(key, value)`` with ``opts``. :param opts: a bitflag that can be `RDBXOLCKREC` for record locking and/or `RDBXOLCKGLB` for global locking. """ self._sock.send(self.EXT, len(func), opts, _ulen(key), _ulen(value), func, key, value) return self._sock.get_unicode() def sync(self): # TODO: better documentation (why would someone need this?) """ Synchronizes the updated contents of the remote database object with the file and the device. """ self._sock.send(self.SYNC) def vanish(self): """ Removes all records from the database. """ self._sock.send(self.VANISH) def copy(self, path): """ Hot-copies the database to given path. """ self._sock.send(self.COPY, _ulen(path), path) def restore(self, path, msec): """ Restores the database from `path` at given timestamp (in `msec`). """ self._sock.send(self.RESTORE, _ulen(path), msec, path) def setmst(self, host, port): """ Sets master to `host`:`port`. """ self._sock.send(self.SETMST, len(host), port, host) def rnum(self): """ Returns the number of records in the database. """ self._sock.send(self.RNUM) return self._sock.get_long() def add_index(self, name, kind=None, keep=False): """ Sets index on given column. Returns `True` if index was successfully created. :param name: column name for which index should be set. :param kind: index type, one of: `lexical`, `decimal`, `token`, `q-gram`. :param keep: if True, index is only created if it did not yet exist. Default is False, i.e. any existing index is reset. .. note:: we have chosen not to mimic the original API here because it is a bit too confusing. Instead of a single cumbersome function Pyrant provides three: :meth:`~add_index`, :meth:`~optimize_index` and :meth:`~drop_index`. They all do what their names suggest. """ # TODO: replace "kind" with keyword arguments TYPES = { 'lexical': self.TDBITLEXICAL, 'decimal': self.TDBITDECIMAL, 'token': self.TDBITTOKEN, 'q-gram': self.TDBITQGRAM, } kind = 'lexical' if kind is None else kind assert kind in TYPES, 'unknown index type "%s"' % kind type_code = TYPES[kind] if keep: type_code |= self.TDBITKEEP try: self.misc('setindex', [name, type_code]) except exceptions.InvalidOperation: return False else: return True def optimize_index(self, name): """ Optimizes index for given column. Returns `True` if the operation was successfully performed. In most cases the operation fails when the index does not exist. You can add index using :meth:`~add_index`. """ try: self.misc('setindex', [name, self.TDBITOPT]) except exceptions.InvalidOperation: return False else: return True def drop_index(self, name): """ Removes index for given column. Returns `True` if the operation was successfully performed. In most cases the operation fails when the index doesn't exist. You can add index using :meth:`~add_index`. """ try: self.misc('setindex', [name, self.TDBITVOID]) except exceptions.InvalidOperation: return False else: return True def size(self): """ Returns the size of the database in bytes. """ self._sock.send(self.SIZE) return self._sock.get_long() def stat(self): """ Returns some statistics about the database. """ self._sock.send(self.STAT) return self._sock.get_unicode() def search(self, conditions, limit=10, offset=0, order_type=0, order_column=None, opts=0, ms_conditions=None, ms_type=None, columns=None, out=False, count=False, hint=False): """ Returns list of keys for elements matching given ``conditions``. :param conditions: a list of tuples in the form ``(column, op, expr)`` where `column` is name of a column and `op` is operation code (one of TyrantProtocol.RDBQC[...]). The conditions are implicitly combined with logical AND. See `ms_conditions` and `ms_type` for more complex operations. :param limit: integer. Defaults to 10. :param offset: integer. Defaults to 0. :param order_column: string; if defined, results are sorted by this column using default or custom ordering method. :param order_type: one of TyrantProtocol.RDBQO[...]; if defined along with `order_column`, results are sorted by the latter using given method. Default is RDBQOSTRASC. :param opts: a bitflag (see :meth:`~pyrant.protocol.TyrantProtocol.misc` :param ms_conditions: MetaSearch conditions. :param ms_type: MetaSearch operation type. :param columns: iterable; if not empty, returns only given columns for matched records. :param out: boolean; if True, all items that correspond to the query are deleted from the database when the query is executed. :param count: boolean; if True, the return value is the number of items that correspond to the query. :param hint: boolean; if True, the hint string is added to the return value. """ # TODO: split this function into separate functions if they return # different results: # # - search = misc('search', []) --> list of keys # - searchget = misc('search', ['get']) --> list of items # - searchout = misc('search', ['out']) --> boolean # - searchcount = misc('search', ['count']) --> integer # # Some functions should be of course left as keywords for the # above-mentioned functions: # # - addcond = misc('search', ['addcond...']) # - setorder = misc('search', ['setorder...']) # - setlimit = misc('search', ['setlimit...']) # - hint = misc('search', ['hint']) # - metasearch stuff, including functions 'mstype', 'addcond' and 'next'. # # See http://1978th.net/tokyotyrant/spex.html#tcrdbapi # sanity check assert limit is None or 0 <= limit, 'wrong limit value "%s"' % limit assert offset is None or 0 <= offset, 'wrong offset value "%s"' % offset if offset and not limit: # this is required by TDB API. Could be worked around somehow? raise ValueError('Offset cannot be specified without limit.') assert ms_type in (None, self.TDBMSUNION, self.TDBMSISECT, self.TDBMSDIFF) assert order_type in (self.RDBQOSTRASC, self.RDBQOSTRDESC, self.RDBQONUMASC, self.RDBQONUMDESC) # conditions args = ['addcond\x00%s\x00%d\x00%s' % cond for cond in conditions] # MetaSearch support (multiple additional queries, one Boolean operation) if ms_type is not None and ms_conditions: args += ['mstype\x00%s' % ms_type] for conds in ms_conditions: args += ['next'] args += ['addcond\x00%s\x00%d\x00%s' % cond for cond in conds] # return only selected columns if columns: args += ['get\x00%s' % '\x00'.join(columns)] # set order in query if order_column: args += ['setorder\x00%s\x00%d' % (order_column, order_type)] # set limit and offset if limit: # and 0 <= offset: # originally this is named setlimit(max,skip). # it is *not* possible to specify offset without limit. args += ['setlimit\x00%d\x00%d' % (limit, offset)] # drop all records yielded by the query if out: args += ['out'] if count: args += ['count'] if hint: args += ['hint'] return self.misc('search', args, opts) def misc(self, func, args, opts=0): """ Executes custom function. :param func: the function name (see below) :param opts: a bitflag (see below) Functions supported by all databases: * `putlist` stores records. It receives keys and values one after the other, and returns an empty list. * `outlist` removes records. It receives keys, and returns an empty list. * `getlist` retrieves records. It receives keys, and returns values. Functions supported by the table database (in addition to mentioned above): * `setindex` * `search` * `genuid`. Possible options: * :const:`TyrantProtocol.RDBMONOULOG` to prevent writing to the update log. """ try: self._sock.send(self.MISC, len(func), opts, len(args), func, args) finally: numrecs = self._sock.get_int() return [self._sock.get_unicode() for i in xrange(numrecs)]
nilq/baby-python
python
import h2o h2o.init() weather_hex = h2o.import_file("weather.csv") # To see a brief summary of the data, run the following command. weather_hex.describe()
nilq/baby-python
python
from Tkinter import Tk, Label, Button def update_label(): global n n += 1 l["text"] = "Number of clicks: %d" % n w = Tk() n = 0 l = Label(w, text="There have been no clicks yet") l.pack() Button(w, text="click me", command=update_label).pack() w.mainloop()
nilq/baby-python
python
# -*- coding: utf-8 -*- # cython: language_level=3 # Copyright (c) 2020 Nekokatt # Copyright (c) 2021-present davfsa # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Utility methods used for parsing timestamps and datetimes from Discord.""" from __future__ import annotations __all__: typing.List[str] = [ "DISCORD_EPOCH", "datetime_to_discord_epoch", "discord_epoch_to_datetime", "unix_epoch_to_datetime", "Intervalish", "timespan_to_int", "local_datetime", "utc_datetime", "monotonic", "monotonic_ns", "uuid", ] import datetime import time import typing import uuid as uuid_ Intervalish = typing.Union[int, float, datetime.timedelta] """Type hint representing a naive time period or time span. This is a type that is like an interval of some sort. This is an alias for `typing.Union[int, float, datetime.datetime]`, where `builtins.int` and `builtins.float` types are interpreted as a number of seconds. """ DISCORD_EPOCH: typing.Final[int] = 1_420_070_400 """Discord epoch used within snowflake identifiers. This is defined as the number of seconds between `1/1/1970 00:00:00 UTC` and `1/1/2015 00:00:00 UTC`. References ---------- * [Discord API documentation - Snowflakes](https://discord.com/developers/docs/reference#snowflakes) """ # Default to the standard lib parser, that isn't really ISO compliant but seems # to work for what we need. def slow_iso8601_datetime_string_to_datetime(datetime_str: str) -> datetime.datetime: """Parse an ISO-8601-like datestring into a datetime. Parameters ---------- datetime_str : builtins.str The date string to parse. Returns ------- datetime.datetime The corresponding date time. """ if datetime_str.endswith(("z", "Z")): # Python's parser cannot handle zulu time, it isn't a proper ISO-8601 compliant parser. datetime_str = datetime_str[:-1] + "+00:00" return datetime.datetime.fromisoformat(datetime_str) fast_iso8601_datetime_string_to_datetime: typing.Optional[typing.Callable[[str], datetime.datetime]] try: # CISO8601 is around 600x faster than modules like dateutil, which is # going to be noticeable on big bots where you are parsing hundreds of # thousands of "joined_at" fields on users on startup. import ciso8601 # Discord appears to actually use RFC-3339, which isn't a true ISO-8601 implementation, # but somewhat of a subset with some edge cases. # See https://tools.ietf.org/html/rfc3339#section-5.6 fast_iso8601_datetime_string_to_datetime = ciso8601.parse_rfc3339 except ImportError: fast_iso8601_datetime_string_to_datetime = None iso8601_datetime_string_to_datetime: typing.Callable[[str], datetime.datetime] = ( fast_iso8601_datetime_string_to_datetime or slow_iso8601_datetime_string_to_datetime ) def discord_epoch_to_datetime(epoch: int, /) -> datetime.datetime: """Parse a Discord epoch into a `datetime.datetime` object. Parameters ---------- epoch : builtins.int Number of milliseconds since `1/1/2015 00:00:00 UTC`. Returns ------- datetime.datetime Number of seconds since `1/1/1970 00:00:00 UTC`. """ return datetime.datetime.fromtimestamp(epoch / 1_000 + DISCORD_EPOCH, datetime.timezone.utc) def datetime_to_discord_epoch(timestamp: datetime.datetime) -> int: """Parse a `datetime.datetime` object into an `builtins.int` `DISCORD_EPOCH` offset. Parameters ---------- timestamp : datetime.datetime Number of seconds since `1/1/1970 00:00:00 UTC`. Returns ------- builtins.int Number of milliseconds since `1/1/2015 00:00:00 UTC`. """ return int((timestamp.timestamp() - DISCORD_EPOCH) * 1_000) def unix_epoch_to_datetime(epoch: typing.Union[int, float], /, *, is_millis: bool = True) -> datetime.datetime: """Parse a UNIX epoch to a `datetime.datetime` object. !!! note If an epoch that's outside the range of what this system can handle, this will return `datetime.datetime.max` if the timestamp is positive, or `datetime.datetime.min` otherwise. Parameters ---------- epoch : typing.Union[builtins.int, builtins.float] Number of seconds/milliseconds since `1/1/1970 00:00:00 UTC`. is_millis : builtins.bool `builtins.True` by default, indicates the input timestamp is measured in milliseconds rather than seconds Returns ------- datetime.datetime Number of seconds since `1/1/1970 00:00:00 UTC`. """ # Datetime seems to raise an OSError when you try to convert an out of range timestamp on Windows and a ValueError # if you try on a UNIX system so we want to catch both. try: epoch /= (is_millis * 1_000) or 1 return datetime.datetime.fromtimestamp(epoch, datetime.timezone.utc) except (OSError, ValueError): if epoch > 0: return datetime.datetime.max else: return datetime.datetime.min def timespan_to_int(value: Intervalish, /) -> int: """Cast the given timespan in seconds to an integer value. Parameters ---------- value : Intervalish The number of seconds. Returns ------- builtins.int The integer number of seconds. Fractions are discarded. Negative values are removed. """ if isinstance(value, datetime.timedelta): value = value.total_seconds() return int(max(0, value)) def local_datetime() -> datetime.datetime: """Return the current date/time for the system's time zone.""" return utc_datetime().astimezone() def utc_datetime() -> datetime.datetime: """Return the current date/time for UTC (GMT+0).""" return datetime.datetime.now(tz=datetime.timezone.utc) # time.monotonic_ns is no slower than time.monotonic, but is more accurate. # Also, fun fact that monotonic_ns appears to be 1µs faster on average than # monotonic on ARM64 architectures, but on x86, monotonic is around 1ns faster # than monotonic_ns. Just thought that was kind of interesting to note down. # (RPi 3B versus i7 6700) # time.perf_counter and time.perf_counter_ns don't have proper typehints, causing # pdoc to not be able to recognise them. This is just a little hack around that. def monotonic() -> float: """Performance counter for benchmarking.""" # noqa: D401 - Imperative mood return time.perf_counter() def monotonic_ns() -> int: """Performance counter for benchmarking as nanoseconds.""" # noqa: D401 - Imperative mood return time.perf_counter_ns() def uuid() -> str: """Generate a unique UUID (1ns precision).""" return uuid_.uuid1(None, monotonic_ns()).hex
nilq/baby-python
python
import json import os import copy __author__ = 'nekmo' class Field(object): def __call__(self, value): return self.parse(value) def parse(self, value): raise NotImplementedError class IntegerField(Field): def parse(self, value): return int(value) class BooleanField(Field): def parse(self, value): return bool(value) class BaseParser(object): _key = None # Si el padre es un diccionario, el key del mismo _parent = None # El elemento padre parser = None config = None def save(self): self.config.save() class ListParser(list, BaseParser): def __init__(self, parser=None, data=None, config=None): """ :param parser: Con qué parseador se debe parsear cada elemento :param data: Datos con los que poblar los elementos :param config: Config raíz para poder usar método save() :return: """ super().__init__() # TODO: debería validarse cada elemento de data self.extend(data or []) class DictParser(dict, BaseParser): schema = None default = None def __init__(self, parser=None, data=None, config=None): self.config = config super().__init__() if data: self.update(data) self.default = self.default def __getattr__(self, item): if item in self: return self[item] elif item in (self.default or {}) and item in self.schema: return self.parse_schema_element(item, copy.deepcopy(self.default[item])) return self.__getattribute__(item) def parse_schema(self, data): new_data = {} for key, value in data.items(): new_data[key] = self.parse_schema_element(key, value) return new_data def parse_schema_element(self, key, value): parser = self.parser or self.schema[key] if isinstance(parser, Field): return parser(value) else: element = parser(data=value, config=self.config) element._key = key element._parent = self return element def update(self, E=None, **F): new_data = self.parse_schema(E) return super(DictParser, self).update(new_data, **F) class Config(DictParser): is_loaded = False default = None def __init__(self, config_file, default=None): super().__init__() self.config_file = config_file self.default = default or self.default or {} def __setitem__(self, key, value): self.load() return super(Config, self).__setitem__(key, value) def __getitem__(self, item): self.load() return super(Config, self).__getitem__(item) def __delitem__(self, key): self.load() return super(Config, self).__delitem__(key) def __getattr__(self, item): if item in ['is_loaded']: return self.__getattribute__(item) self.load() if item in self: return self[item] return self.__getattribute__(item) def load(self): if self.is_loaded: return self.is_loaded = True self.clear() if os.path.exists(self.config_file): self.update(json.load(open(self.config_file, 'r'))) else: default = copy.deepcopy(self.default) self.save(default) self.update(default) return self def save(self, data=None): config_dir = os.path.dirname(self.config_file) if not os.path.exists(config_dir): os.makedirs(config_dir) json.dump(data or self, open(self.config_file, 'w'))
nilq/baby-python
python
import logging.config import os class Config(object): SERVER_NAME = '127.0.0.1:5000' LOGGING_CONFIG_FILE = 'logging-config.ini' @classmethod def init_app(cls, app): logging_config_path = os.path.normpath( os.path.join( os.path.dirname(__file__), cls.LOGGING_CONFIG_FILE)) logging.config.fileConfig(logging_config_path) class DevelopmentConfig(Config): DEBUG = True config_map = { 'development': DevelopmentConfig, 'default': DevelopmentConfig }
nilq/baby-python
python
""" InputReader -------------------------------------------------- Input Reader that loads previous output files """ import yaml import json def load_previous_outputs_as_inputs(file_paths: list) -> dict: print("Start loading input files...") previous_records = {} for file_path in file_paths: print("Loading {}...".format(file_path)) # start reading files data = None # try yaml and json input_stream = None try: input_stream = open(file_path) data = yaml.safe_load(input_stream) print("{} successfully loaded as yaml file.".format(file_path)) input_stream.close() except yaml.YAMLError: if input_stream: input_stream.close() data = None if not data: try: input_stream = open(file_path) data = json.load(input_stream) print("{} successfully loaded as json file.".format(file_path)) input_stream.close() except json.JSONDecodeError: if input_stream: input_stream.close() data = None if not data or not isinstance(data, dict): print("Loading {} failed both in yaml and json. Skipped.".format(file_path)) continue # read data into dict and merge data if necessary for user_dict in data["results"]: if user_dict["owner__username"] in previous_records: to_merge_user_object = previous_records[user_dict["owner__username"]] # iterate all repos in data for repo_object in user_dict["repos"]: # update to the latest scanned ones repo_name = repo_object["repo__name"] if repo_name in to_merge_user_object["repos"]: if repo_object["date"] > \ to_merge_user_object["repos"][repo_name]["date"]: to_merge_user_object["repos"][repo_name]["date"] = \ repo_object["date"] to_merge_user_object["repos"][repo_name]["status"] = repo_object["status"] # or add the repos if no collision else: to_merge_user_object["repos"][repo_name] = { **repo_object } else: previous_records[user_dict["owner__username"]] = { **user_dict, "repos": { repo_object["repo__name"]: {**repo_object} for repo_object in user_dict["repos"] } } print("Inputs loading finished.") return previous_records
nilq/baby-python
python
from robo_ai.resources.assistants import AssistantsResource from robo_ai.resources.client_resource import ClientResource from robo_ai.resources.oauth import OauthResource class BaseResource(ClientResource): def _register_resources(self): self._add_resource('assistants', AssistantsResource) self._add_resource('oauth', OauthResource) @property def assistants(self) -> AssistantsResource: return self._get_resource('assistants') @property def oauth(self) -> OauthResource: return self._get_resource('oauth')
nilq/baby-python
python
# sdspy import configparser import datetime import json from performance_counters import PerformanceCounters as PC from sds_client import SdsClient from sds_stream import SdsStream from sds_type import SdsType from sds_type_code import SdsTypeCode from sds_type_data import SdsTypeData from sds_type_property import SdsTypeProperty import time import xml.etree.ElementTree import xml
nilq/baby-python
python
import matplotlib # Force matplotlib to not use any Xwindows backend. matplotlib.use('Agg') import pickle import operator from random import randint import matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as np import data_io.settings as Settings from data_io.testdata import sliding_window from . import utils import cv2 as cv from math import ceil import logging def get_mean_accuracy(accuracies): # average accuracy is only calculated over the testing results which is index 2 testingAccuracies = [i[2] for i in accuracies] return float(sum(testingAccuracies)) / float(len(accuracies)) class ModelTester(object): """Class to test and evaluate models.""" def __init__(self, classifier, transformation=None, size=(-1,-1), transformationBack=None): """ Instantiates model tester. Keyword arguments: classifier -- reference to the model. transformation -- optional method for image transformation before prediction size -- desired image size. Default: (-1, -1) means do not change size transformationBack -- optional method for the transformation of the prediction image format back to a displayable format """ self.classifier = classifier self.transformation = transformation # method to transform the data (needed for NNs) self.transformationBack = transformationBack # since the TestData module applies the transformation we have to reverse the transformation on the images to display them. self.size = size def __yield_image_predictions(self, segmentIndex, classes=None, maxNumberOfImages=-1, shuffle=False, slidingWindow=False, slidingWindowSize=(300, 300), slidingWindowStride=64): """ Calls the predict method for each image and returns the result of the prediction. Keyword arguments: segmentsIndex -- Index of the segment to test. classes -- List of classes to test. Default: Test all classes maxNumberOfImages -- number of images to test. Default: use all shuffle -- reshuffle images slidingWindow -- test sliding window slidingWindowSize -- size of the sliding window. Default: (300, 300) Pixels slidingWindowStride -- stride of the sliding window. Default: 64 Pixels Returns: Generator((class_, prediction, img)) := (Class Name, prediction, image that was tested) """ if classes is None: classes = self.classifier.testData.classes if shuffle: self.classifier.testData.reshuffle_segment_data(segmentIndex) prevRandomSamplingStatus = Settings.E_RANDOM_SAMPLE for class_ in classes: # load test images for this class and predict predictions = [] for img, _ in self.classifier.testData.load_data(segmentIndex, classes=[class_], grayscale=self.classifier.grayscale, outputActions=False, maxNumberOfImagesPerClass=maxNumberOfImages, size=self.size, transformation=self.transformation, resolutionSize=self.classifier.imageSize): # classifier tester expects a list in the form of [(class_, [predictions])] if slidingWindow: # prevent random sampling Settings.E_RANDOM_SAMPLE = False voteDict = {cls: 0 for cls in classes} slVis = np.copy(img) # is slVis grayscale? if self.classifier.grayscale: slVis = cv.cvtColor(slVis, cv.COLOR_GRAY2BGR) for roi, slImg in sliding_window(img, slidingWindowSize, slidingWindowStride): p = self.classifier.predict([slImg]) if p is None: continue # outputs the class with highest confidence p = p[0][0] voteDict[p] += 1 # overlay imagePart if correct class if p == class_: slVis = roi.overlay_rectangle(slVis) cv.imwrite(self.classifier.modelSaver.get_save_path_for_visualizations() + "slidingWindow/{0}.jpg".format(class_), slVis) print "Sliding Window prediction for class {0} Votes:\n{1}\n\n".format(class_, voteDict) Settings.E_RANDOM_SAMPLE = prevRandomSamplingStatus prediction = self.classifier.predict([img]) if prediction is None: continue yield (class_, prediction, img) def __yield_class_predictions(self, segmentIndex): """ Calls the predict method for each class and yields the result as a tuple with the class and a list of predictions. Keyword arguments: segmentIndex -- index of the test data segment Returns: Generator((class_, predictions)) := (Class name, List of predictions) """ for class_ in self.classifier.testData.classes: # load test images for this class and predict predictions = [p for _, p, _ in self.__yield_image_predictions(segmentIndex, [class_])] yield (class_, predictions) def test_classifier(self, segments=["test"]): """ Completely evaluates a classifier and prints the results to the console window and saves the results to the model directory. Keyword arguments: segments -- List of segments to test onto Returns: dictionary of results of the segments. """ if Settings.G_DETAILED_CONSOLE_OUTPUT: print "## Testing classifier:\n" results = {} for segment in segments: print "# Testing",segment # stores classes as key and the prediction results (list) as value. segmentResults = {} precisionRecallValues = {} for class_, predictions in self.__yield_class_predictions(segment): # number of matches for 1,2,...,numberOfClasses-1 candidates topNMatches = [0] * (self.classifier.testData.numberOfClasses - 1) images = 0.0 # load images and predict. for prediction in predictions: predictionRank = self.__test_top_n_prediction(class_, prediction) #prevent problems with less than 6 classes maxRank = min(self.classifier.testData.numberOfClasses - 1, len(predictionRank)-1) for i in xrange(maxRank+1): topNMatches[i] += predictionRank[i] images += 1.0 # Calculate accuracy for class. segmentResults[class_] = [matches / images for matches in topNMatches] # calculate Precision recall precisionValues = [] recallValues = [] f1Scores = [] for top in xrange(self.classifier.testData.numberOfClasses - 1): # All correctly classified items truePositives = float(topNMatches[top]) # all predicted images without the correctly predicted images. In case of top-1 the total ammount of images is exactly the number of returned predictions. # For top-2 we have twice as much predictions to consider. falsePositives = float((len(predictions) * (top+1))-truePositives) # All items that were not correctly classified. falseNegatives = float(len(predictions) - truePositives) precision = truePositives / (truePositives + falsePositives) recall = truePositives / (truePositives + falseNegatives) #f1Score = 2.0 * ((precision * recall) / (precision + recall)) precisionValues.append(precision) recallValues.append(recall) #f1Scores.append(f1Score) precisionRecallValues[class_] = (precisionValues, recallValues) if Settings.G_DETAILED_CONSOLE_OUTPUT: print "\t- Testing {0} - Accuracy: {1:.2f}% - T5 Precision: {2:.2f} - T5 Recall: {3:.2f}".format(class_, segmentResults[class_][0]*100, precisionValues[4], recallValues[4]) # Calculate overall top 1 accuracy. segmentAccuracy = sum([a[0] for (_, a) in segmentResults.iteritems()]) / len(segmentResults) segmentError = 1 - segmentAccuracy # sort accuracies of classes so that we can get the best and worst classes segmentResultsList = segmentResults.items() # segmentResultsList contains the top-n accuracies but we only need the top-1 accuracy segmentResultsList = [(class_, values[0]) for (class_, values) in segmentResultsList] segmentResultsList = sorted(segmentResultsList, key=operator.itemgetter(1), reverse=True) # prevent overflow bestBound = min(2, len(segmentResultsList)) worstBound = max(2, len(segmentResultsList)-2) bestClasses = segmentResultsList[0:bestBound] worstClasses = segmentResultsList[worstBound:] results[segment] = [segmentAccuracy, segmentError, bestClasses, worstClasses, segmentResults, precisionRecallValues] # Save the results self.save_results(results, False) return results def plot_random_predictions(self, segmentIndex="test", cols=4): """ Creates an image with predictions of random images from the segment index and the model confidences.""" # result will have a format like this: [(real class, [(class, prediction for class), (class, prediction for class), ...], image)] results = [] for class_, prediction, image in self.__yield_image_predictions(segmentIndex, maxNumberOfImages=1, shuffle=True, slidingWindow=True): # convert image back to cv format if neccessary if not self.transformationBack is None: image = self.transformationBack(image) # take the first 4 predictions and turn them to percent (looks better) top4 = [(cls, p[0]*100.0) for cls, p in prediction[0:4]] top4.reverse() # convert the images from bgr to rgb if color if len(image.shape) > 2 and image.shape[2] != 1: image = cv.cvtColor(image, cv.COLOR_BGR2RGB) results.append((class_, top4, image)) # plot results rows = int((ceil(float(len(results)) / cols)) * 2) f, axarr = plt.subplots(rows, cols) f.set_size_inches(int(cols*4),int((rows/2)*5)) f.suptitle(str(self.classifier), fontsize=20) i = 0 for y in range(0, rows, 2): for x in range(cols): if i >= len(results): # disable axis for empty images axarr[y, x].axis('off') axarr[y+1, x].axis('off') continue if self.classifier.grayscale: axarr[y, x].imshow(results[i][2], cmap = cm.Greys_r) else: axarr[y, x].imshow(results[i][2]) axarr[y, x].set_title(results[i][0]) axarr[y, x].axis('off') # add bars barPlotValues = zip(*results[i][1]) # barPlotValues[0] = labels, barPlotValues[1] = prediction values positions = np.arange(0, 0.8, 0.2) bars = axarr[y+1, x].barh(positions, barPlotValues[1], align='center', height=0.2, color=Settings.G_COLOR_PALETTE[0], linewidth=0) # color bar of correct result differently if results[i][0] in barPlotValues[0]: correctBarIndex = barPlotValues[0].index(results[i][0]) bars[correctBarIndex].set_color(Settings.G_COLOR_PALETTE[3]) for class_, yPos in zip(barPlotValues[0], positions): axarr[y+1, x].text(95, yPos, class_, horizontalalignment='right', verticalalignment='center', fontsize=8) axarr[y+1, x].axis('off') axarr[y+1, x].set_xlim([0, 100]) i += 1 name = "RandomResults_" + self.classifier.name + "_" + utils.get_uuid() utils.save_plt_figure(plt, name, self.classifier.modelSaver.get_save_path_for_visualizations()) raw_input("Random results plotting complete. Press any key to continue") def __test_top_n_prediction(self, class_, predictions): """ Computes the top-N predictions.""" topN = [] for n in range(1, len(predictions)): # take n-size slice out of predictions and create list without the confidence. # the result should look something like this for the top 3 ["bread", "chocolate", "rice"] if the list looks like this # ["bread", "chocolate", "rice", "noodles", "ice", ...]. topNPredictions = [c for (c, _) in predictions[:n]] if class_ in topNPredictions: topN.append(1) else: topN.append(0) return topN def yield_top_n_results_as_list(self, results): """ Returns a generator that yields the top-N results.""" for segment in results: result = results[segment] # Iterate through classPredictions and display the top-n categories for class_ in result[4]: classPredictions = result[4][class_] topN = [] for accuracy in classPredictions: topN.append(accuracy) yield (segment, class_, topN) def format_results_string(self, results): """ Formats the results and creates a string that can be saved or printed to the console.""" output = "" #overallAccuracy, classPredictions = results output += "\n\n\nTest report for " + self.classifier.name + "\n" detailedOutput = "\n\nDetailed report:" outputRows = [] for segment in results: result = results[segment] outputRows.append([segment, result[1], result[0], result[2], result[3]]) #detailed output: detailedOutput += "\n\n********************************************************\nSegment " + segment + "\n" detailedOutputRows = [] # Iterate through classPredictions and display the top5 categories for class_ in result[4]: classPredictions = result[4][class_] detailedRow = [class_] for accuracy in classPredictions: detailedRow.append(accuracy) detailedOutputRows.append(detailedRow) detailedOutputTitle = ["class"] detailedOutputTitle.extend(self.__get_top_title()) detailedOutput += utils.get_table(detailedOutputTitle, 6, *detailedOutputRows).get_string() output += utils.get_table(["segment", "segment_loss", "segment_accuracy", "top-2", "flop-2"], 6, *outputRows).get_string() output += detailedOutput return output def __get_top_title(self): """ Returns the Top-N title used for the csv output.""" return ["Top " + str(n+1) for n in range(self.classifier.testData.numberOfClasses-1)] def export_results_csv(self, results, confMatrices): """ Exports the results to a csv file.""" writer = self.classifier.modelSaver.get_csv_exporter() # export test data stats writer.export(self.classifier.testData.export_test_data_information(), "testDataStats") # get mean / std images if pre computed mean = self.classifier.testData.mean_image if not mean is None: # there is propably also a std image std = self.classifier.testData.std_image cv.imwrite(self.classifier.modelSaver.get_save_path_for_visualizations() + "testDataMeanImage.jpg", mean) cv.imwrite(self.classifier.modelSaver.get_save_path_for_visualizations() + "testDataStdImage.jpg", std) # export conf matrices and results iterationOutput = [] iterationOutputTitle = ["iteration", "segment", "segment loss", "segment accuracy"] iterationOutputTitle.extend([class_ + " t1 accuracy" for class_ in self.classifier.testData.classes]) iterationOutput.append(iterationOutputTitle) for iteration in xrange(len(results)): if iteration < len(confMatrices): self.export_confusion_matrix_as_csv(confMatrices[iteration], fileName="ConfusionMatrix_iteration" + str(iteration+1)) try: iterationResults = results[iteration] except: # could not extract iterationResults because in this case results does not contain a list of iterations because it had only one iteration. # This shouldn't happen -> FIXME return for segment in iterationResults: result = iterationResults[segment] iterationOutputRow = [iteration+1, segment, result[1], result[0]] for class_ in self.classifier.testData.classes: iterationOutputRow.append(result[4][class_][0]) iterationOutput.append(iterationOutputRow) # export precision recall precisionRecallValues = result[5] # precisionRecallValues[class_] = (precisionValues, recallValues) for class_ in precisionRecallValues: precisionCSV = [["Top-N", "precision", "recall"]] precisionValues, recallValues = precisionRecallValues[class_] for i in xrange(len(precisionValues)): precisionCSV.append([i+1, precisionValues[i], recallValues[i]]) writer.export(precisionCSV, "{0}_PrecisionRecall_{1}".format(segment, class_)) # export top-n results segmentTopResults = [] segmentTopResultsTitle = ["segment", "class"] segmentTopResultsTitle.extend(self.__get_top_title()) segmentTopResults.append(segmentTopResultsTitle) for (sgmt, class_, topN) in self.yield_top_n_results_as_list(iterationResults): segmentTopResultsRow = [sgmt, class_] segmentTopResultsRow.extend(topN) segmentTopResults.append(segmentTopResultsRow) writer.export(segmentTopResults, name="iteration_" + str(iteration+1) + "_topN") writer.export(iterationOutput, name="detailedResults") def save_results(self, results, exportToCSV=True): """ Exports the result string to a text file and saves the results to csv if exportToCSV is True.""" path = self.classifier.modelSaver.get_save_path() resultString = self.format_results_string(results) with open(path + "Results.txt", "w") as f: f.write(resultString) if exportToCSV: self.export_results_csv(results, []) def plot_confusion_matrix(self, save=True, show=True, confMatrix=None): """ Plots a confusion matrix and saves the image. Keyword arguments: save -- Save confusion matrix show -- Show confusion matrix. Only works locally or via vcn. confMatrix -- precomputed confusion matrix - Default: Compute new. """ if confMatrix is None: confMatrix = self.compute_confusion_matrix() # normalize matrix normConfMatrix = [] for i in confMatrix: a = sum(i, 0) temp = [] for j in i: temp.append(float(j)/float(a)) normConfMatrix.append(temp) # can we plot labels? Only plot labels if we have less than 10 classes showLables = len(confMatrix[0]) < 10 # we can not create the figure on the server since tkinter does not work because the server does not have a display output. # in this case we save the confusion matrix which we can load on a machine with a display to create the plot from there. try: # create figure and clear it fig = plt.figure() plt.clf() ax = fig.add_subplot(111) ax.set_aspect(1) res = ax.imshow(np.array(normConfMatrix), cmap=plt.cm.jet, interpolation='nearest') if showLables: w = len(confMatrix) h = len(confMatrix[0]) for x in xrange(w): for y in xrange(h): if normConfMatrix[x][y] > 0: ax.annotate(str(confMatrix[x][y]), xy=(y, x), horizontalalignment='center', verticalalignment='center') plt.xticks(range(w), self.classifier.testData.classes) plt.yticks(range(h), self.classifier.testData.classes) else: plt.xticks([]),plt.yticks([]) cb = fig.colorbar(res) if show: plt.show() if save: utils.save_plt_figure(plt, "conf_matrix_{0}".format(self.classifier.name)) except Exception, e: path = utils.get_temp_path() + "ConfMatrix.tmp" logging.exception("Error while saving confusion matrix. Saving results in {0}.".format(path)) self.export_confusion_matrix_as_csv(confMatrix) def calculate_confusion_score(self, confMatrix=None): """ Calculates the sum the the diagonal of the confusion matrix. This is the number of correctly classified images. """ if confMatrix is None: confMatrix = self.compute_confusion_matrix() diagonalSum = np.trace(confMatrix) return diagonalSum def export_confusion_matrix_as_csv(self, confMatrix=None, fileName="ConfusionMatrix"): """ Exports the confusion matrix to csv. Keyword arguments: confMatrix -- precomputed confusion matrix """ if confMatrix is None: confMatrix = self.compute_confusion_matrix() writer = self.classifier.modelSaver.get_csv_exporter() writer.export(confMatrix, fileName) # export keys convKeys = [range(self.classifier.testData.numberOfClasses)] convKeys.append(self.classifier.testData.classes) writer.export(convKeys, fileName + "_Keys") def compute_confusion_matrix(self, export=True): """ Computes the confusion matrix for the classifier using the test segmentindex. """ # construct the confusion matrix confusionMatrix = np.zeros((self.classifier.testData.numberOfClasses, self.classifier.testData.numberOfClasses)) classes = self.classifier.testData.classes classIndex = 0 for class_, predictions in self.__yield_class_predictions("test"): for prediction in predictions: predictedClass, _ = prediction[0] confusionMatrix[classIndex][classes.index(predictedClass)] += 1 classIndex += 1 if export: self.export_confusion_matrix_as_csv(confusionMatrix) return confusionMatrix def classify_image_folder(self, path): """ Classifies images from a folder from a given path and prints the top-1 prediction on the console.""" if not path.endswith("/"): path += "/" if not utils.check_if_dir_exists(path): raise Exception("Path '{0}' does not exist.".format(path)) from os import walk # Load filenames _, _, filenames = walk(path).next() # Load images #Load flag for cv.imread. loadFlag = cv.IMREAD_GRAYSCALE if self.classifier.grayscale else cv.IMREAD_UNCHANGED from data_io.testdata import load_image for imgName in filenames: imgPath = path + imgName img = load_image(imgPath, loadFlag, 1) if self.size != (-1, -1): img = utils.crop_to_square(img) desiredArea = self.size[0] * self.size[1] img = utils.equalize_image_size(img, desiredArea) if not self.transformation is None: img = self.transformation(img) prediction = self.classifier.predict([img]) print "Img {0}: {1}".format(imgName, prediction[0])# only top-1 prediction def classify_webcam(self): """ Classifies frames from the webcam.""" cam = cv.VideoCapture(0) while True: ret_val, img = cam.read() cv.imshow('TUM FoodCam', img) try: prediction = self.classifier.predict([img]) print "{0}".format(prediction[0])# only top-1 prediction except: pass if cv.waitKey(1) == 27: break # esc to quit cv.destroyAllWindows()
nilq/baby-python
python
class CircularQueue: """ Queue implementation using circularly linked list for storage """ #------------------------------------------------------------------------------------- #------------------------------------------------------------------------------------- class _Node: """ LightWwight, non public class for storing a singly linked list node """ __slots__ = '_element', '_next' def __init__(self, element, next): self._element = element self._next = next #------------------------------------------------------------------------------------- #------------------------------------------------------------------------------------- def __init__(self): """ Create an empty Queue """ self._tail = None self._size = 0 #------------------------------------------------------------------------------------- def __len__(self): """ Return the number of element inn the queue """ return self._size #------------------------------------------------------------------------------------- def is_empty(self): """ Return Ture if the queue is empty """ return self._size == 0 #------------------------------------------------------------------------------------- def first(self): """ Return (but do not remove ) the element at the front og the queue Raise Empty exception if the queueu is empty """ if self.is_empty(): raise Empty('Queue is Empty') head = self._tail._next return head._element #------------------------------------------------------------------------------------- def dequeue(self): """ Remove and return first element of the queue Raise Empty exception if the queue is empty """ if self.is_empty(): raise Empty('Queue is empty') oldhead = self._tail._next if self._size == 1: self._tail = None else: self._tail._next = oldhead._next self._size -= 1 return oldhead._element #------------------------------------------------------------------------------------- def enqueue(self, e): """ Add element to the back of queue """ newest = self._Node(e, None) if self.is_empty(): newest._next = newest # initialy circular else: newest._next = self._tail._next self._tail._next = newest self._tail = newest self._size += 1 #------------------------------------------------------------------------------------- def rotate(self): """ Rotate front element to the back of the queue """ if self._size > 0: self._tail = self._tail._next #------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------- if __name__ == '__main__': cq = CircularQueue() s1 = cq.__len__() print('Initial Size : ', s1) cq.enqueue(10) cq.enqueue(20) cq.enqueue(30) cq.enqueue(40) cq.enqueue(50) cq.enqueue(60) s2 = cq.__len__() print('Size : ', s2) f1 = cq.first() print('First element : ', f1) d1 = cq.dequeue() print('Dequeued element : ', d1) s3 = cq.__len__() print('Size : ', s3) cq.rotate() f2 = cq.first() print('First element : ', f2) #----------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------- """ OUTPUT : Initial Size : 0 Size : 6 First element : 10 Dequeued element : 10 Size : 5 First element : 30 """ #----------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------- #-----------------------------------------------------------------------------------------
nilq/baby-python
python
# -*- coding: utf-8 from __future__ import unicode_literals, absolute_import import django DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "uh-v-hc=h7=%4(5g&f13217*!ja%osm%l0oyb$^n2kk^ij#&zj" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sites", "django.contrib.staticfiles", "django_blockstack_auth", ] SITE_ID = 1 STATIC_URL = '/static/' if django.VERSION >= (1, 10): MIDDLEWARE = () else: MIDDLEWARE_CLASSES = () TEMPLATES = [ { "BACKEND": "django.template.backends.django.DjangoTemplates", 'APP_DIRS': True } ]
nilq/baby-python
python
import os import pytest from helpers.cluster import ClickHouseCluster from helpers.network import PartitionManager from helpers.test_tools import assert_eq_with_retry CLICKHOUSE_DATABASE = 'test' def initialize_database(nodes, shard): for node in nodes: node.query(''' CREATE DATABASE {database}; CREATE TABLE `{database}`.src (p UInt64, d UInt64) ENGINE = ReplicatedMergeTree('/clickhouse/{database}/tables/test_consistent_shard1{shard}/replicated', '{replica}') ORDER BY d PARTITION BY p SETTINGS min_replicated_logs_to_keep=3, max_replicated_logs_to_keep=5, cleanup_delay_period=0, cleanup_delay_period_random_add=0; CREATE TABLE `{database}`.dest (p UInt64, d UInt64) ENGINE = ReplicatedMergeTree('/clickhouse/{database}/tables/test_consistent_shard2{shard}/replicated', '{replica}') ORDER BY d PARTITION BY p SETTINGS min_replicated_logs_to_keep=3, max_replicated_logs_to_keep=5, cleanup_delay_period=0, cleanup_delay_period_random_add=0; '''.format(shard=shard, replica=node.name, database=CLICKHOUSE_DATABASE)) cluster = ClickHouseCluster(__file__) node1 = cluster.add_instance('node1', main_configs=['configs/remote_servers.xml'], with_zookeeper=True) node2 = cluster.add_instance('node2', main_configs=['configs/remote_servers.xml'], with_zookeeper=True) @pytest.fixture(scope="module") def start_cluster(): try: cluster.start() initialize_database([node1, node2], 1) yield cluster except Exception as ex: print ex finally: cluster.shutdown() def test_consistent_part_after_move_partition(start_cluster): # insert into all replicas for i in range(100): node1.query('INSERT INTO `{database}`.src VALUES ({value} % 2, {value})'.format(database=CLICKHOUSE_DATABASE, value=i)) query_source = 'SELECT COUNT(*) FROM `{database}`.src'.format(database=CLICKHOUSE_DATABASE) query_dest = 'SELECT COUNT(*) FROM `{database}`.dest'.format(database=CLICKHOUSE_DATABASE) assert_eq_with_retry(node2, query_source, node1.query(query_source)) assert_eq_with_retry(node2, query_dest, node1.query(query_dest)) node1.query('ALTER TABLE `{database}`.src MOVE PARTITION 1 TO TABLE `{database}`.dest'.format(database=CLICKHOUSE_DATABASE)) assert_eq_with_retry(node2, query_source, node1.query(query_source)) assert_eq_with_retry(node2, query_dest, node1.query(query_dest))
nilq/baby-python
python
# Import the modules import sys import MinVel as mv import numpy as np # NOTES: May want to update temperature dependence of thermal expansivity using Holland and Powell's (2011) # new revised equations (see figure 1 in that article). This will necessitate recalculating the first # Gruneisen parameters. This could provide more realistic temperature dependence of material # properties within the mantle. if len(sys.argv) > 1: if sys.argv[1] == "-h": print('MinVel -- Program to calculate mineral aggregate moduli and density') print('') print(' Written by Oliver Boyd') print('') print(' This program calculates the velocity and density of a mineral assemblage ') print(' at a given pressure and temperature (which may be vectors).') print(' The velocities are expressed as Voigt, Reuss, and Voigt-Reuss-Hill averages.') print('') print(' The data required for this analysis is taken from Hacker and Abers (2003),') print(' updated by Abers and Hacker in 2016, and expanded by Boyd in 2018.') print(' The moduli at pressure and temperature are calculated based on the') print(' procedures of Hacker and Abers (2004), Bina and Helffrich (1992) and') print(' Holland and Powell (1998) as outlined in the supplementary section of ') print(' Boyd et al. (2004) with updates by Abers and Hacker (2016) for quartz.') print('') print(' OUTPUT (SI Units)') print(' results.npy - numpy binary file containing the following vectors:') print(' Voigt-Reuss-Hill averages') print(' K - Bulk modulus') print(' G - Shear modulus') print(' E - Youngs modulus') print(' l - Lambda') print(' v - Poissons ratio') print(' Vp - P-wave velocity') print(' Vs - S-wave velocity') print(' p - Density') print(' a - Thermal Expansivity') print(' Voigt(v) and Reuss(r) bounds on velocity') print(' Vpv - P-wave velocity') print(' Vpr - P-wave velocity') print(' Vsv - S-wave velocity') print(' Vsr - S-wave velocity') print('') print(' INPUTS') print(' Command line options') print(' -h Help about this program.') print('') print(' -f InputFile - File containing composition, temperature, and pressure ') print(' information with the following format') print(' MinIndx 1, MinIndx 2, ..., MinIndx N') print(' VolFrac 1, VolFrac 2, ..., VolFrac N') print(' T1, P1') print(' T2, P2') print(' ...') print(' TN, PN') print('') print(' -p Pressure - desired pressure or comma separated vector of pressures (Pa)') print(' -t Temperature - desired temperature or comma separated vector of temperatures (K)') print('') print(' Composition parmeters - a composition structure with the following fields: ') print(' -cm Min - The mineral index comma separated vector.') print(' -cv Fr - Volume fraction for each mineral in Min (0 to 1), comma separated.') print('') print(' Mineral Indexes') print(' Quartz') print(' 1. Alpha Quartz ') print(' 2. Beta Quartz ') print(' 3. Coesite ') print(' Feldspar group') print(' Plagioclase') print(' 4. High Albite ') print(' 5. Low Albite ') print(' 6. Anorthite ') print('') print(' 7. Orthoclase ') print(' 8. Sanidine ') print(' Garnet structural group') print(' 9. Almandine ') print(' 10. Grossular ') print(' 11. Pyrope ') print(' Olivine group') print(' 12. Forsterite ') print(' 13. Fayalite ') print(' Pyroxene group') print(' 14. Diopside ') print(' 15. Enstatite ') print(' 16. Ferrosilite ') print(' 79. Mg-Tschermak ') print(' 17. Jadeite ') print(' 18. Hedenbergite ') print(' 80. Acmite ') print(' 81. Ca-Tschermak ') print(' Amphibole supergroup') print(' 19. Glaucophane ') print(' 20. Ferroglaucophane ') print(' 21. Tremolite ') print(' 22. Ferroactinolite ') print(' 23. Tshermakite ') print(' 24. Pargasite ') print(' 25. Hornblende ') print(' 26. Anthophyllite ') print(' Mica group') print(' 27. Phlogopite ') print(' 28. Annite ') print(' 29. Muscovite ') print(' 30. Celadonite ') print(' Other') print(' 31. Talc ') print(' 32. Clinochlore ') print(' 33. Daphnite ') print(' 34. Antigorite ') print(' 35. Zoisite ') print(' 36. Clinozoisite ') print(' 37. Epidote ') print(' 38. Lawsonite ') print(' 39. Prehnite ') print(' 40. Pumpellyite ') print(' 41. Laumontite ') print(' 42. Wairakite ') print(' 43. Brucite ') print(' 44. Clinohumite ') print(' 45. Phase A ') print(' 46. Sillimanite ') print(' 47. Kyanite ') print(' 48. Spinel ') print(' 49. Hercynite ') print(' 50. Magnetite ') print(' 51. Calcite ') print(' 52. Aragonite ') print(' 82. Magnesite ') print(' 83. En79Fs09Ts12 ') print(' 84. Di75He9Jd3Ts12 ') print(' 85. ilmenite ') print(' 86. cordierite ') print(' 87. scapolite (meionite) ') print(' 88. rutile ') print(' 89. sphene ') print(' 53. Corundum ') print(' 54. Dolomite ') print(' 74. Halite ') print(' 77. Pyrite ') print(' 78. Gypsum ') print(' 90. Anhydrite ') print(' 0. Water ') print(' -1. Ice ') print(' Clays') print(' 55. Montmorillonite (Saz-1)') print(' 56. Montmorillonite (S Wy-2)') print(' 57. Montmorillonite (STX-1)') print(' 58. Montmorillonite (S Wy-1)') print(' 59. Montmorillonite (Shca-1)') print(' 60. Kaolinite (Kga-2)') print(' 61. Kaolinite (Kga-1b)') print(' 62. Illite (IMT-2)') print(' 63. Illite (ISMT-2)') print(' 66. Smectite (S Wa-1)') print(' 70. Montmorillonite (S YN-1)') print(' 71. Chrysotile ') print(' 72. Lizardite ') print(' 76. Dickite ') print('') print(' Example:'); print(' Geophysical parameters for 20% Quartz, 20% low Albite, 30% Forsterite, and 30% Fayalite at') print(' 300, 400, and 500K and 0.1, 0.3, and 0.5 MPa') print(' > python MinVelWrapper.py -t 300,400,500 -p 0.1e6,0.3e6,0.5e6 -cm 1,5,12,13 -cv 0.2,0.2,0.3,0.3') print('') sys.exit() nMin = 1 nPT = 1 nT = 0 nP = 0 if len(sys.argv) > 1: for j in range(1,len(sys.argv),2): if sys.argv[j] == "-t": entries = sys.argv[j+1].split(",") nT = len(entries) T = np.zeros((nT),dtype=np.float64) for k in range(0,nT): T[k] = entries[k] if sys.argv[j] == "-p": entries = sys.argv[j+1].split(",") nP = len(entries) P = np.zeros((nP),dtype=np.float64) for k in range(0,nP): P[k] = entries[k] if sys.argv[j] == "-cm": entries = sys.argv[j+1].split(",") nMin = len(entries) Cm = np.zeros((nMin),dtype=np.int8) for k in range(0,nMin): Cm[k] = entries[k] if sys.argv[j] == "-cv": entries = sys.argv[j+1].split(",") nFr = len(entries) Cv = np.zeros((nFr),dtype=np.float64) for k in range(0,nFr): Cv[k] = entries[k] if sys.argv[j] == "-f": fl = sys.argv[j+1] print('Reading {0:s}'.format(fl)) f = open(fl,"r") if f.mode == "r": nPT = 0 ln = 0 for line in f: line = line.strip() columns = line.split(",") if ln < 2: nMin = len(columns) else: nPT = nPT + 1 ln = ln + 1 nT = nPT nP = nPT nFr = nMin f.close() T = np.zeros((nPT),dtype=np.float64) P = np.zeros((nPT),dtype=np.float64) Cm = np.zeros((nMin),dtype=np.int8) Cv = np.zeros((nMin),dtype=np.float64) f = open(fl,"r") if f.mode == "r": ln = 0 jT = 0 for line in f: line = line.strip() columns = line.split(",") if ln == 0: for j in range(0,len(columns)): Cm[j] = columns[j] elif ln == 1: for j in range(0,len(columns)): Cv[j] = columns[j] else: T[jT] = columns[0] P[jT] = columns[1] jT = jT + 1 ln = ln + 1 f.close() # MAke sure volume fractions sum to 1 if sum(Cv) < 1: print('Composition does not sum to one. - Exiting') sys.exit() if nT != nP: print('Number of temperature inputs must be equal to the number of pressure inputs') sys.exit() else: nPT = nT if nMin != nFr: print('Number of minerals types must be equal to the number of mineral fractional volumes') sys.exit() Par, MinNames, nPar, nAllMin = mv.loadPar('../database/MineralPhysicsDatabase.nc') MinIndex = Par[0,:]; print('{0:21s}{1:20s}'.format('Mineral','Volume fraction')) for j in range(0,nMin): k = mv.find(MinIndex,Cm[j]); print(MinNames[:,k].tobytes().decode('utf-8'),'(',Cv[j],')') if nPT > 1: print('There are',nPT,'temperature and pressure points') else: print('Temperature',T) print('Pressure',P) print('') K, G, E, l, v, Vp, Vs, den, Vpv, Vpr, Vsv, Vsr, a = mv.CalcMV(Cm,Cv,T,P); print('K ',K) print('G ',G) print('E ',E) print('l ',l) print('v ',v) print('Vp ',Vp) print('Vs ',Vs) print('den',den) print('a ',a) print('') print('Voigt(v) and Reuss(r) bounds on velocity') print('Vpv',Vpv) print('Vpr',Vpr) print('Vsv',Vsv) print('Vsr',Vsr) print('') res = np.zeros((13,nPT),dtype=np.float64) res[0,:] = K res[1,:] = G res[2,:] = E res[3,:] = l res[4,:] = v res[5,:] = Vp res[6,:] = Vs res[7,:] = den res[8,:] = a res[9,:] = Vpv res[10,:] = Vpr res[11,:] = Vsv res[12,:] = Vsr f = 'results.npy' np.save(f,res) sys.exit()
nilq/baby-python
python
import os import sys import cv2 import numpy as np from PyQt5.QtCore import pyqtSlot, QThreadPool, QTimer from PyQt5.QtWidgets import * from PyQt5 import QtCore from PyQt5.QtGui import * from src.transformers.Transformer import Transformer, getTransformer from src.util.UserInterface.ControlBox import ControlBox from src.util.UserInterface.Display import Display from src.util.UserInterface.DisplayWorker import DisplayWorker from src.util.UserInterface.RadioBox import RadioBox from src.util.UserInterface.ReferenceCarousel import ReferenceCarousel from src.util.UserInterface.Result import Result from src.util.UserInterface.StartScreen import StartScreen from src.util.UserInterface.TransformWorker import TransformWorker from src.util.UserInterface.TypeSelector import TypeSelector from src.util.capture import Capture BASE_DIR = os.path.dirname(os.path.abspath(__file__)) ref_images = next(os.walk(BASE_DIR + '/../../ref_images'), (None, None, []))[2] NOT_FOUND: QPixmap T: Transformer def set_align_center(x: QWidget) -> QWidget: x.setAlignment(QtCore.Qt.AlignCenter) return x def get_qimage(path: str) -> QPixmap: qimage = QPixmap() qimage.load(path, flags=QtCore.Qt.AutoColor) return qimage class MainWindow(QMainWindow): def __init__(self): super().__init__() self.window_stack = QStackedWidget(self) self.start_screen = StartScreen() self.display_worker = DisplayWorker(capture) self.display = Display() self.radio_box = RadioBox() self.reference_carousel = ReferenceCarousel(ref_images) self.control_box = ControlBox() self.type_selector = TypeSelector(ref_images) self.result = Result() self.transform_worker = TransformWorker(capture, T) self.setWindowTitle("HAiR") self.setGeometry(0, 0, 1920, 1080) self.setup() @pyqtSlot() def start_signal(self): self.window_stack.setCurrentIndex(1) self.type_selector.initialize() self.control_box.initialize() self.display_worker.go = True self.display_worker.start() @pyqtSlot() def close_signal(self): self.close() @pyqtSlot() def result_signal(self): # deprecated dont use self.window_stack.setCurrentIndex(2) self.display_worker.go = False @pyqtSlot(int) def ref_select(self, index: int): self.type_selector.set_reference(self.radio_box.type, index) if self.radio_box.type == "머리 색상": T.set_appearance_ref(ref_images[index][0]) else: T.set_shape_ref(ref_images[index][0]) T.set_structure_ref(ref_images[index][0]) @pyqtSlot(str) def ref_unselect(self, ref_type: str) -> None: if ref_type == "머리 색상": T.set_appearance_ref(None) else: T.set_shape_ref(None) T.set_structure_ref(None) @pyqtSlot(QPixmap) def get_image(self, image: QPixmap): self.display.set_image(image) @pyqtSlot() def back_to_start_signal(self): self.window_stack.setCurrentIndex(0) @pyqtSlot() def qr_done_signal(self): self.window_stack.setCurrentIndex(0) @pyqtSlot(int) def result_clicked_signal(self, timestamp: int): self.qr_result.set(timestamp) self.window_stack.setCurrentIndex(3) @pyqtSlot() def transform_signal(self): self.control_box.transform_button.setDisabled(True) self.control_box.set_processing() pool = QThreadPool.globalInstance() pool.start(self.transform_worker) self.transform_worker = TransformWorker(capture, transformer=T) self.transform_worker.signal.transformed.connect(self.transformed_signal) @pyqtSlot(np.ndarray) def transformed_signal(self, image: np.ndarray): if image.ndim == 1: # when failed self.control_box.set_error() QTimer().singleShot(2000, self.control_box.set_ready) else: self.control_box.set_ready() self.control_box.result_button.setDisabled(False) self.result.set(image) self.control_box.transform_button.setDisabled(False) def setup(self): # Start Screen self.start_screen.start.connect(self.start_signal) self.start_screen.close.connect(self.close_signal) # DISPLAY self.display_worker.finished.connect(self.get_image) # REF CAROUSEL [i.selected_reference.connect(self.ref_select) for i in self.reference_carousel.carousel] # TYPE SELECTOR [i.unselect.connect(self.ref_unselect) for i in self.type_selector.selectors.values()] # CONTROL BOX self.control_box.result.connect(self.result_signal) self.control_box.transform.connect(self.transform_signal) self.control_box.close.connect(self.close_signal) # QR result self.result.qr_done.connect(self.qr_done_signal) # Transform thread self.transform_worker.signal.transformed.connect(self.transformed_signal) # setup UI start = QWidget(self) start.setLayout(self.start_screen) self.setCentralWidget(self.window_stack) transform = QWidget(self) transform_window = set_align_center(QHBoxLayout()) left_box = set_align_center(QVBoxLayout()) right_box = set_align_center(QVBoxLayout()) left_box.addLayout(self.display, 1) left_box.addWidget(self.radio_box) left_box.addLayout(self.reference_carousel, 1) right_box.addLayout(self.type_selector, 3) right_box.addLayout(self.control_box, 1) transform_window.addStretch(1) transform_window.addLayout(left_box, 8) transform_window.addLayout(right_box, 4) transform.setLayout(transform_window) self.window_stack.addWidget(start) # 0 self.window_stack.addWidget(transform) # 1 self.window_stack.addWidget(self.result) # 2 if __name__ == "__main__": T = getTransformer() capture = Capture(0) app = QApplication(sys.argv) ref_images = list( map(lambda x: [ cv2.imread(BASE_DIR + '/../../ref_images/' + x), get_qimage(BASE_DIR + '/../../ref_images/' + x) ], ref_images) ) ref_images.append( [ cv2.imread(BASE_DIR + '/image_not_selected.png'), get_qimage(BASE_DIR + '/image_not_selected.png') ] ) mainWindow = MainWindow() mainWindow.showFullScreen() ret = app.exec_() sys.exit(ret)
nilq/baby-python
python
# coding: utf-8 # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. # This script provides a basic example of how to use the Health Checks service. # Please review the documentation for more information about # how Health Checks works, including permissions needed. # # https://docs.cloud.oracle.com/iaas/Content/HealthChecks/Concepts/healthchecks.htm import oci from datetime import datetime # Helper to format dates def format_time(timestamp): # Will be ticks, not seconds from epoch return datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') # Default config file and profile config = oci.config.from_file() healthchecks_client = oci.healthchecks.HealthChecksClient(config) # This is the root compartment. You can use another compartment in your tenancy. compartment_id = config["tenancy"] # List of available vantage points vantage_points = healthchecks_client.list_health_checks_vantage_points().data # HttpMonitors examples # Creating a new HttpMonitor: http_monitor = healthchecks_client.create_http_monitor( oci.healthchecks.models.CreateHttpMonitorDetails( compartment_id=compartment_id, display_name="Monitor Name", targets=["example.com"], protocol="HTTPS", vantage_point_names=[vantage_points[0].name], # If not specified we will auto assign 3 vantage points port=443, path="/", is_enabled=False, interval_in_seconds=30, timeout_in_seconds=30 ) ).data # Updating an existing monitor: # Note: You only need to specify any properties you wish to change. # It returns the updated monitor. http_monitor = healthchecks_client.update_http_monitor( monitor_id=http_monitor.id, update_http_monitor_details=oci.healthchecks.models.UpdateHttpMonitorDetails( targets=["example.com", "other.example.com"], is_enabled=True ) ).data print('Display Name: {}, isEnabled: {}'.format(http_monitor.display_name, http_monitor.is_enabled)) # Retrieving monitor results: # There's a pagination helper to get all the pages for you. http_monitor_results = oci.pagination.list_call_get_all_results(healthchecks_client.list_http_probe_results, http_monitor.id) for monitor_result in http_monitor_results.data: print('Result: {}, Start Time: {}, isHealthy: {}'.format(monitor_result.target, format_time(monitor_result.start_time), monitor_result.is_healthy)) # To change the compartment: healthchecks_client.change_http_monitor_compartment( monitor_id=http_monitor.id, change_http_monitor_compartment_details=oci.healthchecks.models.ChangeHttpMonitorCompartmentDetails( compartment_id="NEW_COMPARTMENT_ID" ) ) # The delete will have no return if successful healthchecks_client.delete_http_monitor(monitor_id=http_monitor.id) # PingMonitors examples # Creating a new PingMonitor: ping_monitor = healthchecks_client.create_ping_monitor( oci.healthchecks.models.CreatePingMonitorDetails( compartment_id=compartment_id, display_name="Monitor Name", targets=["example.com"], protocol="ICMP", vantage_point_names=[vantage_points[0].name], # If not specified we will auto assign 3 vantage points is_enabled=False, interval_in_seconds=30, timeout_in_seconds=30 ) ).data # Updating an existing monitor: # Note: You only need to specify any properties you wish to change. # It returns the updated monitor. ping_monitor = healthchecks_client.update_ping_monitor( monitor_id=ping_monitor.id, update_ping_monitor_details=oci.healthchecks.models.UpdatePingMonitorDetails( targets=["example.com", "other.example.com"], is_enabled=True ) ).data print('Display Name: {}, isEnabled: {}'.format(ping_monitor.display_name, ping_monitor.is_enabled)) # Retrieving monitor results: # There's a pagination helper to get all the pages for you. ping_monitor_results = oci.pagination.list_call_get_all_results(healthchecks_client.list_ping_probe_results, ping_monitor.id) for monitor_result in ping_monitor_results.data: print('Result: {}, Start Time: {}, isHealthy: {}'.format(monitor_result.target, format_time(monitor_result.start_time), monitor_result.is_healthy)) # To change the compartment: healthchecks_client.change_ping_monitor_compartment( monitor_id=ping_monitor.id, change_ping_monitor_compartment_details=oci.healthchecks.models.ChangePingMonitorCompartmentDetails( compartment_id="NEW_COMPARTMENT_ID" ) ) # The delete will have no return if successful healthchecks_client.delete_ping_monitor(monitor_id=ping_monitor.id)
nilq/baby-python
python
"""Metrics to assess performance on sequence labeling task given prediction Functions named as ``*_score`` return a scalar value to maximize: the higher the better Reference: seqeval==0.0.19 """ from __future__ import absolute_import, division, print_function import warnings from collections import defaultdict import numpy as np def get_entities(seq, suffix=False): """Gets entities from sequence. Args: seq (list): sequence of labels. Returns: list: list of (chunk_type, chunk_start, chunk_end). Example: >>> from seqeval.metrics.sequence_labeling import get_entities >>> seq = ['B-PER', 'I-PER', 'O', 'B-LOC'] >>> get_entities(seq) [('PER', 0, 1), ('LOC', 3, 3)] """ def _validate_chunk(chunk, suffix): if chunk in ["O", "B", "I", "E", "S"]: return if suffix: if not ( chunk.endswith("-B") or chunk.endswith("-I") or chunk.endswith("-E") or chunk.endswith("-S") ): warnings.warn("{} seems not to be NE tag.".format(chunk)) else: if not ( chunk.startswith("B-") or chunk.startswith("I-") or chunk.startswith("E-") or chunk.startswith("S-") ): warnings.warn("{} seems not to be NE tag.".format(chunk)) # for nested list if any(isinstance(s, list) for s in seq): seq = [item for sublist in seq for item in sublist + ["O"]] prev_tag = "O" prev_type = "" begin_offset = 0 chunks = [] for i, chunk in enumerate(seq + ["O"]): _validate_chunk(chunk, suffix) if suffix: tag = chunk[-1] type_ = chunk[:-1].rsplit("-", maxsplit=1)[0] or "_" else: tag = chunk[0] type_ = chunk[1:].split("-", maxsplit=1)[-1] or "_" if end_of_chunk(prev_tag, tag, prev_type, type_): chunks.append((prev_type, begin_offset, i - 1)) if start_of_chunk(prev_tag, tag, prev_type, type_): begin_offset = i prev_tag = tag prev_type = type_ return chunks def end_of_chunk(prev_tag, tag, prev_type, type_): """Checks if a chunk ended between the previous and current word. Args: prev_tag: previous chunk tag. tag: current chunk tag. prev_type: previous type. type_: current type. Returns: chunk_end: boolean. """ chunk_end = False if prev_tag == "E": chunk_end = True if prev_tag == "S": chunk_end = True if prev_tag == "B" and tag == "B": chunk_end = True if prev_tag == "B" and tag == "S": chunk_end = True if prev_tag == "B" and tag == "O": chunk_end = True if prev_tag == "I" and tag == "B": chunk_end = True if prev_tag == "I" and tag == "S": chunk_end = True if prev_tag == "I" and tag == "O": chunk_end = True if prev_tag != "O" and prev_tag != "." and prev_type != type_: chunk_end = True return chunk_end def start_of_chunk(prev_tag, tag, prev_type, type_): """Checks if a chunk started between the previous and current word. Args: prev_tag: previous chunk tag. tag: current chunk tag. prev_type: previous type. type_: current type. Returns: chunk_start: boolean. """ chunk_start = False if tag == "B": chunk_start = True if tag == "S": chunk_start = True if prev_tag == "E" and tag == "E": chunk_start = True if prev_tag == "E" and tag == "I": chunk_start = True if prev_tag == "S" and tag == "E": chunk_start = True if prev_tag == "S" and tag == "I": chunk_start = True if prev_tag == "O" and tag == "E": chunk_start = True if prev_tag == "O" and tag == "I": chunk_start = True if tag != "O" and tag != "." and prev_type != type_: chunk_start = True return chunk_start def f1_score(y_true, y_pred, average="micro", suffix=False): """Compute the F1 score. The F1 score can be interpreted as a weighted average of the precision and recall, where an F1 score reaches its best value at 1 and worst score at 0. The relative contribution of precision and recall to the F1 score are equal. The formula for the F1 score is:: F1 = 2 * (precision * recall) / (precision + recall) Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import f1_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> f1_score(y_true, y_pred) 0.50 """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_pred = len(pred_entities) nb_true = len(true_entities) p = nb_correct / nb_pred if nb_pred > 0 else 0 r = nb_correct / nb_true if nb_true > 0 else 0 score = 2 * p * r / (p + r) if p + r > 0 else 0 return score def accuracy_score(y_true, y_pred): """Accuracy classification score. In multilabel classification, this function computes subset accuracy: the set of labels predicted for a sample must *exactly* match the corresponding set of labels in y_true. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import accuracy_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> accuracy_score(y_true, y_pred) 0.80 """ if any(isinstance(s, list) for s in y_true): y_true = [item for sublist in y_true for item in sublist] y_pred = [item for sublist in y_pred for item in sublist] nb_correct = sum(y_t == y_p for y_t, y_p in zip(y_true, y_pred)) nb_true = len(y_true) score = nb_correct / nb_true return score def precision_score(y_true, y_pred, average="micro", suffix=False): """Compute the precision. The precision is the ratio ``tp / (tp + fp)`` where ``tp`` is the number of true positives and ``fp`` the number of false positives. The precision is intuitively the ability of the classifier not to label as positive a sample. The best value is 1 and the worst value is 0. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import precision_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> precision_score(y_true, y_pred) 0.50 """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_pred = len(pred_entities) score = nb_correct / nb_pred if nb_pred > 0 else 0 return score def recall_score(y_true, y_pred, average="micro", suffix=False): """Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samples. The best value is 1 and the worst value is 0. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: score : float. Example: >>> from seqeval.metrics import recall_score >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> recall_score(y_true, y_pred) 0.50 """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) nb_correct = len(true_entities & pred_entities) nb_true = len(true_entities) score = nb_correct / nb_true if nb_true > 0 else 0 return score def performance_measure(y_true, y_pred): """ Compute the performance metrics: TP, FP, FN, TN Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a tagger. Returns: performance_dict : dict Example: >>> from seqeval.metrics import performance_measure >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'O', 'B-ORG'], ['B-PER', 'I-PER', 'O', 'B-PER']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O', 'O'], ['B-PER', 'I-PER', 'O', 'B-MISC']] >>> performance_measure(y_true, y_pred) {'TP': 3, 'FP': 3, 'FN': 1, 'TN': 4} """ performance_dict = dict() if any(isinstance(s, list) for s in y_true): y_true = [item for sublist in y_true for item in sublist] y_pred = [item for sublist in y_pred for item in sublist] performance_dict["TP"] = sum( y_t == y_p for y_t, y_p in zip(y_true, y_pred) if ((y_t != "O") or (y_p != "O")) ) performance_dict["FP"] = sum( ((y_t != y_p) and (y_p != "O")) for y_t, y_p in zip(y_true, y_pred) ) performance_dict["FN"] = sum( ((y_t != "O") and (y_p == "O")) for y_t, y_p in zip(y_true, y_pred) ) performance_dict["TN"] = sum( (y_t == y_p == "O") for y_t, y_p in zip(y_true, y_pred) ) return performance_dict def classification_report(y_true, y_pred, digits=2, suffix=False, output_dict=False): """Build a text report showing the main classification metrics. Args: y_true : 2d array. Ground truth (correct) target values. y_pred : 2d array. Estimated targets as returned by a classifier. digits : int. Number of digits for formatting output floating point values. output_dict : bool(default=False). If True, return output as dict else str. Returns: report : string/dict. Summary of the precision, recall, F1 score for each class. Examples: >>> from seqeval.metrics import classification_report >>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']] >>> print(classification_report(y_true, y_pred)) precision recall f1-score support <BLANKLINE> MISC 0.00 0.00 0.00 1 PER 1.00 1.00 1.00 1 <BLANKLINE> micro avg 0.50 0.50 0.50 2 macro avg 0.50 0.50 0.50 2 weighted avg 0.50 0.50 0.50 2 <BLANKLINE> """ true_entities = set(get_entities(y_true, suffix)) pred_entities = set(get_entities(y_pred, suffix)) name_width = 0 d1 = defaultdict(set) d2 = defaultdict(set) for e in true_entities: d1[e[0]].add((e[1], e[2])) name_width = max(name_width, len(e[0])) for e in pred_entities: d2[e[0]].add((e[1], e[2])) avg_types = ["micro avg", "macro avg", "weighted avg"] if output_dict: report_dict = dict() else: avg_width = max([len(x) for x in avg_types]) width = max(name_width, avg_width, digits) headers = ["precision", "recall", "f1-score", "support"] head_fmt = "{:>{width}s} " + " {:>9}" * len(headers) report = head_fmt.format("", *headers, width=width) report += "\n\n" row_fmt = "{:>{width}s} " + " {:>9.{digits}f}" * 3 + " {:>9}\n" ps, rs, f1s, s = [], [], [], [] for type_name in sorted(d1.keys()): true_entities = d1[type_name] pred_entities = d2[type_name] nb_correct = len(true_entities & pred_entities) nb_pred = len(pred_entities) nb_true = len(true_entities) p = nb_correct / nb_pred if nb_pred > 0 else 0 r = nb_correct / nb_true if nb_true > 0 else 0 f1 = 2 * p * r / (p + r) if p + r > 0 else 0 if output_dict: report_dict[type_name] = { "precision": p, "recall": r, "f1-score": f1, "support": nb_true, } else: report += row_fmt.format( *[type_name, p, r, f1, nb_true], width=width, digits=digits ) ps.append(p) rs.append(r) f1s.append(f1) s.append(nb_true) if not output_dict: report += "\n" # compute averages nb_true = np.sum(s) for avg_type in avg_types: if avg_type == "micro avg": # micro average p = precision_score(y_true, y_pred, suffix=suffix) r = recall_score(y_true, y_pred, suffix=suffix) f1 = f1_score(y_true, y_pred, suffix=suffix) elif avg_type == "macro avg": # macro average p = np.average(ps) r = np.average(rs) f1 = np.average(f1s) elif avg_type == "weighted avg": # weighted average p = np.average(ps, weights=s) r = np.average(rs, weights=s) f1 = np.average(f1s, weights=s) else: assert False, "unexpected average: {}".format(avg_type) if output_dict: report_dict[avg_type] = { "precision": p, "recall": r, "f1-score": f1, "support": nb_true, } else: report += row_fmt.format( *[avg_type, p, r, f1, nb_true], width=width, digits=digits ) if output_dict: return report_dict else: return report
nilq/baby-python
python
from det3d.core.utils.scatter import scatter_mean from torch.nn import functional as F from ..registry import READERS from torch import nn import numpy as np import torch def voxelization(points, pc_range, voxel_size): keep = (points[:, 0] >= pc_range[0]) & (points[:, 0] <= pc_range[3]) & \ (points[:, 1] >= pc_range[1]) & (points[:, 1] <= pc_range[4]) & \ (points[:, 2] >= pc_range[2]) & (points[:, 2] <= pc_range[5]) points = points[keep, :] coords = ((points[:, [2, 1, 0]] - pc_range[[2, 1, 0]]) / voxel_size[[2, 1, 0]]).to(torch.int64) unique_coords, inverse_indices = coords.unique(return_inverse=True, dim=0) voxels = scatter_mean(points, inverse_indices, dim=0) return voxels, unique_coords def voxelization_virtual(points, pc_range, voxel_size): # current one is hard coded for nuScenes # TODO: fix those magic number keep = (points[:, 0] >= pc_range[0]) & (points[:, 0] <= pc_range[3]) & \ (points[:, 1] >= pc_range[1]) & (points[:, 1] <= pc_range[4]) & \ (points[:, 2] >= pc_range[2]) & (points[:, 2] <= pc_range[5]) points = points[keep, :] real_points_mask = points[:, -2] == 1 painted_points_mask = points[:, -2] == 0 virtual_points_mask = points[:, -2] == -1 # remove zero padding for real points real_points = points[real_points_mask][:, [0, 1, 2, 3, -1]] painted_point = points[painted_points_mask] virtual_point = points[virtual_points_mask] padded_points = torch.zeros(len(points), 22, device=points.device, dtype=points.dtype) # real points will occupy channels 0 to 4 and -1 padded_points[:len(real_points), :5] = real_points padded_points[:len(real_points), -1] = 1 # painted points will occupy channels 5 to 21 padded_points[len(real_points):len(real_points)+len(painted_point), 5:19] = painted_point[:, :-2] padded_points[len(real_points):len(real_points)+len(painted_point), 19] = painted_point[:, -1] padded_points[len(real_points):len(real_points)+len(painted_point), 20] = 1 padded_points[len(real_points):len(real_points)+len(painted_point), 21] = 0 # virtual points will occupy channels 5 to 21 padded_points[len(real_points)+len(painted_point):, 5:19] = virtual_point[:, :-2] padded_points[len(real_points)+len(painted_point):, 19] = virtual_point[:, -1] padded_points[len(real_points)+len(painted_point):, 20] = 0 padded_points[len(real_points)+len(painted_point):, 21] = 0 points_xyz = torch.cat([real_points[:, :3], painted_point[:, :3], virtual_point[:, :3]], dim=0) coords = ((points_xyz[:, [2, 1, 0]] - pc_range[[2, 1, 0]]) / voxel_size[[2, 1, 0]]).to(torch.int64) unique_coords, inverse_indices = coords.unique(return_inverse=True, dim=0) voxels = scatter_mean(padded_points, inverse_indices, dim=0) indicator = voxels[:, -1] mix_mask = (indicator > 0) * (indicator < 1) # remove index voxels = voxels[:, :-1] voxels[mix_mask, :5] = voxels[mix_mask, :5] / indicator[mix_mask].unsqueeze(-1) voxels[mix_mask, 5:] = voxels[mix_mask, 5:] / (1-indicator[mix_mask].unsqueeze(-1)) return voxels, unique_coords @READERS.register_module class DynamicVoxelEncoder(nn.Module): def __init__( self, pc_range, voxel_size, virtual=False ): super(DynamicVoxelEncoder, self).__init__() self.pc_range = torch.tensor(pc_range) self.voxel_size = torch.tensor(voxel_size) self.shape = torch.round((self.pc_range[3:] - self.pc_range[:3]) / self.voxel_size) self.shape_np = self.shape.numpy().astype(np.int32) self.virtual = virtual @torch.no_grad() def forward(self, points): # points list[torch.Tensor] coors = [] voxels = [] for res in points: if self.virtual: voxel, coor = voxelization_virtual(res, self.pc_range.to(res.device), self.voxel_size.to(res.device)) else: voxel, coor = voxelization(res, self.pc_range.to(res.device), self.voxel_size.to(res.device)) voxels.append(voxel) coors.append(coor) coors_batch = [] for i in range(len(voxels)): coor_pad = F.pad(coors[i], (1, 0), mode='constant', value=i) coors_batch.append(coor_pad) coors_batch = torch.cat(coors_batch, dim=0) voxels_batch = torch.cat(voxels, dim=0) return voxels_batch, coors_batch, self.shape_np
nilq/baby-python
python
from fjord.base.tests import eq_, TestCase from fjord.feedback.utils import clean_url, compute_grams class Testclean_url(TestCase): def test_basic(self): data = [ (None, None), ('', ''), ('http://example.com/', 'http://example.com/'), ('http://example.com/#foo', 'http://example.com/'), ('http://example.com/?foo=bar', 'http://example.com/'), ('http://example.com:8000/', 'http://example.com/'), ('ftp://foo.bar/', ''), ('chrome://something', 'chrome://something'), ('about:home', 'about:home'), ] for url, expected in data: eq_(clean_url(url), expected) class TestComputeGrams(TestCase): # FIXME - Beef this up so that we have more comprehensive tests of # the various tokenizing edge cases. def test_basic(self): test_data = [ ('The quick brown fox', [u'brown quick', u'brown fox']), ('the latest update disables the New tab function', [u'disables new', u'function tab', u'new tab', u'latest update', u'disables update']), ('why is firefox so damn slow???? many tabs load slow or not at ' 'all!', [u'load tabs', u'load slow', u'slow tabs', u'damn slow']), ("I'm one of the guys that likes to try Firefox ahead of the " 'herd... usually I use Nightly, but then a while back my ' 'favorite add-on, TabMixPlus stopped working because Firefox ' "redid something in the code. \"No problem,\" says I to myself, " "I'll just use Aurora until they get it fixed.", [u'add-on favorite', u'add-on tabmixplus', u'ahead herd', u'ahead try', u'aurora fixed', u'aurora use', u'code problem', u'code redid', u'favorite nightly', u"guys i'm", u'guys likes', u'herd usually', u"i'll just", u"i'll myself", u'just use', u'likes try', u'myself says', u'nightly use', u'problem says', u'redid working', u'stopped tabmixplus', u'stopped working', u'use usually']), ('Being partially sighted, I found the features with Windows XP ' 'and IE8 extremely usefu;. I need everything in Arial black bold ' 'text.', [u'extremely usefu', u'features sighted', u'windows xp', u'ie8 xp', u'black bold', u'partially sighted', u'need usefu', u'features windows', u'arial need', u'arial black', u'bold text', u'extremely ie8']), ] for text, expected in test_data: eq_(sorted(compute_grams(text)), sorted(expected))
nilq/baby-python
python
from typing import Optional from cdm.enums import CdmObjectType from cdm.objectmodel import CdmAttributeReference, CdmCorpusContext from .cdm_object_ref_persistence import CdmObjectRefPersistence class AttributeReferencePersistence(CdmObjectRefPersistence): @staticmethod def from_data(ctx: CdmCorpusContext, data: str) -> Optional[CdmAttributeReference]: if not data: return None simple_reference = True attribute = data return ctx.corpus.make_ref(CdmObjectType.ATTRIBUTE_REF, attribute, simple_reference)
nilq/baby-python
python
import pandas as pd IN_FILE = 'aus-domain-urls.txt' START_IDX = 0 BLOCK_SIZE = [10, 20, 50, 100, 1000, 100000, 1000000] OUT_FILE_PREFIX = 'aus-domain-urls' data = pd.read_csv(IN_FILE) data_length = len(data) for i in range(len(BLOCK_SIZE)): if i == 0: lower_bound = 0 else: lower_bound = upper_bound if i == len(BLOCK_SIZE) - 1: upper_bound = data_length else: upper_bound = lower_bound + BLOCK_SIZE[i] out_file = '{}_{}_{}_{}.txt'.format(OUT_FILE_PREFIX, lower_bound, upper_bound, upper_bound - lower_bound) (data.iloc[ lower_bound:upper_bound, : ]).to_csv(out_file, header=False, index=None, sep=" ")
nilq/baby-python
python
# Generated by Django 3.2.6 on 2021-10-19 10:58 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0002_auto_20211019_1613'), ] operations = [ migrations.RemoveField( model_name='bookingrooms', name='room', ), migrations.AddField( model_name='bookingrooms', name='branch', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='category', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='city', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='contact', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='duration', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='email', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='gender', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='hostel_name', field=models.CharField(default='null', max_length=30), ), migrations.AddField( model_name='bookingrooms', name='year', field=models.CharField(default='null', max_length=30), ), migrations.AlterField( model_name='bookingrooms', name='college', field=models.CharField(default='null', max_length=30), ), migrations.AlterField( model_name='bookingrooms', name='cust_name', field=models.CharField(default='null', max_length=30), ), migrations.AlterField( model_name='bookingrooms', name='date', field=models.CharField(default='null', max_length=30), ), migrations.AlterField( model_name='bookingrooms', name='payment', field=models.CharField(default='null', max_length=30), ), migrations.AlterField( model_name='bookingrooms', name='total', field=models.CharField(default='null', max_length=30), ), ]
nilq/baby-python
python
from typing import Protocol class SupportsStr(Protocol): def __str__(self) -> str: ...
nilq/baby-python
python
import os import tensorflow as tf from PIL import Image cwd = os.getcwd()+'/train/' for root, dirs, files in os.walk(cwd): print(dirs) # 当前路径下所有子目录 classes = dirs break print(cwd) writer = tf.python_io.TFRecordWriter("train.tfrecords") for index, name in enumerate(classes): class_path = cwd + name + "/" print(class_path) for img_name in os.listdir(class_path): img_path = class_path + img_name img = Image.open(img_path) img = img.resize((224, 224)) if img.mode != 'RGB': print(img_path) img_raw = img.tobytes() #将图片转化为原生bytes example = tf.train.Example(features=tf.train.Features(feature={ "label": tf.train.Feature(int64_list=tf.train.Int64List(value=[index])), 'img': tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw])) })) writer.write(example.SerializeToString()) #序列化为字符串 writer.close()
nilq/baby-python
python
# -*- coding: utf-8 -*- # (c) Copyright IBM Corp. 2010, 2021. All Rights Reserved. # pragma pylint: disable=unused-argument, no-self-use """ Incident poller for a ProofPoint TRAP server """ import logging from resilient_circuits import ResilientComponent, handler from fn_scheduler.components import SECTION_SCHEDULER from fn_scheduler.lib.scheduler_helper import ResilientScheduler from fn_scheduler.lib.resilient_helper import validate_app_config """ Summary: Start the scheduler """ log = logging.getLogger(__name__) class FunctionComponent(ResilientComponent): """Component that polls for new data arriving from Proofpoint TRAP""" def __init__(self, opts): """constructor provides access to the configuration options""" super(FunctionComponent, self).__init__(opts) options = opts.get(SECTION_SCHEDULER, {}) validate_app_config(options) self.timezone = options.get("timezone") self.scheduler = ResilientScheduler(options.get("db_url"), options.get("datastore_dir"), options.get("thread_max"), options.get("timezone")) log.info("Scheduler started") @handler("reload") def _reload(self, event, opts): """Configuration options have changed, save new values""" self.opts = opts options = opts.get(SECTION_SCHEDULER, {}) validate_app_config(options) # TODO restart the scheduler
nilq/baby-python
python
import os import pandas as pd from bento.common import datautil, logger, util logging = logger.fancy_logger(__name__) def load_covid_raw_data(data_path, base, cases, deaths, nrows=None): read_args = {} if nrows: read_args["nrows"] = nrows idf = pd.read_csv(f"{data_path}/{base}/{cases}").drop(["Lat", "Long"], axis=1) idf = idf.melt( id_vars=["Province/State", "Country/Region"], var_name="date", value_name="cases", ) idf = idf.groupby(["date", "Country/Region"]).sum().reset_index() # Add on deaths ddf = pd.read_csv(f"{data_path}/{base}/{deaths}").drop(["Lat", "Long"], axis=1) ddf = ddf.melt( id_vars=["Province/State", "Country/Region"], var_name="date", value_name="deaths", ) ddf = ddf.groupby(["date", "Country/Region"]).sum() idf = idf.join(ddf, on=["date", "Country/Region"]).rename( columns={"Country/Region": "country"} ) idf.loc[:, "date"] = pd.to_datetime(idf["date"]) idf = idf.sort_values("date") return idf def add_country_reference(raw_df, ref_df): # Drop some hard to handle, more obscure areas drop_entries = [ "Diamond Princess", "West Bank and Gaza", "Kosovo", "Holy See", "MS Zaandam", "Eritrea", "Western Sahara", ] idf = raw_df.copy() idf = idf.loc[~idf.country.isin(drop_entries)] # Change some unrecognized entries modifications = { "Burma": ("country", "Myanmar"), "US": ("country", "United States"), "Korea, South": ("country", "Korea, Republic of"), } for name, mod in modifications.items(): idf.loc[idf.country == name, mod[0]] = mod[1] reference = tuple(ref_df["country"].unique()) mismatch = set(idf["country"].unique()) - set(reference) for country in mismatch: match_name = datautil.fuzzy_search(country, reference) logging.debug(f"Missing '{country}', assigning {match_name}") idf.loc[idf.country == country, "country"] = match_name logging.info(f"Total country name mismatches: {len(mismatch)}") idf = idf.join(ref_df.set_index("country"), on="country") return idf def process_covid_data(idf): idf["cases_per_100k"] = idf["cases"] * 1e5 / idf["population"] idf["deaths_per_100k"] = idf["deaths"] * 1e5 / idf["population"] idf = idf.drop(["population"], axis=1) return idf def load(nrows=None): data_path = f"{os.environ['APP_HOME']}/{os.environ['DATA_DIR']}" base = f"jhopkins-covid-19/csse_covid_19_data/csse_covid_19_time_series" cases = "time_series_covid19_confirmed_global.csv" deaths = "time_series_covid19_deaths_global.csv" raw_df = load_covid_raw_data(data_path, base, cases, deaths) ref_df = datautil.df_loader("world_country_reference.csv") jdf = add_country_reference(raw_df, ref_df) pdf = process_covid_data(jdf) data = datautil.autostructure(pdf) return data
nilq/baby-python
python
from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory from tests.unit.dataactvalidator.utils import number_of_errors, query_columns _FILE = 'fabs18_detached_award_financial_assistance' def test_column_headers(database): expected_subset = {'row_number', 'business_types', 'uniqueid_AssistanceTransactionUniqueKey'} actual = set(query_columns(_FILE, database)) assert expected_subset == actual def test_success(database): """ BusinessTypes must be one to three letters in length. BusinessTypes values must be non-repeated letters from A to X. """ det_award = DetachedAwardFinancialAssistanceFactory(business_types='A', correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(business_types='XB', correction_delete_indicatr=None) det_award_3 = DetachedAwardFinancialAssistanceFactory(business_types='RCm', correction_delete_indicatr='c') det_award_4 = DetachedAwardFinancialAssistanceFactory(business_types='rcm', correction_delete_indicatr='C') # Ignore correction delete indicator of D det_award_5 = DetachedAwardFinancialAssistanceFactory(business_types='BOB', correction_delete_indicatr='d') errors = number_of_errors(_FILE, database, models=[det_award, det_award_2, det_award_3, det_award_4, det_award_5]) assert errors == 0 def test_failure(database): """ BusinessTypes must be one to three letters in length. BusinessTypes values must be non-repeated letters from A to X. """ # Test if it's somehow empty or has 4 letters (length test) det_award = DetachedAwardFinancialAssistanceFactory(business_types='', correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(business_types='ABCD', correction_delete_indicatr='c') errors = number_of_errors(_FILE, database, models=[det_award, det_award_2]) assert errors == 2 # Test repeats det_award = DetachedAwardFinancialAssistanceFactory(business_types='BOb', correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(business_types='BOB', correction_delete_indicatr='c') det_award_3 = DetachedAwardFinancialAssistanceFactory(business_types='BbO', correction_delete_indicatr='') det_award_4 = DetachedAwardFinancialAssistanceFactory(business_types='BB', correction_delete_indicatr='') errors = number_of_errors(_FILE, database, models=[det_award, det_award_2, det_award_3, det_award_4]) assert errors == 4 # Test that only valid letters work det_award = DetachedAwardFinancialAssistanceFactory(business_types='ABY', correction_delete_indicatr='') det_award_2 = DetachedAwardFinancialAssistanceFactory(business_types='C2', correction_delete_indicatr='c') det_award_3 = DetachedAwardFinancialAssistanceFactory(business_types='c2d', correction_delete_indicatr='') det_award_4 = DetachedAwardFinancialAssistanceFactory(business_types='123', correction_delete_indicatr='') errors = number_of_errors(_FILE, database, models=[det_award, det_award_2, det_award_3, det_award_4]) assert errors == 4
nilq/baby-python
python
from rtm.api import validate def test_validate(rtm_path): validate(rtm_path)
nilq/baby-python
python
from sqlalchemy import create_engine from td.client import TDClient from datetime import datetime from td import exceptions from requests.exceptions import ConnectionError import datetime import pandas as pd import sqlite3 import time import credentials print("- Modules imported -") def make_sqlite_table(table_name): engine = create_engine('sqlite:///Options_temp.db', echo=False) table_columns = pd.DataFrame(columns=columns_wanted) table_columns.to_sql(table_name, con=engine) return 0 def add_rows(clean_data, table_name): global file_date engine = create_engine(f'sqlite:///Data/Options_{file_date}.db', echo=False) clean_data.to_sql(table_name, con=engine, if_exists='append', index_label='index') return 0 def delete_row(table_name, column, argument): conn = sqlite3.connect('Options.db') con = conn.cursor() con.execute(f'DELETE FROM {table_name} WHERE {column}={argument}') conn.commit() conn.close() return 0 def delete_db_table(table_name): conn = sqlite3.connect('options.db') con = conn.cursor() con.execute(f'DROP TABLE {table_name}') conn.commit() conn.close() return 0 def show_db_table(puts_calls): conn = sqlite3.connect('options.db') con = conn.cursor() for row in con.execute(f'SELECT * FROM {puts_calls}'): print(row) conn.close() return 0 TDSession = TDClient( client_id=credentials.client_id, redirect_uri='https://127.0.0.1', credentials_path=credentials.json_path # Users/user/.../Project/td_state.json ) TDSession.login() print("- TD connection made -") def human_time(epoch): new_time = datetime.fromtimestamp(int(epoch) / 1000) output = new_time.strftime('%Y-%m-%d %H:%M:%S') return output def get_time_now(): curr_time = time.localtime() curr_clock = time.strftime("%H:%M:%S", curr_time) curr_m = time.strftime('%m') curr_y_d = time.strftime('%d%Y') int_curr_clock = int(f'{curr_clock[:2]}{curr_clock[3:5]}') return int_curr_clock, curr_m, curr_y_d def history(symbol): quotes = TDClient.get_price_history(TDSession, symbol=symbol, period_type='day', period=1, frequency_type='minute', frequency=1, extended_hours=False) # start_date = 1606086000000, end_date = 1606341600000, return quotes cur_weekly = 0 cur_stocks = ['AAPL'] ''' test_quotes_2D = TDClient.get_quotes(TDSession, instruments=['AMD', 'AAPL']) def stats_list(): stats_wanted = ['symbol', 'bidPrice', 'bidSize', 'bidId', 'askPrice', 'askId', 'lastPrice', 'lastSize', 'lastId', 'openPrice', 'highPrice', 'lowPrice', 'bidTick', 'closePrice', 'netChange', 'totalVolume', 'quoteTimeInLong', 'tradeTimeInLong', 'exchange', 'exchangeName', 'volatility', 'regularMarketLastPrice', 'regularMarketNetChange', 'regularMarketTradeTimeInLong', 'netPercentChangeInDouble', 'markChangeInDouble', 'markPercentChangeInDouble', 'regularMarketPercentChangeInDouble'] output_stats = [] for key in test_quotes_2D['AMD'].keys(): for i in stats_wanted: if key == i: output_stats.append(key) return output_stats ''' file_date = 0 trade_days_2021 = {'jan': [4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 19, 20, 21, 22, 25, 26, 27, 28, 29], 'feb': [1, 2, 3, 4, 5, 8, 9, 10, 11, 12, 16, 17, 18, 19, 22, 23, 24, 25, 26], 'mar': [1, 2, 3, 4, 5, 8, 9, 10, 11, 12, 15, 16, 17, 18, 19, 22, 23, 24, 25, 26, 29, 30, 31], 'apr': [5, 6, 7, 8, 9, 12, 13, 14, 15, 16, 19, 20, 21, 22, 23, 26, 27, 28, 29, 30], 'may': [3, 4, 5, 6, 7, 10, 11, 12, 13, 14, 17, 18, 19, 20, 21, 24, 25, 26, 27, 28], 'jun': [1, 2, 3, 4, 7, 8, 9, 10, 11, 14, 15, 16, 17, 18, 21, 22, 23, 24, 25, 28, 29, 30], 'jul': [1, 2, 6, 7, 8, 9, 12, 13, 14, 15, 16, 19, 20, 21, 22, 23, 26, 27, 28, 29, 30], 'aug': [2, 3, 4, 5, 6, 9, 10, 11, 12, 13, 16, 17, 18, 19, 20, 23, 24, 25, 26, 27, 30, 31], 'sep': [1, 2, 3, 7, 8, 9, 10, 13, 14, 15, 16, 17, 20, 21, 22, 23, 24, 27, 28, 29, 30], 'oct': [1, 4, 5, 6, 7, 8, 12, 13, 14, 15, 18, 19, 20, 21, 22, 25, 26, 27, 28, 29], 'nov': [1, 2, 3, 4, 5, 8, 9, 10, 12, 15, 16, 17, 18, 19, 22, 23, 24, 29, 30], 'dec': [1, 2, 3, 6, 7, 8, 9, 10, 13, 14, 15, 16, 17, 20, 21, 22, 27, 28, 29, 30]} opt_column_names = ['putCall', 'symbol', 'description', 'exchangeName', 'bid', 'ask', 'last', 'mark', 'bidSize', 'askSize', 'bidAskSize', 'lastSize', 'highPrice', 'lowPrice', 'openPrice', 'closePrice', 'totalVolume', 'tradeDate', 'tradeTimeInLong', 'quoteTimeInLong', 'netChange', 'volatility', 'delta', 'gamma', 'theta', 'vega', 'rho', 'openInterest', 'timeValue', 'theoreticalOptionValue', 'theoreticalVolatility', 'optionDeliverablesList', 'strikePrice', 'expirationDate', 'daysToExpiration', 'expirationType', 'lastTradingDay', 'multiplier', 'settlementType', 'deliverableNote', 'isIndexOption', 'percentChange', 'markChange', 'markPercentChange', 'mini', 'inTheMoney', 'nonStandard'] columns_unwanted = ['description', 'mark', 'bidSize', 'askSize', 'bidAskSize', 'lastSize', 'tradeDate', 'tradeTimeInLong', 'theoreticalOptionValue', 'optionDeliverablesList', 'expirationType', 'lastTradingDay', 'multiplier', 'settlementType', 'deliverableNote', 'isIndexOption', 'markChange', 'markPercentChange', 'nonStandard', 'inTheMoney', 'mini'] columns_wanted = ['putCall', 'symbol', 'exchangeName', 'bid', 'ask', 'last', 'highPrice', 'lowPrice', 'openPrice', 'closePrice', 'totalVolume', 'quoteTimeInLong', 'netChange', 'volatility', 'delta', 'gamma', 'theta', 'vega', 'rho', 'openInterest', 'timeValue', 'theoreticalVolatility', 'strikePrice', 'expirationDate', 'daysToExpiration', 'percentChange'] stocks = ['AAL', 'AAPL', 'AMD', 'AMZN', 'APA', 'ATVI', 'AXP', 'BABA', 'CME', 'CMG', 'CSCO', 'DAL', 'DIS', 'EA', 'FB', 'GME', 'GOOG', 'GS', 'HD', 'IBM', 'JNJ', 'JPM', 'MCD', 'MSFT', 'MU', 'NEE', 'NFLX', 'NVDA', 'ORCL', 'PEP', 'PYPL', 'QQQ', 'ROKU', 'SBUX', 'SNAP', 'SPY', 'SQ', 'TSLA', 'TWTR', 'ULTA', 'UPS', 'V', 'VXX', 'WMT', 'YUM', 'VDE', 'XLB', 'XLI', 'VCR', 'VDC', 'XLV', 'XLF', 'VGT', 'XLC', 'XLU', 'VNQ'] # This segment was used to sort out unique columns after i hard coded the columns i wanted ''' # print(len(opt_column_names)) # print(len(columns_unwanted)) # print(len(columns_wanted)) # print(len(stocks)) outs = [] def unique_list(n): output = [] for x in n: if x not in output: output.append(x) else: print(x) print(len(output)) return 0 for i in opt_column_names: for j in columns_wanted: if i == j: outs.append(i) print(outs) print(len(outs)) unique_list(outs) ''' trade_stocks = ['AAPL', 'SPY', 'ROKU', 'TSLA', 'GME'] def get_weekly_data(clean): # get data for just the stuff we want to use for r in clean.iterrows(): if r[1][-2] == 'symbol': print(r[1]) if r[0] == 'bid': print(r[1]) print(r[1][2]) return 0 def get_stock(stock): # pass an array of ticker(s) for stock stock_lookup = TDSession.get_quotes(instruments=stock) return stock_lookup def raw_stock(raw): clean_stock_data = [[]] for i in raw.keys(): print(i) return clean_stock_data def pandas_stock_data(arr): pandas_data = [] return pandas_data def get_next_stock(): global pulls global failed_pulls for stock in trade_stocks: error = False try: stock_data = get_stock(stock) except (exceptions.ServerError, exceptions.GeneralError, exceptions.ExdLmtError, ConnectionError): error = True failed_pulls = failed_pulls + 1 print('A server error occurred') if not error: try: clean_stock_data = pandas_stock_data(raw_stock(stock_data)) # add_rows(clean_stock_data) UNCOMMENT TO ADD TO STOCKS.DB pulls = pulls + 1 except ValueError: print(ValueError.with_traceback()) print(f'{stock} did not have values for this iteration') failed_pulls = failed_pulls + 1 print(stock) time.sleep(1) return 0 def get_chain(stock): opt_lookup = TDSession.get_options_chain( option_chain={'symbol': stock, 'strikeCount': 50, 'toDate': '2021-4-23'}) return opt_lookup def raw_chain(raw, put_call): cp = f'{put_call}ExpDateMap' clean_data = [[]] r = -1 for k in raw[cp].keys(): # print(k, raw[k], '\n') for strike in raw[cp][k].keys(): # print(strike, raw[k][strike]) for a in raw[cp][k][strike][0].keys(): # if r == -1: # print(raw[cp][k][strike][0].keys()) unit = raw[cp][k][strike][0][a] if unit == put_call.upper(): r = r + 1 if r > 0: clean_data.append([]) clean_data[r].append(unit) return clean_data def pandas_chain(clean): df_cp = pd.DataFrame(clean, columns=opt_column_names) panda_data = df_cp.drop(columns=columns_unwanted) return panda_data pulls = 0 failed_pulls = 0 def get_next_chains(): x = 0 global pulls global failed_pulls global cur_stocks for stock in stocks: error = False try: chain = get_chain(stock) except (exceptions.ServerError, exceptions.GeneralError, exceptions.ExdLmtError, ConnectionError): error = True failed_pulls = failed_pulls + 1 print('A server error occurred') if not error: try: clean = pandas_chain(raw_chain(chain, 'call')) add_rows(clean, 'calls') for s in cur_stocks: if s == stock: get_weekly_data(clean) pulls = pulls + 1 except ValueError: print(ValueError.with_traceback()) print(f'{x}: Calls for {stock} did not have values for this iteration') failed_pulls = failed_pulls + 1 try: get_clean = pandas_chain(raw_chain(chain, 'put')) add_rows(get_clean, 'puts') pulls = pulls + 1 except ValueError: print(f'{x}: Puts for {stock} did not have values for this iteration') failed_pulls = failed_pulls + 1 print(f'{x}: {stock}') x = x + 1 time.sleep(2) return 0 # |SQLite management| # # # make_sqlite_table('calls') # inputs: puts|calls # make_sqlite_table('puts') # inputs: puts|calls # delete_db_table('calls') # delete_db_table('puts') # show_db_table('calls') # show_db_table('puts') # add_rows(clean_chain(raw_chain(get_chain('SPY'), 'put')), 'puts') # raw_chain(,'put|call')), 'puts|calls') # delete_row('puts', '', 1321354652) def main(): global file_date global trade_stocks t, mon, day = get_time_now() mon = list(trade_days_2021.keys())[int(mon) - 1] ''' # uncomment for LIVE while True: if (t < 930) or (t > 1600): print(f'{t}: Market closed {mon}{day}'.upper()) time.sleep(10) else: break ''' # uncomment below line when TESTING on live data file_date = f'temp' # uncomment below line to save and analyze live data # file_date = f'{mon}{day}' pull_count = 0 end_t = 1600 while get_time_now()[0]: # < end_t: insert segment to run LIVE # get_next_stock() get_next_chains() pull_count = pull_count + 1 print(pull_count) print('option market closed') print(f'failed_pulls: {failed_pulls}') print(f'pulls: {pulls}') return 0 main()
nilq/baby-python
python
import sys, os, subprocess, shutil, time BUILDDIR = os.path.abspath("build") NINJA_EXE = "ninja.exe" NINJA_BUILD_FILE = "build/build.ninja" CALL_PATH = os.getcwd() TOOL_PATH = sys.path[0] + "/" TOOLCHAIN_PATH = os.path.dirname(sys.path[0]) NO_EMOJI = False NO_COLOR = False SELECTION = None SECONDARY = None CMAKE_EXTRA = "-DTOOLCHAIN_OFFSET:STRING={} ".format(TOOLCHAIN_PATH) SKIP_PREBUILD = False ONLY_CONFIG = False NEW_BUILD = False NO_NINJA = False class Text: @staticmethod def error(text): return "\033[91m\033[1m\033[4m" + text + "\033[0m" @staticmethod def recoverableError(text): return "\033[31m" + text + "\033[0m" @staticmethod def underline(text): return "\033[4m" + text + "\033[0m" @staticmethod def bold(text): return "\033[1m" + text + "\033[0m" @staticmethod def header(text): return "\033[1m\033[4m" + text + "\033[0m" @staticmethod def warning(text): return "\033[93m\033[1m" + text + "\033[0m" @staticmethod def important(text): return "\033[94m\033[1m" + text + "\033[0m" @staticmethod def reallyImportant(text): return "\033[94m\033[1m\033[4m" + text + "\033[0m" @staticmethod def green(text): return "\033[92m" + text + "\033[0m" @staticmethod def success(text): return "\033[92m\033[1m" + text + "\033[0m" @staticmethod def red(text): return "\033[91m" + text + "\033[0m" @staticmethod def blue(text): return "\033[94m" + text + "\033[0m" @staticmethod def cyan(text): return "\033[96m" + text + "\033[0m" @staticmethod def magenta(text): return "\033[95m" + text + "\033[0m" @staticmethod def gray(text): return "\033[0;90m" + text + "\033[0m" @staticmethod def yellow(text): return "\033[93m" + text + "\033[0m" @staticmethod def darkYellow(text): return "\033[33m" + text + "\033[0m" @staticmethod def darkGreen(text): return "\033[32m" + text + "\033[0m" @staticmethod def darkRed(text): return "\033[31m" + text + "\033[0m" @staticmethod def darkBlue(text): return "\033[34m" + text + "\033[0m" @staticmethod def darkCyan(text): return "\033[36m" + text + "\033[0m" @staticmethod def darkMagenta(text): return "\033[35m" + text + "\033[0m" exitCode = 0 exitError = None def runCommand(cmd: str): global exitCode, exitError print() result = subprocess.run(cmd, shell=True) exitCode = result.returncode exitError = result.stderr return exitCode usageMap = { "Valid options": Text.header("Valid options"), "Valid flags": Text.header("Valid flags"), "Prebuild Script": Text.header("Prebuild Script"), "Example Usage": Text.header("Example Usage"), "build": Text.warning("build"), "upload": Text.warning("upload"), "clean": Text.warning("clean"), "reset": Text.warning("reset"), "config": Text.warning("config"), "disable": Text.warning("disable"), "s": Text.gray("-s"), "com_port": Text.bold(Text.darkCyan("com_port")), "cmake_defs": Text.bold(Text.gray("cmake_defs")), "Pre_Build": Text.magenta("`Pre_Build`"), "bat": Text.cyan("`.bat`"), "ps1": Text.cyan("`.ps1`"), "py": Text.cyan("`.py`"), "Usage": "{} [{}] [{}] [{}]".format( Text.important("config.py"), Text.warning("option"), Text.bold(Text.gray("-s")), Text.bold(Text.gray("cmake_defs")) + "|" + Text.bold(Text.darkCyan("com_port")), ), "exUsage": "{} {} {}".format( Text.important("config.py"), Text.warning("build"), Text.gray("-s -DCUSTOM_BUILD_PATH_PREFIX:STRING=build/Pre_Build/") ), } msg = """ {Usage} {Valid options} {clean} \t: Cleanup build files {build}\t[{cmake_defs}]\t: Build project, configuring if necessary {upload}\t[{com_port}]\t: Upload binary file to a connected teensy {disable}\t[{com_port}]\t: Put a connected teensy into programming mode {reset}\t[{cmake_defs}]\t: Refresh project to a clean configured state {config}\t[{cmake_defs}]\t: Reconfigure cmake project, can pass \t extra defines {cmake_defs} for cmake {Valid flags} {s} \t: Skip any {Pre_Build} script that exists {Prebuild Script} If a script is named {Pre_Build} and is at the root of a project it will be run before configuring CMake It can be a {bat}, {ps1}, or {py} Only one is run, prefering the file type is that order {Example Usage} {exUsage} """.format_map( usageMap ) def usage(): print(msg) sys.exit() def endScript(errMsg: str = None): global exitCode, exitError if exitCode != 0 or errMsg: if errMsg: print(errMsg) if exitError: print() print(bytes.decode(exitError)) print(Text.error("\nTask Failed ❌")) sys.exit(1) else: print(Text.success("\nTask Succeeded ✔")) sys.exit() TEENSY_CORE_PREFIX = "TEENSY_CORE_NAME:INTERNAL=" FINAL_OUTPUT_FILE_PREFIX = "FINAL_OUTPUT_FILE:INTERNAL=" TEENSY_CORE_NAME = None FINAL_OUTPUT_FILE = None def populateCMAKEVars(): global TEENSY_CORE_NAME, FINAL_OUTPUT_FILE with open(BUILDDIR + "\\CMakeCache.txt", "r") as f: for line in f: if line.find(FINAL_OUTPUT_FILE_PREFIX) != -1: FINAL_OUTPUT_FILE = line.removeprefix(FINAL_OUTPUT_FILE_PREFIX).rstrip() elif line.find(TEENSY_CORE_PREFIX) != -1: TEENSY_CORE_NAME = line.removeprefix(TEENSY_CORE_PREFIX).rstrip() def compile(): global FINAL_OUTPUT_FILE print(Text.reallyImportant("\nBuilding ⏳")) if runCommand("cd build && " + TOOL_PATH + NINJA_EXE + " -j16") != 0: endScript(Text.error("Ninja failed to build ⛔")) print(Text.success("\nBuild Finished 🏁")) populateCMAKEVars() if not FINAL_OUTPUT_FILE: endScript(Text.error("Final binary file was not found ⛔")) else: print(Text.important("Ready to Upload 🔌")) endScript() def preBuild(): if SKIP_PREBUILD: print(Text.warning("Skipping Pre_Build script")) else: code = None if os.path.isfile("Pre_Build.bat"): code = runCommand("Pre_Build.bat") elif os.path.isfile("Pre_Build.ps1"): code = runCommand("Pre_Build.ps1") elif os.path.isfile("Pre_Build.py"): code = runCommand("Pre_Build.py") else: return if code != 0: endScript(Text.error("Pre_Build script failed ⛔")) def build(): print(Text.header("Build Project")) if NO_NINJA: fullClean() config() compile() def disable(): runCommand(TOOL_PATH + "ComMonitor.exe {} 134 -c --priority".format(SECONDARY)) def upload(): print(Text.header("Upload Binary ⚡")) populateCMAKEVars() if not FINAL_OUTPUT_FILE: endScript(Text.error("Final binary file was not found ⛔")) elif not SECONDARY: print(Text.warning("Warning! no port defined, unable to auto reboot ⚠")) else: disable() time.sleep(1.5) tries = 1 while True: if runCommand(TOOL_PATH + "teensy_loader_cli.exe -mmcu={} -v {}".format(TEENSY_CORE_NAME, FINAL_OUTPUT_FILE)) == 0: print(Text.success("\nGood to go ✔")) endScript() elif tries == 0: break else: print(Text.recoverableError("Failed to upload once ✖")) tries -= 1 endScript(Text.error("Failed to upload")) def config(): print(Text.header("Configure Project")) preBuild() print(Text.bold("Configuring CMake project ⚙")) if runCommand("cd build && cmake .. -G Ninja {}".format(CMAKE_EXTRA)) != 0: endScript(Text.error("\nFailed to configure cmake")) elif ONLY_CONFIG: endScript() def clean(): if NO_NINJA: print(Text.error("Project is invalid")) endScript(Text.recoverableError("Consider running config or reset")) print(Text.important("Cleaning 🧹")) if runCommand("cd build && " + TOOL_PATH + NINJA_EXE + " clean") != 0: endScript(Text.error("Error cleaning up build files")) def fullClean(): shutil.rmtree(BUILDDIR) os.mkdir(BUILDDIR) def reset(): global ONLY_CONFIG print(Text.red("Resetting Project")) ONLY_CONFIG = True if not NEW_BUILD: print(Text.important("Hard Cleaning 🧼🧽")) fullClean() config() # Begin Script if len(sys.argv) < 2: usage() SELECTION = sys.argv[1].strip(" '\"").upper() if len(sys.argv) > 2: SECONDARY = sys.argv[2].strip(" '\"").upper() SKIP_PREBUILD = SECONDARY == "-S" if SKIP_PREBUILD: CMAKE_EXTRA += " ".join(sys.argv[3:]) else: CMAKE_EXTRA += " ".join(sys.argv[2:]) if not os.path.isdir(BUILDDIR): os.mkdir(BUILDDIR) NEW_BUILD = True NO_NINJA = not os.path.isfile(NINJA_BUILD_FILE) print() if SELECTION == "BUILD": build() elif SELECTION == "UPLOAD": upload() elif SELECTION == "CONFIG": ONLY_CONFIG = True config() elif SELECTION == "CLEAN": clean() elif SELECTION == "RESET": reset() elif SELECTION == "DISABLE": disable() endScript()
nilq/baby-python
python
#!/usr/bin/python """ This plugin implements identifying the modbusRTU protocol for serial2pcap. Modbus RTU Frame Format: Name Length (bits) Function Start 28 At least 3.5 (28 bits) character times of silence Address 8 Function 8 Data n*8 CRC 16 End 28 At Least 3.5 (28 bits) character times of silence between frames This plugin identifies ModbusRTU frames by matching data to CRC's. The plugin forward slices through received data (up to 256 bytes - max RTU ADU size) and computes the data so far to the next two bytes. If a CRC match is found then the plugin assumes that it has found a valid RTU frame. """ from PluginCore import PluginCore from ctypes import c_ushort class ModbusRTU(PluginCore): ProtocolName = "modbusRTU" ProtocolDescription = "Modbus RTU Frame Format Serial Protocol" crc16_tab = [] crc16_constant = 0xA001 def __init__(self): if not len(self.crc16_tab): self.init_crc16() #CRC code derived and modified from PyCRC - Github cristianav/PyCRC - GPLv3 license #https://github.com/cristianav/PyCRC/blob/master/PyCRC/CRC16.py def calculate(self, input_data): is_string = isinstance(input_data, str) is_bytes = isinstance(input_data, (bytes, bytearray)) #if not is_string and not is_bytes: # raise Exception("input data type is not supported") crc_value = 0xFFFF for c in input_data: d = ord(c) tmp = crc_value ^ d rotated = crc_value >> 8 crc_value = rotated ^ self.crc16_tab[(tmp & 0x00ff)] #added this to rotate the bytes. RTU transmits CRC in a different endian crc_low = crc_value & 255 crc_high = crc_value >> 8 return (crc_low << 8) ^ crc_high def init_crc16(self): for i in range(0,256): crc = c_ushort(i).value for j in range(0,8): if crc & 0x0001: crc = c_ushort(crc >> 1).value ^ self.crc16_constant else: crc = c_ushort(crc >> 1).value self.crc16_tab.append(crc) #end derived code def Identify(self, data, capture_info): #sizes do not include 2 byte checksum LOWER_SLICE_LIMIT = 6 #min Modbus RTU Size 8 UPPER_SLICE_LIMIT = 254 #max Modbus RTU Size 256 #if not enough data then wait if len(data) <= LOWER_SLICE_LIMIT: return (PluginCore.Status.TOOSHORT,0) sliceat = LOWER_SLICE_LIMIT while sliceat <= UPPER_SLICE_LIMIT: #make sure there is enough data if len(data) < sliceat + 2: return (PluginCore.Status.TOOSHORT,0) #calculate CRC at slice calc_crc = self.calculate(data[:sliceat]) #get test CRC from data recv_crc = (ord(data[sliceat]) << 8) ^ ord(data[sliceat + 1]) #check to see if calculated and received CRC match - if so then assume good packet if calc_crc == recv_crc: return (PluginCore.Status.OK,sliceat+2) sliceat += 1 #if no packet was found then signal unknown return (PluginCore.Status.UNKNOWN,0)
nilq/baby-python
python
import sys import DiveConstants as dc from rpy2.rinterface import NA from rpy2.robjects.vectors import IntVector, FloatVector, StrVector import rpy2.robjects.packages as rpackages import rpy2.robjects as robjects import numpy as np np.set_printoptions(suppress=True) utils = rpackages.importr('utils') scuba = rpackages.importr('scuba') def max_ascent(dive): """ finds the maximum ascent rate :param dive: dataframe: a dataframe containing columns: time and depth :return: float: the maximum ascent rate """ max = 0 # finds maximum positive difference between each time interval for i in range(len(dive[1])): try: temp = dive[1][i+1] if (dive[1][i] - temp) > max: max = dive[1][i] - temp except IndexError: pass return round(max/10, 3) def compartment_pressures(data, halftime_set): """ Gets compartment pressures from dive profile based on given half time set. :param data: dataframe: a dataframe containing columns: time and depth :param halftime_set: str: the name of the halftime set to be used :return: cp a dataframe containing compartment pressures from 1,1b - 16 """ # setup R functions dive = robjects.r['dive'] haldane = robjects.r['haldane'] pickmodel = robjects.r['pickmodel'] data_frame = robjects.r['data.frame'] nitrox = robjects.r['nitrox'] dive_profile = dive(data, gas=nitrox(0.21)) # check if halftime_set is one of the allowed halftime sets, raise exception if not. if(not(halftime_set == 'ZH-L16A' or halftime_set == 'Haldane' or halftime_set == 'DSAT' or halftime_set == 'Workman65' or halftime_set == 'Buzzacott')): raise ValueError('Invalid halftime-set') else: # if halftime set is decimate, set up decimate model. if(halftime_set == 'Buzzacott'): hm = robjects.r['hm'] decimate_model = hm(HalfT=IntVector((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)), M0=IntVector(( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)), dM=IntVector((1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1))) cp = haldane(dive_profile, model=decimate_model, progressive=True) # for all other models, set up normally else: cp = haldane(dive_profile, model=pickmodel( halftime_set), progressive=True) # return the compartment pressures as dataframe return data_frame(cp) def max_values(ambient_pressures, compartment_pressures, totalIPP): """ merges max_bubble, max_inspired into a single function :param ambient_pressures: float[]: a list of ambient pressures at each time point :param compartment_pressures: float[]: a list of compartment pressure values :param totalIPP: float[]: the total inert gas partial pressure at given time points :return: float[]: max_values : array containing 4 collumns: maxins, maxbub, the cp where maxbub occured, and surf the cp when the diver surfaces. """ # get compartment pressures and ambient pressure data cp = compartment_pressures ap = ambient_pressures # initialize output array, array is same length as comparment pressures max_values = np.zeros((len(cp), 5)) for i in range(len(cp)): maxbub = 0 maxins = -sys.maxsize n2cp = 0 hecp = 0 # find the maximum positive difference of inert gas against ambient pressure (pressure @ compartment - ambient pressure @ that depth) # find the maximum positive difference of inert gas inside each compartment for j in range(len(cp[i])): try: # nparr does [row,col] # dataframe does [col][row] tempbub = (cp[i][j] - ap[j, 1]) # cp[i][j] tempins = (cp[i][j] - totalIPP[j]) if(tempbub > maxbub): maxbub = tempbub n2cp = cp[i][j] if(len(cp)>17): hecp = cp[i+17][j] if(tempins > maxins): maxins = tempins except IndexError: pass max_values[i][0] = maxins max_values[i][1] = maxbub max_values[i][2] = n2cp max_values[i][3] = hecp max_values[i][4] = cp[i][len(cp[i])-1] return max_values # TODO: allow this to take in raw csv or a dataframe def ambient_pressures(dive_csv): """ calculates ambient pressures :param dive_csv: dataframe: a dataframe containing columns: time and depth :return: float[]: a list of ambient pressures at each time point """ # R function setup data_frame = robjects.r['data.frame'] # get dive data (times/depths) df = data_frame(dive_csv) # initialize output array ap = np.zeros((len(df[0]), len(df))) for i in range(len(df[0])): # nparr does [row,col] # dataframe does [col][row] ap[i, 0] = df[0][i] ap[i, 1] = df[1][i]/10 + 1 return ap def max_inspired(compartment_pressures, totalIPP): """ calculates the maximum positive difference between the inert gas pressure inside each compartment (1-17, but it should be 1-16 with both 1 and 1b included) and the partial pressure of inert gas in the breathing mixture at each respective time and depth. :param: compartment_pressures: float[]: a list of compartment pressure values :param totalIPP: float[]: the total inert gas partial pressure at given time points :return: float[]: the maximum inspired difference for each compartment A list containing the maximum positive differences of inert gas against totalIPP (pressure @ compartment - totalIPP @ that depth) """ # get compartment pressures and ambient pressure data cp = compartment_pressures # initialize output array, array is same length as comparment pressures maxins = np.zeros(len(cp)) for i in range(len(cp)): max = -sys.maxsize # find the maximum positive difference of inert gas against totalIPP (pressure @ compartment - totalIPP @ that depth) for j in range(len(cp[i])): try: # nparr does [row,col] # dataframe does [col][row] tempmax = (cp[i][j] - totalIPP[j]) # cp[i][j] if(tempmax > max): max = tempmax maxins[i] = max except IndexError: pass return maxins def max_bubble(ambient_pressures, compartment_pressures): """ calculates the maximum positive difference between the inert gas pressure inside each compartment (1-17, but it should be 1-16 with both 1 and 1b included) :param ambient_pressures: float[]: a list of ambient pressures at each time point :param compartment_pressures: float[]: a list of compartment pressure values :return: float[]: the maximum bubble difference for each compartment """ # get compartment pressures and ambient pressure data cp = compartment_pressures ap = ambient_pressures # initialize output array, array is same length as comparment pressures maxbubs = np.zeros((len(cp), 2)) for i in range(len(cp)): max = -sys.maxsize n2cp = 0 # find the maximum positive difference of inert gas against ambient pressure (pressure @ compartment - ambient pressure @ that depth)cls for j in range(len(cp[i])): try: # nparr does [row,col] # dataframe does [col][row] tempbub = (cp[i][j] - ap[j, 1]) # cp[i][j] if(tempbub > max): max = tempbub n2cp = cp[i][j] maxbubs[i][0] = max maxbubs[i][1] = n2cp except IndexError: pass return maxbubs # TODO: having dive might be redundant if compartment pressures can be used? # TODO: Find out how to combine the nitrogen m values with helium m values - when helium and nitrogen is in gas mixture def gradient_factors(dive, gases, compartment_pressures): """ calculates the maximum percentage of the respective M-value any compartment reaches otherwise known as the gradient factor. Below values are harcoded from Erik C. Baker's “Understanding M-values” from tables 2 & 4 :param dive: dataframe: a dataframe containing columns: time and depth :param gasses: str: TODO: this will be a list later? :param compartment_pressures: dataframe containing compartment pressure values :return: float[]: list of gradient factor values """ cp = compartment_pressures # nitrogen delta slope values in order [1, 1b, 2, ... 16] n_delta = dc.N_DELTA # nitogen surfacing m-value in order [1, 1b, 2, ... 16] n_m_naught = dc.N_M_NAUGHT # helium delta slope values in order [1, 1b, 2, ... 16] he_delta = dc.HE_DELTA # helium surfacing m-value in order [1, 1b, 2, ... 16] he_m_naught = dc.HE_M_NAUGHT gaugeP = np.zeros(len(dive[0])) # nitrogen and helium XDM, calculation = (the respective gas * gauge pressure at each timepoint) nXDM = np.zeros((len(gaugeP), 17)) heXDM = np.zeros((len(gaugeP), 17)) # nitrogen and helium respective m values n_mvalues = np.zeros((len(nXDM), 17)) he_mvalues = np.zeros((len(heXDM), 17)) # if a dive has both nitrogen and helium then we need to combine the m values using a weighting total_mvalues = np.zeros((len(nXDM), 17)) GFs = np.zeros((len(n_mvalues), 17)) maxGF = np.zeros(len(gaugeP)) for i in range(len(gaugeP)): gaugeP[i] = dive[1][i]/10 for j in range(17): nXDM[i][j] = gaugeP[i] * n_delta[j] heXDM[i][j] = gaugeP[i] * he_delta[j] n_mvalues[i][j] = (n_m_naught[j]/10) + nXDM[i][j] he_mvalues[i][j] = (he_m_naught[j]/10) + heXDM[i][j] GFs[i][j] = (cp[j][i] / n_mvalues[i][j]) * 100 maxGF[i] = round(np.max(GFs[i])) ''' print("\ngaugeP") print(gaugeP) print("\nnXDM") print(nXDM) print("\nheXDM") print(heXDM) print("\n n_mvalues") print(n_mvalues) print("\n gradient factors") print(GFs) print("\nmax GF") print(maxGF) ''' def helium_inert_pressure(ambient_pressures, gases): """ calculate inert gas partial pressure of helium at each time point :param ambient_pressures: float[]: a list of ambient pressures at each time point :param gasses: str: TODO: this will be a list later? :return: float[]: the inert gas partial pressure of helium at each time point """ # this will need to be changed later to get the actual value of helium helium = dc.HELIUM ap = ambient_pressures heIPP = np.zeros(len(ap)) for i in range(len(ap)): heIPP[i] = ap[i, 1] * helium return heIPP def nitrogen_inert_pressure(ambient_pressures, gases): """ calculate inert gas partial pressure of nitrogen at each time point :param ambient_pressures: float[]: a list of ambient pressures at each time point :param gasses: str: TODO: this will be a list later? :return: float[]: the inert gas partial pressure of nitrogen at each time point """ nitrogen = dc.NITROGEN ap = ambient_pressures nIPP = np.zeros(len(ap)) for i in range(len(ap)): nIPP[i] = ap[i, 1] * nitrogen return nIPP def totalIPP(nIPP, heIPP): """ calculate the total inert gas partial pressure :param niPP: float[]: the inert gas partial pressure of nitrogen at a given time points :param heIPP: float[]: the inert gas partial pressure of helium at a given time points :return: float[]: the total inert gas partial pressure at given time points """ total_IPP = np.zeros(len(nIPP)) for i in range(len(nIPP)): total_IPP[i] = nIPP[i] + heIPP[i] return total_IPP
nilq/baby-python
python
from collections import Counter from random import randint from django.http import JsonResponse from django.shortcuts import render from django.views.generic import View, TemplateView from .models import Article, portals, languages from utils.utils import parse_a_website BENCHMARK_URL = 'https://www.benchmark.pl/' BGG_URL = 'https://boardgamegeek.com/blog/1/boardgamegeek-news' ZWIAD_HISTORII_URL = 'https://www.zwiadowcahistorii.pl/' TOJUZBYLO_URL = 'https://tojuzbylo.pl/aktualnosci' COMPUTER_WORLD_WEB_URL = 'https://www.computerworld.pl/' PYTHON_WEB_URL = 'https://www.infoworld.com/uk/category/python/' REAL_PYTHON_WEB_URL = 'https://realpython.com/' BUSHCRAFTABLE_URL = 'https://bushcraftable.com/' class HomeView(TemplateView): template_name = 'homepage.html' class StatisticsView(View): def get(self, request): return render(self.request, 'statistics.html') def get_all_article_pie_chart_data(self): all_articles = list(Article.objects.all().values_list('portal', flat=True)) articles = Counter(all_articles) colors = [] for color in range(len(articles)): color = '#%06x' % randint(0, 0xFFFFFF) colors.append(color) context = { 'labels': list(articles.keys()), 'data': list(articles.values()), 'colors': colors, } return JsonResponse(data=context) def get_all_article_tab_chart_data(self): all_articles = list(Article.objects.all().values_list('portal', flat=True)) articles = Counter(all_articles) sorted_articles = dict(sorted(articles.items(), key=lambda item: item[1], reverse=True)) colors = [] for color in range(len(articles)): color = '#%06x' % randint(0, 0xFFFFFF) colors.append(color) context = { 'labels': list(sorted_articles.keys()), 'data': list(sorted_articles.values()), 'colors': colors, } return JsonResponse(data=context) def get_top_en_word_chart_data(self): all_titles = list(Article.objects.filter(language='ENG').values_list('title', flat=True)) top_words = [] for title in all_titles: split_title = title.split(' ') for word in split_title: if len(word) > 3: top_words.append(word.lower()) count_top_words = Counter(top_words) sorted_words = dict(sorted(count_top_words.items(), key=lambda item: item[1], reverse=True)) colors = [] for color in range(10): color = '#%06x' % randint(0, 0xFFFFFF) colors.append(color) context = { 'labels': list(sorted_words.keys())[:10], 'data': list(sorted_words.values())[:10], 'colors': colors, } return JsonResponse(data=context) def get_top_pl_word_chart_data(self): all_titles = list(Article.objects.filter(language='PL').values_list('title', flat=True)) top_words = [] for title in all_titles: split_title = title.split(' ') for word in split_title: if len(word) > 3: top_words.append(word.lower()) count_top_words = Counter(top_words) sorted_words = dict(sorted(count_top_words.items(), key=lambda item: item[1], reverse=True)) colors = [] for color in range(10): color = '#%06x' % randint(0, 0xFFFFFF) colors.append(color) context = { 'labels': list(sorted_words.keys())[:10], 'data': list(sorted_words.values())[:10], 'colors': colors, } return JsonResponse(data=context) class BenchmarkView(View): def get(self, *args, **kwargs): soup = parse_a_website(BENCHMARK_URL) # Getting data from soup data = [] sections = soup.find_all('section') section_3 = sections[3] section_3_divs = section_3.find_all('div') for div in section_3_divs[1:2]: benchmark_li = div.find_all('li') for li in benchmark_li: title = (li.find('a').text).split('\t\t\t')[1].split('\n')[0] url = f"http://benchmark.pl{li.find('a')['href']}" data.append((url, title)) # Creating Article Article.check_if_article_already_exist(data, portals[0][0], languages[0][1]) # Check if data not empty if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'benchmark.html', context) context = { 'data': data, } return render(self.request, 'benchmark.html', context) class BoardGamesGeekView(View): def get(self, *args, **kwargs): soup = parse_a_website(BGG_URL) # Getting data from soup data = [] posts = soup.find_all("h3", {"class": 'post_title'}) for post in posts: title = post.find('a').text url = f"https://boardgamegeek.com{post.find('a')['href']}" data.append((url, title)) # Creating Article Article.check_if_article_already_exist(data, portals[1][1], languages[1][1]) # Check if data not empty if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'bgg.html', context) context = { 'data': data, } return render(self.request, 'bgg.html', context,) class ArcheologyView(View): def get(self, *args, **kwargs): soup = parse_a_website(ZWIAD_HISTORII_URL) # Getting data from soup data = [] divs_1 = soup.find_all("div", {"class": 'td_module_1 td_module_wrap td-animation-stack'}) for div in divs_1: divs_2 = div.find_all('div', {'class': 'td-module-thumb'}) for element in divs_2: title = element.find('a')['title'] url = element.find('a')['href'] img = element.find('img')['data-img-url'] data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[3][1], languages[0][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'archeology.html', context) context = { 'data': data, } return render(self.request, 'archeology.html', context) class ToJuzByloView(View): def get(self, *args, **kwargs): soup = parse_a_website(TOJUZBYLO_URL) # Getting data from soup data = [] tds = soup.find_all('td', {'class': 'col-1 col-first'}) for td in tds: title = (td.find('h2', {'class': 'tytul'}).text).split('\n')[1] img = td.find('img')['src'] href = td.find_all('a')[1]['href'] url = f"https://tojuzbylo.pl/{href}" data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[2][1], languages[0][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'tojuzbylo.html', context) context = { 'data': data, } return render(self.request, 'tojuzbylo.html', context,) class ComputerWorldView(View): def get(self, *args, **kwargs): soup = parse_a_website(COMPUTER_WORLD_WEB_URL) # Getting data from soup data = [] main_div = soup.find('div', {'class': 'left-side'}) divs = main_div.find_all('div', {'class': 'row-item-icon'}) for div in divs: img = div.find('img', {'class': 'img-fluid'})['src'] url = f"https://www.computerworld.pl{div.find('a')['href']}" title = div.find('a')['href'].split(',')[0].split('/')[2].replace('-', ' ') data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[4][1], languages[0][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'computer_world_news.html', context) context = { 'data': data, } return render(self.request, 'computer_world_news.html', context,) class PythonView(View): def get(self, *args, **kwargs): soup = parse_a_website(PYTHON_WEB_URL) # Getting data from soup data = [] divs = soup.find_all('div', {'class': 'post-cont'}) figs = soup.find_all('figure', {'class': 'well-img'}) for div, figure in zip(divs, figs): title = div.find('a').text url = f"https://www.infoworld.com{div.find('a')['href']}" img = figure.find('img')['data-original'] data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[5][1], languages[1][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'python.html', context) context = { 'data': data, } return render(self.request, 'python.html', context) class RealPythonView(View): def get(self, *args, **kwargs): soup = parse_a_website(REAL_PYTHON_WEB_URL) # Getting data from soup data = [] posts = soup.find_all('div', {'class': 'card border-0'}) for post in posts: a_tags = post.find_all('a')[0] title = a_tags.find('img')['alt'] img = a_tags.find('img')['src'] url = f"https://realpython.com{a_tags['href']}" data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[6][1], languages[1][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'real_python.html', context) context = { 'data': data, } return render(self.request, 'real_python.html', context) class BushcraftableView(View): def get(self, *args, **kwargs): soup = parse_a_website(BUSHCRAFTABLE_URL) # Getting data from soup data = [] post_headers = soup.find_all('h2', {'class': 'entry-title'}) post_images = soup.find_all('div', {'class': 'post-image'}) for header, image in zip(post_headers, post_images): url = header.find('a')['href'] title = header.find('a').text img = image.find('img')['src'] data.append((url, title, img)) # Creating Article Article.check_if_article_already_exist(data, portals[7][1], languages[1][1]) if len(data) == 0: context = {'data': [('#', 'No data to view. Contact with administrator.')]} return render(self.request, 'bushcraftable.html', context) context = { 'data': data, } return render(self.request, 'bushcraftable.html', context) # soup.find_all(lambda tag: tag.name == 'p' and 'et' in tag.text) # https://www.livescience.com/news # TODO: Widok statystyk. Obliczenie ilości artykułów i piechart na widoku statystycznym, # TODO: Settingsy porownac do django projektu KWL/Inforshare i pozmieniać. # detect language - https://pypi.org/project/langdetect/
nilq/baby-python
python
'''utils and constants functions used by the selector and selectors class''' import re RE_ALPHA = re.compile(r'\w') SELECTOR_TYPE = {'XML': 'xml', 'TRXML': 'trxml'} TRXML_SELECTOR_TYPE = {'SINGLETON': 'singleton', 'MULTIPLE': 'multiple'} def valid_field_name(tag_name: str = '') -> bool: ''' simple validation function: params: - tag_name: string output: - True/False ''' # need to contain at least one alphabet chars if RE_ALPHA.search(tag_name) is None: raise ValueError( f"tag_name '{tag_name}' needs at least one alphabet char") return True def _selector_target_type(selector) -> str: if "." in selector.text: selector_type = SELECTOR_TYPE['TRXML'] else: selector_type = SELECTOR_TYPE['XML'] return selector_type def _selector_singleton_type(selector) -> bool: item_index = selector.item_index if item_index.isdigit(): selector_type = TRXML_SELECTOR_TYPE['SINGLETON'] else: selector_type = TRXML_SELECTOR_TYPE['MULTIPLE'] return selector_type def _selector_same_itemgroup(selector) -> str: return selector.itemgroup_name def selector_attribute(selectors, attribute_name) -> str: ''' fetch the selector attribute, and check the consistency of all selectors params: - selectors: a list of selector object - attribute_name: name of the attribute output: attibute_value: string ''' if attribute_name == 'selector_type': result = _selector_attribute_checking(selectors, _selector_target_type) elif attribute_name == 'trxml_selector_type': result = _selector_attribute_checking(selectors, _selector_singleton_type) elif attribute_name == 'same_itemgroup': result = _selector_attribute_checking(selectors, _selector_same_itemgroup) else: raise ValueError( f"selector attribute type '{attribute_name}' unknown" ) return result def _selector_attribute_checking(selectors, attrib_func): first_attrib = None for selector in selectors: if first_attrib is None: first_attrib = attrib_func(selector) elif first_attrib != attrib_func(selector): raise ValueError( f"""selector '{selector.text}' seems has different type than others, e.g., - xml v.s. trxml, - or singleton V.S. multi-item - or different itemgroup for multi-item selectors. Please check! """ ) return first_attrib
nilq/baby-python
python
import matplotlib.pyplot as plt import random import numpy as np import cv2 def visualize(img, det_boxes=None, gt_boxes=None, keypoints=None, is_show_label=True, show_cls_label = True, show_skeleton_labels=False, classes=None, thresh=0.5, name='detection', return_img=False): if is_show_label: if classes == 'voc': classes = [ '__background__', 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor' ] elif classes == 'coco': classes = [ "__background__", "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket","bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush" ] color_map = [(0, 0, 0), (0, 255, 0), (255, 128, 0), (255, 255, 0), (255, 0, 255), (255, 128, 255), (128, 255, 128), (128, 255, 255), (255, 255, 128), (0, 128, 255), (0, 255, 128), (255, 0, 128), (0, 215, 255), (255, 0, 255), (255, 128, 0), (128, 128, 255), (0, 255, 255), (0, 69, 255), (0, 69, 255), (255, 204, 204), (204, 255, 255)] im = np.array(img).copy().astype(np.uint8) colors = dict() font = cv2.FONT_HERSHEY_SIMPLEX if det_boxes is not None: det_boxes = np.array(det_boxes) for det in det_boxes: bb = det[:4].astype(int) if is_show_label: if show_cls_label: cls_id = int(det[4]) if cls_id == 0: continue if len(det) > 4: score = det[-1] else: score = 1. if thresh < score: if show_cls_label: if cls_id not in colors: colors[cls_id] = (random.random() * 128 + 128, random.random() * 128 + 128, random.random() * 128 + 128) cv2.rectangle(im, (bb[0], bb[1]), (bb[2], bb[3]), colors[cls_id], 1) if classes and len(classes) > cls_id: cls_name = classes[cls_id] else: cls_name = str(cls_id) cv2.putText(im, '{:s} {:.3f}'.format(cls_name, score), (bb[0], bb[1] - 2), font, 0.7, colors[cls_id], 2) else: cv2.putText(im, '{:.3f}'.format(score), (bb[0], bb[1] - 2), font, 0.7, (255, 0, 0), 2) cv2.rectangle(im, (bb[0], bb[1]), (bb[2], bb[3]), (139, 139, 139), 1) else: cv2.rectangle(im, (bb[0], bb[1]), (bb[2], bb[3]), (random.random() * 128 + 128, random.random() * 128 + 128, random.random() * 128 + 128), 1) if gt_boxes is not None: gt_boxes = np.array(gt_boxes) for gt in gt_boxes: bb = gt[:4].astype(int) if is_show_label: cls_id = int(gt[4]) cv2.rectangle(im, (bb[0], bb[1]), (bb[2], bb[3]), (0, 0, 255), 3) if classes and len(classes) > cls_id: cls_name = classes[cls_id] else: cls_name = str(cls_id) cv2.putText(im, '{:s}'.format(cls_name), (bb[0], bb[1] - 2), \ font, 0.5, (0, 0, 255), 1) else: cv2.rectangle(im, (bb[0], bb[1]), (bb[2], bb[3]), (0, 0, 255), 3) if keypoints is not None: keypoints = np.array(keypoints).astype(int) keypoints = keypoints.reshape(-1, 17, 3) if False: idx = np.where(det_boxes[:, -1] > thresh) keypoints = keypoints[idx] for i in range(len(keypoints)): draw_skeleton(im, keypoints[i], show_skeleton_labels) else: for i in range(len(keypoints)): draw_skeleton(im, keypoints[i], show_skeleton_labels) if return_img: return im.copy() import matplotlib.pyplot as plt im = cv2.cvtColor ( im, cv2.COLOR_BGR2RGB ) plt.imshow(im) plt.show() # cv2.imshow(name, im) # cv2.waitKey(0) # while True: # c = cv2.waitKey(0) # if c == ord('d'): # return # elif c == ord('n'): # break def draw_skeleton(aa, kp, show_skeleton_labels=False): skeleton = [[16, 14], [14, 12], [17, 15], [15, 13], [12, 13], [6, 12], [7, 13], [6, 7], [6, 8], [7, 9], [8, 10], [9, 11], [2, 3], [1, 2], [1, 3], [2, 4], [3, 5], [4, 6], [5, 7]] kp_names = ['nose', 'l_eye', 'r_eye', 'l_ear', 'r_ear', 'l_shoulder', 'r_shoulder', 'l_elbow', 'r_elbow', 'l_wrist', 'r_wrist', 'l_hip', 'r_hip', 'l_knee', 'r_knee', 'l_ankle', 'r_ankle'] for i, j in skeleton: if kp[i-1][0] >= 0 and kp[i-1][1] >= 0 and kp[j-1][0] >= 0 and kp[j-1][1] >= 0 and \ (len(kp[i-1]) <= 2 or (len(kp[i-1]) > 2 and kp[i-1][2] > 0.1 and kp[j-1][2] > 0.1)): cv2.line(aa, tuple(kp[i-1][:2]), tuple(kp[j-1][:2]), (0,255,255), 2) for j in range(len(kp)): if kp[j][0] >= 0 and kp[j][1] >= 0: if len(kp[j]) <= 2 or (len(kp[j]) > 2 and kp[j][2] > 1.1): cv2.circle(aa, tuple(kp[j][:2]), 2, tuple((0,0,255)), 2) elif len(kp[j]) <= 2 or (len(kp[j]) > 2 and kp[j][2] > 0.1): cv2.circle(aa, tuple(kp[j][:2]), 2, tuple((255,0,0)), 2) if show_skeleton_labels and (len(kp[j]) <= 2 or (len(kp[j]) > 2 and kp[j][2] > 0.1)): cv2.putText(aa, kp_names[j], tuple(kp[j][:2]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 0))
nilq/baby-python
python
import webbrowser class RomanNumeralCipher: def __init__(self): ''' This is a python implementation of Roman Numeral Cipher''' self.val = [ 1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1 ] self.syb = [ "M", "CM", "D", "CD", "C", "XC", "L", "XL", "X", "IX", "V", "IV", "I" ] url = 'https://www.britannica.com/topic/Roman-numeral' def about(self): '''Read about Roman Numeral Cipher online''' webbrowser.open(self.url) def encrypt(self, num: int) -> str: result = '' if not isinstance(num, int): return 'Cannot cast to Roman cipher' i = 0 while num > 0: for _ in range(num // self.val[i]): result += self.syb[i] num -= self.val[i] i += 1 return result def decrypt(self, msg: str) -> int: list_ = ['CM', 'CD', 'XC', 'XL', 'IX', 'IV'] num = 0 for ele in list_: if ele in msg: msg = msg.replace(ele, '') num += self.val[self.syb.index(ele)] for ele in msg: num += self.val[self.syb.index(ele)] return num if __name__ == '__main__': cipher = RomanNumeralCipher() message = 3349 encrypted = cipher.encrypt(message) decrypted = cipher.decrypt(encrypted) print(encrypted) print(decrypted)
nilq/baby-python
python
import json import disnake as discord from disnake.ext import commands class Active_Check(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command() async def activate(self, ctx, cog=None): with open('utils/json/active_check.json', 'r') as f: data = json.load(f) if not cog: if str(ctx.guild.id) not in data: new_checks = { "Administration": "true", "Automod": "true", "Channel": "true", "Fun": "true", "Help": "true", "Info": "true", "Math": "true", "Moderation": "true", "Music": "true", "Poll": "true", "Roles": "true", "Rules": "true", "Setup": "true", "Ticket": "true", "Timers": "true", "Translator": "true", "Utilities": "true", "Verify": "true" } data[str(ctx.guild.id)] = new_checks with open('utils/json/active_check.json', 'w') as f: json.dump(data, f, indent=4) embed = discord.Embed(description=f'Der Server `{ctx.guild.name}` wurde **erfolgreich registriert!**', color=discord.Color.green()) await ctx.send(embed=embed) else: embed = discord.Embed(description=f'Der Server `{ctx.guild.name}` ist **bereits registriert!**', color=discord.Color.green()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'true': embed = discord.Embed(description=f'Das `Modul {cog}` ist **bereits aktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'false': data[str(ctx.guild.id)][f"{cog}"] = 'true' with open('utils/json/active_check.json', 'w') as f: json.dump(data, f, indent=4) embed = discord.Embed(description=f'Das `Modul {cog}` **war deaktiviert** und wurde **nun aktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) else: embed = discord.Embed( description=f'Dein Server scheint **nicht registriert zu sein!** **Registriere dein Server** bitte erst einmal **mit dem Befehl** `?activate`', color=discord.Color.red()) await ctx.send(embed=embed) @commands.command() async def deactivate(self, ctx, cog): with open('utils/json/active_check.json', 'r') as f: data = json.load(f) if str(ctx.guild.id) not in data: embed = discord.Embed( description=f'Dein Server scheint **nicht registriert zu sein!** **Registriere dein Server** bitte erst einmal **mit dem Befehl** `?activate`', color=discord.Color.red()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'false': embed = discord.Embed(description=f'Das `Modul {cog}` ist **bereits deaktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'true': data[str(ctx.guild.id)][f"{cog}"] = 'false' with open('utils/json/active_check.json', 'w') as f: json.dump(data, f, indent=4) embed = discord.Embed(description=f'Das `Modul {cog}` **war aktiviert** und wurde **nun deaktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) else: embed = discord.Embed(description=f'**Unbekannter Fehler!** Versuche es in ein paar Sekunden erneut', color=discord.Color.red()) await ctx.send(embed=embed) @commands.command() async def check(self, ctx, cog): with open('utils/json/active_check.json', 'r') as f: data = json.load(f) if str(ctx.guild.id) not in data: embed = discord.Embed( description=f'Dein Server scheint **nicht registriert zu sein!** **Registriere dein Server** bitte erst einmal **mit dem Befehl** `?activate`', color=discord.Color.red()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'false': embed = discord.Embed(description=f'Das Modul `{cog}` ist **momentan deaktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) elif data[str(ctx.guild.id)][f"{cog}"] == 'true': embed = discord.Embed(description=f'Das Modul `{cog}` ist **momentan aktiviert!**', color=discord.Color.green()) await ctx.send(embed=embed) else: embed = discord.Embed(description=f'**Unbekannter Fehler!** Versuche es in ein paar Sekunden erneut', color=discord.Color.red()) await ctx.send(embed=embed) @commands.command() async def check_all(self, ctx): with open('utils/json/active_check.json', 'r') as f: data = json.load(f) if str(ctx.guild.id) not in data: embed = discord.Embed( description=f'Dein Server scheint **nicht registriert zu sein!** **Registriere dein Server** bitte erst einmal **mit dem Befehl** `?activate`', color=discord.Color.red()) await ctx.send(embed=embed) elif str(ctx.guild.id) in data: embed = discord.Embed(description=f'{data[str(ctx.guild.id)]}', color=discord.Color.green()) await ctx.send(embed=embed) else: return def setup(bot): bot.add_cog(Active_Check(bot))
nilq/baby-python
python
import sys sys.path.append("../common/tests") from test_utils import * import test_common sys.path.insert(0, '../../../../build/production/config/schema-transformer/') from vnc_api.vnc_api import * import uuid class STTestCase(test_common.TestCase): def setUp(self): super(STTestCase, self).setUp() self._svc_mon_greenlet = gevent.spawn(test_common.launch_svc_monitor, self._api_server_ip, self._api_server_port) self._st_greenlet = gevent.spawn(test_common.launch_schema_transformer, self._api_server_ip, self._api_server_port) def tearDown(self): self._svc_mon_greenlet.kill() self._st_greenlet.kill() super(STTestCase, self).tearDown() def create_virtual_machine(self, name, vn, ipaddress): vm_instance = VirtualMachine(name) self._vnc_lib.virtual_machine_create(vm_instance) fq_name = [name] fq_name.append('0') vmi = VirtualMachineInterface(parent_type = 'virtual-machine', fq_name = fq_name) vmi.set_virtual_network(vn) self._vnc_lib.virtual_machine_interface_create(vmi) ip = InstanceIp(vm_instance.name + '.0') ip.set_virtual_machine_interface(vmi) ip.set_virtual_network(vn) ip.set_instance_ip_address(ipaddress) uuid = self._vnc_lib.instance_ip_create(ip) return vm_instance def vmi_clean(self, vm_instance): fq_name = vm_instance.fq_name fq_name.append('0') try: vmi = self._vnc_lib.virtual_machine_interface_read(fq_name = fq_name) except NoIdError: return ips = vmi.get_instance_ip_back_refs() for ref in ips: self._vnc_lib.instance_ip_delete(id = ref['uuid']) self._vnc_lib.virtual_machine_interface_delete(id = vmi.uuid) def delete_virtual_machine(self, vm_instance): self.vmi_clean(vm_instance) self._vnc_lib.virtual_machine_delete(id = vm_instance.uuid) def create_network_policy_with_multiple_rules(self, rules): pentrys = [] for rule in rules: src_addr = rule["src"] if src_addr["type"] == "vn": vn = src_addr["value"] addr1 = AddressType(virtual_network=vn.get_fq_name_str()) else: cidr = src_addr["value"].split('/') pfx = cidr[0] pfx_len = int(cidr[1]) addr1 = AddressType(subnet=SubnetType(pfx, pfx_len)) dst_addr = rule["dst"] if dst_addr["type"] == "vn": vn = dst_addr["value"] addr2 = AddressType(virtual_network=vn.get_fq_name_str()) else: cidr = dst_addr["value"].split('/') pfx = cidr[0] pfx_len = int(cidr[1]) addr2 = AddressType(subnet=SubnetType(pfx, pfx_len)) #src_port = rule["src-port"] src_port = PortType(-1, 0) #dst_port = rule["dst-port"] dst_port = PortType(-1, 0) action = rule["action"] action_list = ActionListType(simple_action=action) prule = PolicyRuleType(direction=rule["direction"], protocol=rule["protocol"], src_addresses=[addr1], dst_addresses=[addr2], src_ports=[src_port], dst_ports=[dst_port], action_list=action_list) pentrys.append(prule) pentry = PolicyEntriesType(pentrys) np = NetworkPolicy(str(uuid.uuid4()), network_policy_entries=pentry) self._vnc_lib.network_policy_create(np) return np # end create_network_policy_with_multiple_rules def delete_network_policy(self, policy, auto_policy=False): action_list = policy.network_policy_entries.policy_rule[0].action_list if action_list: for service in action_list.apply_service or []: si = self._vnc_lib.service_instance_read(fq_name_str=service) st_ref = si.get_service_template_refs() st = self._vnc_lib.service_template_read(id=st_ref[0]['uuid']) self._vnc_lib.service_instance_delete(id=si.uuid) self._vnc_lib.service_template_delete(id=st.uuid) # end for service # if action_list if not auto_policy: self._vnc_lib.network_policy_delete(id=policy.uuid) # end delete_network_policy(policy)
nilq/baby-python
python
from django.contrib import admin from unecorn.models import * admin.site.register(Discount) admin.site.register(Category) admin.site.register(Company)
nilq/baby-python
python