text
stringlengths
29
850k
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'SEEDUser.default_organization' db.add_column(u'landing_seeduser', 'default_organization', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='default_users', null=True, to=orm['orgs.Organization']), keep_default=False) def backwards(self, orm): # Deleting field 'SEEDUser.default_organization' db.delete_column(u'landing_seeduser', 'default_organization_id') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'landing.seeduser': { 'Meta': {'object_name': 'SEEDUser'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'default_custom_columns': ('djorm_pgjson.fields.JSONField', [], {'default': '{}'}), 'default_organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_users'", 'null': 'True', 'to': u"orm['orgs.Organization']"}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'show_shared_buildings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}) }, u'orgs.organization': { 'Meta': {'ordering': "['name']", 'object_name': 'Organization'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'parent_org': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_orgs'", 'null': 'True', 'to': u"orm['orgs.Organization']"}), 'query_threshold': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'orgs'", 'symmetrical': 'False', 'through': u"orm['orgs.OrganizationUser']", 'to': u"orm['landing.SEEDUser']"}) }, u'orgs.organizationuser': { 'Meta': {'ordering': "['organization', '-role_level']", 'object_name': 'OrganizationUser'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orgs.Organization']"}), 'role_level': ('django.db.models.fields.IntegerField', [], {'default': '20'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '12'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['landing.SEEDUser']"}) } } complete_apps = ['landing']
U of A med students' simple clay pots make huge difference to Kenyans' drinking water. But the deceptively simple clay pot has the potential to make a huge difference to the health of people without access to clean drinking water. Local potter Musa Omumia reviews new designs for Kenyan Ceramic Project prototypes. How it works is that the wet clay is blended with a specific ratio of organic material — or temper — that burns off in the firing process, making the material water-permeable. “So, essentially you get a porous pot that lets water through, but not bacteria,” says Saleh. They also made a believer out of U of A biology professor — and winner of the University of Alberta Cup, the U of A’s highest academic honour — Mike Belosevic. It was Belosevic who gave Saleh and three other students access to a research lab, after they’d recruited a local potter, Lorris Williams, to help create their filter. Saleh became interested in the topic of clean water while researching HIV transmission rates from breast milk in Ecuador, which lead to the question of why women were breastfeeding if they have HIV. It boiled down to the fact that the possible risk of HIV was less threatening than the immediate risk of dysentery. But, after seven weeks, the students managed to arrange for a local sugar factory to provide all the necessary organic temper materials. “It’s a waste product from their production, completely renewable and completely free. We found sources of clay from a village of potters and now we have a preliminary workshop set up,” says Saleh, adding that UNICEF has bought into the project. For more info on the Kenya Ceramic Project go to www.kenyanceramics.org. Close to half of all Kenyans do not have access to clean, safe drinking water. Donations to the Kenya Ceramic Project help to ensure continued production of water-filtering ceramic pots in a community-based factory located in Western Kenya. Financial support of the project is 100% tax deductible and will have a lasting impact on the lives of Kenyan communities.
""" DO NOT MODIFY Time steps a system using runge-kutta 4th order numerical integration Uses a time step of h """ import numpy class RungeKutta: def __init__(self, state,time, h,model): """ Initialise simulation state -- starting state time -- time h -- runga kutta time step model -- physical model """ self.n = len(state) self.time=time; self.h=h; self.model=model; self.state=state; self.init() def init(self): self.state_dot1 = numpy.zeros(self.n) self.state_dot2 = numpy.zeros(self.n) self.state_dot3 = numpy.zeros(self.n) self.state_dot4 = numpy.zeros(self.n) self.state1 = numpy.zeros(self.n) self.state2 = numpy.zeros(self.n) self.state3 = numpy.zeros(self.n) def stepRK(self): """ Perform one runga kutta time step """ # 1. initial state_dotocity at time tstart = self.time self.model._eval(tstart, self.state, self.state_dot1) # 2. state_dotocity at time+h/2 using previous state_dot estimate tmid = self.time + self.h/ 2.0 for i in xrange(self.n): self.state1[i] = self.state[i] + self.h * self.state_dot1[i] * 0.5 self.model._eval(tmid, self.state1, self.state_dot2) # 3. redo using new state_dotocity estimate for i in xrange(self.n): self.state2[i] = self.state[i] + self.h * self.state_dot2[i] * 0.5 self.model._eval(tmid, self.state1, self.state_dot3) # 4. estimate state_dotocity at end tend = self.time + self.h; for i in xrange(self.n): self.state3[i] = self.state[i] + self.h * self.state_dot3[i] * 0.5 self.model._eval(tend, self.state3, self.state_dot4) for i in xrange(self.n): self.state[i] += self.h * (self.state_dot1[i] + 2.0 * self.state_dot2[i] + 2.0 * self.state_dot3[i] + self.state_dot4[i])/ 6.0 self.time +=self.h def reset(self): """ Reset the model """ self.time=0.0 self.model.reset() self.init() def step(self,dt): """ Perform runga-kutta time steps to advance the model by dt """ tend=self.time+dt while(self.time < tend): self.stepRK() self.model.time=self.time
19.10.2014: HBC The Super Trio: Scott Henderson (guitar), Jeff Berlin (bass guitar) and Lenny White (drums - not Cobham unfortunately). Gee a little late. You could have posted it a little earlier. Billy may have been there in he was in town and he is in Moscow more than you know, his wife lives here. He though doesn't like playing in Moscow very much.
#!/usr/bin/env python # -*- coding: utf-8 -*- from argparse import PARSER # Copyright 2015 Garret Fick <garret@ficksworkshop.com> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Excellon Settings Definition File module ==================== **Excellon file classes** This module provides Excellon file classes and parsing utilities """ import re try: from cStringIO import StringIO except(ImportError): from io import StringIO from .cam import FileSettings def loads(data): """ Read settings file information and return an FileSettings Parameters ---------- data : string string containing Excellon settings file contents Returns ------- file settings: FileSettings """ return ExcellonSettingsParser().parse_raw(data) def map_coordinates(value): if value == 'ABSOLUTE': return 'absolute' return 'relative' def map_units(value): if value == 'ENGLISH': return 'inch' return 'metric' def map_boolean(value): return value == 'YES' SETTINGS_KEYS = { 'INTEGER-PLACES': (int, 'format-int'), 'DECIMAL-PLACES': (int, 'format-dec'), 'COORDINATES': (map_coordinates, 'notation'), 'OUTPUT-UNITS': (map_units, 'units'), } class ExcellonSettingsParser(object): """Excellon Settings PARSER Parameters ---------- None """ def __init__(self): self.values = {} self.settings = None def parse_raw(self, data): for line in StringIO(data): self._parse(line.strip()) # Create the FileSettings object self.settings = FileSettings( notation=self.values['notation'], units=self.values['units'], format=(self.values['format-int'], self.values['format-dec']) ) return self.settings def _parse(self, line): line_items = line.split() if len(line_items) == 2: item_type_info = SETTINGS_KEYS.get(line_items[0]) if item_type_info: # Convert the value to the expected type item_value = item_type_info[0](line_items[1]) self.values[item_type_info[1]] = item_value
After separation, superannuation is treated as property under the Family Law Act. However, it is different from other types of property because it is held in a trust and is subject to rules on accessing it. The laws on superannuation splitting apply to both married and de facto couples, except for de facto couples in Western Australia, who are not eligible to split superannuation. However, the value of any superannuation benefits will be taken into account in financial settlements. Superannuation splitting will not make the superannuation benefit into a cash asset and it will remain subject to the applicable superannuation laws. You may enter into a formal written agreement to split superannuation. To do so, both you and your partner must have your own lawyer. The lawyer must sign a certificate stating that independent legal advice about the agreement and your rights has been given to both of you. The agreement is not registered in court and you do not need to go to court or give a copy to the court. Each of you must keep a copy and a copy is sent to the trustee of the Superannuation fund. You can choose to have your agreement made into a formal court order by filing consent financial orders to achieve superannuation splitting. If you cannot reach an agreement, you can ask the court for an order to split superannuation. In order to get the information to value the superannuation a Form 6 Declaration, a Superannuation Information Request Form and the appropriate Superannuation Information Form must be sent to the trustee of the fund. The fund may also charge a fee. Your lawyer can do this for you or you can request the information yourself using the Family Court’s Superannuation Information Kit. The information from the trustee may be enough to value the superannuation but as superannuation can be complex you may need to seek an expert valuation. You should get legal advice about this. The superannuation splitting legislation sets out methods for valuing most types of superannuation, but there are exceptions and these may have an approved different valuation method. The Attorney General approves methods or factors for determining the value of certain superannuation funds and information is available on their website. If you and your partner have reached an agreement, then an Application for Consent Orders can be filed in the Family Court, with a draft consent order setting out the agreement. If the court approves your application the orders can be made in chambers without either of you attending court. If you cannot agree on the split then the superannuation benefits will be dealt with by the court in making financial orders. In deciding on a split the court is required to value the interest in accordance with any method set out in the Family Law (Superannuation) Regulations which provide methods for valuing superannuation interests and set out the way in which the payment split is to be put into effect. The superannuation benefits must be valued and the superannuation fund trustee must be advised about the orders you are seeking before the court orders that they be split in property settlement proceedings. The trustee has the opportunity to attend the court hearing and can object to the superannuation splitting orders that you are seeking. If the superannuation order is made, whether by consent or after a hearing, you must provide a sealed copy of the order to the trustee. When payment from a superannuation interest becomes payable to the fund member, the amount decided under the superannuation split will be paid to the non-member and the remainder will be paid to the member. Payment splitting does not usually create a new superannuation interest for the non-member. However, in some instances, if there is a payment splitting agreement or order, the splitting laws may allow the creation of a new interest for the non-member, or may allow a transfer or roll-out of benefits for the non-member to another superannuation fund. These options are known as interest splitting and it lets the non-member access their share of the entitlements independently of the member.
import socket, threading, traceback import curses import curses.textpad import datetime from time import sleep import argparse parser = argparse.ArgumentParser() parser.add_argument('-p','--port', default=80, type=int, help='Port number to listen on (default is 80)') args = parser.parse_args() PORT = args.port socket.setdefaulttimeout(5) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', PORT)) s.listen(4) clients = [] #list of clients connected lock = threading.Lock() exitScript = False class gui(threading.Thread): def run(self): curses.wrapper(self.draw_ui) def draw_ui(self, stdscr): global clients global exitScript height, width = stdscr.getmaxyx() clientWin = stdscr.subwin(height-3, 17, 0, 0) clientWin.scrollok(True) bufferWin = stdscr.subwin(height-3, width-18,0, 18) bufferWin.scrollok(True) inputWin = stdscr.subwin(2, width-1,height-3, 0) inputWin.scrollok(True) tb = curses.textpad.Textbox(inputWin) stdscr.nodelay(1) k = 0 sendCommand = False closeActive = False oldHeight = 0 oldWidth = 0 while k != 276: #276 is keycode for F12 clientWin.clear() if len(clients) == 0: bufferWin.clear() bufferWin.refresh() height, width = stdscr.getmaxyx() if oldHeight != height or oldWidth != width: clientWin.resize(height-3, 17) bufferWin.resize(height-3, width-18) inputWin.resize(2, width-1) if k == 9: # 9 is keycode for tab for idx in range(0,len(clients)): if clients[idx].active: clients[idx].active = False if idx >= len(clients)-1: newIdx = 0 else: newIdx = idx+1 clients[newIdx].active = True clients[newIdx].updated = True break elif k == 10:# 10 is keycode for enter sendCommand = True elif k == 274:# 274 is keycode for F10 closeActive = True elif k != 0: tb.do_command(k) lineNum = 0 for conn in clients: if conn.active: if sendCommand: conn.command = tb.gather() sendCommand = False inputWin.clear() if closeActive: conn.toClose = True closeActive = False clientWin.addstr(lineNum,0,"*"+conn.r_ip+"\n") #Update buffer display to show updated log buffer if conn.updated: bufferWin.clear() bufferWin.addstr(0,0,conn.buffer) bufferWin.refresh() conn.updated = False else: clientWin.addstr(lineNum,0," "+conn.r_ip+"\n") lineNum+=1 clientWin.refresh() inputWin.refresh() k = 0 try: k = stdscr.getch() except: pass #sleep(0.1) exitScript = True class remoteConn(threading.Thread): active = False toClose = False updated = False buffer = "" command = "" def __init__(self, sock): threading.Thread.__init__(self) socket, address = sock self.socket = socket self.r_ip, self.r_port = address logname = self.r_ip+"--"+datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S-%f")+".log" self.logfile = open(logname, "ab", 0) def updateBuffer(self, data): self.buffer += data.decode("utf-8") self.logfile.write(data) self.updated = True def run(self): global clients global exitScript global lock lock.acquire() clients.append(self) if len(clients) == 1: self.active = True lock.release() self.socket.settimeout(1) while exitScript == False and self.toClose == False: try: data = self.socket.recv(1024) if not data: break self.updateBuffer(data) except Exception as e: pass if self.command != "": self.socket.send(bytearray(self.command, "utf-8")) self.updateBuffer(bytearray(self.command, "utf-8")) self.command = "" self.socket.close() lock.acquire() clients.remove(self) if self.active and len(clients) >= 1: clients[0].active = True clients[0].updated = True lock.release() self.logfile.close() threads = [] while exitScript == False: # wait for socket to connect threads.append(gui().start()) s.settimeout(1) while exitScript == False: try: threads.append(remoteConn(s.accept()).start()) except: pass for t in threads: try: t.join() except: pass print("exiting")
Nice 85+ year old RPPC real photo postcard showing store fronts and vintage cars on St. Vincent Street in Glasgow Scotland. At left are signs for: ARROL JOHNSTON, CROSSLEY CARS, ROYAL TYPEWRITERS AND BRYSON BROS. This 2015 Indian Scout has been customized to resemble the Indian Scout from World War II owned by Indian Motorcycle of Springfield, located in Westfield, MA. The fenders were cut slightly, the bike custom painted military green with a polished finish, a Thompson M1 A1 military machine gun scabbard was constructed, mounted on the front wheel assembly & filled with a Thompson replica to go along with the bike. A tool box was constructed & mounted on the left mid bike area, a tractor seat was manufactured along with military leather saddle bags, rear fender leather cover & leather windshield shroud. 2002 Fatboy Limited Edition Lot of custom work. This bike was $24,000 when new and has $6000 in custom work. 95ci 100hp/100ft lbs 10.5 to 1 forged pistons CNC ported heads with compression relief valves S&S 585 gear drive cams Python Exhaust with hidden crossover Limited Edition Harley Paint set Custom Harley Wheels Floating Rotors 12,491 miles currently Also includes seat for riding with a passenger and a back rest for a passenger. Always kept garaged and on a tender.
from rx.core import ObserverBase from rx.disposables import SingleAssignmentDisposable class JoinObserver(ObserverBase): def __init__(self, source, on_error): super(JoinObserver, self).__init__() self.source = source self.on_error = on_error self.queue = [] self.active_plans = [] self.subscription = SingleAssignmentDisposable() self.is_disposed = False def _on_next_core(self, notification): if not self.is_disposed: if notification.kind == 'E': self.on_error(notification.exception) return self.queue.append(notification) active_plans = self.active_plans[:] for plan in active_plans: plan.match() def _on_error_core(self, error): return NotImplemented def _on_completed_core(self): return NotImplemented def add_active_plan(self, active_plan): self.active_plans.append(active_plan) def subscribe(self): self.subscription.disposable = self.source.materialize().subscribe(self) def remove_active_plan(self, active_plan): self.active_plans.remove(active_plan) if not len(self.active_plans): self.dispose() def dispose(self): super(JoinObserver, self).dispose() if not self.is_disposed: self.is_disposed = True self.subscription.dispose()
Get a FREE session to find out just what we have to offer. Get a Free 3 day Pass and a Coupon for 3 personal training sessions! Purchase A Banner In Our New Athletic Field! The Best Protein Bar EVER!! Ankle Mobility Exercises: BY MARK WALLACE Ankle Mobility -Stand in front of a wall in a staggered stance with one foot in front of the other. Front foot approx. 6-8 inches away from a wall. Place both hands on the wall and bend the front knee forward towards the wall. Bend the knee as far as you can without actually touching the wall. Make sure to keep your heel of front foot down at all times. This forces your ankle joint to rotate. This should be an easy back and forth movement. So don’t feel the need to stretch and hold the position for a period of time. Return to starting position with front leg locked. Continue motions back and forth for about 15-20 reps. After completing, stay in same position/stance. Now bend knee at an inward angle towards your body but still forward towards the wall. Remember to keep your heel down. Don’t worry about going too far during this motion. The more often this exercise is practiced, the more mobile and further your ankle will be able to move. Bend knee inward for 15-20 reps and then do the same outward. Always do both legs! Leg Swings -Stand in front of a wall with both hands in front against the wall. Swing one leg in front of other out away from the body and then across the body. Keep planted heel down at all times. The objective is to get into a bit of a rhythm while doing this. Don’t try to stretch the hips by going too far out or across. This is an ankle exercise so without putting stress on the ankle keep body weight on planted foot and just swing the other leg back and forth for 15-20 reps. Switch legs when done. Split Squat -Get into a staggered stance by putting one foot in front of the other but not too far. Place both hands behind your head. Now squat down like you’re doing a lunge without the lunge part. Keep back straight and only bend your knees. Make sure your front legged knee does not go further forward than where your toes are sitting. Doing so would put pressure on your knee and eventually weakening the tendons making your more susceptible to injury. Remember this is NOT a lunge so don’t lean forward or back to stretch your hips. You shouldn’t feel much of a stretch. Try not to touch knee to floor. Once moved down, come back up to starting position. 15-20 reps on each leg. Split Squat Rear Foot Elevated This exercise is a lot like the regular split squat but, obvious from the name, your rear foot is elevated. Place your rear foot on a chair, bench, anything that isn’t too high to make yourself unbalanced but high enough so you can squat down and mobilize the ankle. The placement of your rear foot is important. Lay the top of the rear foot on the object behind you. Don’t extend the toes, only have the tip of the toes touching the platform. Squat down, keeping your knee from overlapping your toes like in a regular split squat. This exercise will require more of a hip flexor stretch depending on the level of elevation of the platform behind you, however, the focus is still on the ankle planted on the ground. 15-20 reps each leg. ***************************************************************************************************************************************** It’s important to remember that these are mobilization exercises, NOT weighted exercises.They are designed to regain lost mobility in the ankle joints, NOT to strengthen any muscles. So even though you may not feel a “burn” like in a normal workout, trust that over time these exercises will improve not only your physical abilities in the gym, but in your everyday life as well. Marcel’s Total Fitness & Athletics provides expert personal training for individuals in local athletic programs, as well as for clients of the general community. Personal training is offered for groups or a one-on-one basis. Available are almost every piece of conditioning, strength, and flexibility gear you can imagine. From exercise bikes, treadmills, and free weights to specialized equipment like side-to-side sled training, Marcel’s Total Fitness & Athletics has it all. Come see why our clients think we are the best in town!
from datetime import timedelta, datetime from nose.tools import assert_equals from copy import deepcopy import linphone import logging import os import sys import time import weakref test_domain = "sipopen.example.org" auth_domain = "sip.example.org" test_username = "liblinphone_tester" test_password = "secret" test_route = "sip2.linphone.org" if os.path.isdir(os.path.join(os.path.dirname(__file__), "rcfiles")): # Running unit tests from an installed package tester_resources_path = os.path.abspath(os.path.dirname(__file__)) else: # Running unit tests from the linphone sources tester_resources_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../tester/")) def linphonetester_log_handler(level, msg): import logging method = getattr(logging.getLogger("linphonetester"), level) if not msg.strip().startswith('[PYLINPHONE]'): msg = '[CORE] ' + msg method(msg) linphonetester_logger = logging.getLogger("linphonetester") handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s.%(msecs)03d %(levelname)s: %(message)s', '%H:%M:%S') handler.setFormatter(formatter) linphonetester_logger.addHandler(handler) linphone.set_log_handler(linphonetester_log_handler) def create_address(domain): addr = linphone.Address.new(None) assert addr != None addr.username = test_username assert_equals(addr.username, test_username) if domain is None: domain = test_route addr.domain = domain assert_equals(addr.domain, domain) addr.display_name = None addr.display_name = "Mr Tester" assert_equals(addr.display_name, "Mr Tester") return addr class Account: def __init__(self, id_addr, unique_id): self.created = False self.done = False self.auth_requested = False self.identity = id_addr.clone() self.password = linphone.testing.get_random_token(8) self.modified_identity = id_addr.clone() modified_username = "{username}_{unique_id}".format(username=id_addr.username, unique_id=unique_id) self.modified_identity.username = modified_username class AccountManager: def __init__(self): self.unique_id = linphone.testing.get_random_token(6) self.accounts = [] @classmethod def wait_for_until(cls, lc1, lc2, func, timeout): lcs = [] if lc1 is not None: lcs.append(lc1) if lc2 is not None: lcs.append(lc2) return cls.wait_for_list(lcs, func, timeout) @classmethod def wait_for_list(cls, lcs, func, timeout): start = datetime.now() end = start + timedelta(milliseconds = timeout) res = func(*lcs) while not res and datetime.now() < end: for lc in lcs: lc.iterate() time.sleep(0.02) res = func(*lcs) return res @classmethod def account_created_on_server_cb(cls, lc, cfg, state, message): if state == linphone.RegistrationState.Ok: lc.user_data().created = True elif state == linphone.RegistrationState.Cleared: lc.user_data().done = True @classmethod def account_created_auth_requested_cb(cls, lc, realm, username, domain): lc.user_data().auth_requested = True def check_account(self, cfg): create_account = False lc = cfg.core id_addr = cfg.identity_address account = self._get_account(id_addr) if account is None: linphonetester_logger.info("[TESTER] No account for {identity} exists, going to create one.".format(identity=id_addr.as_string())) account = Account(id_addr, self.unique_id) self.accounts.append(account) create_account = True cfg.identity_address = account.modified_identity if create_account: self._create_account_on_server(account, cfg) ai = linphone.AuthInfo.new(account.modified_identity.username, None, account.password, None, None, account.modified_identity.domain) lc.add_auth_info(ai) return account.modified_identity def _get_account(self, id_addr): for account in self.accounts: if account.identity.weak_equal(id_addr): return account return None def _create_account_on_server(self, account, refcfg): vtable = {} tmp_identity = account.modified_identity.clone() vtable['registration_state_changed'] = AccountManager.account_created_on_server_cb vtable['auth_info_requested'] = AccountManager.account_created_auth_requested_cb lc = CoreManager.configure_lc_from(vtable, tester_resources_path, None, account) lc.sip_transports = linphone.SipTransports(-1, -1, -1, -1) cfg = lc.create_proxy_config() tmp_identity.password = account.password tmp_identity.set_header("X-Create-Account", "yes") cfg.identity_address = tmp_identity server_addr = linphone.Address.new(refcfg.server_addr) server_addr.transport = linphone.TransportType.Tcp; server_addr.port = 0 cfg.server_addr = server_addr.as_string() cfg.expires = 3600 lc.add_proxy_config(cfg) if AccountManager.wait_for_until(lc, None, lambda lc: lc.user_data().auth_requested == True, 10000) != True: linphonetester_logger.critical("[TESTER] Account for {identity} could not be created on server.".format(identity=refcfg.identity_address.as_string())) sys.exit(-1) cfg.edit() cfg.identity_address = account.modified_identity cfg.done() ai = linphone.AuthInfo.new(account.modified_identity.username, None, account.password, None, None, account.modified_identity.domain) lc.add_auth_info(ai) if AccountManager.wait_for_until(lc, None, lambda lc: lc.user_data().created == True, 3000) != True: linphonetester_logger.critical("[TESTER] Account for {identity} is not working on server.".format(identity=refcfg.identity_address.as_string())) sys.exit(-1) lc.remove_proxy_config(cfg) if AccountManager.wait_for_until(lc, None, lambda lc: lc.user_data().done == True, 3000) != True: linphonetester_logger.critical("[TESTER] Account creation could not clean the registration context.") sys.exit(-1) account_manager = AccountManager() class CoreManagerStats: def __init__(self): self.reset() def reset(self): self.number_of_LinphoneRegistrationNone = 0 self.number_of_LinphoneRegistrationProgress = 0 self.number_of_LinphoneRegistrationOk = 0 self.number_of_LinphoneRegistrationCleared = 0 self.number_of_LinphoneRegistrationFailed = 0 self.number_of_auth_info_requested = 0 self.number_of_LinphoneCallIncomingReceived = 0 self.number_of_LinphoneCallOutgoingInit = 0 self.number_of_LinphoneCallOutgoingProgress = 0 self.number_of_LinphoneCallOutgoingRinging = 0 self.number_of_LinphoneCallOutgoingEarlyMedia = 0 self.number_of_LinphoneCallConnected = 0 self.number_of_LinphoneCallStreamsRunning = 0 self.number_of_LinphoneCallPausing = 0 self.number_of_LinphoneCallPaused = 0 self.number_of_LinphoneCallResuming = 0 self.number_of_LinphoneCallRefered = 0 self.number_of_LinphoneCallError = 0 self.number_of_LinphoneCallEnd = 0 self.number_of_LinphoneCallPausedByRemote = 0 self.number_of_LinphoneCallUpdatedByRemote = 0 self.number_of_LinphoneCallIncomingEarlyMedia = 0 self.number_of_LinphoneCallUpdating = 0 self.number_of_LinphoneCallReleased = 0 self.number_of_LinphoneTransferCallOutgoingInit = 0 self.number_of_LinphoneTransferCallOutgoingProgress = 0 self.number_of_LinphoneTransferCallOutgoingRinging = 0 self.number_of_LinphoneTransferCallOutgoingEarlyMedia = 0 self.number_of_LinphoneTransferCallConnected = 0 self.number_of_LinphoneTransferCallStreamsRunning = 0 self.number_of_LinphoneTransferCallError = 0 self.number_of_LinphoneMessageReceived = 0 self.number_of_LinphoneMessageReceivedWithFile = 0 self.number_of_LinphoneMessageReceivedLegacy = 0 self.number_of_LinphoneMessageExtBodyReceived = 0 self.number_of_LinphoneMessageInProgress = 0 self.number_of_LinphoneMessageDelivered = 0 self.number_of_LinphoneMessageNotDelivered = 0 self.number_of_LinphoneIsComposingActiveReceived = 0 self.number_of_LinphoneIsComposingIdleReceived = 0 self.number_of_LinphoneFileTransferDownloadSuccessful = 0 self.progress_of_LinphoneFileTransfer = 0 self.number_of_IframeDecoded = 0 self.number_of_NewSubscriptionRequest =0 self.number_of_NotifyReceived = 0 self.number_of_LinphonePresenceActivityOffline = 0 self.number_of_LinphonePresenceActivityOnline = 0 self.number_of_LinphonePresenceActivityAppointment = 0 self.number_of_LinphonePresenceActivityAway = 0 self.number_of_LinphonePresenceActivityBreakfast = 0 self.number_of_LinphonePresenceActivityBusy = 0 self.number_of_LinphonePresenceActivityDinner = 0 self.number_of_LinphonePresenceActivityHoliday = 0 self.number_of_LinphonePresenceActivityInTransit = 0 self.number_of_LinphonePresenceActivityLookingForWork = 0 self.number_of_LinphonePresenceActivityLunch = 0 self.number_of_LinphonePresenceActivityMeal = 0 self.number_of_LinphonePresenceActivityMeeting = 0 self.number_of_LinphonePresenceActivityOnThePhone = 0 self.number_of_LinphonePresenceActivityOther = 0 self.number_of_LinphonePresenceActivityPerformance = 0 self.number_of_LinphonePresenceActivityPermanentAbsence = 0 self.number_of_LinphonePresenceActivityPlaying = 0 self.number_of_LinphonePresenceActivityPresentation = 0 self.number_of_LinphonePresenceActivityShopping = 0 self.number_of_LinphonePresenceActivitySleeping = 0 self.number_of_LinphonePresenceActivitySpectator = 0 self.number_of_LinphonePresenceActivitySteering = 0 self.number_of_LinphonePresenceActivityTravel = 0 self.number_of_LinphonePresenceActivityTV = 0 self.number_of_LinphonePresenceActivityUnknown = 0 self.number_of_LinphonePresenceActivityVacation = 0 self.number_of_LinphonePresenceActivityWorking = 0 self.number_of_LinphonePresenceActivityWorship = 0 self.last_received_presence = None self.number_of_inforeceived = 0 self.number_of_LinphoneSubscriptionIncomingReceived = 0 self.number_of_LinphoneSubscriptionOutgoingInit = 0 self.number_of_LinphoneSubscriptionPending = 0 self.number_of_LinphoneSubscriptionActive = 0 self.number_of_LinphoneSubscriptionTerminated = 0 self.number_of_LinphoneSubscriptionError = 0 self.number_of_LinphoneSubscriptionExpiring = 0 self.number_of_LinphonePublishProgress = 0 self.number_of_LinphonePublishOk = 0 self.number_of_LinphonePublishExpiring = 0 self.number_of_LinphonePublishError = 0 self.number_of_LinphonePublishCleared = 0 self.number_of_LinphoneConfiguringSkipped = 0 self.number_of_LinphoneConfiguringFailed = 0 self.number_of_LinphoneConfiguringSuccessful = 0 self.number_of_LinphoneCallEncryptedOn = 0 self.number_of_LinphoneCallEncryptedOff = 0 self.last_received_chat_message = None class CoreManager: @classmethod def configure_lc_from(cls, vtable, resources_path, rc_path, user_data=None): filepath = None if rc_path is not None: filepath = os.path.join(resources_path, rc_path) assert_equals(os.path.isfile(filepath), True) lc = linphone.Core.new(vtable, None, filepath) linphone.testing.set_dns_user_hosts_file(lc, os.path.join(resources_path, 'tester_hosts')) lc.root_ca = os.path.join(resources_path, 'certificates', 'cn', 'cafile.pem') lc.ring = os.path.join(resources_path, 'sounds', 'oldphone.wav') lc.ringback = os.path.join(resources_path, 'sounds', 'ringback.wav') lc.static_picture = os.path.join(resources_path, 'images', 'nowebcamCIF.jpg') lc.user_data = weakref.ref(user_data) return lc @classmethod def wait_for_until(cls, manager1, manager2, func, timeout): managers = [] if manager1 is not None: managers.append(manager1) if manager2 is not None: managers.append(manager2) return cls.wait_for_list(managers, func, timeout) @classmethod def wait_for_list(cls, managers, func, timeout): start = datetime.now() end = start + timedelta(milliseconds = timeout) res = func(*managers) while not res and datetime.now() < end: for manager in managers: manager.lc.iterate() time.sleep(0.02) res = func(*managers) return res @classmethod def wait_for(cls, manager1, manager2, func): return cls.wait_for_until(manager1, manager2, func, 10000) @classmethod def call(cls, caller_manager, callee_manager, caller_params = None, callee_params = None, build_callee_params = False): initial_caller_stats = deepcopy(caller_manager.stats) initial_callee_stats = deepcopy(callee_manager.stats) # Use playfile for callee to avoid locking on capture card callee_manager.lc.use_files = True callee_manager.lc.play_file = os.path.join(tester_resources_path, 'sounds', 'hello8000.wav') if caller_params is None: call = caller_manager.lc.invite_address(callee_manager.identity) else: call = caller_manager.lc.invite_address_with_params(callee_manager.identity, caller_params) assert call is not None assert_equals(CoreManager.wait_for(callee_manager, caller_manager, lambda callee_manager, caller_manager: callee_manager.stats.number_of_LinphoneCallIncomingReceived == initial_callee_stats.number_of_LinphoneCallIncomingReceived + 1), True) assert_equals(callee_manager.lc.incoming_invite_pending, True) assert_equals(caller_manager.stats.number_of_LinphoneCallOutgoingProgress, initial_caller_stats.number_of_LinphoneCallOutgoingProgress + 1) retry = 0 while (caller_manager.stats.number_of_LinphoneCallOutgoingRinging != initial_caller_stats.number_of_LinphoneCallOutgoingRinging + 1) and \ (caller_manager.stats.number_of_LinphoneCallOutgoingEarlyMedia != initial_caller_stats.number_of_LinphoneCallOutgoingEarlyMedia + 1) and \ retry < 20: retry += 1 caller_manager.lc.iterate() callee_manager.lc.iterate() time.sleep(0.1) assert ((caller_manager.stats.number_of_LinphoneCallOutgoingRinging == initial_caller_stats.number_of_LinphoneCallOutgoingRinging + 1) or \ (caller_manager.stats.number_of_LinphoneCallOutgoingEarlyMedia == initial_caller_stats.number_of_LinphoneCallOutgoingEarlyMedia + 1)) == True assert callee_manager.lc.current_call_remote_address is not None if caller_manager.lc.current_call is None or callee_manager.lc.current_call is None or callee_manager.lc.current_call_remote_address is None: return False callee_from_address = caller_manager.identity.clone() callee_from_address.port = 0 # Remove port because port is never present in from header assert_equals(callee_from_address.weak_equal(callee_manager.lc.current_call_remote_address), True) if callee_params is not None: callee_manager.lc.accept_call_with_params(callee_manager.lc.current_call, callee_params) elif build_callee_params: default_params = callee_manager.lc.create_call_params(callee_manager.lc.current_call) callee_manager.lc.accept_call_with_params(callee_manager.lc.current_call, default_params) else: callee_manager.lc.accept_call(callee_manager.lc.current_call) assert_equals(CoreManager.wait_for(callee_manager, caller_manager, lambda callee_manager, caller_manager: (callee_manager.stats.number_of_LinphoneCallConnected == initial_callee_stats.number_of_LinphoneCallConnected + 1) and \ (caller_manager.stats.number_of_LinphoneCallConnected == initial_caller_stats.number_of_LinphoneCallConnected + 1)), True) # Just to sleep result = CoreManager.wait_for(callee_manager, caller_manager, lambda callee_manager, caller_manager: (callee_manager.stats.number_of_LinphoneCallStreamsRunning == initial_callee_stats.number_of_LinphoneCallStreamsRunning + 1) and \ (caller_manager.stats.number_of_LinphoneCallStreamsRunning == initial_caller_stats.number_of_LinphoneCallStreamsRunning + 1)) if caller_manager.lc.media_encryption != linphone.MediaEncryption.MediaEncryptionNone and callee_manager.lc.media_encryption != linphone.MediaEncryption.None: # Wait for encryption to be on, in case of zrtp, it can take a few seconds if caller_manager.lc.media_encryption == linphone.MediaEncryption.ZRTP: CoreManager.wait_for(callee_manager, caller_manager, lambda callee_manager, caller_manager: caller_manager.stats.number_of_LinphoneCallEncryptedOn == initial_caller_stats.number_of_LinphoneCallEncryptedOn + 1) if callee_manager.lc.media_encryption == linphone.MediaEncryption.ZRTP: CoreManager.wait_for(callee_manager, caller_manager, lambda callee_manager, caller_manager: callee_manager.stats.number_of_LinphoneCallEncryptedOn == initial_callee_stats.number_of_LinphoneCallEncryptedOn + 1) assert_equals(callee_manager.lc.current_call.current_params.media_encryption, caller_manager.lc.media_encryption) assert_equals(caller_manager.lc.current_call.current_params.media_encryption, callee_manager.lc.media_encryption) return result @classmethod def end_call(cls, caller_manager, callee_manager): caller_manager.lc.terminate_all_calls() assert_equals(CoreManager.wait_for(caller_manager, callee_manager, lambda caller_manager, callee_manager: caller_manager.stats.number_of_LinphoneCallEnd == 1 and callee_manager.stats.number_of_LinphoneCallEnd == 1), True) @classmethod def registration_state_changed(cls, lc, cfg, state, message): manager = lc.user_data() linphonetester_logger.info("[TESTER] New registration state {state} for user id [{identity}] at proxy [{addr}]".format( state=linphone.RegistrationState.string(state), identity=cfg.identity_address.as_string(), addr=cfg.server_addr)) if state == linphone.RegistrationState.None: manager.stats.number_of_LinphoneRegistrationNone += 1 elif state == linphone.RegistrationState.Progress: manager.stats.number_of_LinphoneRegistrationProgress += 1 elif state == linphone.RegistrationState.Ok: manager.stats.number_of_LinphoneRegistrationOk += 1 elif state == linphone.RegistrationState.Cleared: manager.stats.number_of_LinphoneRegistrationCleared += 1 elif state == linphone.RegistrationState.Failed: manager.stats.number_of_LinphoneRegistrationFailed += 1 else: raise Exception("Unexpected registration state") @classmethod def auth_info_requested(cls, lc, realm, username, domain): manager = lc.user_data() linphonetester_logger.info("[TESTER] Auth info requested for user id [{username}] at realm [{realm}]".format( username=username, realm=realm)) manager.stats.number_of_auth_info_requested +=1 @classmethod def call_state_changed(cls, lc, call, state, msg): manager = lc.user_data() to_address = call.call_log.to_address.as_string() from_address = call.call_log.from_address.as_string() direction = "Outgoing" if call.call_log.dir == linphone.CallDir.Incoming: direction = "Incoming" linphonetester_logger.info("[TESTER] {direction} call from [{from_address}] to [{to_address}], new state is [{state}]".format( direction=direction, from_address=from_address, to_address=to_address, state=linphone.CallState.string(state))) if state == linphone.CallState.IncomingReceived: manager.stats.number_of_LinphoneCallIncomingReceived += 1 elif state == linphone.CallState.OutgoingInit: manager.stats.number_of_LinphoneCallOutgoingInit += 1 elif state == linphone.CallState.OutgoingProgress: manager.stats.number_of_LinphoneCallOutgoingProgress += 1 elif state == linphone.CallState.OutgoingRinging: manager.stats.number_of_LinphoneCallOutgoingRinging += 1 elif state == linphone.CallState.OutgoingEarlyMedia: manager.stats.number_of_LinphoneCallOutgoingEarlyMedia += 1 elif state == linphone.CallState.Connected: manager.stats.number_of_LinphoneCallConnected += 1 elif state == linphone.CallState.StreamsRunning: manager.stats.number_of_LinphoneCallStreamsRunning += 1 elif state == linphone.CallState.Pausing: manager.stats.number_of_LinphoneCallPausing += 1 elif state == linphone.CallState.Paused: manager.stats.number_of_LinphoneCallPaused += 1 elif state == linphone.CallState.Resuming: manager.stats.number_of_LinphoneCallResuming += 1 elif state == linphone.CallState.Refered: manager.stats.number_of_LinphoneCallRefered += 1 elif state == linphone.CallState.Error: manager.stats.number_of_LinphoneCallError += 1 elif state == linphone.CallState.End: manager.stats.number_of_LinphoneCallEnd += 1 elif state == linphone.CallState.PausedByRemote: manager.stats.number_of_LinphoneCallPausedByRemote += 1 elif state == linphone.CallState.UpdatedByRemote: manager.stats.number_of_LinphoneCallUpdatedByRemote += 1 elif state == linphone.CallState.IncomingEarlyMedia: manager.stats.number_of_LinphoneCallIncomingEarlyMedia += 1 elif state == linphone.CallState.Updating: manager.stats.number_of_LinphoneCallUpdating += 1 elif state == linphone.CallState.Released: manager.stats.number_of_LinphoneCallReleased += 1 else: raise Exception("Unexpected call state") @classmethod def message_received(cls, lc, room, message): manager = lc.user_data() from_str = message.from_address.as_string() text_str = message.text external_body_url = message.external_body_url linphonetester_logger.info("[TESTER] Message from [{from_str}] is [{text_str}], external URL [{external_body_url}]".format( from_str=from_str, text_str=text_str, external_body_url=external_body_url)) manager.stats.number_of_LinphoneMessageReceived += 1 manager.stats.last_received_chat_message = message if message.file_transfer_information is not None: manager.stats.number_of_LinphoneMessageReceivedWithFile += 1 elif message.external_body_url is not None: manager.stats.number_of_LinphoneMessageExtBodyReceived += 1 @classmethod def new_subscription_requested(cls, lc, lf, url): manager = lc.user_data() linphonetester_logger.info("[TESTER] New subscription request: from [{from_str}], url [{url}]".format( from_str=lf.address.as_string(), url=url)) manager.stats.number_of_NewSubscriptionRequest += 1 lc.add_friend(lf) # Accept subscription @classmethod def notify_presence_received(cls, lc, lf): manager = lc.user_data() linphonetester_logger.info("[TESTER] New notify request: from [{from_str}]".format( from_str=lf.address.as_string())) manager.stats.number_of_NotifyReceived += 1 manager.stats.last_received_presence = lf.presence_model acttype = manager.stats.last_received_presence.activity.type if acttype == linphone.PresenceActivityType.Offline: manager.stats.number_of_LinphonePresenceActivityOffline += 1 elif acttype == linphone.PresenceActivityType.Online: manager.stats.number_of_LinphonePresenceActivityOnline += 1 elif acttype == linphone.PresenceActivityType.Appointment: manager.stats.number_of_LinphonePresenceActivityAppointment += 1 elif acttype == linphone.PresenceActivityType.Away: manager.stats.number_of_LinphonePresenceActivityAway += 1 elif acttype == linphone.PresenceActivityType.Breakfast: manager.stats.number_of_LinphonePresenceActivityBreakfast += 1 elif acttype == linphone.PresenceActivityType.Busy: manager.stats.number_of_LinphonePresenceActivityBusy += 1 elif acttype == linphone.PresenceActivityType.Dinner: manager.stats.number_of_LinphonePresenceActivityDinner += 1 elif acttype == linphone.PresenceActivityType.Holiday: manager.stats.number_of_LinphonePresenceActivityHoliday += 1 elif acttype == linphone.PresenceActivityType.InTransit: manager.stats.number_of_LinphonePresenceActivityInTransit += 1 elif acttype == linphone.PresenceActivityType.LookingForWork: manager.stats.number_of_LinphonePresenceActivityLookingForWork += 1 elif acttype == linphone.PresenceActivityType.Lunch: manager.stats.number_of_LinphonePresenceActivityLunch += 1 elif acttype == linphone.PresenceActivityType.Meal: manager.stats.number_of_LinphonePresenceActivityMeal += 1 elif acttype == linphone.PresenceActivityType.Meeting: manager.stats.number_of_LinphonePresenceActivityMeeting += 1 elif acttype == linphone.PresenceActivityType.OnThePhone: manager.stats.number_of_LinphonePresenceActivityOnThePhone += 1 elif acttype == linphone.PresenceActivityType.Other: manager.stats.number_of_LinphonePresenceActivityOther += 1 elif acttype == linphone.PresenceActivityType.Performance: manager.stats.number_of_LinphonePresenceActivityPerformance += 1 elif acttype == linphone.PresenceActivityType.PermanentAbsence: manager.stats.number_of_LinphonePresenceActivityPermanentAbsence += 1 elif acttype == linphone.PresenceActivityType.Playing: manager.stats.number_of_LinphonePresenceActivityPlaying += 1 elif acttype == linphone.PresenceActivityType.Presentation: manager.stats.number_of_LinphonePresenceActivityPresentation += 1 elif acttype == linphone.PresenceActivityType.Shopping: manager.stats.number_of_LinphonePresenceActivityShopping += 1 elif acttype == linphone.PresenceActivityType.Sleeping: manager.stats.number_of_LinphonePresenceActivitySleeping += 1 elif acttype == linphone.PresenceActivityType.Spectator: manager.stats.number_of_LinphonePresenceActivitySpectator += 1 elif acttype == linphone.PresenceActivityType.Steering: manager.stats.number_of_LinphonePresenceActivitySteering += 1 elif acttype == linphone.PresenceActivityType.Travel: manager.stats.number_of_LinphonePresenceActivityTravel += 1 elif acttype == linphone.PresenceActivityType.TV: manager.stats.number_of_LinphonePresenceActivityTV += 1 elif acttype == linphone.PresenceActivityType.Unknown: manager.stats.number_of_LinphonePresenceActivityUnknown += 1 elif acttype == linphone.PresenceActivityType.Vacation: manager.stats.number_of_LinphonePresenceActivityVacation += 1 elif acttype == linphone.PresenceActivityType.Working: manager.stats.number_of_LinphonePresenceActivityWorking += 1 elif acttype == linphone.PresenceActivityType.Worship: manager.stats.number_of_LinphonePresenceActivityWorship += 1 def __init__(self, rc_file = None, check_for_proxies = True, vtable = {}): if not vtable.has_key('registration_state_changed'): vtable['registration_state_changed'] = CoreManager.registration_state_changed if not vtable.has_key('auth_info_requested'): vtable['auth_info_requested'] = CoreManager.auth_info_requested if not vtable.has_key('call_state_changed'): vtable['call_state_changed'] = CoreManager.call_state_changed if not vtable.has_key('message_received'): vtable['message_received'] = CoreManager.message_received #if not vtable.has_key('is_composing_received'): #vtable['is_composing_received'] = CoreManager.is_composing_received if not vtable.has_key('new_subscription_requested'): vtable['new_subscription_requested'] = CoreManager.new_subscription_requested if not vtable.has_key('notify_presence_received'): vtable['notify_presence_received'] = CoreManager.notify_presence_received #if not vtable.has_key('transfer_state_changed'): #vtable['transfer_state_changed'] = CoreManager.transfer_state_changed #if not vtable.has_key('info_received'): #vtable['info_received'] = CoreManager.info_received #if not vtable.has_key('subscription_state_changed'): #vtable['subscription_state_changed'] = CoreManager.subscription_state_changed #if not vtable.has_key('notify_received'): #vtable['notify_received'] = CoreManager.notify_received #if not vtable.has_key('publish_state_changed'): #vtable['publish_state_changed'] = CoreManager.publish_state_changed #if not vtable.has_key('configuring_status'): #vtable['configuring_status'] = CoreManager.configuring_status #if not vtable.has_key('call_encryption_changed'): #vtable['call_encryption_changed'] = CoreManager.call_encryption_changed self.identity = None self.stats = CoreManagerStats() rc_path = None if rc_file is not None: rc_path = os.path.join('rcfiles', rc_file) self.lc = CoreManager.configure_lc_from(vtable, tester_resources_path, rc_path, self) self.check_accounts() if check_for_proxies and rc_file is not None: proxy_count = len(self.lc.proxy_config_list) else: proxy_count = 0 if proxy_count: CoreManager.wait_for_until(self, None, lambda manager: manager.stats.number_of_LinphoneRegistrationOk == proxy_count, 5000 * proxy_count) assert_equals(self.stats.number_of_LinphoneRegistrationOk, proxy_count) self.enable_audio_codec("PCMU", 8000) if self.lc.default_proxy_config is not None: self.lc.default_proxy_config.identity_address.clean() def enable_audio_codec(self, mime, rate): codecs = self.lc.audio_codecs for codec in codecs: self.lc.enable_payload_type(codec, False) codec = self.lc.find_payload_type(mime, rate, 1) assert codec is not None if codec is not None: self.lc.enable_payload_type(codec, True) def disable_all_audio_codecs_except_one(self, mime): self.enable_audio_codec(mime, -1) def check_accounts(self): pcl = self.lc.proxy_config_list for cfg in pcl: self.identity = account_manager.check_account(cfg)
Dazzle every admirer in an instant when you slip our long silver dangle earrings in. From the red carpet through to the fashion catwalks, these on-trend drops are excellently designed to see you through the most regal of events. A must-have for Summer weddings, a night on the tiles or even that romantic date.
#!/usr/bin/python import sys import os import paho.mqtt.client as mqtt import string import datetime import time import logging #keeps track of when we last turned the light on onStartTime = 0 ############################## #Create and setup the logging subsystem logger = None logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) # create a file handler timeFormat = "%a %b %d %Y %H.%M.%S" today = datetime.datetime.today() timestamp = today.strftime(timeFormat) logFile = 'logs/logs' + timestamp + '.log' handler = logging.FileHandler(logFile) handler.setLevel(logging.INFO) # create a logging format formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) # add the handlers to the logger logger.addHandler(handler) ############################## def on_message(mqttc, obj, msg): #define our global vars for logger and the start time tracker global onStartTime global logger #get the local time in an easy to read format localtime = time.asctime( time.localtime(time.time()) ) #print the message topic and payload for debugging print msg.topic + " - " + msg.payload #check to see that the topic is our light1confirm #- not needed in this example because we are only subscribed to 1 topic as it is #- but I prefer to play it safe if (msg.topic == "/house/light1confirm"): #to do if the message said that we turned the light On if(msg.payload == "On"): #take note of when we turned the light on onStartTime = time.time() #log the light on time and print logMessage = "Light turned on at: " + localtime print logMessage logger.info(logMessage) #to do if the message said that we turned the light Off else: #take note of the total run time runTime = time.time() - onStartTime #log & print when the light turned off logMessage = "Light turned off at: " + localtime print logMessage logger.info(logMessage) #log & print the total time the light was on for logMessage = "The light was on for a total of " + str(int(runTime)) + " seconds" print logMessage logger.info(logMessage) #create our MQTT client mqttc = mqtt.Client() #tell it what to do when we recieve a message mqttc.on_message = on_message #connect to the broker (most likely it is localhost if running MQTT lotcally) mqttc.connect("127.0.0.1", 1883, 60) #subscribe to our light confirmation topic mqttc.subscribe("/house/light1confirm", 0) #start the MQTT client loop in a separate thread mqttc.loop_start() #just loop a bunch - yeah I know this is not the best way to do things while(True): time.sleep(1)
Rebecca Dryer Photography Blog: Random Pretty Flower! When we were walking back to our hotel from Jessica's in Hawaii, we walked through a park that had some gorgeous Hibiscus flowers. I have always loved this flower, but unfortunately, they are so hard to grow in Oregon. So I decided that if I took a photo of it, then it would last forever! Besides... I have a brown thumb anyways, I can never manage to keep anything alive! It would take a lot of talent to kill a photo of a flower anyways! I have been slacking on my blog lately, but I am trying my best to get caught up-ish!
#!/usr/bin/env python ''' Copyright 2014 Nedim Srndic, University of Tuebingen This file is part of Mimicus. Mimicus is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Mimicus is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Mimicus. If not, see <http://www.gnu.org/licenses/>. ############################################################################## fig11.py Reproduction of Figure 11 of the paper "Practical Evasion of a Learning-Based Classifier: A Case Study" by Nedim Srndic and Pavel Laskov. Created on March 21, 2014. ''' from argparse import ArgumentParser import multiprocessing import os import random import sys from matplotlib import pyplot from mimicus.tools.featureedit import FeatureEdit from mimicus.tools.datasets import csv2numpy import common import config def mimicry(wolf_fname, sheep_feats, m_id): ''' Mimics file with the features sheep_feats using the attack file with the name wolf_fname. Returns the resulting feature vector. ''' mimic = FeatureEdit(wolf_fname).modify_file(sheep_feats, '/run/shm') os.remove(mimic['path']) return mimic['feats'], m_id def mimicry_wrap(args): ''' Helper function for calling the mimicry function in parallel. ''' return mimicry(*args) def fig11(tr_data, tr_labels, te_data, te_labels, tr_files): ''' Tests the vaccination defense against the Benign Random Noise (BRN) attack seeded by results of our mimicry attack against itself and original, unmodified data. Performs 5 trials. ''' mal_tr_ind = [i for i, l in enumerate(tr_labels) if l == 1] ben_tr_ind = [i for i, l in enumerate(tr_labels) if l == 0] mim_data, mim_labels = common.get_FTC_mimicry() TRIALS = 5 print '\n{:>6}{:>15}{:>15}'.format('%', 'ORIGINAL', 'OUR MIMICRY') pool = multiprocessing.Pool(processes=None) scores = [] for subset in (0, 0.0005, 0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1): acc = [0.0, 0.0] sys.stdout.write('{:>6.2f}'.format(subset * 100)) for _ in range(TRIALS): tr_mod = tr_data.copy() # Subsample malicious training files for attack wolf_ind = random.sample(mal_tr_ind, int(round(subset * len(mal_tr_ind)))) # Mimic random benign files using the sampled files pargs = [(tr_data[random.choice(ben_tr_ind)], tr_files[w_id], w_id) for w_id in wolf_ind] for mimic, w_id in pool.imap(mimicry_wrap, pargs): tr_mod[w_id] = mimic # Evaluate the classifier on both clean test data and mimicry data res = common.evaluate_classifier(tr_mod, tr_labels, [te_data, mim_data], [te_labels, mim_labels]) acc = [old + new for old, new in zip(acc, res)] acc = [acc[0] / TRIALS, acc[1] / TRIALS] print '{:>15.3f}{:>15.3f}'.format(acc[0], acc[1]) scores.append(tuple(acc)) return scores def main(): random.seed(0) parser = ArgumentParser() parser.add_argument('--plot', help='Where to save plot (file name)', default=False) parser.add_argument('--show', help='Show plot in a window', default=False) args = parser.parse_args() print 'Loading training data from CSV...' tr_data, tr_labels, tr_fnames = csv2numpy(config.get('datasets', 'contagio')) print 'Loading test data from CSV...' te_data, te_labels, _ = csv2numpy(config.get('datasets', 'contagio_test')) print 'Evaluating...' scores = fig11(tr_data, tr_labels, te_data, te_labels, tr_fnames) if not (args.plot or args.show): return 0 # Plot original, our_mimicry = zip(*scores) fig = pyplot.figure() pyplot.plot(original, label='Clean data', marker='o', color='k', linewidth=2) pyplot.plot(our_mimicry, label='Our mimicry', marker='+', color='k', linewidth=2, linestyle=':') axes = fig.gca() # Set up axes and labels axes.yaxis.set_ticks([r / 10.0 for r in range(11)]) axes.yaxis.grid() axes.set_ylim(0, 1) axes.set_ylabel('Accuracy') xticklabels = ['0', '0.05', '0.1', '0.5', '1', '5', '10', '50', '100'] axes.set_xticklabels(xticklabels, rotation=0) axes.set_xlabel('Training set perturbation (%)') fig.subplots_adjust(bottom=0.13, top=0.95, left=0.11, right=0.96) pyplot.legend(loc='lower right') if args.show: pyplot.show() if args.plot: pyplot.savefig(args.plot, dpi=300, bbox_inches='tight') return 0 if __name__ == '__main__': sys.exit(main())
Have you heard of the movie Attack of the Killer Tomatoes? Yeah, its as silly as it sounds. But, wait a minute now. The Internet is becoming as hazardous as one of those bad sci-fi flicks. There used to be a time (not too long ago) when we could log onto our email and just read messages from friends and family, or enjoy a nice ezine or two. Remember those times? Remember the good old days when you could click on your Internet browser and take a leisurely stroll through cyberspace without nary a care in the world? Okay. So maybe there were some pretty awful web sites back then. And yes, there was even some annoying advertising blinking banners anyone? Ahh . . . those were the days. Your computer is littered with unwanted ads and Trojans that have literally taken your computer hostage. For a brief while this was only happening to the business community, but soon the invasion spread into the home PC where unsuspecting people wonder where did all this junk come from? Once your computers been hijacked theres no getting rid of these nasty invaders without a strong line of defense. Laser guns? No, not for this fight. Save that for the movies. Your best defense against viruses and spyware is to download a good anti-spyware software program that will seek out and destroy your computers enemies with speed and precision. And once the pesky invaders are gone, youll want that program to keep up-to-date on the newest threats, and destroy those too. The best anti-spyware software will scan your computer for free so you have an idea of how many spyware invaders are actually hiding inside your hard drive waiting to strike. Youll be surprised at what youll find. Its such a relief when youre finally able to surf the Internet again with no pop up ads interfering an no waiting for your slow as molasses computer to get from point A to point B due to its being overloaded with unwanted spyware invaders. Its easy to drive those invaders away with only a few mouse clicks. Ive done it, and you can too. Isnt your peace of mind worth it? Ive reclaimed my computer and now Im able to enjoy surfing the Net once again. And, just like in countless space invader movies where the world is saved, you can experience a happy ending to your spyware troubles as well.
from setuptools import setup, find_packages from codecs import open from os import path __version__ = '0.0.1' here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() # get the dependencies and installs with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f: all_reqs = f.read().split('\n') install_requires = [x.strip() for x in all_reqs if 'git+' not in x] dependency_links = [x.strip().replace('git+', '') for x in all_reqs if 'git+' not in x] setup( name='ccdroplet', version=__version__, description='Convective Burning Droplet model for (rocket) Combustion Chambers.', long_description=long_description, url='https://github.com/ernestyalumni/Propulsion/ccdroplet', download_url='https://github.com/ernestyalumni/Propulsion/ccdroplet/tarball/' + __version__, license='BSD', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Programming Language :: Python :: 3', ], keywords='', packages=find_packages(exclude=['docs', 'tests*']), include_package_data=True, author='Ernest Yeung', install_requires=install_requires, dependency_links=dependency_links, author_email='ernestyalumni@gmail.com' )
Welcome! to www.the-road-south.com. This site chronicles Nick's travels by bicycle from the tip of North America at Prudhoe Bay, Alaska to the tip of South America at Ushuaia, Argentina. Inside you'll find journal entries and photos from the road, as well as other useful information related to the trip and bicycling the Pan-Am in general. Click any of the links above for further info. Please enjoy your visit and if you have any questions or comments feel free to contact Nick. Photos current as of 06.24.2008. 02.15.2004 Ushuaia, Argentina-Nick reaches Tierra del Fuego and after 20 months and 20,000 miles brings the trip to a close in Ushuaia. Read more in the last journal entry. Copyright© 2002-2005 Nick Lenzmeier. All rights reserved.
# Copyright (c) 2005-2008, Enthought, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # Neither the name of Enthought, Inc. nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from math import acos, sin, cos, hypot, ceil, sqrt, radians, degrees import warnings def bezier_arc(x1, y1, x2, y2, start_angle=0, extent=90): """ Compute a cubic Bezier approximation of an elliptical arc. (x1, y1) and (x2, y2) are the corners of the enclosing rectangle. The coordinate system has coordinates that increase to the right and down. Angles, measured in degress, start with 0 to the right (the positive X axis) and increase counter-clockwise. The arc extends from start_angle to start_angle+extent. I.e. start_angle=0 and extent=180 yields an openside-down semi-circle. The resulting coordinates are of the form (x1,y1, x2,y2, x3,y3, x4,y4) such that the curve goes from (x1, y1) to (x4, y4) with (x2, y2) and (x3, y3) as their respective Bezier control points. """ x1,y1, x2,y2 = min(x1,x2), max(y1,y2), max(x1,x2), min(y1,y2) if abs(extent) <= 90: frag_angle = float(extent) nfrag = 1 else: nfrag = int(ceil(abs(extent)/90.)) if nfrag == 0: warnings.warn('Invalid value for extent: %r' % extent) return [] frag_angle = float(extent) / nfrag x_cen = (x1+x2)/2. y_cen = (y1+y2)/2. rx = (x2-x1)/2. ry = (y2-y1)/2. half_angle = radians(frag_angle) / 2 kappa = abs(4. / 3. * (1. - cos(half_angle)) / sin(half_angle)) if frag_angle < 0: sign = -1 else: sign = 1 point_list = [] for i in range(nfrag): theta0 = radians(start_angle + i*frag_angle) theta1 = radians(start_angle + (i+1)*frag_angle) c0 = cos(theta0) c1 = cos(theta1) s0 = sin(theta0) s1 = sin(theta1) if frag_angle > 0: signed_kappa = -kappa else: signed_kappa = kappa point_list.append((x_cen + rx * c0, y_cen - ry * s0, x_cen + rx * (c0 + signed_kappa * s0), y_cen - ry * (s0 - signed_kappa * c0), x_cen + rx * (c1 - signed_kappa * s1), y_cen - ry * (s1 + signed_kappa * c1), x_cen + rx * c1, y_cen - ry * s1)) return point_list def angle(x1, y1, x2, y2): """ The angle in degrees between two vectors. """ sign = 1.0 usign = (x1*y2 - y1*x2) if usign < 0: sign = -1.0 num = x1*x2 + y1*y2 den = hypot(x1,y1) * hypot(x2,y2) ratio = min(max(num/den, -1.0), 1.0) return sign * degrees(acos(ratio)) def transform_from_local(xp, yp, cphi, sphi, mx, my): """ Transform from the local frame to absolute space. """ x = xp * cphi - yp * sphi + mx y = xp * sphi + yp * cphi + my return (x,y) def elliptical_arc_to(x1, y1, rx, ry, phi, large_arc_flag, sweep_flag, x2, y2): """ An elliptical arc approximated with Bezier curves or a line segment. Algorithm taken from the SVG 1.1 Implementation Notes: http://www.w3.org/TR/SVG/implnote.html#ArcImplementationNotes """ # Basic normalization. rx = abs(rx) ry = abs(ry) phi = phi % 360 # Check for certain special cases. if x1==x2 and y1==y2: # Omit the arc. # x1 and y1 can obviously remain the same for the next segment. return [] if rx == 0 or ry == 0: # Line segment. return [(x2,y2)] rphi = radians(phi) cphi = cos(rphi) sphi = sin(rphi) # Step 1: Rotate to the local coordinates. dx = 0.5*(x1 - x2) dy = 0.5*(y1 - y2) x1p = cphi * dx + sphi * dy y1p = -sphi * dx + cphi * dy # Ensure that rx and ry are large enough to have a unique solution. lam = (x1p/rx)**2 + (y1p/ry)**2 if lam > 1.0: scale = sqrt(lam) rx *= scale ry *= scale # Step 2: Solve for the center in the local coordinates. num = max((rx*ry)**2 - (rx*y1p)**2 - (ry*x1p)**2, 0.0) den = ((rx*y1p)**2 + (ry*x1p)**2) a = sqrt(num / den) cxp = a * rx*y1p/ry cyp = -a * ry*x1p/rx if large_arc_flag == sweep_flag: cxp = -cxp cyp = -cyp # Step 3: Transform back. mx = 0.5*(x1+x2) my = 0.5*(y1+y2) # Step 4: Compute the start angle and the angular extent of the arc. # Note that theta1 is local to the phi-rotated coordinate space. dx = (x1p-cxp) / rx dy = (y1p-cyp) / ry dx2 = (-x1p-cxp) / rx dy2 = (-y1p-cyp) / ry theta1 = angle(1,0,dx,dy) dtheta = angle(dx,dy,dx2,dy2) if not sweep_flag and dtheta > 0: dtheta -= 360 elif sweep_flag and dtheta < 0: dtheta += 360 # Step 5: Break it apart into Bezier arcs. p = [] control_points = bezier_arc(cxp-rx,cyp-ry,cxp+rx,cyp+ry, theta1, dtheta) for x1p,y1p, x2p,y2p, x3p,y3p, x4p,y4p in control_points: # Transform them back to asbolute space. p.append(( transform_from_local(x2p,y2p,cphi,sphi,mx,my) + transform_from_local(x3p,y3p,cphi,sphi,mx,my) + transform_from_local(x4p,y4p,cphi,sphi,mx,my) )) return p
Although this subject is an uncomfortable subject for many, there is a growing population of our preschoolers who deal with the impact of divorce everyday. There are many books, and helpful sources that parents can use if they are going through a divorce, however this is not so for preschool teachers. This article is for teachers that are interested in how to help preschoolers deal with divorce. Here you will find “red-flags," to watch out for, tips on how to help, and a book list for your students that are experiencing the changes that come with a divorce. Become Aware of Common “Red-Flags" Not every child that has a divorce in the family is affected by it. Sometimes a divorce took place early on in the child’s life, so they are accustomed to the separate arrangements. Unfortunately this is not always the case. Some children experience divorce during their preschool years, which will affect the child. A teacher can learn to become aware of changes in their students, thereby giving them the advantage of offering help to them sooner. If you have prior knowledge of the divorce you are even at a greater advantage, and should be closely watching for behavioral or personality changes in these students. These common “red flags" can act as warning signals to teachers that something might be going on with the child or at home. This is a general guide, so it should not be taken literally. -Find out about family status: When you are gathering other information such as primary doctor, emergency contacts and allergies, attempt to collect this information as well. This request for information can be sent home via a welcome letter to be completed by a parent or during an orientation if your school has one. Simply ask for the names of mom and dad and their relationship status (optional). Leave room for each parent to leave their phone number and/or address for future contact. This may not be filled out by the parents, but if you can obtain this information it may be helpful to you and the student. -Have patience: Preschool teachers are usually already gifted with patience; however this attribute is extremely necessary when it comes to a hurting child. Depending on the severity of their behaviors, these students may need extra TLC. They are simply reacting to changes in their home environment. Lessons on feelings, expression of feelings, and actions toward others would be beneficial to the student, and to the class as a whole. -Keep the classroom schedule as routine as possible: Predictability can be a comfort to a child experiencing drastic changes at home. If there is a change in the routine, for example a teacher’s absence, you may see a flare up of “red-flag" behaviors. Again if you have prior knowledge of the divorce you can prepare the child for any changes to their routine, or preventively spend time with the child afterward. This should give a sense of security to the child, which is most beneficial to the student at this time. -Meet with the parents: If there are behavioral or emotional concerns, meet with the parents to discuss what you are observing. You may need to set up two separate meetings to ensure that you discuss your concerns with both parents. Suggest to them the book list, and speaking with their child about the goings-on at home in a way preschoolers will understand. Encourage the parents to share with the preschooler that the divorce is not their child’s fault, that they will always be there and will always love them. -Do not show favoritism: If you are sending home a packet for mom, send one for dad as well. Keep both parties informed on the child’s progress. If you have parent-teacher conferences, then offer to sit down with each parent separately. Typically both parents are concerned with the well-being of the child, as are you. Keeping everyone informed reinforces to the parents that the main goal is an educated, healthy child over everything else. -Have a lesson on family structures: Teach your class that families can be more than a mommy and a daddy. Educate the class as a whole, and teach the child that they still have a family that loves them, and cares for them. Read books about divorce and multiple family structures. Use the book list below as a starting library to keep in your class. Let the children take out the books, and suggest taking them home to read with their parent. Books show children that they are not alone, and usually offer suggestions as to why they might be feeling the way they do. In some cases, a teacher’s intervention is the only hope for a struggling student. Take the time and learn how to help preschoolers deal with divorce, so you could be that teacher who makes a positive difference in a child’s life.
# -*- coding: utf-8 -*- # # Copyright (c) 2014, Paweł Wodnicki # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the 32bitmicro nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. #THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND #ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #DISCLAIMED. IN NO EVENT SHALL Paweł Wodnicki BE LIABLE FOR ANY #DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; #LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND #ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS #SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from edautils import * from eda import * pcb_symbols= """ Symbol(' ' 18) ( ) Symbol('!' 12) ( SymbolLine(0 35 0 40 8) SymbolLine(0 0 0 25 8) ) Symbol('"' 12) ( SymbolLine(0 0 0 10 8) SymbolLine(10 0 10 10 8) ) Symbol('#' 12) ( SymbolLine(0 25 20 25 8) SymbolLine(0 15 20 15 8) SymbolLine(15 10 15 30 8) SymbolLine(5 10 5 30 8) ) Symbol('$' 12) ( SymbolLine(15 5 20 10 8) SymbolLine(5 5 15 5 8) SymbolLine(0 10 5 5 8) SymbolLine(0 10 0 15 8) SymbolLine(0 15 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 30 8) SymbolLine(15 35 20 30 8) SymbolLine(5 35 15 35 8) SymbolLine(0 30 5 35 8) SymbolLine(10 0 10 40 8) ) Symbol('%' 12) ( SymbolLine(0 5 0 10 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 10 0 8) SymbolLine(10 0 15 5 8) SymbolLine(15 5 15 10 8) SymbolLine(10 15 15 10 8) SymbolLine(5 15 10 15 8) SymbolLine(0 10 5 15 8) SymbolLine(0 40 40 0 8) SymbolLine(35 40 40 35 8) SymbolLine(40 30 40 35 8) SymbolLine(35 25 40 30 8) SymbolLine(30 25 35 25 8) SymbolLine(25 30 30 25 8) SymbolLine(25 30 25 35 8) SymbolLine(25 35 30 40 8) SymbolLine(30 40 35 40 8) ) Symbol('&' 12) ( SymbolLine(0 35 5 40 8) SymbolLine(0 5 0 15 8) SymbolLine(0 5 5 0 8) SymbolLine(0 25 15 10 8) SymbolLine(5 40 10 40 8) SymbolLine(10 40 20 30 8) SymbolLine(0 15 25 40 8) SymbolLine(5 0 10 0 8) SymbolLine(10 0 15 5 8) SymbolLine(15 5 15 10 8) SymbolLine(0 25 0 35 8) ) Symbol(''' 12) ( SymbolLine(0 10 10 0 8) ) Symbol('(' 12) ( SymbolLine(0 35 5 40 8) SymbolLine(0 5 5 0 8) SymbolLine(0 5 0 35 8) ) Symbol(')' 12) ( SymbolLine(0 0 5 5 8) SymbolLine(5 5 5 35 8) SymbolLine(0 40 5 35 8) ) Symbol('*' 12) ( SymbolLine(0 10 20 30 8) SymbolLine(0 30 20 10 8) SymbolLine(0 20 20 20 8) SymbolLine(10 10 10 30 8) ) Symbol('+' 12) ( SymbolLine(0 20 20 20 8) SymbolLine(10 10 10 30 8) ) Symbol(',' 12) ( SymbolLine(0 50 10 40 8) ) Symbol('-' 12) ( SymbolLine(0 20 20 20 8) ) Symbol('.' 12) ( SymbolLine(0 40 5 40 8) ) Symbol('/' 12) ( SymbolLine(0 35 30 5 8) ) Symbol('0' 12) ( SymbolLine(0 35 5 40 8) SymbolLine(0 5 0 35 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 30 20 10 8) ) Symbol('1' 12) ( SymbolLine(5 40 15 40 8) SymbolLine(10 0 10 40 8) SymbolLine(0 10 10 0 8) ) Symbol('2' 12) ( SymbolLine(0 5 5 0 8) SymbolLine(5 0 20 0 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 15 8) SymbolLine(0 40 25 15 8) SymbolLine(0 40 25 40 8) ) Symbol('3' 12) ( SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) SymbolLine(5 20 20 20 8) ) Symbol('4' 12) ( SymbolLine(0 20 20 0 8) SymbolLine(0 20 25 20 8) SymbolLine(20 0 20 40 8) ) Symbol('5' 12) ( SymbolLine(0 0 20 0 8) SymbolLine(0 0 0 20 8) SymbolLine(0 20 5 15 8) SymbolLine(5 15 15 15 8) SymbolLine(15 15 20 20 8) SymbolLine(20 20 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) ) Symbol('6' 12) ( SymbolLine(15 0 20 5 8) SymbolLine(5 0 15 0 8) SymbolLine(0 5 5 0 8) SymbolLine(0 5 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(15 20 20 25 8) SymbolLine(0 20 15 20 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) SymbolLine(20 25 20 35 8) ) Symbol('7' 12) ( SymbolLine(0 40 25 15 8) SymbolLine(25 0 25 15 8) SymbolLine(0 0 25 0 8) ) Symbol('8' 12) ( SymbolLine(0 35 5 40 8) SymbolLine(0 25 0 35 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 15 5 20 8) SymbolLine(0 5 0 15 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 15 8) SymbolLine(15 20 20 15 8) ) Symbol('9' 12) ( SymbolLine(0 40 20 20 8) SymbolLine(20 5 20 20 8) SymbolLine(15 0 20 5 8) SymbolLine(5 0 15 0 8) SymbolLine(0 5 5 0 8) SymbolLine(0 5 0 15 8) SymbolLine(0 15 5 20 8) SymbolLine(5 20 20 20 8) ) Symbol(':' 12) ( SymbolLine(0 15 5 15 8) SymbolLine(0 25 5 25 8) ) Symbol(';' 12) ( SymbolLine(0 40 10 30 8) SymbolLine(10 15 10 20 8) ) Symbol('<' 12) ( SymbolLine(0 20 10 10 8) SymbolLine(0 20 10 30 8) ) Symbol('=' 12) ( SymbolLine(0 15 20 15 8) SymbolLine(0 25 20 25 8) ) Symbol('>' 12) ( SymbolLine(0 10 10 20 8) SymbolLine(0 30 10 20 8) ) Symbol('?' 12) ( SymbolLine(10 20 10 25 8) SymbolLine(10 35 10 40 8) SymbolLine(0 5 0 10 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 10 8) SymbolLine(10 20 20 10 8) ) Symbol('A' 12) ( SymbolLine(0 5 0 40 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 20 0 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 40 8) SymbolLine(0 20 25 20 8) ) Symbol('B' 12) ( SymbolLine(0 40 20 40 8) SymbolLine(20 40 25 35 8) SymbolLine(25 25 25 35 8) SymbolLine(20 20 25 25 8) SymbolLine(5 20 20 20 8) SymbolLine(5 0 5 40 8) SymbolLine(0 0 20 0 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 15 8) SymbolLine(20 20 25 15 8) ) Symbol('C' 12) ( SymbolLine(5 40 20 40 8) SymbolLine(0 35 5 40 8) SymbolLine(0 5 0 35 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 20 0 8) ) Symbol('D' 12) ( SymbolLine(5 0 5 40 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 35 8) SymbolLine(20 40 25 35 8) SymbolLine(0 40 20 40 8) SymbolLine(0 0 20 0 8) ) Symbol('E' 12) ( SymbolLine(0 20 15 20 8) SymbolLine(0 40 20 40 8) SymbolLine(0 0 0 40 8) SymbolLine(0 0 20 0 8) ) Symbol('F' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 0 20 0 8) SymbolLine(0 20 15 20 8) ) Symbol('G' 12) ( SymbolLine(20 0 25 5 8) SymbolLine(5 0 20 0 8) SymbolLine(0 5 5 0 8) SymbolLine(0 5 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 20 40 8) SymbolLine(20 40 25 35 8) SymbolLine(25 25 25 35 8) SymbolLine(20 20 25 25 8) SymbolLine(10 20 20 20 8) ) Symbol('H' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(25 0 25 40 8) SymbolLine(0 20 25 20 8) ) Symbol('I' 12) ( SymbolLine(0 0 10 0 8) SymbolLine(5 0 5 40 8) SymbolLine(0 40 10 40 8) ) Symbol('J' 12) ( SymbolLine(0 0 15 0 8) SymbolLine(15 0 15 35 8) SymbolLine(10 40 15 35 8) SymbolLine(5 40 10 40 8) SymbolLine(0 35 5 40 8) ) Symbol('K' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 20 20 0 8) SymbolLine(0 20 20 40 8) ) Symbol('L' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 40 20 40 8) ) Symbol('M' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 0 15 15 8) SymbolLine(15 15 30 0 8) SymbolLine(30 0 30 40 8) ) Symbol('N' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 0 0 5 8) SymbolLine(0 5 25 30 8) SymbolLine(25 0 25 40 8) ) Symbol('O' 12) ( SymbolLine(0 5 0 35 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) ) Symbol('P' 12) ( SymbolLine(5 0 5 40 8) SymbolLine(0 0 20 0 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 15 8) SymbolLine(20 20 25 15 8) SymbolLine(5 20 20 20 8) ) Symbol('Q' 12) ( SymbolLine(0 5 0 35 8) SymbolLine(0 5 5 0 8) SymbolLine(5 0 15 0 8) SymbolLine(15 0 20 5 8) SymbolLine(20 5 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) SymbolLine(10 30 20 40 8) ) Symbol('R' 12) ( SymbolLine(0 0 20 0 8) SymbolLine(20 0 25 5 8) SymbolLine(25 5 25 15 8) SymbolLine(20 20 25 15 8) SymbolLine(5 20 20 20 8) SymbolLine(5 0 5 40 8) SymbolLine(5 20 25 40 8) ) Symbol('S' 12) ( SymbolLine(20 0 25 5 8) SymbolLine(5 0 20 0 8) SymbolLine(0 5 5 0 8) SymbolLine(0 5 0 15 8) SymbolLine(0 15 5 20 8) SymbolLine(5 20 20 20 8) SymbolLine(20 20 25 25 8) SymbolLine(25 25 25 35 8) SymbolLine(20 40 25 35 8) SymbolLine(5 40 20 40 8) SymbolLine(0 35 5 40 8) ) Symbol('T' 12) ( SymbolLine(0 0 20 0 8) SymbolLine(10 0 10 40 8) ) Symbol('U' 12) ( SymbolLine(0 0 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) SymbolLine(20 0 20 35 8) ) Symbol('V' 12) ( SymbolLine(0 0 0 30 8) SymbolLine(0 30 10 40 8) SymbolLine(10 40 20 30 8) SymbolLine(20 0 20 30 8) ) Symbol('W' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 40 15 25 8) SymbolLine(15 25 30 40 8) SymbolLine(30 0 30 40 8) ) Symbol('X' 12) ( SymbolLine(0 0 0 5 8) SymbolLine(0 5 25 30 8) SymbolLine(25 30 25 40 8) SymbolLine(0 30 0 40 8) SymbolLine(0 30 25 5 8) SymbolLine(25 0 25 5 8) ) Symbol('Y' 12) ( SymbolLine(0 0 0 5 8) SymbolLine(0 5 10 15 8) SymbolLine(10 15 20 5 8) SymbolLine(20 0 20 5 8) SymbolLine(10 15 10 40 8) ) Symbol('Z' 12) ( SymbolLine(0 0 25 0 8) SymbolLine(25 0 25 5 8) SymbolLine(0 30 25 5 8) SymbolLine(0 30 0 40 8) SymbolLine(0 40 25 40 8) ) Symbol('[' 12) ( SymbolLine(0 0 5 0 8) SymbolLine(0 0 0 40 8) SymbolLine(0 40 5 40 8) ) Symbol('\' 12) ( SymbolLine(0 5 30 35 8) ) Symbol(']' 12) ( SymbolLine(0 0 5 0 8) SymbolLine(5 0 5 40 8) SymbolLine(0 40 5 40 8) ) Symbol('^' 12) ( SymbolLine(0 5 5 0 8) SymbolLine(5 0 10 5 8) ) Symbol('_' 12) ( SymbolLine(0 40 20 40 8) ) Symbol('a' 12) ( SymbolLine(15 20 20 25 8) SymbolLine(5 20 15 20 8) SymbolLine(0 25 5 20 8) SymbolLine(0 25 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(20 20 20 35 8) SymbolLine(20 35 25 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) ) Symbol('b' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) SymbolLine(20 25 20 35 8) SymbolLine(15 20 20 25 8) SymbolLine(5 20 15 20 8) SymbolLine(0 25 5 20 8) ) Symbol('c' 12) ( SymbolLine(5 20 20 20 8) SymbolLine(0 25 5 20 8) SymbolLine(0 25 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 20 40 8) ) Symbol('d' 12) ( SymbolLine(20 0 20 40 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) SymbolLine(0 25 0 35 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) ) Symbol('e' 12) ( SymbolLine(5 40 20 40 8) SymbolLine(0 35 5 40 8) SymbolLine(0 25 0 35 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(0 30 20 30 8) SymbolLine(20 30 20 25 8) ) Symbol('f' 10) ( SymbolLine(5 5 5 40 8) SymbolLine(5 5 10 0 8) SymbolLine(10 0 15 0 8) SymbolLine(0 20 10 20 8) ) Symbol('g' 12) ( SymbolLine(15 20 20 25 8) SymbolLine(5 20 15 20 8) SymbolLine(0 25 5 20 8) SymbolLine(0 25 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) SymbolLine(0 50 5 55 8) SymbolLine(5 55 15 55 8) SymbolLine(15 55 20 50 8) SymbolLine(20 20 20 50 8) ) Symbol('h' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 40 8) ) Symbol('i' 10) ( SymbolLine(0 10 0 15 8) SymbolLine(0 25 0 40 8) ) Symbol('j' 10) ( SymbolLine(5 10 5 15 8) SymbolLine(5 25 5 50 8) SymbolLine(0 55 5 50 8) ) Symbol('k' 12) ( SymbolLine(0 0 0 40 8) SymbolLine(0 25 15 40 8) SymbolLine(0 25 10 15 8) ) Symbol('l' 10) ( SymbolLine(0 0 0 35 8) SymbolLine(0 35 5 40 8) ) Symbol('m' 12) ( SymbolLine(5 25 5 40 8) SymbolLine(5 25 10 20 8) SymbolLine(10 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 40 8) SymbolLine(20 25 25 20 8) SymbolLine(25 20 30 20 8) SymbolLine(30 20 35 25 8) SymbolLine(35 25 35 40 8) SymbolLine(0 20 5 25 8) ) Symbol('n' 12) ( SymbolLine(5 25 5 40 8) SymbolLine(5 25 10 20 8) SymbolLine(10 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 40 8) SymbolLine(0 20 5 25 8) ) Symbol('o' 12) ( SymbolLine(0 25 0 35 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 15 20 8) SymbolLine(15 20 20 25 8) SymbolLine(20 25 20 35 8) SymbolLine(15 40 20 35 8) SymbolLine(5 40 15 40 8) SymbolLine(0 35 5 40 8) ) Symbol('p' 12) ( SymbolLine(5 25 5 55 8) SymbolLine(0 20 5 25 8) SymbolLine(5 25 10 20 8) SymbolLine(10 20 20 20 8) SymbolLine(20 20 25 25 8) SymbolLine(25 25 25 35 8) SymbolLine(20 40 25 35 8) SymbolLine(10 40 20 40 8) SymbolLine(5 35 10 40 8) ) Symbol('q' 12) ( SymbolLine(20 25 20 55 8) SymbolLine(15 20 20 25 8) SymbolLine(5 20 15 20 8) SymbolLine(0 25 5 20 8) SymbolLine(0 25 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) ) Symbol('r' 12) ( SymbolLine(5 25 5 40 8) SymbolLine(5 25 10 20 8) SymbolLine(10 20 20 20 8) SymbolLine(0 20 5 25 8) ) Symbol('s' 12) ( SymbolLine(5 40 20 40 8) SymbolLine(20 40 25 35 8) SymbolLine(20 30 25 35 8) SymbolLine(5 30 20 30 8) SymbolLine(0 25 5 30 8) SymbolLine(0 25 5 20 8) SymbolLine(5 20 20 20 8) SymbolLine(20 20 25 25 8) SymbolLine(0 35 5 40 8) ) Symbol('t' 10) ( SymbolLine(5 0 5 35 8) SymbolLine(5 35 10 40 8) SymbolLine(0 15 10 15 8) ) Symbol('u' 12) ( SymbolLine(0 20 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) SymbolLine(20 20 20 35 8) ) Symbol('v' 12) ( SymbolLine(0 20 0 30 8) SymbolLine(0 30 10 40 8) SymbolLine(10 40 20 30 8) SymbolLine(20 20 20 30 8) ) Symbol('w' 12) ( SymbolLine(0 20 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(5 40 10 40 8) SymbolLine(10 40 15 35 8) SymbolLine(15 20 15 35 8) SymbolLine(15 35 20 40 8) SymbolLine(20 40 25 40 8) SymbolLine(25 40 30 35 8) SymbolLine(30 20 30 35 8) ) Symbol('x' 12) ( SymbolLine(0 20 20 40 8) SymbolLine(0 40 20 20 8) ) Symbol('y' 12) ( SymbolLine(0 20 0 35 8) SymbolLine(0 35 5 40 8) SymbolLine(20 20 20 50 8) SymbolLine(15 55 20 50 8) SymbolLine(5 55 15 55 8) SymbolLine(0 50 5 55 8) SymbolLine(5 40 15 40 8) SymbolLine(15 40 20 35 8) ) Symbol('z' 12) ( SymbolLine(0 20 20 20 8) SymbolLine(0 40 20 20 8) SymbolLine(0 40 20 40 8) ) Symbol('{' 12) ( SymbolLine(5 5 10 0 8) SymbolLine(5 5 5 15 8) SymbolLine(0 20 5 15 8) SymbolLine(0 20 5 25 8) SymbolLine(5 25 5 35 8) SymbolLine(5 35 10 40 8) ) Symbol('|' 12) ( SymbolLine(0 0 0 40 8) ) Symbol('}' 12) ( SymbolLine(0 0 5 5 8) SymbolLine(5 5 5 15 8) SymbolLine(5 15 10 20 8) SymbolLine(5 25 10 20 8) SymbolLine(5 25 5 35 8) SymbolLine(0 40 5 35 8) ) Symbol('~' 12) ( SymbolLine(0 25 5 20 8) SymbolLine(5 20 10 20 8) SymbolLine(10 20 15 25 8) SymbolLine(15 25 20 25 8) SymbolLine(20 25 25 20 8) ) """ pcb_layers = """ Layer(1 "solder") ( ) Layer(2 "component") ( ) Layer(3 "GND") ( ) Layer(4 "power") ( ) Layer(5 "signal1") ( ) Layer(6 "signal2") ( ) Layer(7 "unused") ( ) Layer(8 "unused") ( ) Layer(9 "silk") ( ) Layer(10 "silk") ( ) """ class CPCB: " PCB class " def __init__(self, sch=None,brd=None): self.name="" self.sch=sch self.brd=brd self.script_path="" def addLayers(self): # These layers have to be added in the board # self.brd.addLayer(CLayer("solder",1)) # bottom orientation # self.brd.addLayer(CLayer("component",2)) # these layers are already added ## self.brd.addLayer(CLayer("GND",3)) ## self.brd.addLayer(CLayer("VCC",4)) ## self.brd.addLayer(CLayer("blksolder",5)) # bottom orientation ## self.brd.addLayer(CLayer("blkcomponent",6)) ## self.brd.addLayer(CLayer("signal3",7)) ## self.brd.addLayer(CLayer("signal4",8)) ## self.brd.addLayer(CLayer("Vias",9)) ## self.brd.addLayer(CLayer("silk",10)) pass #Layer(1 "solder") #( # Line(1375 1075 1325 1025 40 30 0x00000020) #) # gen sch layers scr" def genSchLayersScr(self): ns = '' CRLF = "\n" ns = pcb_layers; return ns; #ADD 'C1' 'G$1' POLARISED_CASE_H@ipc-7351-capacitor R0.000 (-0.300 3.300); #ADD 'Q1' 'G$1' -PNP-SOT23-EBC@transistor R0.000 (1.600 3.300); #ADD 'Q5' 'G$1' MMBT2222ALT1-NPN-SOT23-BEC@transistor R0.000 (0.900 2.800); #ADD 'V1' 'GND' GND@supply2 R0.000 (0.600 0.100); #ADD 'V2' 'G$1' VCC@supply2 R0.000 (5.600 4.200); # gen sch add scr" def genSchAddScr(self): ns = '' CRLF = "\n" ns += "GRID INCH 0.005" + CRLF ns += "Layer (91 Nets;" + CRLF ns += "Layer (92 Busses;" + CRLF ns += "Layer (93 Pins;" + CRLF ns += "Layer (94 Symbols;" + CRLF ns += "Layer (95 Names;" + CRLF ns += "Layer (96 Values;" + CRLF ns += "Layer (250 Descript;" + CRLF ns += "Layer (251 SMDround;" + CRLF ns += "DISPLAY -PINS" + CRLF ns += CRLF ns += "EDIT .S1" + CRLF ns += "SET WIRE_BEND 2;" + CRLF ns += "CHANGE STYLE 'Continuous'" + CRLF for dev in self.sch.devices: ns += "ADD '" + str(dev.refid) + "' 'G$1' " + str(dev.name) + "@" + str(dev.libname) + " " + dev.orientation + "R%.3f"% (dev.rotation) +" (" + str(dev.position) + ");" + CRLF ns += "GRID LAST" + CRLF return ns # gen cmd sch net-connect" def genSchNetConnectScr(self): ns = '' CRLF = "\n" runcmd="run " + self.script_path + "/sch-net-connect.ulp" for net in self.sch.nets.values: prevdev="" prevpin="" l = "" first = 1 for node in net.nodes: if first: first = 0 prevdev=str(node.dev.refid) prevpin=str(node.pin) else: l = runcmd + " " + net.name + " " + prevdev + " " + prevpin + " " + str(node.dev.refid) + " " + str(node.pin) + ";" + CRLF ns += l prevdev=str(node.dev.refid) prevpin=str(node.pin) # string function return ns # gen sch netlist listing def genSchNetlistLst(self): ns = '' CRLF = '\n' for net in self.sch.nets.values(): name = net.name ns += net.name + ' ' for node in net.nodes: ns += str(node.dev.refid) + '-' + str(node.pin.num) + ' ' ns += CRLF ns += CRLF #extra one # string function return ns # gen sch netlist script def genSchNetlistScr(self): ns = '' CRLF = "\n" ns = "# Netlist script" + CRLF ns += "# EAGLE Version 4.11" + CRLF ns += "# Copyright Hobby-Robotics" + CRLF ns += expandtab("#Net\tPart\tPad",12) + CRLF ns += CRLF for net in self.sch.nets.values(): ns += CRLF ns += "Change Class 0;" + CRLF l = "Signal " + " '" + net.name + "'" first = 1 for node in net.nodes: if first: first = 0 l += "\t'" else: l += "\t\t" l += str(node.dev.refid) + "'\t'" + str(node.pin) + "' \\" + CRLF ns += expandtab(str(l),12) ns += "\t\t\t;" + CRLF # string function return ns # Select # {"All objects" Select(ObjectByName) ActiveWhen(have_regex)} # {"Elements" Select(ElementByName) ActiveWhen(have_regex)} # {"Pads" Select(PadByName) ActiveWhen(have_regex)} # {"Pins" Select(PinByName) ActiveWhen(have_regex)} # {"Text" Select(TextByName) ActiveWhen(have_regex)} # {"Vias" Select(ViaByName) ActiveWhen(have_regex)} # Move # {"Move selected elements to other side" Flip(SelectedElements) a={"Shift-B" "Shift<Key>b"}} # {"Remove selected objects" RemoveSelected()} # {Connects m=C # {"Lookup connection to object" GetXY(Select the object) Connection(Find) a={"Ctrl-F" "Ctrl<Key>f"}} # {"Reset scanned pads/pins/vias" Connection(ResetPinsViasAndPads) Display(Redraw)} # {"Reset scanned lines/polygons" Connection(ResetLinesAndPolygons) Display(Redraw)} # {"Reset all connections" Connection(Reset) Display(Redraw) a={"Shift-F" "Shift<Key>f"}} # {"Optimize rats nest" Atomic(Save) DeleteRats(AllRats) # Atomic(Restore) AddRats(AllRats) Atomic(Block) a={"O" "<Key>o"}} # {"Erase rats nest" DeleteRats(AllRats) a={"E" "<Key>e"}} # {"Erase selected rats" DeleteRats(SelectedRats) a={"Shift-E" "Shift<Key>e"}} # # {"Auto-route selected rats" AutoRoute(Selected)} # {"Auto-route all rats" AutoRoute(AllRats)} # {"Rip up all auto-routed tracks" RipUp(All)} # {"Optimize routed tracks" # {"Auto-Optimize" djopt(auto) a={"Shift-=" "Shift<Key>="}} # {"Debumpify" djopt(debumpify) } # {"Unjaggy" djopt(unjaggy) } # {"Vianudge" djopt(vianudge) } # {"Viatrim" djopt(viatrim) } # {"Ortho pull" djopt(orthopull) } # {"Simple optimization" djopt(simple) a={"=" "<Key>="}} # {"Miter" djopt(miter) } # {"Puller" a={"Y" "<Key>y"} Puller() } # # {"Only autorouted nets" OptAutoOnly() checked=optautoonly} # } # {"Design Rule Checker" DRC()} # {"Apply vendor drill mapping" ApplyVendor()} # } def genBrdPlaceBottom(self): ns = '' #Select(ElementByName|ObjectByName|PadByName|PinByName) for dev in self.brd.devices.values(): name = str(dev.refid) if dev.bottom: #Select(ElementByName) ActiveWhen(have_regex) ns += 'Select(ElementByName) ActiveWhen( ' + name + ' )\n' ns += 'Flip(SelectedElements)\n' ns += 'Unselect(All)\n' return ns # gen brd cmd scr" def genBrdCmdScr(self): ns = '' CRLF = "\n" ns += "# Gen EDA generated" + CRLF ns += "# date:" + CRLF # version ns += "# user:" + CRLF # version # LoadFrom(Layout|LayoutToBuffer|ElementToBuffer|Netlist|Revert,filename) ns += 'LoadFrom( Layout, ' + self.script_path + '/' + self.brd.name + '.pcb )' + CRLF # layout ns += 'LoadFrom( Netlist, ' + self.script_path + '/' + self.brd.name + '.net )' + CRLF # netlist # Do not do that, do it in the placement # ns += self.genBrdPlaceBottom() # AddRats(AllRats|SelectedRats|Close) ns += 'AddRats(AllRats)' + CRLF # add all rats # AutoRoute(AllRats|SelectedRats) ns += 'AutoRoute(AllRats)' + CRLF # route all rats # Auto-Optimize djopt(auto) ns += 'djopt(auto)' + CRLF # optimize all routes # SaveTo(Layout|LayoutAs,filename) ns += 'SaveTo( LayoutAs, ' + self.script_path + '/' + self.brd.name + '.brd )' + CRLF # board ns += 'Quit( )' + CRLF # Quit return ns ##################################### ## release: pcb 1.7.1.ALPHA ## date: Sun Jul 22 15:22:22 2001 ## user: tp (Terry Porter,,,) ## host: gronk.porter.net #PCB("" 6047 4000) # #Grid(25 0 0 0) #Cursor(400 0 2) #Flags(0x000000c0) #Groups("1,s:2,c:3:4:5:6:7:8") #Styles("Signal,10,40,20:Power,25,60,35:Fat,40,60,35:Skinny,8,36,20") #################################### # release: pcb 1.99v # date: Tue May 1 07:59:48 2007 # user: pawel (pawel,U-WODNICKI\pawel,S-1-5-21-1835012242-1811546175-1750076985-1007) # host: Wodnicki # #FileVersion[20070407] # #PCB["" 350000 330000] # #Grid[3937.007904 1800 100 1] #Cursor[133000 107500 2.000000] #PolyArea[200000000.000000] #Thermal[0.500000] #DRC[1000 1000 1000 1000 1500 1000] #Flags("rubberband,nameonpcb,alldirection,uniquename,snappin") #Groups("4,5,6,c:1,2,3,s:8:7") #Styles["Signal,1000,4000,2000,1000:Power,2500,6000,3500,1000:Fat,4000,6000,3500,1000:Skinny,800,3600,2000,1000"] # gen brd board scr" def genBrdBoardScr(self): ns = '' CRLF = "\n" ns += "# boostEDA generated" + CRLF ns += "# date:" + CRLF # version ns += "# user:" + CRLF # version # determine board size, aka outline for rectangular ones only self.brd.outline.calcBBox() xsize = self.brd.outline.bbox.sizeX() ysize = self.brd.outline.bbox.sizeY() ns += "PCB[\"" + self.brd.name + "\" " ns += "%d "% (xsize) # x size ns += " %d"% (ysize) # y size ns += "]" + CRLF ns += "Grid(25 0 0 0)" + CRLF ns += "Cursor(400 0 2)" + CRLF ns += "Flags(0x000000c0)" + CRLF ns += "Groups(\"1,s:2,c:3:4:5:6:7:8\")" + CRLF ns += "Styles(\"Signal,10,40,20:Power,25,60,35:Fat,40,60,35:Skinny,8,36,20\")" + CRLF return ns #Layer(1 "solder") #( # Line(1375 1075 1325 1025 40 30 0x00000020) #) def genBrdLayerFromNet(self,layer,net): ns = '' # Should come from board technology ### print "out net " + net.name ### print "layer num " + str(layer.num) for line in net.route: #print "found line on net layer num " + str(line.layernum) if line.layernum == layer.num: ### print "out line on net " + net.name ### print "net.route length " + str(len(net.route)) ### print "line.points length " + str(len(line.points)) Thickness = line.thickness Clearance = line.thickness * 2 first = True prev = Point() for pt in line.points: #print "pt " + str(pt) if first: first = False else: X1 = int(prev._x) Y1 = int(prev._y) X2 = int(pt._x) Y2 = int(pt._y) ns += 'Line [' + " %d "% X1 + " %d "% Y1 + " %d "% X2 + " %d "% Y2 ns += " %d "% Thickness ns += " %d "% Clearance ns += '"auto"' ns += ']\n' prev = pt return ns def genLayerBlockages(self,layer): ns = '' # blockages use absolute coordinates, for rect in layer.blockages: # order of processing is important X1=int(rect.ll._x) Y1=int(rect.ll._y) X2=int(rect.ur._x) Y2=int(rect.ur._y) ns += ' Polygon("clearpoly")\n' ns += '(\n' ns += " [%d "% X1 + " %d ]"% Y1 ns += " [%d "% X1 + " %d ]"% Y2 ns += " [%d "% X2 + " %d ]"% Y2 ns += " [%d "% X2 + " %d ]"% Y1 ns += '\n' ns += ')\n' return ns; # routing # gen brd layers scr" def genBrdLayersScr(self): ### print "PCB! gen brd layers scr" ns = '' CRLF = "\n" for l in self.brd.layers: ### print "layer " + l.name ns += "Layer (" +str(l.num) + " \"" + l.name + "\")" + CRLF ns += "(" + CRLF # here go all of the layer elements for net in self.brd.nets.values(): ns += self.genBrdLayerFromNet(l,net) # Routes ns += self.generateNetPour(l,net) # Geometry ns += self.genLayerBlockages(l) ns += ")" + CRLF return ns; def generateRoutes(self): return self.genBrdLayersScr() def generateNetPour(self,layer,net): ns = '' CRLF = "\n" ### print " layer " + str(layer) for geom in net.geometry: ### print " found geom in " + net.name + " type " + str(type(geom)) + " layer " + str(geom.layernum) + CRLF if geom.layernum != layer.num : continue # Handle rectangle #if type(geom) is Rectangle : if isinstance(geom, Rectangle) : ### print " found Rectangle" + CRLF rect = Rectangle(geom.ll._x, geom.ll._y, geom.ur._x, geom.ur._y, geom.layernum ) rect.normalize() # normalize just in case # order of processing is important X1=int(rect.ll._x) Y1=int(rect.ll._y) X2=int(rect.ur._x) Y2=int(rect.ur._y) ns += ' Polygon("clearpoly")\n' ns += '(\n' ns += " [%d "% X1 + " %d ]"% Y1 ns += " [%d "% X1 + " %d ]"% Y2 ns += " [%d "% X2 + " %d ]"% Y2 ns += " [%d "% X2 + " %d ]"% Y1 ns += '\n' ns += ')\n' return ns; # Geometry on nets, aka pour def generatePour(self): ns = '' CRLF = "\n" for l in self.brd.layers: ### print "layer " + l.name ns += "Layer (" +str(l.num) + " \"" + l.name + "\")" + CRLF ns += "(" + CRLF # here go through the layers for net in self.brd.nets.values(): ns += self.generateNetPour(l,net) ns += ")" + CRLF return ns; # Via[] # Via[17000 182000 31000 3000 34000 2800 "" ""] # Via [X Y Thickness Clearance Mask Drill "Name" SFlags] # Via (X Y Thickness Clearance Mask Drill "Name" NFlags) # Via (X Y Thickness Clearance Drill "Name" NFlags) # Via (X Y Thickness Drill "Name" NFlags) # Via (X Y Thickness "Name" NFlags) # X Y coordinates of center # Thickness outer diameter of copper annulus # Clearance add to thickness to get clearance diameter # Mask diameter of solder mask opening # Drill diameter of drill # Name string, name of via (vias have names?) # SFlags symbolic or numerical flags # NFlags numerical flags only def generateVias(self): ns = '' CRLF = "\n" ### print " board vias " + str(len(self.brd.vias)) for via in self.brd.vias: ### print "via " + via.name ns += "Via [" ns += " %d "% int(via.pos._x) + " %d "% int(via.pos._y) ns += ' 4000 2000 0 2000 "" "" ' ns += "]" + CRLF return ns; #NetList() #( # Net("unnamed_net1" "(unknown)") # ( # Connect("L1-2") # Connect("L2-1") # Connect("C2-1") # Connect("C1-1") # ) #) # gen brd net scr" def genBrdNetlistScr(self): ns = '' CRLF = "\n" ns = 'NetList()' + CRLF ns += '(' + CRLF for net in self.sch.nets.values(): name = net.name ns += "Net(\"" + net.name + "\" \"(unknown)\")" + CRLF ns += "(" + CRLF for node in net.nodes: ns += expandtab("\tConnect(\"") + str(node.dev.refid) + "-" + str(node.pin.num) + "\")" + CRLF ns += ")" + CRLF ns += ')' + CRLF return ns # pcb footprint file may contain any of the following commands: # Element [element_flags, description, pcb-name, value, mark_x, mark_y, text_x, text_y, text_direction, text_scale, text_flags] # Pad [x1 y1 x2 y2 thickness clearance mask name pad_number flags] # Pin [x y thickness clearance mask drillholedia name number flags] # ElementArc [x y r1 r2 startangle sweepangle thickness] # ElementLine [x1 y1 x2 y2 thickness] > thickness != 1000 = 10 mils almost for all footprints # Comment lines start with the #-sign #Elements # Element [element_flags, description, pcb-name, value, mark_x, mark_y, text_x, text_y, text_direction, text_scale, text_flags] item allowed value explanation comment # element_flags unsigned hex value # description string text description of footprint written by footprint author # pcb name string refdes used on this particular pcb xxx # value string value of component on this particular pcb layout xxx # mark_x 1/100th mils # mark_y 1/100th mils # text_x 1/100th mils # text_y 1/100th mils # text direction decimal integer 0=horiz; 1=ccw90; 2=180; 3=cw90 # text_scale decimal integer usu. set 100 # text_flags unsigned hex # Pads # Pad[x1 y1 x2 y2 thickness clearance mask name pad_number flags] Item Allowed Value Explanation Comment # x1 1/100th mils x(1st point) # y1 1/100th mils y(1st point) # x2 1/100th mils x(2nd point) # y2 1/100th mils y(2nd point) # thickness 1/100 mils width of metal surrounding line segment see Brorson .pdf # clearance 1/100 mils distance to any other copper on any layer actually 1/2 of this number is used! # mask 1/100th mils width of mask relief actual width of the mask centered on pad copper # name string name of pad (arb. string) e.g. pad_1 or positive or any other string # pad_number string pad # used for nets. it MUST be consistent with the definitions on the netlist. # flags hex value xxx # Pin[x y thickness clearance mask drillholedia name number flags] Item Allowed Value Explanation Comment # x 1/100th mils pin x coord. # y 1/100th mils pin y coord. # thickness 1/100th mils copper diameter # clearance 1/100th mils 2*(cu to cu clearance) if you want a 10 mil clearance, put 2000 (20 mils) here # mask 1/100th mils diameter of mask aperture actual dia. of hole in mask # drillholedia 1/100th mils dia. of hole # name string arb. pin name # number decimal integer pin number used by nets/rats # flags hex xxx # Via[] # Via[17000 182000 31000 3000 34000 2800 "" ""] # Via [X Y Thickness Clearance Mask Drill "Name" SFlags] # Via (X Y Thickness Clearance Mask Drill "Name" NFlags) # Via (X Y Thickness Clearance Drill "Name" NFlags) # Via (X Y Thickness Drill "Name" NFlags) # Via (X Y Thickness "Name" NFlags) # X Y coordinates of center # Thickness outer diameter of copper annulus # Clearance add to thickness to get clearance diameter # Mask diameter of solder mask opening # Drill diameter of drill # Name string, name of via (vias have names?) # SFlags symbolic or numerical flags # NFlags numerical flags only # On the Layer # Line[] # Line[137500 107500 132500 102500 4000 3000 "clearline"] # Text[423000 391500 2 100 "T J PORTER ELECTRONICS" "auto"] # Polygon("clearpoly") # ( # [2000 198000] [47000 198000] [47000 187000] [126000 187000] [126000 198000] # [297000 198000] [297000 1000] [2000 1000] # ) # Notes: # Pins - Throughole # Pads - SMD # Examples for version 1.99 # TH # Element["" "Cap" "C17" "" 215500 81500 -9000 -32900 0 150 ""] # ( # Pin[0 0 8000 3000 11000 3500 "1" "1" ""] # Pin[0 -20000 8000 3000 11000 3500 "2" "2" ""] # ElementLine [-5000 5000 5000 5000 1000] # ElementLine [5000 5000 5000 -25000 1000] # ElementLine [5000 -25000 -5000 -25000 1000] # ElementLine [-5000 -25000 -5000 5000 1000] # # ) # SMD # Element["" "SMD 0805" "C13" "" 252500 151000 -3000 4500 0 150 ""] # ( # Pad[0 0 0 0 6000 3000 9000 "1" "1" "square"] # Pad[0 -9000 0 -9000 6000 3000 9000 "2" "2" "square"] # ElementLine [-3500 -12500 -3500 3500 1000] # ElementLine [3500 -12500 -3500 -12500 1000] # ElementLine [3500 3500 3500 -12500 1000] # ElementLine [-3500 3500 3500 3500 1000] # ) # # Original #Element["" "SOT-23 package" "Q7" "" 66666 66666 3200 5900 0 100 ""] #( # Pad[0 -300 0 300 3400 3000 4000 "1" "1" "square,edge2"] # Pad[7800 -300 7800 300 3400 3000 4000 "2" "2" "square,edge2"] # Pad[3900 -8500 3900 -7900 3400 3000 4000 "3" "3" "square"] # ElementLine [10300 -11000 -2500 -11000 1000] # ElementLine [10300 2900 10300 -11000 1000] # ElementLine [-2500 2900 10300 2900 1000] # ElementLine [-2500 -11000 -2500 2900 1000] #) # Placed on the far side -> layer onsolder? #Element["selected,onsolder" "SOT-23 package" "Q7" "" 66666 133334 3200 -5900 0 100 "selected,auto"] #( # Pad[0 300 0 -300 3400 3000 4000 "1" "1" "selected,onsolder,square"] # Pad[7800 300 7800 -300 3400 3000 4000 "2" "2" "selected,onsolder,square"] # Pad[3900 8500 3900 7900 3400 3000 4000 "3" "3" "selected,onsolder,square,edge2"] # ElementLine [10300 11000 -2500 11000 1000] # ElementLine [10300 -2900 10300 11000 1000] # ElementLine [-2500 -2900 10300 -2900 1000] # ElementLine [-2500 11000 -2500 -2900 1000] # # ) # VIAs # Via[17000 182000 31000 3000 34000 2800 "" ""] # Via[17000 17000 31000 3000 34000 2800 "" ""] # Via[282000 17000 31000 3000 34000 2800 "" ""] # Via[282000 182000 31000 3000 34000 2800 "" ""] # Via[15500 382500 31000 3000 34000 2800 "" ""] # Via[15500 217500 31000 3000 34000 2800 "" ""] # Via[280500 217500 31000 3000 34000 2800 "" ""] # Tracks are made of Line???? # Layer(1 "solder") # ( # Line[137500 107500 132500 102500 4000 3000 "clearline"] # Line[145000 107500 137500 107500 4000 3000 "clearline"] # Line[85000 112500 85000 107500 4000 3000 "clearline"] # Line[97500 90000 97500 147500 4000 3000 "clearline"] #) # Element [element_flags, description, pcb-name, value, mark_x, mark_y, text_x, text_y, text_direction, text_scale, text_flags] def gen0805_resitor(self,refid,x,y,v): CRLF = '\n' s = 'Element["" "0805 chip resitor" "' + str(refid) + '" "' + str(v) + '" ' +'%i'% x + ' ' + '%i'% y + ' 3200 5900 0 100 ""]' + CRLF s += '(' + CRLF s += ' Pad[0 -700 0 700 4500 3000 5100 "1" "1" "square"]' + CRLF s += ' Pad[8000 -700 8000 700 4500 3000 5100 "2" "2" "square"]' + CRLF s += ' ElementLine [11700 -4400 -3700 -4400 800]' + CRLF s += ' ElementLine [11700 4400 11700 -4400 800]' + CRLF s += ' ElementLine [-3700 4400 11700 4400 800]' + CRLF s += ' ElementLine [-3700 -4400 -3700 4400 800]' + CRLF s += ')' + CRLF return s def gen0805_capacitor(self,refid,x,y,v): CRLF = '\n' s = 'Element["" "0805 chip cap" "' + str(refid) + '" "' + str(v) + '" ' +'%i'% x + ' ' + '%i'% y + ' 3200 5900 0 100 ""]' + CRLF s += '(' + CRLF s += ' Pad[0 -700 0 700 4500 3000 5100 "1" "1" "square"]' + CRLF s += ' Pad[8000 -700 8000 700 4500 3000 5100 "2" "2" "square"]' + CRLF s += ' ElementLine [11700 -4400 -3700 -4400 800]' + CRLF s += ' ElementLine [11700 4400 11700 -4400 800]' + CRLF s += ' ElementLine [-3700 4400 11700 4400 800]' + CRLF s += ' ElementLine [-3700 -4400 -3700 4400 800]' + CRLF s += ')' + CRLF return s def genSOT23(self, refid, x, y, v): CRLF = '\n' s = 'Element["" "SOT-23 package" "' + str(refid) + '" "' + str(v) + '" ' +'%i'% x + ' ' + '%i'% y + ' 3200 5900 0 100 ""]' + CRLF s += '(' + CRLF s += ' Pad[0 -300 0 300 3400 3000 4000 "1" "1" "square,edge2"]' + CRLF s += ' Pad[7800 -300 7800 300 3400 3000 4000 "2" "2" "square,edge2"]' + CRLF s += ' Pad[3900 -8500 3900 -7900 3400 3000 4000 "3" "3" "square"] ' + CRLF s += ' ElementLine [10300 -11000 -2500 -11000 1000]' + CRLF s += ' ElementLine [10300 2900 10300 -11000 1000]' + CRLF s += ' ElementLine [-2500 2900 10300 2900 1000]' + CRLF s += ' ElementLine [-2500 -11000 -2500 2900 1000]' + CRLF s += ')' + CRLF return s def rotatePoint(self,pt,x0,y0,angle): dX = pt._x - x0 dY = pt._y - y0 rX = pt._x rY = pt._y if angle == 90: rX = x0 + dY rY = y0 - dX if angle == 180: rX = x0 - dX rY = y0 - dY if angle == 270: rX = x0 - dY rY = y0 + dX return rX,rY def genElementLine(self,line,dev): # order of processing is important X1=int(line.points[0]._x) Y1=int(line.points[0]._y) X2=int(line.points[1]._x) Y2=int(line.points[1]._y) if dev.bottom: Y1 = 0 - Y1 Y2 = 0 - Y2 X1,Y1 = self.rotatePoint(Point(X1,Y1),0,0,dev.rotation) X2,Y2 = self.rotatePoint(Point(X2,Y2),0,0,dev.rotation) # keep horizontal, vertical Point2 > Point1 if (X1 == X2): if (Y1 > Y2): t = Y1 Y1 = Y2 Y2 = t else: if (Y1 == Y2): if (X1 > X2): t = X1 X1 = X2 X2 = t ns = 'ElementLine [' + " %d "% X1 + " %d "% Y1 + " %d "% X2 + " %d "% Y2 ns += " %d "% line.thickness ns += ']\n' return ns # rotation is clockwise def genElementArc(self,arc,dev): # Thickness, Clearance, Mask, Drill, Name, Number, SFlags rX = int(arc._x) rY = int(arc._y) # rY is if dev.bottom: rY = 0 - rY if dev.rotation == 90: arc.sangle += 90 if dev.rotation == 180: arc.sangle += 180 if dev.rotation == 270: arc.sangle += 270 rX,rY = self.rotatePoint(arc,0,0,dev.rotation) arc.sangle = arc.sangle % 360 ns = 'ElementArc [' + " %d "% rX + " %d "% rY ns += " %d "% arc.width ns += " %d "% arc.height ns += " %d "% arc.sangle ns += " %d "% arc.dangle ns += " %d "% arc.thickness ns += ']\n' return ns def genElementPin(self,pin,dev): # Thickness, Clearance, Mask, Drill, Name, Number, SFlags rX=int(pin.pos._x) rY=int(pin.pos._y) # Why we do not have to do it for the pins? # rY is #if dev.bottom: # rY = 0 - rY # Package has not been rotated and must match device pins rX,rY = self.rotatePoint(Point(rX,rY),0,0,dev.rotation) ns = 'Pin [' + " %d "% rX + " %d "% rY ns += " %d "% pin.thickness ns += " %d "% pin.clearance ns += " %d "% pin.mask ns += " %d "% pin.drill ns += pin.name + ' ' ns += '"' + "%d"% pin.num + '" ' ns += pin.sflags ns += ']\n' return ns def genElementPad(self,pin,dev): # Thickness, Clearance, Mask, Name, Number, SFlags # if package was parsed then these are set, if not I need to generate correct ones rX1=int(pin.rX1) rY1=int(pin.rY1) rX2=int(pin.rX2) rY2=int(pin.rY2) # Why we do not have to do it for the pads? #if dev.bottom: # rY1 = 0 - rY1 # rY2 = 0 - rY2 rX1,rY1 = self.rotatePoint(Point(rX1,rY1),0,0,dev.rotation) rX2,rY2 = self.rotatePoint(Point(rX2,rY2),0,0,dev.rotation) try: sflags = pin.sflags except: # no PCB sflags then generate one # square # edge2 if pin.pad.type == "S": sflags ='"square"' else: sflags ='""' ns = 'Pad [' + " %d "% rX1 + " %d "% rY1 + " %d "% rX2 + " %d "% rY2 ns += " %d "% pin.thickness ns += " %d "% pin.clearance ns += " %d "% pin.mask ns += pin.name + ' ' ns += '"' + "%d"% pin.num + '" ' ns += sflags ns += ']\n' return ns def genElementBody(self,dev): # print'name ' + dev.name l = len(dev.pins) # print ' len ' + str(l) # print 'roation ' + str(dev.rotation) ns = '(\n' for num in range(1,l+1): # print 'pin ' + str(num) pin = dev.pins[num] ppin = dev.package.pins[num] #if dev.package.smt: # event smt packages can have pins aka mounting holes if ppin.smt: ns += self.genElementPad(ppin,dev) else: ns += self.genElementPin(ppin,dev) for geo in dev.package.geometry: if isinstance(geo, Line): ns += self.genElementLine(geo,dev) if isinstance(geo, Arc): ns += self.genElementArc(geo,dev) if isinstance(geo, Text): ns += self.genElementText(geo,dev) ns += ')\n' return ns # Device is on the bottom, coordinates of the pad are for the bottom # Pcb defines package looking from top so mirror it in X back to top # and add the flags # For details see the core.py def genBrdPlaceDevOnSolder(self,dev): for pad in dev.package.pins.values(): pad.pos._y = 0 - pad.pos._y try: # quick fix TBI pad.rY1 = 0 - pad.rY1 except: pad.rY1 = 0 try: # quick fix TBI pad.rY2 = 0 - pad.rY2 except: pad.rY2 = 0 try: # quick fix TBI newsflags = pad.sflags.strip('"') except: newsflags = 'square' # default to square if newsflags != '': newsflags = ',' + newsflags newsflags = '"onsolder' + newsflags + '"' pad.sflags = newsflags for pad in dev.package.geometry: pass # print pad.sflags # gen brd place scr" def genBrdPlaceScr(self): ns = '' CRLF = '\n' devnum = 0 self.brd.outline.calcBBox() for dev in self.brd.devices.values(): name = str(dev.refid) + CRLF if dev.bottom: self.genBrdPlaceDevOnSolder(dev) x = (int) #x = (self.brd.outline.bbox.ur._x - dev.position._x) # position is in mils x = dev.position._x # position is in mils y = (int) #y = (self.brd.outline.bbox.ur._y - dev.position._y) # position is in mils y = dev.position._y # position is in mils placement = '"onsolder"' else: x = (int) x = dev.position._x # position is in mils y = (int) y = dev.position._y # position is in mils placement = '""' # place the device ns += 'Element[' + placement + ' "' + str(dev.package.description) + '" "' + str(dev.refid) + '" "' + str(dev.val) + '" ' +'%i'% x + ' ' + '%i'% y + ' 3200 5900 0 100 ""]' + CRLF ns += self.genElementBody(dev) # if name[0:1] == 'R': # ns += self.gen0805_resitor(dev.refid,x,y,dev.val) # if name[0:1] == 'C': # ns += self.gen0805_capacitor(dev.refid,x,y,dev.val) # if name[0:1] == 'Q': # ns += self.genSOT23(dev.refid,x,y,dev.val) # numpins = 0 # for pin in dev.pins: # numpins += 1 # for k in dev.pins.keys(): # pin = dev.pins[k] # dev.rotation ? return ns def Cmd(self,cmds): gen = 0 sch = 0 brd = 0 cmd = 0 add = 0 layers = 0 net_connect = 0 netlist = 0 board = 0 place = 0 route = 0 scr = 0 lst = 0 # 0 if cmds[0:1] == ['gen']: gen = 1 # 1 if cmds[1:2] == ['sch']: sch = 1 if cmds[1:2] == ['brd']: brd = 1 # 2 if cmds[2:3] == ['cmd']: cmd = 1 if cmds[2:3] == ['add']: add = 1 if cmds[2:3] == ['layers']: layers = 1 if cmds[2:3] == ['netconnect']: net_connect = 1 if cmds[2:3] == ['netlist']: netlist = 1 if cmds[2:3] == ['board']: board = 1 if cmds[2:3] == ['place']: place = 1 if cmds[2:3] == ['route']: route = 1 # 3 if cmds[3:4] == ['scr']: scr = 1 if cmds[3:4] == ['lst']: lst = 1 if gen: if sch: if add: if scr: s = self.genSchAddScr() return s if layers: if scr: s = self.genSchLayersScr() return s if net_connect: pass if netlist: s = self.genSchNetlistLst() return s if brd: if cmd: if scr: s = self.genBrdCmdScr() # commands to make the board return s if board: if scr: s = self.genBrdBoardScr() return s if layers: if scr: s = self.genBrdLayersScr() return s if place: if scr: s = self.genBrdPlaceScr() return s if netlist: if scr: s = self.genBrdNetlistScr() return s if route: pass return "" def test(self): ic1 = CDev("U1","","IC1") ic1.add( CPin("GND",1) ) ic1.add( CPin("VCC",2) ) self.sch.addDev(ic1) net1 = CNet("GND") net1.add(CNode(ic1,"GND")) self.sch.addNet(net1) net2 = CNet("VCC") net2.add(CNode(ic1,"VCC")) self.sch.addNet(net2) print "gen sch add scr" s = self.genSchAddScr() print s print "gen sch net-connect scr" s = self.genSchNetConnectScr() print s print "gen sch netlist lst" s = self.genSchNetlistLst() print s print "gen sch netlist scr" s = self.genSchNetlistScr() print s # Some tests if __name__ == "__main__": import sys #import string import re schem = CSchematic() board = CBoard(schem) board.addFromSchematic() mucs = CPCB(schem,board) # open input file if sys.argv[1:] == ['test']: mucs.test()
This group of Luo migrated from south Sudan due to drought, famine, and epidemic disease like fowl pox which killed so many people and they entered East Acholi land at around 1700AD. Under the leadership of Palabek they settled in the place called Pamwoma in a district called kitgum. So this man gave birth to children, where each child formed sub-clan. Therefore Pukony is a child of Palabek who formed the clan Palabek Pukony. Although nothing much is known about the migration this clan, their economic activities they carried out is farming, and cattle keeping.
import pytest import transcode.conf import transcode.render def my_callback(source, *args, **kws): pass CFG_GOOD = { 'TEXT': {'transcoder': my_callback}, } CFG_BAD = { 'MARK': {'transcoder': 42} } class TestConf: def test_default_config(self): for fmt, expected in ( (transcode.conf.HTML_FORMAT, transcode.render.render_html), (transcode.conf.SIMPLE_TEXT_FORMAT, transcode.render.render_simple), (transcode.conf.MARKDOWN_FORMAT, transcode.render.render_markdown), (transcode.conf.RST_FORMAT, transcode.render.render_restructuredtext), ): handler, args, kwargs = transcode.conf.get_transcoder(fmt) assert handler is expected def test_config_with_actual_callback(self): handler, args, kwargs = transcode.conf.get_transcoder('TEXT', CFG_GOOD) assert handler == my_callback assert args == () assert kwargs == {} def test_config_with_bad_callback(self): try: transcode.conf.load_config(CFG_BAD) except TypeError: assert True else: assert False
Here is the comment I posted to the story in the Tribune about the Milwaukie teacher. “This bogus story plays to the worst stereotypes of male teachers…That they cannot be trusted around children. Oh wait, correction, around female children. He should have been fooling around with boys, he’d be mayor of Milwaukie now instead of in jail. How is this guy any different than our Mayor? One gets to be mayor, the other gets jail? That doesn’t make any sense. Either throw Sam Adams in jail or set this man free. I phoned every commissioner this morning and asked them if they’d be issuing a statement of solidarity with the jailed teacher or if they thought it was okay that he was in jail for making the same “mistake” that Sam made. I was actually hung up on by Saltzman’s lackey. I reminded them of the “soft bigotry of low expectations” they were displaying towards gays. It is in fact bigoted to expect and condone bad behavior by gays. They didn’t like being called bigots. TEA PARTY, APRIL 15TH, PIONEER SQUARE. Gather at 5pm, Tea Party Starts at 6pm! WE GATHER AT 5, TEA PARTY STARTS AT 6PM. Start making your signs and let’s start making some history. For more information email standup@kpam.com. For sign ideas and additional info go to Oregon Tea Party here. Smart Girl Politics – Tea Party Coverage from TCOT TV on Vimeo. By now, the entire world knows that pres__ent Obama has flubbed protocol in dealing with our good friends, the British. He showed a total lack of class by giving the British PM 25 DVD’s that won’t even play in England! Now, he further embarrasses America with his latest trip to Great Britain and his exchange of gifts with their Queen Elizabeth. One is left to wonder just who he has in the Office of Protocol as I’m sure his adolescent daughters could conceive of more fitting gifts to give foreign dignitaries. Additionally, the Queen was presented with a signed songbook, supposedly “rare” by Richard Rogers. It should also be noted that the Queen has had her own iPod for the last 4 years. In return, the Obama’s received from Her Majesty “a silver-framed, signed photograph of themselves,” apparently a standard present for visiting dignitaries. Was the Queen prepared for a substandard gift? Or, perhaps was she returning the sub-par gift giving that he gave PM Brown? We may never know. Contrast this with the president mast hated in modern history and who for 8 years endured false accusations of low I.Q., unable to speak properly and who character and style was almost daily assaulted. In May 2007, towards the end of his administration, President and Mrs. Bush presented the Queen of England with a bronze statuette “High Desert Princess” with a personal inscription on the bottom of the base, a replica of the original statue located at the National Cowgirl Museum and Hall of Fame in Ft. Worth, Texas. In exchange, the Royal Couple presented President Bush with a sterling silver oversized plate by William & Son with gold seals including the Presidential seal, the Royal seal and a center seal with the star of Texas surrounded by roses and a personal inscription on the back of the plate. They presented Mrs. Bush a gold and crystal clock with the Royal seal by William & Son. UPDATE: As the video below shows, the Obama’s committed yet another gaffe in protocol by his not bowing before he Queen and she forgetting to curtsey, as is European tradition. In another protocol gaffe, Obama did bow before the King of Saudi Arabia, a sign of the Monarch’s power over their subjects. TEA PARTY, APRIL 15TH, VENUE TBA THURSDAY. WATCH THIS SPACE. Or listen tomorrow between 5-8pm on KPAM 860. Or get on email list: standup@kpam.com put “Tea Party” in subject line. Or follow me on Twitter or Face Book.
#!/usr/bin/env python """ Convert FASTQ output of simulators containing simulation information into SAM alignment file """ import traceback import base64 def encode_qname(qname, retain_petag=True): # return md5.new(str(qname)).hexdigest() if qname[-2] == "/": if retain_petag: return base64.b64encode(qname[0:-2], "-_") + qname[-2:] else: return base64.b64encode(qname[0:-2], "-_") else: return base64.b64encode(qname, "-_") class Object: def __init__(self): pass class FASTQ: def __init__(self, filename, pe): self.handle = open(filename, "r") self.out_handle = open("enc_" + filename, "w") self.lines = iter(self.handle.readlines()) self.is_paired = pe def readline(self): return next(self.lines).strip() def next_read(self): read = Object() read.valid = False try: read.id = self.readline() if len(read.id) > 0 and read.id[0] == "@": read.id = read.id[1:] else: raise read.id_encoded = encode_qname(read.id, self.is_paired) read.seq = self.readline() read.desc = self.readline() read.qual = self.readline() read.valid = True self.out_handle.write("@" + read.id_encoded + "\n") self.out_handle.write("%s\n%s\n%s\n" % (read.seq, read.desc, read.qual)) except Exception as e: pass return read """ Aligns reads based on their data """ class Aligner: def __init__(self): pass def align(read): pass class DummyAligner(Aligner): def align(self, read, paired=False, is_read1=True): read.chrom = "A" read.pos = 0 if paired: read.is_read1 = is_read1 read.is_read2 = not is_read1 else: read.is_read1 = False read.is_read2 = False read.flags = 0 if paired: read.flags = read.flags | 0x1 if read.is_read1: read.flags = read.flags | 0x40 if read.is_read2: read.flags = read.flags | 0x80 def align_pair(self, read1, read2): self.align(read1, True, True) self.align(read2, True, False) class dwgsim(Aligner): def align(self, read, paired=False, is_read1=True): # @random_sequence_632951_1_1_0_0_0_2:0:0_0:0:0_0 parts = read.id.split("_") if is_read1: read.pos = parts[-9] else: read.pos = parts[-8] if paired: read.is_read1 = is_read1 read.is_read2 = not is_read1 else: read.is_read1 = False read.is_read2 = False read.chrom = "_".join(parts[0:-9]) # print(read.id,read.chrom,read.pos) read.flags = 0 reverse = (int(parts[-7]) == 1) if read.is_read2: reverse = not reverse if reverse: read.flags = read.flags | 0x10 if paired: read.flags = read.flags | 0x1 if read.is_read1: read.flags = read.flags | 0x40 if read.is_read2: read.flags = read.flags | 0x80 def align_pair(self, read1, read2): self.align(read1, True, True) self.align(read2, True, False) class Converter: def __init__(self, aligner, outfile): self.aligner = aligner self.sam = open(outfile, "w") self.sam_enc = open("enc_" + outfile, "w") def write(self, what): self.sam.write(what) self.sam_enc.write(what) def write_sam_header(self, reads): seq = [] for read in reads: if not read.chrom in seq: seq.append(read.chrom) for s in sorted(seq): self.write("@SQ\tSN:%s\tLN:10000\n" % s) self.write("@PG\tID:mapper\tPN:mapper\tVN:1.0\n") def write_single(self, read, to): read_id = read.id if read_id[-2] == "/": read_id = read_id[0:-2] to.write("%s\t%d\t%s\t%d\t60\t*\t*\t0\t0\t%s\t%s\n" % ( read_id, read.flags, read.chrom, int(read.pos), read.seq, read.qual)) def align_se(self, infile): fastq = FASTQ(infile, False) read = fastq.next_read() # aligned = [] while read.valid: self.aligner.align(read) # aligned.append(read) self.write_single(read, self.sam) read.id = read.id_encoded self.write_single(read, self.sam_enc) del read read = fastq.next_read() # self.write_sam_header(aligned) # for read in aligned: # self.write_single(read,self.sam) # read.id = read.id_encoded # self.write_single(read,self.sam_enc) self.sam.close() self.sam_enc.close() def align_pe(self, infile_1, infile_2): fq1 = FASTQ(infile_1, True) fq2 = FASTQ(infile_2, True) read1 = fq1.next_read() read2 = fq2.next_read() aligned1 = [] aligned2 = [] while read1.valid and read2.valid: self.aligner.align_pair(read1, read2) aligned1.append(read1) aligned2.append(read2) read1 = fq1.next_read() read2 = fq2.next_read() self.write_sam_header(aligned1) for i in range(len(aligned1)): self.write_single(aligned1[i], self.sam) self.write_single(aligned2[i], self.sam) aligned1[i].id = aligned1[i].id_encoded aligned2[i].id = aligned2[i].id_encoded self.write_single(aligned1[i], self.sam_enc) self.write_single(aligned2[i], self.sam_enc) self.sam.close() self.sam_enc.close() if __name__ == "__main__": import sys if len(sys.argv) < 3: print("Usage: fastq2sam.py <simulator> <reads.fastq> [<reads_2.fastq>]") raise SystemExit arg_sim = sys.argv[1] arg_reads1 = sys.argv[2] if len(sys.argv) > 3: arg_reads2 = sys.argv[3] paired = True else: paired = False if arg_sim == "dwgsim": aligner = dwgsim() else: raise "Simulator not supported" conv = Converter(aligner, arg_reads1 + ".sam") if not paired: print( "Align single-end reads from " + arg_sim + ", input: " + arg_reads1 + ", output " + arg_reads1 + ".sam...") conv.align_se(arg_reads1) else: print( "Align paired-end reads from " + arg_sim + ", input: " + arg_reads1 + " + " + arg_reads2 + ", output " + arg_reads1 + ".sam...") conv.align_pe(arg_reads1, arg_reads2)
On Saturday, I was down in South Beach celebrating with a soon to be bride for her bachelorette party, and by the end of the night, I ended up one very hungry girl. With only a little hummus as my dinner, and the two burritos I had for lunch being just a faint memory by the wee hours, I was absolutely famished. Literally, my stomach hurt from hunger! I ended up at the 24 hour pharmacy picking up some Triscuits and raw nuts to eat before bed. Sunday morning I woke up with a sense of purpose (and more hunger!); I was going to go to Darbster for Sunday brunch! I had already decided before I got there that I was going to order at least 2 items since I may never be up that early on a Sunday again. I knew I wanted french toast or pancakes, and a more savory dish too. When I told the waitress I was going to order two things she said, “I’m not judging”. 😉 Which was actually really great, because I remember the first time I went there and ordered a bunch of food, the waitress (she wasn’t there for too long) kept telling me that I was ordering too much food. (Really?!) Aside from the one food phobic girl that waited on me that first time, I’ve had nothing but excellent service from super warm hearted people at Darbster, and that always makes the experience twice as nice. Hi, this is vegan brunch heaven on a plate! This hits on SO MANY LEVELS! The french toast was perfect… toasty edges and a slight mush in the middle. Well flavored and topped with rice whip and sweet, sliced strawberries. It turns out I really didn’t need a savory dish to balance out my meal because it came with two plump, delicious veggie sausages that I proceeded to cut up into little pieces so that they could be a part of every amazing bite. The fruit salad was cut up into teeny little pieces that were bursting with flavor. This plate is masterful and enough to make even this dedicated weekend slumberer re-think her ways (at least occasionally!). I have a feeling I will be craving this fairly often! AWESOME.
# -*- coding: utf-8 -*- # test/unit/cli/test_main.py # Copyright (C) 2016 authors and contributors (see AUTHORS file) # # This module is released under the MIT License. """Test main()""" # ============================================================================ # Imports # ============================================================================ # Stdlib imports import os from pathlib import Path import sys # Third-party imports from pandas import Timedelta import pytest from pytz import timezone # Local imports import loadlimit.cli as cli from loadlimit.cli import main, PROGNAME from loadlimit.importhook import TaskImporter from loadlimit.util import LogLevel # ============================================================================ # Fixtures # ============================================================================ @pytest.fixture def empty_argv(monkeypatch): """Set sys.argv to an empty list""" monkeypatch.setattr(sys, 'argv', []) @pytest.fixture def norunloop(monkeypatch): """Mock runloop() with func that does nothing""" def fake_runloop(self, config, args, state): """fake_runloop""" cli.process_options(config, args) monkeypatch.setattr(cli.RunLoop, '__call__', fake_runloop) pytestmark = pytest.mark.usefixtures('empty_argv', 'norunloop') # ============================================================================ # Test main # ============================================================================ def test_main_help(capsys): """main""" with pytest.raises(SystemExit) as err: main() assert err.value.args == (0, ) # Check stdout out, err = capsys.readouterr() assert out.startswith('usage: {}'.format(PROGNAME)) def test_main_nonempty_sysargv(monkeypatch, capsys): """Non-empty sys.argv list""" monkeypatch.setattr(sys, 'argv', ['loadlimit', '-h']) with pytest.raises(SystemExit) as err: main() assert err.value.args == (0, ) # Check stdout out, err = capsys.readouterr() assert out.startswith('usage: {}'.format(PROGNAME)) def test_main_loadlimit_configsection(capsys): """loadlimit config section exists in dict passed to main""" config = dict(loadlimit={}) with pytest.raises(SystemExit) as err: main(config=config) assert err.value.args == (0, ) # Check stdout out, err = capsys.readouterr() assert out.startswith('usage: {}'.format(PROGNAME)) def test_main_default_args(): """Config default values""" config = {} args = ['-d', '1s', 'what'] with pytest.raises(SystemExit): main(arglist=args, config=config) assert config assert len(config) == 1 assert 'loadlimit' in config llconfig = config['loadlimit'] names = ['timezone', 'numusers', 'duration', 'importer', 'show-progressbar', 'cache', 'export', 'periods', 'logging', 'qmaxsize', 'flushwait', 'initrate', 'schedsize', 'sched_delay'] assert len(llconfig) == len(names) for name in names: assert name in llconfig assert llconfig['numusers'] == 1 assert llconfig['timezone'] == timezone('UTC') assert llconfig['duration'] == Timedelta('1s') assert llconfig['show-progressbar'] is True assert llconfig['cache']['type'] == 'memory' assert llconfig['export']['type'] is None assert 'targetdir' not in llconfig['export'] assert isinstance(llconfig['importer'], TaskImporter) assert llconfig['periods'] == 8 assert llconfig['logging']['loglevel'] == LogLevel.WARNING assert llconfig['qmaxsize'] == 1000 assert llconfig['flushwait'] == Timedelta('2s') assert llconfig['initrate'] == 0 assert llconfig['schedsize'] == 0 assert llconfig['sched_delay'] == Timedelta('0s') @pytest.mark.parametrize('val', ['fhjdsf', '42z', 'one zots']) def test_main_bad_duration(val): """Invalid value for duration option raises an error""" config = {} args = ['-d', val, 'what'] with pytest.raises(ValueError): main(arglist=args, config=config) @pytest.mark.parametrize('val', [None, '']) def test_main_empty_duration(val): """Not giving a duration raises an error""" config = {} args = ['what'] if val is not None: args[:0] = ['-d', val] expected = 'duration option got invalid value {!r}'.format(val) with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) def test_main_bad_users(): """Value < 0 for users option raises error""" config = {} args = ['-u', '0', 'what'] expected = 'users option expected value > 0, got 0' with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) @pytest.mark.parametrize('val', [0, 1]) def test_main_periods_badvalue(val): """Raise error if periods is given value <= 1""" config = {} args = ['-p', str(val), '-d', '1s', 'what'] expected = 'periods option must be > 1' with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) def test_main_export_baddir(monkeypatch): """Raise error if directory does not exist""" def fake_isdir(n): """fake_isdir""" return False monkeypatch.setattr(cli, 'isdir', fake_isdir) config = {} args = ['-E', 'csv', '-e', '/not/exist', '-d', '1s', 'what'] expected = '/not/exist' with pytest.raises(FileNotFoundError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) def test_main_export_targetdir(monkeypatch): """Store export directory in internal config""" def fake_isdir(n): """fake_isdir""" return True monkeypatch.setattr(cli, 'isdir', fake_isdir) config = {} args = ['-E', 'csv', '-e', '/not/exist', '-d', '1s', 'what'] with pytest.raises(SystemExit): main(arglist=args, config=config) llconfig = config['loadlimit'] assert 'export' in llconfig exportconfig = llconfig['export'] assert exportconfig['type'] == 'csv' assert exportconfig['targetdir'] == '/not/exist' def test_main_export_nodir(monkeypatch): """Use current directory if targetdir not given""" config = {} args = ['-E', 'csv', '-d', '1s', 'what'] with pytest.raises(SystemExit): main(arglist=args, config=config) llconfig = config['loadlimit'] assert 'export' in llconfig exportconfig = llconfig['export'] assert exportconfig['type'] == 'csv' assert exportconfig['targetdir'] == os.getcwd() def test_main_logfile_default(): """Default logfile""" config = {} args = ['-L', '-d', '1s', 'what'] with pytest.raises(SystemExit): main(arglist=args, config=config) llconfig = config['loadlimit'] assert 'logging' in llconfig expected = Path.cwd() / '{}.log'.format(cli.PROGNAME) assert llconfig['logging']['logfile'] == str(expected) def test_main_logfile_bad_parentdir(monkeypatch): """Raise error if given logfile path's parent doesn't exist""" filename = Path('/imaginary/path/notexist') def fake_isdir(self): """fake_isdir""" return False monkeypatch.setattr(cli.Path, 'is_dir', fake_isdir) config = {} args = ['-L', '-l', str(filename), '-d', '1s', 'what'] with pytest.raises(FileNotFoundError) as err: main(arglist=args, config=config) assert err.value.args == (str(filename.parent), ) def test_main_logfile_isdir(monkeypatch): """Raise error if given logfile is a directory""" filename = Path('/imaginary/path/notexist') def fake_isdir(self): """fake_isdir""" return True monkeypatch.setattr(cli.Path, 'is_dir', fake_isdir) config = {} args = ['-L', '-l', str(filename), '-d', '1s', 'what'] with pytest.raises(IsADirectoryError) as err: main(arglist=args, config=config) assert err.value.args == (str(filename), ) @pytest.mark.parametrize('val', ['hello', (42, )]) def test_main_flushwait_badval(val): """Raise error if flushwait is given bad value""" config = {} args = ['--flush-wait', str(val), '-d', '1s', 'what'] expected = 'duration option got invalid value: {}'.format(val) with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) def test_main_schedsize_badval(): """Raise error if sched-size is given value larger than numusers""" numusers = 10 schedsize = 42 config = {} args = ['-u', str(numusers), '--sched-size', str(schedsize), '-d', '1s', 'what'] msg = 'sched-size option expected maximum value of {}, got value {}' expected = msg.format(numusers, schedsize) with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) @pytest.mark.parametrize('val,numusers', [ (v, u) for v in [0, 5, 10] for u in [0, 5, 10] ]) def test_main_schedsize_goodval(val, numusers): """Don't raise eror if sched-size is >= 0 and <= numusers""" numusers = 10 schedsize = 10 config = {} args = ['-u', str(numusers), '--sched-size', str(schedsize), '-d', '1s', 'what'] with pytest.raises(SystemExit): main(arglist=args, config=config) @pytest.mark.parametrize('val', ['hello', (42, )]) def test_main_sched_delay_badval(val): """Raise error if sched_delay is given bad value""" config = {} args = ['--sched-delay', str(val), '-d', '1s', 'what'] expected = 'sched-delay option got invalid value: {}'.format(val) with pytest.raises(ValueError) as err: main(arglist=args, config=config) assert err.value.args == (expected, ) # ============================================================================ # # ============================================================================
The Planetary Health Center of Expertise (PHCOE), part of the UC Global Health Institute, addresses complex global issues that arise from the effects of climate change, rapidly growing populations and limited food and natural resources. The PHCOE draws on the resources of all ten UC campuses and two national laboratories. Through collaborative research, community engagement, and local and international partnerships, we seek to identify new ways that humans and animal populations can foster resilience in the face of changing environments. The mission of the Center is to lead the world in science, education, outreach, and transformative solutions to health-environment challenges. Education is also a significant focus. PHCOE offers a Summer Field Experience Fellowship Program, a summer graduate fellowship with the California Department of Conservation, a Student Ambassador Program and an International Student Ambassador Program. The Rx One Health summer course in East Africa and the UCGHI GloCal Health Fellowship are also opportunities to conduct research, sharpen problem-solving skills to implement critical change, strengthen professional networks and explore careers in planetary and global health. The PHCOE is co-led by Woutrina Smith, professor of epidemiology and associate director at the UC Davis One Health Institute, and David Lopez-Carr, professor of geography and director at the UC Santa Barbara Human-Environment Dynamics Lab. Visit the UCGHI Planetary Health Center of Expertise website for more information.
# $Id$ ## ## This file is part of pyFormex 0.8.9 (Fri Nov 9 10:49:51 CET 2012) ## pyFormex is a tool for generating, manipulating and transforming 3D ## geometrical models by sequences of mathematical operations. ## Home page: http://pyformex.org ## Project page: http://savannah.nongnu.org/projects/pyformex/ ## Copyright 2004-2012 (C) Benedict Verhegghe (benedict.verhegghe@ugent.be) ## Distributed under the GNU General Public License version 3 or later. ## ## ## This program is free software: you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation, either version 3 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program. If not, see http://www.gnu.org/licenses/. ## from distutils.core import setup, Extension setup(name="pygl2ps", version="1.3.3", description="Wrapper for GL2PS, an OpenGL to PostScript Printing Library", author="Benedict Verhegghe", author_email="benedict.verhegghe@ugent.be", url="http://pyformex.org", long_description=""" Python wrapper for GL2PS library by Christophe Geuzaine. See http://www.geuz.org/gl2ps/ """, license="GNU LGPL (Library General Public License)", py_modules=["gl2ps"], ext_modules=[Extension("_gl2ps", ["gl2ps.c","gl2ps_wrap.c"], libraries=["GL"])])
Born and raised on the Big Island of Hawaii, Kekai has always had a passion for art. Finding the sunshine and beautiful scenery distracting, he moved to dreary Seattle in 2000 to attend art school. After graduating, he proceeded to attempt a career at doing art. The first meaningful step was getting a job at ArenaNet, the makers of the Guild Wars franchise as a texture artist. Through hard work and more than a little luck, he was able to become a concept artist. With even more hard work and little more luck Kekai was to branch out into illustration work where he was able to do book covers and even a few Magic the Gathering cards.
# -*- coding: utf-8 -*- """This module implements the clarification process for ambiguous descriptions. Given a description of an object (ambigouos or not) it returns, if found, the object's identifier in oro. If necessary, it will query the human for additional information. """ import logging logger = logging.getLogger("dialogs") from kb import KbError from dialogs.resources_manager import ResourcePool from dialogs.dialog_exceptions import UnsufficientInputError from dialogs.sentence import * from dialogs.sentence_factory import SentenceFactory from dialogs.helpers.helpers import generate_id from random import choice class Discrimination(object): def __init__(self): self.oro = ResourcePool().ontology_server # -- GET_ALL_OBJECTS_WITH_DESC ------------------------------------------------# # Returns all objects' ids with a given set of features (eg. green, big, etc). # Since we have several descriptions, we obtain a list of objects for each agent # and then we intersect them. # # INPUT: # - description: # [[agent1 '?obj' oro_query]..[[agentN '?obj' oro_query]] # (oro_query= ['?obj hasColor blue',.. ?obj hasShape box']) # # OUTPUT: # - empty list: no objects found fulfilling the description # - list: objects fulfilling the description # - None: no description (or description format incorrect) # -----------------------------------------------------------------------------# def get_all_objects_with_desc(self, description): obj_list = None for agent_desc in description: obj_tmp = [] try: obj_tmp = self.oro.findForAgent(ResourcePool().get_model_mapping(agent_desc[0]), agent_desc[1], agent_desc[2]) except KbError: #The agent does not exist in the ontology pass # if no object found, no need to continue if not obj_tmp: obj_list = [] break else: if obj_list is None: obj_list = obj_tmp else: obj_list = [x for x in obj_tmp if x in obj_list] # intersection return obj_list # -- GET_DISCRIMINANT ---------------------------------------------------------# # Queries the ontology for a list of discriminants. Returns the first one. # TODO: prioritize which discriminant to return. # # INPUT: # - agent # - object list # - ignore_descriptors: list of descriptors not to be used # - include_partial: if true, then partial discriminants are also returned # OUTPUT: # - discriminant: [C, discriminat] if complete, or [P, discriminant] if partial # The new discriminant should be different from the ones already known or ignored # -----------------------------------------------------------------------------# def get_discriminant(self, agent, obj_list, ignore_descriptors, include_partial): discriminants = self.oro.discriminateForAgent(ResourcePool().get_model_mapping(agent), obj_list) logger.debug(colored_print('Possible discriminants: ', 'magenta') + \ str(colored_print(discriminants[1], 'blue')) + \ colored_print(" (complete discriminants: ", 'magenta') + \ str(colored_print(discriminants[0], 'blue')) + ")") complete_disc = discriminants[0] partial_disc = discriminants[1] if complete_disc: res = [x for x in complete_disc if x not in ignore_descriptors] elif partial_disc and include_partial: res = [x for x in partial_disc if x not in ignore_descriptors] else: res = None if res: # include randomization so the same discriminant is not always returned return choice(res) else: # No discriminant after applying the blacklist. return None # -- GET_DESCRIPTOR -----------------------------------------------------------# # Searches for a new descriptor candidate from all agents. # # INPUT: # - description: # [[agent1 '?obj' oro_query]..[[agentN '?obj' oro_query]] # (oro_query= ['?obj hasColor blue',.. ?obj hasShape box']) # - ignore_features: list of features not to use as discriminants # [feat1 ..featN] # - allowPartialDesc: consider also partial discriminants (1) or not (0) (0 default) # # OUTPUT: # - descriptor or None (if no discriminant for any agent found) # -----------------------------------------------------------------------------# def get_descriptor(self, description, ignore_features=None, partial_disc=True): if not ignore_features: ignore_features = [] objL = self.get_all_objects_with_desc(description) descriptor = None agent = None #TODO bug in oro doesn't allow to search discriminants base on other agents models!! # we cannot search in all agents, but only in robot's model # for agent_desc in description: # # list current descriptors to not to use them anymore # #currentDescriptors = map(lambda x: x.split()[1], agent_desc[2]) # descriptor = self.get_discriminant(agent_desc[0], objL, ignore_features, partial_disc) # # if descriptor: # agent = agent_desc[0] # break agent = ResourcePool().default_model # list current descriptors to not to use them anymore #currentDescriptors = map(lambda x: x.split()[1], description[0][2]) descriptor = self.get_discriminant(agent, objL, ignore_features, partial_disc) return agent, descriptor # -- get_values_for_descriptor ------------------------------------------------# # Creates the information to be sent to user based on the discriminant found. # # INPUT: # - agent, discriminant, objectsList # # OUTPUT # - list of values to ask for # -----------------------------------------------------------------------------# def get_values_for_descriptor(self, agent, descriptor, objL): valL = [] # get values for each object for obj in objL: # if the discriminant is type, then look for the directClass of the obj (first found) # how should this work for different agents? There is no directClassForAgent # probably won't be necessary since all the knowledge of the human is part # of the robot's knowledge as well. Then we can obtain this information # directly from the robot itself. if descriptor == 'rdf:type': val = list(self.oro.getDirectClassesOf(obj).keys()) else: val = self.oro.findForAgent(ResourcePool().get_model_mapping(agent), '?val', [obj + ' ' + descriptor + ' ?val']) if val: #TODO: we only consider the first result item! valL.append(self.oro.getLabel(val[0])) # otherwise, the object doesn't have this descriptor, and we don't include it # we make a set to remove repeated elements return list(set(valL)) # -- get_type_description ------------------------------------------------------# # Returns the first type of concept in the description. # # INPUT: # - description # # OUTPUT: # - type # - none # -------------------------------------------------------------------------------# def get_type_description(self, description): def find(value, seq): for item in seq: items = item.split() if value in items: return items[2] return None type = None for desc in description: type = find('rdf:type', desc[2]) if type: break return ResourcePool().ontology_server.getLabel(type) # -- CLARIFY ------------------------------------------------------------------# # Searches for a new descriptor candidate. The descriptor should be as # discriminating as possible. # # INPUT: # - description [['myself', '?obj', ['?obj rdf:type Bottle', '?obj hasColor blue']], # ['agent1', '?obj', ['?obj isVisible True']] # - ignoreFeatureL [feat1..featN] List of features not to use as discriminators. # # OUTPUT: # - objectID: ok # - UnsufficientInputError: # - [FAILURE, "new info required"]: no match, new info required (forget previous description) # - [SUCCESS, "Which value? ..."]: user should indicate value for descriptor (mantain previous description) # - [SUCCESS, "additional info required"]: user should give additional info (mantain previous description) # -----------------------------------------------------------------------------# def clarify(self, description, ignoreFeatureL=None): if not ignoreFeatureL: ignoreFeatureL = [] objL = self.get_all_objects_with_desc(description) if len(objL) == 0: logger.debug(colored_print('Nothing found!', "magenta")) else: logger.debug( colored_print('Found these possible concepts ID: ', "magenta") + colored_print(str(objL), 'blue')) if not self.oro: #No ontology server return 'UNKNOWN_CONCEPT_' + generate_id(with_question_mark=False) if not objL: questions = SentenceFactory().create_i_dont_understand() raise UnsufficientInputError({'status': 'FAILURE', 'question': questions}) #return "I don't understand" else: # Check if the speaker sees only some of the object. # If he sees none of them, discriminate on the whole set. # Else, discriminate only on visible objects. agent = description[0][0] logger.debug("Checking which of these objects are visible for " + agent) visible_objects = self.visible_subset(agent, objL) if visible_objects: objL = visible_objects logger.debug(colored_print('Only ', "magenta") + colored_print(str(objL), 'blue') + colored_print(" are visible by " + agent, "magenta")) else: logger.debug(colored_print('None are visible by ' + agent, "magenta")) if len(objL) == 1: return objL[0] if len(objL) == 2 and self.oro.check(['%s owl:sameAs %s' % (objL[0], objL[1])]): return objL[0] agent, descriptor = self.get_descriptor(description, ignoreFeatureL) object = self.get_type_description(description) if descriptor: sentence_builder = SentenceFactory() question = None values = self.get_values_for_descriptor(agent, descriptor, objL) if not object: object = 'object' if descriptor == 'hasColor' or descriptor == 'mainColorOfObject': questions = sentence_builder.create_w_question_choice(object, 'color', values) elif descriptor == 'hasShape': questions = sentence_builder.create_w_question_choice(object, 'shape', values) elif descriptor == 'hasSize': questions = sentence_builder.create_w_question_choice(object, 'size', values) elif descriptor == 'isOn': questions = sentence_builder.create_w_question_location(object, 'on', values) elif descriptor == 'isIn': questions = sentence_builder.create_w_question_location(object, 'in', values) elif descriptor == 'isNextTo': questions = sentence_builder.create_w_question_location(object, 'next to', values) elif descriptor == 'isAt': questions = sentence_builder.create_w_question_location(object, 'at', values) elif descriptor == 'isLocated': questions = sentence_builder.create_w_question_location_PT(values, agent) elif descriptor == 'rdf:type': questions = sentence_builder.create_w_question_choice(object, 'type', values) else: questions = sentence_builder.create_w_question_generic_descriptor(object, descriptor, values) raise UnsufficientInputError({'status': 'SUCCESS', 'question': questions}) #return questions else: questions = [Sentence(IMPERATIVE, '', [], [VerbalGroup(['give'], [], 'present simple', [NominalGroup([], ['information'], [['more', []]], [], [])], [IndirectComplement([], [NominalGroup([], ['me'], [], [], [])]), IndirectComplement(['about'], [ NominalGroup(['the'], [object], [], [], [])])], [], [], VerbalGroup.affirmative, [])])] raise UnsufficientInputError({'status': 'SUCCESS', 'question': questions}) #return "Give me more information about the object" def visible_subset(self, agent, id_list): """ Returns the list of visible objects for an agent from a list of objects. """ visible_objects = self.oro.findForAgent(ResourcePool().get_model_mapping(agent), "?o", [agent + " sees ?o"]) return list(set(id_list) & set(visible_objects)) # -- ADD_DESCRIPTOR -----------------------------------------------------------# # Includes descriptor in description list. # # INPUT: # - agent: to which agent the descriptor belongs to # - description: current description # - descriptor: feature # - value: feature value # # OUTPUT: # - new description # -----------------------------------------------------------------------------# def add_descriptor(self, agent, description, descriptor, value): # return sublist index in seq containing value def find(value, seq): for index, item in enumerate(seq): if value in item: return index, item idx, desc = find(agent, description) desc[2].append('?obj ' + descriptor + ' ' + value) description[idx] = desc return description # -- FIND_UNAMBIGUOUS_DESC ---------------------------------------# # Searches an unambiguous description for a given object. # If it fails, it returns the most complete description found. # # INPUT: # - objectID: object to be described # # OUTPUT: # - a tuple (is_unambigous, description) # - is_unambigous is a boolean # - description is a set of partial statements like # "?obj rdf:type Superman" describing as well as possible # the object. # ----------------------------------------------------------------# def find_unambiguous_desc(self, objectID): description = None # get the first class name types = [t for t in list(self.oro.getDirectClassesOf(objectID).keys()) if t not in ["ActiveConcept"]] # Not type asserted/inferred? then assume this object is unique. if not types: return True, [] type = types[0] myself = ResourcePool().default_model description = [[myself, '?obj', ['?obj rdf:type ' + type]]] objL = self.get_all_objects_with_desc(description) while len(objL) > 1: nbCandidates = len(objL) logger.debug('Description ' + objectID + ': ' + str(description)) logger.debug('ObjL: ' + str(objL)) agent, descriptor = self.get_descriptor(description, [], True) if not descriptor: break val = self.oro.findForAgent(ResourcePool().get_model_mapping(agent), '?val', [objectID + ' ' + descriptor + ' ?val']) if not val: break description = self.add_descriptor(agent, description, descriptor, val[0]) objL = self.get_all_objects_with_desc(description) if nbCandidates == len(objL): logger.error("While trying to find an unambiguous description" + \ " of " + objectID + ", oro answered a non-discriminant" + \ " property. Bug in oro? Halting here for now.") break if len(objL) == 1: unambiguous = True else: unambiguous = False return unambiguous, description[0][2]
There is an endless list of things I love about travelling. Seeing new cultures, beautiful scenery, food, and adventure. But my favourite will always be meeting new people. Even if you only know them for a moment - a short conversation or perhaps a glance across a room. That small insight into a world so different to your own can be inspiring. A window into a life - a story. The waitress with a perpetual smile and contagious warmth, the man who spends his evenings welding jewellery, the woman in the shop with the colourful dresses. Kaş is a vibrant town filled with interesting characters. Today I'm sharing with you a series of portraits of some of the people I met during my Summer stay in Turkey.
from django.conf.urls import patterns, include, url from django.contrib.auth.decorators import login_required, permission_required from stock.views import ProductListView, WarningProductListView, DangerProductListView, SearchProductListView from stock.views import ProductByCategoryListView from stock.views import ProductDetailView from stock.views import ProductCreateView from stock.views import ProductUpdateView from stock.views import ProductDeleteView from stock.views import PackageListView from stock.views import PackageDetailView from stock.views import PackageCreateView from stock.views import PackageUpdateView from stock.views import PackageDeleteView from stock.views import CategoryCreateView from stock.views import CategoryDeleteView urlpatterns = patterns('stock.views', url(r'^$', login_required(ProductListView.as_view()), name='stock'), url(r'^products/$', login_required(ProductListView.as_view()), name='products'), url(r'^products/search$', login_required(SearchProductListView.as_view()), name='products-search'), url(r'^products/category/(?P<pk>\d+)$', login_required(ProductByCategoryListView.as_view()), name='products-category'), url(r'^products/warning$', login_required(WarningProductListView.as_view()), name='products-warning'), url(r'^products/danger$', login_required(DangerProductListView.as_view()), name='products-danger'), url(r'^product/(?P<pk>\d+)$', login_required(ProductDetailView.as_view()), name = 'product-detail'), url(r'^product_create/$', login_required(ProductCreateView.as_view()), name = 'product-create' ), url(r'^product_update/(?P<pk>\d+)$', login_required(ProductUpdateView.as_view()), name = 'product-edit' ), url(r'^product_delete/(?P<pk>\d+)$', login_required(ProductDeleteView.as_view()), name = 'product-delete' ), url(r'^packages/$', login_required(PackageListView.as_view()), name='packages'), url(r'^package/(?P<pk>\d+)$', login_required(PackageDetailView.as_view()), name = 'package-detail'), url(r'^package_create/$', login_required(PackageCreateView.as_view()), name = 'package-create' ), url(r'^package_update/(?P<pk>\d+)$', login_required(PackageUpdateView.as_view()), name = 'package-edit' ), url(r'^package_delete/(?P<pk>\d+)$', login_required(PackageDeleteView.as_view()), name = 'package-delete' ), url(r'^category_create/$', login_required(CategoryCreateView.as_view()), name = 'category-create' ), url(r'^category_delete/(?P<pk>\d+)$', login_required(CategoryDeleteView.as_view()), name = 'category-delete' ), )
Are you looking for Tulips Delivery? Want to send flowers to someone in 167 Riversdale Road, London, N5? Order Tulips Delivery in Highbury Hackney N5 and our team will arrange for flower delivery; we deliver fresh Tulips Delivery at low prices. Whether you need funeral flowers or want to surprise your beloved with romance flowers in Highbury Hackney N5, we can help you. Send flowers online or get in touch with our friendly customer support team over the phone or via email and our team will get your flowers delivered with a smile. You can also order our Highbury Hackney N5 flower delivery services by completing the contact form on our site. Share your individual requirements with our staff, confirm the date for flower delivery and our Tulips Delivery experienced florists will get your flowers delivered at the address in Highbury Hackney N5 you have given us.
from optimize import * from diagnostics import * import configs import argparse from data import * import copy # parse command line arguments parser = argparse.ArgumentParser() parser.add_argument('config') parser.add_argument('dataset_path') parser.add_argument('-i', '--initial_params', default=None) parser.add_argument('-glove', '--glove_vectors', default=None) args = parser.parse_args() # load pre-set configuration from configs module config = getattr(configs, args.config) config['dataset_path'] = args.dataset_path config['params_path'] = args.initial_params config['glove_path'] = args.glove_vectors # load all configs into local namespace for var, val in config.iteritems(): exec("{0} = config['{0}']".format(var)) util.metadata(var, val) # this logs parameters to a metadata file. def print_header(msg): print print msg.upper() print '=====' * 5 print # define training procedure def build_trainer(train, test, max_steps, step_size, init_params=None): # negative triple generator for training triples = [(q.s, str(q.r[0]), q.t) for q in train if len(q.r) == 1] train_graph = Graph(triples) train_neg_gen = NegativeGenerator(train_graph, max_negative_samples_train, positive_branch_factor, type_matching_negs ) # specify the objective to maximize objective = CompositionalModel(train_neg_gen, path_model=path_model, objective='margin') # initialize params if not already initialized if init_params is None: init_params = objective.init_params( dset.entity_list, dset.relations_list, wvec_dim, model=path_model, hidden_dim=hidden_dim, init_scale=init_scale, glove_path=glove_path) save_wait = 1000 # save parameters after this many steps eval_samples = 200 # number of examples to compute objective on # define Observers observers = [NormObserver(report_wait), SpeedObserver(report_wait), ObjectiveObserver(eval_samples, report_wait)] # this Observer computes the mean rank on each split rank_observer = RankObserver({'train': train, 'test': test}, dset.full_graph, eval_samples, max_negative_samples_eval, report_wait, type_matching_negs=True) observers.append(rank_observer) # define Controllers controllers = [BasicController(report_wait, save_wait, max_steps), DeltaClipper(), AdaGrad(), UnitNorm()] trainer = OnlineMaximizer( train, test, objective, l2_reg=l2_reg, approx_reg=True, batch_size=batch_size, step_size=step_size, init_params=init_params, controllers=controllers, observers=observers) return trainer dset = parse_dataset(dataset_path, dev_mode=False, maximum_examples=100) warm_start = params_path is not None if warm_start: print 'loading warm start params...' init_params = load_params(params_path, path_model) else: init_params = None print_header('single-edge training') # train the model on single edges one_hop_only = lambda queries: [q for q in queries if len(q.r) == 1] trainer0 = build_trainer( one_hop_only(dset.train), one_hop_only(dset.test), max_steps_single, step_size_single, init_params ) params0 = trainer0.maximize() params_single = copy.deepcopy(params0) print_header('path training') # train the model on all edges, with warm start from single-edge model trainer = build_trainer(dset.train, dset.test, max_steps_path, step_size_path, params0) params_comp = trainer.maximize() print_header('evaluation') def report(queries, model, neg_gen, params): scores = lambda query: model.predict(params, query).ravel() def compute_quantile(query): s, r, t = query.s, query.r, query.t negatives = neg_gen(query, 't') pos_query = PathQuery(s, r, t) neg_query = PathQuery(s, r, negatives) # don't score queries with no negatives if len(negatives) == 0: query.quantile = np.nan else: query.quantile = util.average_quantile(scores(pos_query), scores(neg_query)) query.num_candidates = len(negatives) + 1 for query in util.verboserate(queries): compute_quantile(query) # filter out NaNs queries = [q for q in queries if not np.isnan(q.quantile)] mean_quantile = np.mean([q.quantile for q in queries]) hits_at_10 = np.mean([1.0 if util.rank_from_quantile(q.quantile, q.num_candidates) <= 10 else 0.0 for q in queries]) print 'mean_quantile:', mean_quantile print 'h10', hits_at_10 return mean_quantile, hits_at_10 # used for all evaluations neg_gen = NegativeGenerator(dset.full_graph, float('inf'), type_matching_negs=True) print_header('path query evaluation') print '--Single-edge trained model--' mq, h10 = report(dset.test, trainer0.objective, neg_gen, params_single) util.metadata(('path_queries', 'SINGLE', 'mq'), mq) util.metadata(('path_queries', 'SINGLE', 'h10'), h10) print print '--Compositional trained model--' mq, h10 = report(dset.test, trainer.objective, neg_gen, params_comp) util.metadata(('path_queries', 'COMP', 'mq'), mq) util.metadata(('path_queries', 'COMP', 'h10'), h10) print print_header('single edge evaluation') print '--Single-edge trained model--' mq, h10 = report(one_hop_only(dset.test), trainer0.objective, neg_gen, params_single) util.metadata(('single_edges', 'SINGLE', 'mq'), mq) util.metadata(('single_edges', 'SINGLE', 'h10'), h10) print print '--Compositional trained model--' mq, h10 = report(one_hop_only(dset.test), trainer.objective, neg_gen, params_comp) util.metadata(('single_edges', 'COMP', 'mq'), mq) util.metadata(('single_edges', 'COMP', 'h10'), h10)
Gifting for the lunar lover in your life? My CRATER line is inspired by the powerful influence the moon can have in our lives. Each phase has its own series of intentions and goal setting, its own lessons of digging in or letting go. On a grander scale, I derive strength from its presence, felt or seen- the knowledge that no matter how much of itself the moon reveals to us, she is always whole and complete. No matter what you reveal, no matter what you have, where you are, or what you’re going through- you are worthy. You are enough. You are whole.
""" An ingest module """ from lxml import html import requests import json from bs4 import BeautifulSoup import re #Python 3 import urllib.request # #Python 2 # import urllib import pymongo import os import pickle class IngestSystem(object): def __init__(self, cl): self.cities = cl def pull_and_load(self): ''' l = self.get_city_urls() r = [] for city in l: print(city) one_city = self.get_restaurant_urls(city) print(one_city) # Get the 100 most popular restaurants for each city #for w in one_city[:100]: for w in one_city: ## Additional DC restaurants ONLY to pull all restaurants if ('menu' in w[0]) and ('kids' not in w[0]): r.append(w) pickle.dump(r,open('restaurant_url_list.txt','wb')) r=pickle.load(open('restaurant_url_list.txt', 'rb')) print(len(r)) self.store_raw(r[200:300]) ''' self.build_database() def get_city_urls(self): url_list = [] for i in self.cities: url_list.append(('http://www.allmenus.com/'+i['state']+'/'+i['city']+'/-/?sort=popular', i['city'], i['state'])) return url_list def get_restaurant_urls(self, url_citystate_tuple): uct = url_citystate_tuple a = HTMLReader(uct[0]) citysoup = a.html_to_soup() urllist = a.soup_to_urllist(citysoup, uct[1], uct[2]) return urllist def store_raw(self, rest_list): for r in rest_list: splt = r[0].split('/') a = HTMLReader('http://www.allmenus.com'+r[0]) restsoup = a.html_to_soup() with open("raw_data/"+splt[1]+"_"+splt[2]+"_"+splt[3]+".html", "w") as f: print("Writing "+splt[1]+"_"+splt[2]+"_"+splt[3]+".html") f.write(restsoup.prettify()) def build_database(self): l = [] for filenm in os.listdir('raw_data/'): if filenm != '.DS_Store': tmp = Restaurant(filenm).db_obj() if (len(tmp['menu']) >= 1) and (tmp['latitude'] != 9999) and (tmp['type'] != ""): l.append(tmp) print(len(l)) ''' conn = pymongo.MongoClient() db = conn.rdata for i in l: print("Insert "+i['name']) db.restaurants.insert_one(i) ''' self.final_rlist = l class HTMLReader(object): def __init__(self, uct): self.url = uct def html_to_soup(self): html = urllib.request.urlopen(self.url).read() soup = BeautifulSoup(html, "lxml") return soup def soup_to_urllist(self, soup, cityname, statename): tmp = [] match = '/'+statename for u in soup.findAll("a", href=True): if (u['href'])[:3] == match: tmp.append((u['href'], cityname, statename)) return tmp def build_info(self): pass def build_menu(self): pass class Restaurant(object): def __init__(self, filenm): soup = BeautifulSoup(open('raw_data/'+filenm, 'r'), "lxml") self.name = soup.find("h1", {"itemprop": "name"}).string.strip() self.street = soup.find("span", {"itemprop": "streetAddress"}).string.strip() self.city = soup.find("span", {"itemprop": "addressLocality"}).string.strip() self.state = soup.find("span", {"itemprop": "addressRegion"}).string.strip() self.zip = soup.find("span", {"itemprop": "postalCode"}).string.strip() self.lat = str(soup.find("meta", {"itemprop": "latitude"})) self.lng = str(soup.find("meta", {"itemprop": "longitude"})) self.ratings = soup.findAll(attrs = {"itemprop": "ratingValue"}) self.msoup = soup.findAll("li") def db_obj(self): r={} l=[] r['name'] = self.name r['street'] = self.street r['city'] = self.city r['state'] = self.state r['zip'] = self.zip # Add geolocation information try: r['latitude'] = float(re.findall(r'"(.*?)"', self.lat)[0]) r['longitude'] = float(re.findall(r'"(.*?)"', self.lng)[0]) except ValueError: r['latitude'] = float(9999.000) r['longitude'] = float(9999.000) #Create a city group for suburb city names a = self.city if a in ['Dunwoody', 'East Point', 'Sandy Springs']: r['city_group'] = 'Atlanta' elif a in ['Alsip', 'Cicero', 'Evergreen Park', 'Harwood Heights', 'Elmwood Park']: r['city_group'] = 'Chicago' elif a in ['Hollywood', 'West Hollywood']: r['city_group'] = 'Los Angeles' elif a in ['Greenfield', 'Wauwatosa', 'West Allis']: r['city_group'] = 'Milwaukee' elif a in ['South Austin']: r['city_group'] = 'Austin' else: r['city_group'] = a # Take an average of ratings, or else assign a 2.0 if len(self.ratings) == 0: r['avg_rating'] = 0.0 else: num=0 count=0 for i in self.ratings: num=num+float(i['content']) count=count+1 r['avg_rating'] = num/float(count) # Add a blank to cuisine type is missing data if self.msoup[0].string: r['type'] = self.msoup[0].string.strip() else: r['type'] = "" # Create a second consolidated cusine type if self.msoup[0].string: a = self.msoup[0].string.strip() if a in ['Ethiopian']: r['type_2'] = 'African' elif a in ['Hawaiian', 'Local/Organic', 'American (New)']: r['type_2'] = 'American' elif a in ['Breakfast', 'Bakery & Pastries', 'Coffee & Tea']: r['type_2'] = 'Bakery, Breakfast & Coffee' elif a in ['Gastropub', 'Pub Food']: r['type_2'] = 'Bar Food' elif a in ['Hot Dogs', 'Burgers']: r['type_2'] = 'Burgers & Hot Dogs' elif a in ['Dominican', 'Jamaican']: r['type_2'] = 'Caribbean' elif a in ['Asian Fusion', 'Taiwanese']: r['type_2'] = 'Chinese' elif a in ['Sandwiches', 'Deli Food']: r['type_2'] = 'Deli & Sandwiches' elif a in ['Ice Cream', 'Crepes']: r['type_2'] = 'Desserts' elif a in ['Austrian', 'British', 'Eastern European', 'Eclectic & International', 'Spanish', 'French', 'Belgian', 'Irish', 'German', 'Polish']: r['type_2'] = 'European' elif a in ['Puerto Rican', 'Brazilian', 'Central American']: r['type_2'] = 'Latin American' elif a in ['Greek']: r['type_2'] = 'Mediterranean' elif a in ['Sushi', 'Seafood']: r['type_2'] = 'Seafood & Sushi' elif a in ['Soul Food', 'Cajun & Creole']: r['type_2'] = 'Southern' elif a in ['Tex-Mex']: r['type_2'] = 'Southwestern' elif a in ['Chicago Grill']: r['type_2'] = 'Steak' elif a in ['Burmese', 'Malaysian']: r['type_2'] = 'Thai' elif a in ['Noodles']: r['type_2'] = 'Vietnamese' elif a in ['Pakistani']: r['type_2'] = 'Middle Eastern' elif a in ['Salads']: r['type_2'] = 'Vegetarian' else: r['type_2'] = a else: r['type_2'] = "" # Create menu, add blanks if either price or description fields are missing for i in self.msoup: m={} if i.find("span","name") or i.find("span","price") or i.find("p", "description"): if i.find("span","name"): m["item"] = i.find("span","name").string.strip() else: m["item"] = "" # For prices, set $0.00 to blanks and take the first price in a range of prices if i.find("span","price"): tmppr = i.find("span","price").string.strip() tmppr = re.sub('[$]', '', tmppr) print(tmppr) if '-' not in tmppr: if tmppr == "" or tmppr == " ": m["price"] = "" elif float(tmppr) == 0: m["price"] = "" else: m["price"] = float(tmppr) else: if tmppr[0:tmppr.find('-')] == "" or tmppr[0:tmppr.find('-')] == " ": m["price"] = "" else: m["price"] = float(tmppr[0:tmppr.find('-')]) else: m["price"] = "" if i.find("p","description"): m["description"] = i.find("p","description").string.strip() else: m["description"] = "" l.append(m) r['menu'] = l return r
There have been 6 recalls on Forester vehicles in recent years. The latest, dated 2017-10-03 and published by the NHTSA on 2017-10-11, was due to a problem with the wheels. Since 1997, Forester's have experienced approximately 12 recall(s) due to hazards with various components, including the labels, tail lights, recreational vehicle, electrical system and equipment. See a list of recalls for your model year below. Recall Summary: Forest River, Inc. (Forest River) is recalling certain 2018 Forester motorhomes, model FRC3271SF. The Federal Placard may inaccurately state that the vehicle has single rear wheels and tires, when the vehicle is equipped with dual rear wheels and tires. As such, these vehicles fail to comply with the requirements of Federal Motor Vehicle Safety Standard (FMVSS) number 120, "Wheels and Rims-Other than Passenger Cars" and fail to comply with the requirements of 49 CFR Part 567, "Certification." Hazard: The Federal Placards may cause the operator to not properly maintain the tires and/or wheels. Failure to maintain the tires may lead to improper handling, potentially increasing the risk of a crash. Remedy: Forest River will notify owners and will provide a corrected Placard, free of charge. The recall is expected to begin November 28, 2017. Owners may contact Forest River customer service at 1-574-206-7600. Forest River's number for this recall is 34-0528. Recall Summary: Forest River, Inc. (Forest River) is recalling certain 2018 Forest River Dynamax, Forester, Sunseeker, and Coachmen Prism vehicles built on Mercedes-Benz Sprinter vans. The affected vehicles have tire labels on the B-pillar that list a tire pressure that is too low. As such, these vehicles fail to comply with the requirements of 49 CFR Part 567, "Certification." Remedy: Forest River will notify owners, and authorized Mercedes-Benz or Freightliner Sprinter dealers will inspect and replace the tire pressure label with correct tire inflation pressure, free of charge. The recall is expected to begin October 16, 2017. Owners may contact Forest River customer service at 1-854-888-3214. Forest River's number for this recall is 35.55.215-0491. Recall Summary: Forest River, Inc. (Forest River) is recalling certain model year 2016-2017 Forester and Sunseeker recreational vehicles manufactured from February 29, 2016, through April 20, 2016. These vehicles are equipped with a shower door that may not meet the flammability standards. Thus, these vehicles fail to comply with the requirements of Federal Motor Vehicle Safety Standard (FMVSS) No. 302, "Flammability of Interior Materials." Hazard: Failure to meet the flammability standard of FMVSS 302 could enhance the spread of a flame in the event of a fire, increasing the risk serious injury. Remedy: Forest River will notify owners, and dealers will replace the shower doors, free of charge. The recall is expected to begin on, or about, July 12, 2016. Owners may contact Forest River customer service at 1-574-206-7600. Forest River's number for this recall is 34-05052016-0184.
from django.shortcuts import render from django.http import HttpResponse from django.template.loader import get_template from django.template import Context from pincode.models import Pincode from forms import PincodeForm from django.http import HttpResponseRedirect from django.core.context_processors import csrf #from haystack.query import SearchQuerySet # Create your views here. def pincodes(request): args = {} args.update(csrf(request)) args['pincodes'] = Pincode.objects.all() return render(request,"pincodes.html", args) def pincodes_pincode(request): args = {} args.update(csrf(request)) args['pincodes'] = Pincode.objects.all().order_by('pincode') return render(request,"pincodes.html", args) def pincodes_state(request): args = {} args.update(csrf(request)) args['pincodes'] = Pincode.objects.all().order_by('state_name') return render(request,"pincodes.html", args) def pincodes_district(request): args = {} args.update(csrf(request)) args['pincodes'] = Pincode.objects.all().order_by('district_name') return render(request,"pincodes.html", args) ''' def pincode_single(request, pincode_id=1): return render(request,"pincode.html", {'pincode' : Pincode.objects.get(id=pincode_id)}) ''' def pincode_single(request, pincode_id=1): return render(request,"pincode_single.html", {'pincode' : Pincode.objects.get(id=pincode_id)}) def create(request): if request.POST: form = PincodeForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect('/pincode/all') else: form = PincodeForm() args ={} args.update(csrf(request)) args['form'] = PincodeForm() return render(request, "create_entry.html", args) ''' def update(request, pincode_id=1): if request.POST: form = PincodeForm(request.POST) if form.is_valid(): form.save() return HttpResponseRedirect('/pincode/get/{{ pincode_id }}') else: form = PincodeForm() args ={} args.update(csrf(request)) args['form'] = PincodeForm() return render(request, "update_entry.html", args) ''' def update(request, pincode_id=1): if request.POST: form = PincodeForm(request.POST) if form.is_valid(): pincode = form.cleaned_data['pincode'] office_name = form.cleaned_data['office_name'] district_name = form.cleaned_data['district_name'] state_name = form.cleaned_data['state_name'] new = Pincode.objects.get(id=pincode_id) new.pincode = pincode new.office_name = office_name new.district_name = district_name new.state_name = state_name new.save() return HttpResponseRedirect('/pincode/all/') else: form = PincodeForm() args ={} args.update(csrf(request)) args['form'] = PincodeForm() return render(request,"update_entry.html", args) ''' def search(request, id): args = {} args.update(csrf(request)) args['pincodes'] = Pincode.objects.filter(pincode__contains=id) return render(request, "search_items.html",args) def search_items(request): if request.method == "POST": search_text = request.POST['search_text'] else: search_text = '' form = PincodeForm() args ={} args.update(csrf(request)) args['form'] = PincodeForm() return render(request,"search_items.html", args) def search_pincodes(request): if request.mehod == "POST": search_text = request.POST['search_text'] else: search_text = '' pincodes = Pincode.objects.filter(pincode__contains=search_text) return render(request, "ajax_search.html", {'pincodes' : pincodes}) '''
Lean In supports the advancement and success of women and their allies through educational offerings, connections and support circles, and public awareness campaigns. © 2017 NINA VACA. ALL RIGHTS RESERVED.
#!/usr/bin/python # encoding: utf-8 """ mail.py For email functions Created by Shane O'Connor 2013 """ from string import join import email.Message import smtplib from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText from klab.fs.fsio import read_file class MailServer(object): def __init__(self, host = None, port = None): self.host = host self.port = port def sendmail(self, subject, sender, recipients, plaintext, htmltext=None, cc=None, debug=False, useMIMEMultipart=True): if recipients: if type(recipients) == type(""): recipients = [recipients] elif type(recipients) != type([]): raise Exception("Unexpected type for recipients.") if cc: if type(cc) == type(""): recipients.append(cc) elif type(cc) == type([]): recipients.extend(cc) else: raise Exception("Unexpected type for cc.") recipients = join(recipients, ";") if plaintext and htmltext and useMIMEMultipart: msg = MIMEMultipart('alternative') else: msg = email.Message.Message() msg['Subject'] = subject msg['From'] = sender msg['To'] = recipients msg['Reply-To'] = sender if plaintext and htmltext and useMIMEMultipart: part1 = MIMEText(plaintext, 'plain') part2 = MIMEText(htmltext, 'html') msg.attach(part1) msg.attach(part2) else: msg.set_type("text/plain") msg.set_payload(plaintext) if debug: print(msg) else: if self.host and self.port: s = smtplib.SMTP(self.host, self.port) elif self.host: s = smtplib.SMTP(self.host) else: s = smtplib.SMTP() s.connect() s.sendmail(msg['From'], recipients, msg.as_string()) s.close() return True return False def sendgmail(self, subject, recipients, plaintext, htmltext=None, cc=None, debug=False, useMIMEMultipart=True, gmail_account = 'kortemmelab@gmail.com', pw_filepath = None): '''For this function to work, the password for the gmail user must be colocated with this file or passed in.''' smtpserver = smtplib.SMTP("smtp.gmail.com", 587) smtpserver.ehlo() smtpserver.starttls() smtpserver.ehlo gmail_account = 'kortemmelab@gmail.com' if pw_filepath: smtpserver.login(gmail_account, read_file(pw_filepath)) else: smtpserver.login(gmail_account, read_file('pw')) for recipient in recipients: if htmltext: msg = MIMEText(htmltext, 'html') msg['From'] = gmail_account msg['To'] = recipient msg['Subject'] = subject smtpserver.sendmail(gmail_account, recipient, msg.as_string()) else: header = 'To:' + recipient + '\n' + 'From: ' + gmail_account + '\n' + 'Subject:' + subject + '\n' msg = header + '\n ' + plaintext + '\n\n' smtpserver.sendmail(gmail_account, recipient, msg) smtpserver.close() def sendgmail2(self, subject, recipients, plaintext, htmltext=None, cc=None, debug=False, useMIMEMultipart=True, gmail_account = 'kortemmelab@gmail.com', pw_filepath = None): '''For this function to work, the password for the gmail user must be colocated with this file or passed in.''' smtpserver = smtplib.SMTP("smtp.gmail.com", 587) smtpserver.ehlo() smtpserver.starttls() smtpserver.ehlo gmail_account = 'kortemmelab@gmail.com' if pw_filepath: smtpserver.login(gmail_account, read_file(pw_filepath)) else: smtpserver.login(gmail_account, read_file('pw')) for recipient in recipients: header = 'To:' + recipient + '\n' + 'From: ' + gmail_account + '\n' + 'Subject:' + subject + '\n' if htmltext: msg = header + '\n ' + htmltext + '\n\n' else: msg = header + '\n ' + plaintext + '\n\n' smtpserver.sendmail(gmail_account, recipient, msg) smtpserver.close()
INOVA – Fairfax Hospital will be filing a Proffer Condition Amendment. and schedules are not available at this time. Industrial Uses are posted on the zMOD Website. 2. Industrial use zoning regulations. Wednesday, October 10 – 7:00 p.m. how the two items above differ. totally eliminated [such as the sewer tap fee]. The draft of Volume I: Urban Design Guidelines for the CRDs and CRAs is completed and the County is undertaking community outreach and engagement on the draft document. Check it out above. About the Guidelines – Volume I Guidelines include wide-ranging urban design best practices and apply holistically to all of the Fairfax County CRDs and CRAs (except Lake Anne). They provide a common vocabulary to promote discussion about streetscapes, public spaces, and overall development plans and offer visual images about complex ideas and provide alternative approaches to achieving Comprehensive Plan objectives for CRDs and CRAs. They are intended to be used by citizens, developers, Fairfax County staff, the Fairfax County Planning Commission, and the Fairfax County Board of Supervisors when either proposing or reviewing development proposals in CRDs and CRAs. A separate Volume II District-specific Guidelines (Volume II Guidelines) will be uniquely created for each CRD and CRA and customized to the needs of the area. Volume II Guidelines will be completed for all CRDs and CRAs in the coming year. 4. Comprehensive Plan Amendment for 8800 Richmond Highway. about changes to the PFM. Some are significant, they are changing “shall” to “should” in a number of places, etc. Planning Commission Public Hearing scheduled for Thursday, October 18, 2018. Non-Office Building repurposing Planning Commission Public Hearing September 27 – B of S Tuesday, October 30. The proposed Plan amendment focuses on existing non-office commercial buildings in areas planned for retail and other non-office commercial uses, or as retail/commercial components of mixed-use developments. Such structures include freestanding retail/commercial buildings, various types of shopping centers, and ground-floor retail components of mixed-use buildings. Appendix 1 of this report. This is a FREE, two-session community development program designed to help you learn more about your local government and the importance of community engagement. Gain a clearer understanding of the critical role local government plays in our daily lives along with better knowledge of the programs and services available to Fairfax residents. Learn how to effectively use county resources including health and human services, transportation, public safety, land use and zoning to improve neighborhood quality of life. Learn about volunteer opportunities and ways to become involved in ongoing planning and collaboration with the county to help us build a stronger community for the future. Sessions will be held on Wednesday evenings, October 3rd and October 17th, 7:00 pm – 9:00 pm, at the Providence Community Center, 3001 Vaden Drive, Fairfax VA 22031. Online registration is available at: https://bit.ly/2LyIJ0B or here. Deadline for online and paper registrations is September 28, 2018.
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-08-28 20:00 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('simulation', '0007_feeds_price'), ] operations = [ migrations.CreateModel( name='InternalTransfers', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateField()), ('quantity', models.FloatField()), ('unit_value', models.FloatField()), ('destination', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='xfers_in', to='simulation.Enterprises')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Goods')), ('origin', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='xfers_out', to='simulation.Enterprises')), ('rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Rep')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Purchases', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateField()), ('quantity', models.FloatField()), ('unit_value', models.FloatField()), ('destination', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Enterprises')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Goods')), ('rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Rep')), ('vendor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.AccountsSupplier')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Sales', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateField()), ('quantity', models.FloatField()), ('unit_value', models.FloatField()), ('buyer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.AccountsCustomer')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Goods')), ('origin', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Enterprises')), ('rep', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='simulation.Rep')), ], options={ 'abstract': False, }, ), migrations.AlterUniqueTogether( name='feeds', unique_together=set([('scenario', 'name')]), ), migrations.AlterUniqueTogether( name='feedtypes', unique_together=set([('scenario', 'name')]), ), ]
The Yomiuri ShimbunSaplings of Japanese-born fruits such as the Shine Muscat premium grape have been flown out and grown overseas, and their products are circulating in Southeast Asian markets, according to a survey by the agriculture ministry. The Agriculture, Forestry and Fisheries Ministry, with an increasing sense of crisis, is moving to strengthen protections of Japanese-made agricultural products. Shine Muscat is characterized by the jade color of its bunches and a sweet and refreshing taste. As consumers can easily eat the grapes without peeling the skin, they are traded at high prices. The National Agriculture and Food Research Organization (NARO) took 18 years to develop the variety in Japan and registered it here in 2006. According to the ministry, 1,195 hectares in Japan were dedicated to cultivating the variety in 2016, a 597-fold increase from two hectares in 2007. The products are exported to Southeast Asian countries and regions, including Hong Kong. In addition, Shine Muscat grapes produced in China and South Korea are circulating at Southeast Asian markets in Hong Kong, Thailand, Malaysia, Vietnam and others, where they compete with the Japanese products. In Japan, the Plant Variety Protection and Seed Law allows registrants to monopolize sales rights for 25 years once the variety of an agricultural product is registered. In the case of fruits, the duration of the sales rights extends to 30 years. However, in China, South Korea and others, unless application for registration of varieties is made within four years from the start of their sales in Japan (six years for fruits), sales rights would not be granted there, and nor is it possible to request injunctions to stop growing the varieties. Learning a lesson from such bitter experience, the agriculture ministry began a project in fiscal 2016 to support domestic farmers to apply for registration of varieties overseas. The ministry supports farmers who seek counseling services and advice from lawyers. As it costs ¥1 million to ¥2 million to make a registration application abroad, the Japanese government covers more than half of the cost. So far, about 250 cases of application have been submitted, leading to a total of four varieties of sweet potato and rice being registered in South Korea and Vietnam. Two varieties of citrus fruits, “Mihaya” and “Asumi,” that NARO registered in Japan in 2014, were also flown to South Korea and widely grown without permission on Jeju Island. For that reason, NARO is applying for the registration of the varieties in South Korea. In response, South Korean authorities are believed to have warned farmers in the country that there is a possibility that they could infringe on rights if the varieties were to be registered in South Korea. In the meantime, there have been cases where products using brand names of Japanese-born products without permission are circulating at overseas markets. The Japanese government aims to increase exports of food as well as agriculture, forestry and fisheries products, which are worth about ¥900 billion annually, to ¥1 trillion by the end of this year. However, if Japanese brands continue to be used without permission in overseas markets, that could have a negative impact on Japanese producers and others. The Okayama chapter of the National Federation of Agricultural Cooperative Associations, which sells Shine Muscat under the brand name of “Hareou” (Fine sky and king), applied for the registration of the trademark in China, Hong Kong and Taiwan in 2017 and 2018. The registration was accepted in Hong Kong in October last year. In March, the agriculture ministry established an expert panel to discuss how to protect agricultural products developed in Japan. In terms of Japanese brands, how to protect fertilized eggs of wagyu beef cattle and other Japanese genetic resources from being flown abroad is also a challenge. In March, the Osaka prefectural police arrested three people, including a man who runs a grilled meat restaurant, on suspicion of violating the Domestic Animal Infectious Diseases Control Law and the Customs Law as they attempted to export wagyu sperm and fertilized eggs to China. According to investigative sources, they had allegedly smuggled the genetic resources to China at least four times. However, there are no laws in Japan to directly restrict the outflow of wagyu genetic resources overseas. A government expert panel started to discuss how to protect these resources in February.
# Copyright 2010 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import enum import json import os import re import urllib from oslo_config import cfg from oslo_log import log as logging import webob from cinder.api import api_utils from cinder.api import microversions as mv from cinder.common import constants from cinder import exception from cinder.i18n import _ api_common_opts = [ cfg.IntOpt('osapi_max_limit', default=1000, help='The maximum number of items that a collection ' 'resource returns in a single response'), cfg.StrOpt('resource_query_filters_file', default='/etc/cinder/resource_filters.json', help="Json file indicating user visible filter " "parameters for list queries."), ] CONF = cfg.CONF CONF.import_opt('public_endpoint', 'cinder.api.views.versions') CONF.register_opts(api_common_opts) LOG = logging.getLogger(__name__) _FILTERS_COLLECTION = None ATTRIBUTE_CONVERTERS = {'name~': 'display_name~', 'description~': 'display_description~'} METADATA_TYPES = enum.Enum('METADATA_TYPES', 'user image') def get_pagination_params(params, max_limit=None): """Return marker, limit, offset tuple from request. :param params: `wsgi.Request`'s GET dictionary, possibly containing 'marker', 'limit', and 'offset' variables. 'marker' is the id of the last element the client has seen, 'limit' is the maximum number of items to return and 'offset' is the number of items to skip from the marker or from the first element. If 'limit' is not specified, or > max_limit, we default to max_limit. Negative values for either offset or limit will cause exc.HTTPBadRequest() exceptions to be raised. If no offset is present we'll default to 0 and if no marker is present we'll default to None. :max_limit: Max value 'limit' return value can take :returns: Tuple (marker, limit, offset) """ max_limit = max_limit or CONF.osapi_max_limit limit = _get_limit_param(params, max_limit) marker = _get_marker_param(params) offset = _get_offset_param(params) return marker, limit, offset def _get_limit_param(params, max_limit=None): """Extract integer limit from request's dictionary or fail. Defaults to max_limit if not present and returns max_limit if present 'limit' is greater than max_limit. """ max_limit = max_limit or CONF.osapi_max_limit try: limit = int(params.pop('limit', max_limit)) except ValueError: msg = _('limit param must be an integer') raise webob.exc.HTTPBadRequest(explanation=msg) if limit < 0: msg = _('limit param must be positive') raise webob.exc.HTTPBadRequest(explanation=msg) limit = min(limit, max_limit) return limit def _get_marker_param(params): """Extract marker id from request's dictionary (defaults to None).""" return params.pop('marker', None) def _get_offset_param(params): """Extract offset id from request's dictionary (defaults to 0) or fail.""" offset = params.pop('offset', 0) return api_utils.validate_integer(offset, 'offset', 0, constants.DB_MAX_INT) def limited(items, request, max_limit=None): """Return a slice of items according to requested offset and limit. :param items: A sliceable entity :param request: ``wsgi.Request`` possibly containing 'offset' and 'limit' GET variables. 'offset' is where to start in the list, and 'limit' is the maximum number of items to return. If 'limit' is not specified, 0, or > max_limit, we default to max_limit. Negative values for either offset or limit will cause exc.HTTPBadRequest() exceptions to be raised. :kwarg max_limit: The maximum number of items to return from 'items' """ max_limit = max_limit or CONF.osapi_max_limit marker, limit, offset = get_pagination_params(request.GET.copy(), max_limit) range_end = offset + (limit or max_limit) return items[offset:range_end] def get_sort_params(params, default_key='created_at', default_dir='desc'): """Retrieves sort keys/directions parameters. Processes the parameters to create a list of sort keys and sort directions that correspond to either the 'sort' parameter or the 'sort_key' and 'sort_dir' parameter values. The value of the 'sort' parameter is a comma- separated list of sort keys, each key is optionally appended with ':<sort_direction>'. Note that the 'sort_key' and 'sort_dir' parameters are deprecated in kilo and an exception is raised if they are supplied with the 'sort' parameter. The sort parameters are removed from the request parameters by this function. :param params: webob.multidict of request parameters (from cinder.api.openstack.wsgi.Request.params) :param default_key: default sort key value, added to the list if no sort keys are supplied :param default_dir: default sort dir value, added to the list if the corresponding key does not have a direction specified :returns: list of sort keys, list of sort dirs :raise webob.exc.HTTPBadRequest: If both 'sort' and either 'sort_key' or 'sort_dir' are supplied parameters """ if 'sort' in params and ('sort_key' in params or 'sort_dir' in params): msg = _("The 'sort_key' and 'sort_dir' parameters are deprecated and " "cannot be used with the 'sort' parameter.") raise webob.exc.HTTPBadRequest(explanation=msg) sort_keys = [] sort_dirs = [] if 'sort' in params: for sort in params.pop('sort').strip().split(','): sort_key, _sep, sort_dir = sort.partition(':') if not sort_dir: sort_dir = default_dir sort_keys.append(sort_key.strip()) sort_dirs.append(sort_dir.strip()) else: sort_key = params.pop('sort_key', default_key) sort_dir = params.pop('sort_dir', default_dir) sort_keys.append(sort_key.strip()) sort_dirs.append(sort_dir.strip()) return sort_keys, sort_dirs def get_request_url(request): url = request.application_url headers = request.headers forwarded = headers.get('X-Forwarded-Host') if forwarded: url_parts = list(urllib.parse.urlsplit(url)) url_parts[1] = re.split(r',\s?', forwarded)[-1] url = urllib.parse.urlunsplit(url_parts).rstrip('/') return url def remove_version_from_href(href): """Removes the first API version from the href. Given: 'http://cinder.example.com/v1.1/123' Returns: 'http://cinder.example.com/123' Given: 'http://cinder.example.com/v1.1' Returns: 'http://cinder.example.com' Given: 'http://cinder.example.com/volume/drivers/v1.1/flashsystem' Returns: 'http://cinder.example.com/volume/drivers/flashsystem' """ parsed_url = urllib.parse.urlsplit(href) url_parts = parsed_url.path.split('/') # NOTE: this should match vX.X or vX expression = re.compile(r'^v([0-9]+|[0-9]+\.[0-9]+)(/.*|$)') for x in range(len(url_parts)): if expression.match(url_parts[x]): del url_parts[x] break new_path = '/'.join(url_parts) if new_path == parsed_url.path: msg = 'href %s does not contain version' % href LOG.debug(msg) raise ValueError(msg) parsed_url = list(parsed_url) parsed_url[2] = new_path return urllib.parse.urlunsplit(parsed_url) class ViewBuilder(object): """Model API responses as dictionaries.""" _collection_name = None def _get_links(self, request, identifier): return [{"rel": "self", "href": self._get_href_link(request, identifier), }, {"rel": "bookmark", "href": self._get_bookmark_link(request, identifier), }] def _get_next_link(self, request, identifier, collection_name): """Return href string with proper limit and marker params.""" params = request.params.copy() params["marker"] = identifier prefix = self._update_link_prefix(get_request_url(request), CONF.public_endpoint) url = os.path.join(prefix, request.environ["cinder.context"].project_id, collection_name) return "%s?%s" % (url, urllib.parse.urlencode(params)) def _get_href_link(self, request, identifier): """Return an href string pointing to this object.""" prefix = self._update_link_prefix(get_request_url(request), CONF.public_endpoint) return os.path.join(prefix, request.environ["cinder.context"].project_id, self._collection_name, str(identifier)) def _get_bookmark_link(self, request, identifier): """Create a URL that refers to a specific resource.""" base_url = remove_version_from_href(get_request_url(request)) base_url = self._update_link_prefix(base_url, CONF.public_endpoint) return os.path.join(base_url, request.environ["cinder.context"].project_id, self._collection_name, str(identifier)) def _get_collection_links(self, request, items, collection_name, item_count=None, id_key="uuid"): """Retrieve 'next' link, if applicable. The next link is included if we are returning as many items as we can, given the restrictions of limit optional request parameter and osapi_max_limit configuration parameter as long as we are returning some elements. So we return next link if: 1) 'limit' param is specified and equal to the number of items. 2) 'limit' param is NOT specified and the number of items is equal to CONF.osapi_max_limit. :param request: API request :param items: List of collection items :param collection_name: Name of collection, used to generate the next link for a pagination query :param item_count: Length of the list of the original collection items :param id_key: Attribute key used to retrieve the unique ID, used to generate the next link marker for a pagination query :returns: links """ item_count = item_count or len(items) limit = _get_limit_param(request.GET.copy()) if len(items) and limit <= item_count: return self._generate_next_link(items, id_key, request, collection_name) return [] def _generate_next_link(self, items, id_key, request, collection_name): links = [] last_item = items[-1] if id_key in last_item: last_item_id = last_item[id_key] else: last_item_id = last_item["id"] links.append({ "rel": "next", "href": self._get_next_link(request, last_item_id, collection_name), }) return links def _update_link_prefix(self, orig_url, prefix): if not prefix: return orig_url url_parts = list(urllib.parse.urlsplit(orig_url)) prefix_parts = list(urllib.parse.urlsplit(prefix)) url_parts[0:2] = prefix_parts[0:2] url_parts[2] = prefix_parts[2] + url_parts[2] return urllib.parse.urlunsplit(url_parts).rstrip('/') def get_cluster_host(req, params, cluster_version=None): """Get cluster and host from the parameters. This method checks the presence of cluster and host parameters and returns them depending on the cluster_version. If cluster_version is False we will never return the cluster_name and we will require the presence of the host parameter. If cluster_version is None we will always check for the presence of the cluster parameter, and if cluster_version is a string with a version we will only check for the presence of the parameter if the version of the request is not less than it. In both cases we will require one and only one parameter, host or cluster. """ if (cluster_version is not False and req.api_version_request.matches(cluster_version)): cluster_name = params.get('cluster') msg = _('One and only one of cluster and host must be set.') else: cluster_name = None msg = _('Host field is missing.') host = params.get('host') if bool(cluster_name) == bool(host): raise exception.InvalidInput(reason=msg) return cluster_name, host def _initialize_filters(): global _FILTERS_COLLECTION if _FILTERS_COLLECTION: return if not os.path.exists(CONF.resource_query_filters_file): LOG.error( "resource query filters file does not exist: %s", CONF.resource_query_filters_file) return with open(CONF.resource_query_filters_file, 'r') as filters_file: _FILTERS_COLLECTION = json.load(filters_file) def get_enabled_resource_filters(resource=None): """Get list of configured/allowed filters for the specified resource. This method checks resource_query_filters_file and returns dictionary which contains the specified resource and its allowed filters: .. code-block:: json { "resource": ["filter1", "filter2", "filter3"] } if resource is not specified, all of the configuration will be returned, and if the resource is not found, empty dict will be returned. """ try: _initialize_filters() if not resource: return _FILTERS_COLLECTION else: return {resource: _FILTERS_COLLECTION[resource]} except Exception: LOG.debug("Failed to collect resource %s's filters.", resource) return {} def get_time_comparsion_operators(): """Get list of time comparsion operators. This method returns list which contains the allowed comparsion operators. """ return ["gt", "gte", "eq", "neq", "lt", "lte"] def convert_filter_attributes(filters, resource): for key in filters.copy().keys(): if resource in ['volume', 'backup', 'snapshot'] and key in ATTRIBUTE_CONVERTERS.keys(): filters[ATTRIBUTE_CONVERTERS[key]] = filters[key] filters.pop(key) def reject_invalid_filters(context, filters, resource, enable_like_filter=False): invalid_filters = [] for key in filters.copy().keys(): try: # Only ASCII characters can be valid filter keys, # in PY2/3, the key can be either unicode or string. if isinstance(key, str): key.encode('ascii') else: key.decode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): raise webob.exc.HTTPBadRequest( explanation=_('Filter keys can only contain ' 'ASCII characters.')) if context.is_admin and resource not in ['pool']: # Allow all options except resource is pool # pool API is only available for admin return # Check the configured filters against those passed in resource configured_filters = get_enabled_resource_filters(resource) if configured_filters: configured_filters = configured_filters[resource] else: configured_filters = [] for key in filters.copy().keys(): if not enable_like_filter: if key not in configured_filters: invalid_filters.append(key) else: # If 'key~' is configured, both 'key' and 'key~' are valid. if not (key in configured_filters or "%s~" % key in configured_filters): invalid_filters.append(key) if invalid_filters: if 'all_tenants' in invalid_filters: # NOTE: this is a special case: the cinderclient always adds # 'all_tenants', so we don't want to hold that against a non-admin # user and we silently ignore it. See Bug #1917574. invalid_filters.remove('all_tenants') filters.pop('all_tenants') if len(invalid_filters) == 0: return raise webob.exc.HTTPBadRequest( explanation=_('Invalid filters %s are found in query ' 'options.') % ','.join(invalid_filters)) def process_general_filtering(resource): def wrapper(process_non_general_filtering): def _decorator(*args, **kwargs): req_version = kwargs.get('req_version') filters = kwargs.get('filters') context = kwargs.get('context') if req_version.matches(mv.RESOURCE_FILTER): support_like = False if req_version.matches(mv.LIKE_FILTER): support_like = True reject_invalid_filters(context, filters, resource, support_like) convert_filter_attributes(filters, resource) else: process_non_general_filtering(*args, **kwargs) return _decorator return wrapper
WordStream have put together an infographic that highlights the top 20 most competitive keywords categories in Google AdWords. © 2011 WordStream, Inc. Gemius in coopeation with IAB Europe, has recently released the second “Do you CEE” report. This report, apart from the up-to-date data from 15 CEE markets, is enriched with the detailed studies of the online reality in 2 additional countries – that is Belarus and Turkey.
import binascii import click import jinja2 import os import shutil from . import app, tasks instance_path = os.path.join(os.path.dirname(app.root_path), 'instance') local_cfg_path = os.path.join(instance_path, 'local.cfg') local_cfg_template_path = os.path.join(instance_path, 'local.cfg.tmpl') def secret(length): return binascii.hexlify(os.urandom(length)).decode('utf-8')[:length] @app.cli.command() def worker(): """Run celery worker.""" os.execvp('celery', ['celery', '--app=youtube_sync.celery', '--loglevel=info', 'worker']) @app.cli.command() @click.option('--force', is_flag=True) @click.option('--dan-bin', type=click.Path(exists=True), help='Path to Dynamic Audio Normalizer bin') @click.option('--remote-host', type=click.STRING, help='Destination server host') @click.option('--remote-user', type=click.STRING, help='Destination server user') @click.option('--remote-path', type=click.Path(), help='Destination server path') def setup_local_cfg(force, dan_bin, remote_host, remote_user, remote_path): """Setup instance/local.cfg.""" if os.path.exists(local_cfg_path): click.echo('Already exists: %s' % local_cfg_path) if not force: click.echo('Exiting...') return if remote_host and remote_user and remote_path: remote = '%s@%s:%s' % (remote_user, remote_host, remote_path) else: remote = None vars = { 'CSRF_SESSION_KEY': secret(64), 'SECRET_KEY': secret(64), 'ADMIN_PASSWORD': secret(8), 'DESTINATION_SERVER_PATH': remote, 'DYNAMIC_AUDIO_NORMALIZER_BIN': dan_bin, } template = jinja2.Template(open(local_cfg_template_path).read()) click.echo('Updating/creating: %s' % local_cfg_path) open(local_cfg_path, 'w').write(template.render(vars) + '\n') @app.cli.command() def add_test_job(): """Add a test job to the worker queue.""" youtube_id='pn7w-6leiJA' click.echo('Adding job with youtube_id = %s' % youtube_id) result = tasks.make_audio.delay(youtube_id) click.echo('Added task: %s' % result)
Today the achievements of the Stratford Festival are recognized around the world. But in small town Canada in the early 1950's, that success was far from certain. Starting with nothing but a love for their town, Tom Patterson and the determined citizens of Stratford, Ontario, overcame impossible odds to create the first season in 1953. The success of that first year set the stage for all the triumphs that were to follow. First Stage tells the full story of the birth of the Stratford Festival. How the funds were raised. How the stage was designed and built. And, most amazing of all, how Alec Guinness, Tyrone Guthrie, Irene Worth and Tanya Moiseiwitsch agreed to come to Canada to perform in a tent for little more than expenses. But First Stage is also the story of Canada at mid-century, a time when anything even having the greatest Shakespearean Festival in the world was within our grasp.
#coding=utf8 from uliweb.utils.pyini import * def test_sorteddict(): """ >>> d = SortedDict() >>> d <SortedDict {}> >>> d.name = 'limodou' >>> d['class'] = 'py' >>> d <SortedDict {'class':'py', 'name':'limodou'}> >>> d.keys() ['name', 'class'] >>> d.values() ['limodou', 'py'] >>> d['class'] 'py' >>> d.name 'limodou' >>> d.get('name', 'default') 'limodou' >>> d.get('other', 'default') 'default' >>> 'name' in d True >>> 'other' in d False >>> print d.other None >>> try: ... d['other'] ... except Exception, e: ... print e 'other' >>> del d['class'] >>> del d['name'] >>> d <SortedDict {}> >>> d['name'] = 'limodou' >>> d.pop('other', 'default') 'default' >>> d.pop('name') 'limodou' >>> d <SortedDict {}> >>> d.update({'class':'py', 'attribute':'border'}) >>> d <SortedDict {'attribute':'border', 'class':'py'}> """ def test_section(): """ >>> s = Section('default', "#comment") >>> print s #comment [default] <BLANKLINE> >>> s.name = 'limodou' >>> s.add_comment('name', '#name') >>> s.add_comment(comments='#change') >>> print s #change [default] #name name = 'limodou' <BLANKLINE> >>> del s.name >>> print s #change [default] <BLANKLINE> """ def test_ini1(): """ >>> x = Ini() >>> s = x.add('default') >>> print x #coding=utf-8 [default] <BLANKLINE> >>> s['abc'] = 'name' >>> print x #coding=utf-8 [default] abc = 'name' <BLANKLINE> """ def test_ini2(): """ >>> x = Ini() >>> x['default'] = Section('default', "#comment") >>> x.default.name = 'limodou' >>> x.default['class'] = 'py' >>> x.default.list = ['abc'] >>> print x #coding=utf-8 #comment [default] name = 'limodou' class = 'py' list = ['abc'] <BLANKLINE> >>> x.default.list = ['cde'] #for mutable object will merge the data, including dict type >>> print x.default.list ['abc', 'cde'] >>> x.default.d = {'a':'a'} >>> x.default.d = {'b':'b'} >>> print x.default.d {'a': 'a', 'b': 'b'} """ def test_gettext(): """ >>> from uliweb.i18n import gettext_lazy as _ >>> x = Ini(env={'_':_}) >>> x['default'] = Section('default') >>> x.default.option = _('Hello') >>> x.keys() ['set', '_', 'gettext_lazy', 'default'] """ def test_replace(): """ >>> x = Ini() >>> x['default'] = Section('default') >>> x.default.option = ['a'] >>> x.default.option ['a'] >>> x.default.option = ['b'] >>> x.default.option ['a', 'b'] >>> x.default.add('option', ['c'], replace=True) >>> x.default.option ['c'] >>> print x.default [default] option <= ['c'] <BLANKLINE> """ def test_set_var(): """ >>> x = Ini() >>> x.set_var('default/key', 'name') True >>> print x #coding=utf-8 [default] key = 'name' <BLANKLINE> >>> x.set_var('default/key/name', 'hello') True >>> print x #coding=utf-8 [default] key = 'name' key/name = 'hello' <BLANKLINE> >>> x.get_var('default/key') 'name' >>> x.get_var('default/no') >>> x.get_var('defaut/no', 'no') 'no' >>> x.del_var('default/key') True >>> print x #coding=utf-8 [default] key/name = 'hello' <BLANKLINE> >>> x.get_var('default/key/name') 'hello' >>> x.get_var('default') <Section {'key/name':'hello'}> """ def test_update(): """ >>> x = Ini() >>> x.set_var('default/key', 'name') True >>> d = {'default/key':'limodou', 'default/b':123} >>> x.update(d) >>> print x #coding=utf-8 [default] key = 'limodou' b = 123 <BLANKLINE> """ def test_uni_print(): """ >>> a = () >>> uni_prt(a, 'utf-8') '()' >>> a = (1,2) >>> uni_prt(a) '(1, 2)' """ def test_triple_string(): """ >>> from StringIO import StringIO >>> buf = StringIO(\"\"\" ... #coding=utf8 ... [DEFAULT] ... a = u'''hello ... 中文 ... ''' ... \"\"\") >>> x = Ini() >>> x.read(buf) >>> print repr(x.DEFAULT.a) u'hello\\n\\u4e2d\\u6587\\n' """ def test_save(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> x = Ini(env={'_':_}, convertors=i18n_ini_convertor) >>> buf = StringIO(\"\"\" ... [default] ... option = _('English') ... str = 'str' ... str1 = "str" ... float = 1.2 ... int = 1 ... list = [1, 'str', 0.12] ... dict = {'a':'b', 1:2} ... s = u'English' ... [other] ... option = 'default' ... options1 = '{{option}} xxx' ... options2 = '{{default.int}}' ... options3 = option ... options4 = '-- {{default.option}} --' ... options5 = '-- {{default.s}} --' ... options6 = u'English {{default.s}} --' ... options7 = default.str + default.str1 ... \"\"\") >>> x.read(buf) >>> print x #coding=UTF-8 <BLANKLINE> [default] option = _('English') str = 'str' str1 = 'str' float = 1.2 int = 1 list = [1, 'str', 0.12] dict = {'a': 'b', 1: 2} s = u'English' [other] option = 'default' options1 = 'default xxx' options2 = '1' options3 = 'default' options4 = '-- English --' options5 = '-- English --' options6 = u'English English --' options7 = 'strstr' <BLANKLINE> """ def test_merge_data(): """ >>> from uliweb.utils.pyini import merge_data >>> a = [[1,2,3], [2,3,4], [4,5]] >>> b = [{'a':[1,2], 'b':{'a':[1,2]}}, {'a':[2,3], 'b':{'a':['b'], 'b':2}}] >>> c = [set([1,2,3]), set([2,4])] >>> print merge_data(a) [1, 2, 3, 4, 5] >>> print merge_data(b) {'a': [1, 2, 3], 'b': {'a': [1, 2, 'b'], 'b': 2}} >>> print merge_data(c) set([1, 2, 3, 4]) >>> print merge_data([2]) 2 """ def test_lazy(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> x = Ini(env={'_':_}, convertors=i18n_ini_convertor, lazy=True) >>> buf = StringIO(\"\"\" ... [default] ... option = _('English') ... str = 'str' ... str1 = "str" ... float = 1.2 ... int = 1 ... list = [1, 'str', 0.12] ... dict = {'a':'b', 1:2} ... s = u'English' ... [other] ... option = 'default' ... options1 = '{{option}} xxx' ... options2 = '{{default.int}}' ... options3 = option ... options4 = '-- {{default.option}} --' ... options5 = '-- {{default.s}} --' ... options6 = u'English {{default.s}} --' ... options7 = default.str + default.str1 ... \"\"\") >>> x.read(buf) >>> x.freeze() >>> print x #coding=UTF-8 <BLANKLINE> [default] option = _('English') str = 'str' str1 = 'str' float = 1.2 int = 1 list = [1, 'str', 0.12] dict = {'a': 'b', 1: 2} s = u'English' [other] option = 'default' options1 = 'default xxx' options2 = '1' options3 = 'default' options4 = '-- English --' options5 = '-- English --' options6 = u'English English --' options7 = 'strstr' <BLANKLINE> """ def test_multiple_read(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> x = Ini(env={'_':_}, convertors=i18n_ini_convertor, lazy=True) >>> buf = StringIO(\"\"\" ... [default] ... option = 'abc' ... [other] ... option = default.option ... option1 = '{{option}} xxx' ... option2 = '{{default.option}}' ... option3 = '{{other.option}}' ... \"\"\") >>> x.read(buf) >>> buf1 = StringIO(\"\"\" ... [default] ... option = 'hello' ... \"\"\") >>> x.read(buf1) >>> x.freeze() >>> print x #coding=UTF-8 <BLANKLINE> [default] option = 'hello' [other] option = 'hello' option1 = 'hello xxx' option2 = 'hello' option3 = 'hello' <BLANKLINE> """ def test_chinese(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> x = Ini(env={'_':_}, convertors=i18n_ini_convertor) >>> buf = StringIO(\"\"\"#coding=utf-8 ... [default] ... option = '中文' ... option1 = u'中文' ... option2 = _('中文') ... option3 = '{{option}}' ... [other] ... x = '中文 {{default.option}}' ... x1 = u'中文 {{default.option}}' ... x2 = u'xbd {{default.option}}' ... \"\"\") >>> x.read(buf) >>> print x #coding=utf-8 [default] option = '\xe4\xb8\xad\xe6\x96\x87' option1 = u'\xe4\xb8\xad\xe6\x96\x87' option2 = _('\xe4\xb8\xad\xe6\x96\x87') option3 = '\xe4\xb8\xad\xe6\x96\x87' [other] x = '\xe4\xb8\xad\xe6\x96\x87 \xe4\xb8\xad\xe6\x96\x87' x1 = u'\xe4\xb8\xad\xe6\x96\x87 \xe4\xb8\xad\xe6\x96\x87' x2 = u'xbd \xe4\xb8\xad\xe6\x96\x87' <BLANKLINE> >>> print repr(x.other.x1) u'\u4e2d\u6587 \u4e2d\u6587' >>> x.keys() ['set', '_', 'gettext_lazy', 'default', 'other'] """ def test_set(): """ >>> from StringIO import StringIO >>> x = Ini() >>> buf = StringIO(\"\"\"#coding=utf-8 ... [default] ... set1 = {1,2,3} ... set2 = set([1,2,3]) ... \"\"\") >>> x.read(buf) >>> print x #coding=utf-8 [default] set1 = set([1, 2, 3]) set2 = set([1, 2, 3]) <BLANKLINE> >>> buf2 = StringIO(\"\"\"#coding=utf-8 ... [default] ... set1 = {5,3} ... \"\"\") >>> x.read(buf2) >>> print x.default.set1 set([1, 2, 3, 5]) """ def test_triple_string(): """ >>> from StringIO import StringIO >>> buf = StringIO(\"\"\" ... #coding=utf8 ... [DEFAULT] ... a = 'b' ... \"\"\") >>> x = Ini(raw=True) >>> x.set_filename('test.ini') >>> x.read(buf) >>> print x.DEFAULT <BLANKLINE> #coding=utf8 test.ini :0003 [DEFAULT] test.ini :0004 a = 'b' <BLANKLINE> """ def test_var_in_section(): """ >>> from StringIO import StringIO >>> buf = StringIO(\"\"\" ... #coding=utf8 ... [DEFAULT] ... MENUS2012 = [ ... ('about', _('关于大会'), '/2012/about'), ... ('schedulebj', _('北京日程'), '/2012/schedulebj'), ... ('schedulesh', _('上海日程'), '/2012/schedulesh'), ... ('Hackathon', _('编程马拉松'), 'http://www.douban.com/event/17299206/'), ... ('registration', _('报名'), '/2012/registration'), ... ('volunteer', _('志愿'), '/2012/volunteer'), ... ('sponsors', _('赞助'), '/2012/sponsors'), ... ('Weibo', _('微博'), 'http://weibo.com/pyconcn'), ... ('2011', _('2011'), '/2011'), ... ] ... a = [1,2,3] ... b <= MENUS2012 ... \"\"\") >>> x = Ini() >>> x.set_filename('test.ini') >>> x.read(buf) >>> print x.DEFAULT.b[0][0] about """ def test_env_var_1(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> import os >>> os.environ['TEST'] = 'test' >>> os.environ['OK'] = '3' >>> x = Ini(lazy=True) >>> buf = StringIO(\"\"\" ... [default] ... a = '$TEST/ok' ... c = '${TEST}ok' ... b = $OK ... d = {'name_$TEST':'$OK'} ... \"\"\") >>> x = Ini() >>> x.set_filename('test.ini') >>> x.read(buf) >>> x.freeze() >>> print repr(x.default.a) 'test/ok' >>> print repr(x.default.b) 3 >>> print repr(x.default.c) 'testok' >>> print repr(x.default.d) {'name_test': '3'} >>> x = Ini() >>> buf = StringIO(\"\"\" ... [default] ... a = '$TEST/ok' ... c = '${TEST}ok' ... b = $OK ... \"\"\") >>> x = Ini() >>> x.set_filename('test.ini') >>> x.read(buf) >>> print repr(x.default.a) 'test/ok' >>> print repr(x.default.b) 3 >>> print repr(x.default.c) 'testok' """ def test_env_var_2(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> import os >>> os.environ['TEST'] = 'test' >>> os.environ['OK'] = '3' >>> x = Ini(lazy=True) >>> buf = StringIO(\"\"\" ... [default] ... a = '$TEST/ok' ... c = '${TEST}ok' ... \"\"\") >>> x = Ini(import_env=False) >>> x.set_filename('test.ini') >>> x.read(buf) >>> x.freeze() >>> print x #coding=UTF-8 <BLANKLINE> [default] a = '$TEST/ok' c = '${TEST}ok' <BLANKLINE> """ def test_email(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> import os >>> os.environ['MAIL'] = 'test' >>> x = Ini(import_env=True, lazy=True) >>> buf = StringIO(\"\"\" ... [MAIL] ... host = '128.192.168.2' ... \"\"\") >>> x.set_filename('test.ini') >>> x.read(buf) >>> x.freeze() >>> print x #coding=UTF-8 <BLANKLINE> [MAIL] host = '128.192.168.2' <BLANKLINE> """ def test_pre_var(): """ >>> from uliweb.i18n import gettext_lazy as _, i18n_ini_convertor >>> from StringIO import StringIO >>> import os >>> x = Ini(import_env=True, lazy=True, pre_variables={'appname':'test'}) >>> buf = StringIO(\"\"\" ... [DEFAULT] ... model = '#{appname}.models.Model' ... \"\"\") >>> x.set_filename('test.ini') >>> x.read(buf) >>> x.freeze() >>> print x #coding=UTF-8 <BLANKLINE> [DEFAULT] model = 'test.models.Model' <BLANKLINE> """ def test_dict_init(): """ >>> x = Ini({'default':{'abc':'name'}}) >>> print x #coding=utf-8 [default] abc = 'name' <BLANKLINE> """
"Sustainable development in modern lifestyles." by Gregory E. Mercier, Keith D. Stone et al. Sustainable development in modern lifestyles. This project involved the use of sustainable development in the design of a residential structure. The proposed development of the land and design of the house are an effort to create a sustainable future for both the family and the land for which it is suited. Analyses of human lifestyles, sustainable development techniques and green building methods and technologies were performed to place the house and guide its design. The house design and development provides cost-effective green alternatives that encourage a sustainable lifestyle.
import datetime from dateutil import rrule import arcpy, os from arcpy import env import numpy as np # Set workspace to GIS gauge data location, loop through individual watersheds arcpy.env.workspace = "C:\\Recharge_GIS\\nm_gauges.gdb" fc = "nm_wtrs_11DEC15" field = "USGS_Code" cursor = arcpy.SearchCursor(fc) # List csv gauge data, create list of gauge codes folder = 'C:\\Users\David\\Documents\\Recharge\\Gauges\\Gauge_Data_HF_csv' os.chdir(folder) csvList = os.listdir(folder) # make sure that this is getting the right string from the file name files = [int(name[:8]) for name in csvList] # Create layer from polygon feature class so it is selectable by attribute arcpy.env.workspace = "C:\\Recharge_GIS\\nm_gauges.gdb" arcpy.MakeFeatureLayer_management("C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15", "wtr_layer") # Loop through polygon features in watersheds layer, select polygon geometry by attribute for row in cursor: gPoly = row.getValue(field) polyStr = str(gPoly) print gPoly gstr = arcpy.AddFieldDelimiters("C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15", field) sqlExp = gstr + " = " + polyStr print sqlExp geo = arcpy.SelectLayerByAttribute_management("wtr_layer", "NEW_SELECTION", sqlExp) # Get csv data from gauges and identify time interval of needed precip data if int(polyStr) in files: print "true" folder = 'C:\\Users\David\\Documents\\Recharge\\Gauges\\Gauge_Data_HF_csv' os.chdir(folder) pos = files.index(int(gPoly)) recs = [] fid = open(csvList[pos]) lines = fid.readlines()[0:] fid.close() rows = [line.split(',') for line in lines] for line in rows: recs.append([datetime.datetime.strptime(line[2], '%m/%d/%Y %H:%M'), # date float(line[6])]) # discharge print "Data points: " + str(len(recs)) qRecs = np.array(recs) # Make start and end dates correspond with available PRISM data (i.e., 1984-01-01 to 2013-12-31) start = qRecs[0, 0] beginPrecip = datetime.datetime(1984, 1, 1) if start < beginPrecip: start = beginPrecip print "Data start: " + str(start) end = qRecs[len(qRecs)-1, 0] endPrecip = datetime.datetime(2013, 12, 31) if end > endPrecip: end = endPrecip print "Data end: " + str(end) # Loop through raster data, clipping and creating arrays of data: Date Q Ppt rasSq = 1013.02**2/1000 # ppt [mm -> m] and cellsize (x*y) [m*m] precip = [] date = [] q = [] for day in rrule.rrule(rrule.DAILY, dtstart=start, until=end): folder = "C:\\Recharge_GIS\\Precip\\800m\\Daily\\" yr = day.year if yr <= 1991: arcpy.env.overwriteOutput = True # Ensure overwrite capability arcpy.env.workspace = folder + str(day.year) + "a" ras = folder + str(day.year) + "a\\" + "PRISM_NM_" + str(day.year) + day.strftime('%m') + day.strftime('%d') + ".tif" if arcpy.Exists(ras): try: arcpy.CheckOutExtension("Spatial") mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15" rasPart = arcpy.sa.ExtractByMask(ras, geo) if day == beginPrecip: rasPart.save(folder + str(day.year) + "a\\" + str(gPoly) + "_rasterClipTest.tif") arr = arcpy.RasterToNumPyArray(rasPart, nodata_to_value=0) arrVal = np.multiply(arr, rasSq) arrSum = arrVal.sum() print "Sum of precip on " + str(day) + ": " + str(arrSum) precip.append(arrSum) date.append(day) except: pass if yr > 1991: arcpy.env.workspace = folder + str(day.year) ras = folder + str(day.year) + "\\" + "PRISM_NMHW2Buff_" + str(day.year) + day.strftime('%m') + day.strftime('%d') + ".tif" if arcpy.Exists(ras): try: arcpy.CheckOutExtension("Spatial") mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrs_11DEC15" rasPart = arcpy.sa.ExtractByMask(ras, geo) if day == beginPrecip: rasPart.save(folder + str(day.year) + str(gPoly) + "_rasterClipTest.tif") arr = arcpy.RasterToNumPyArray(rasPart, nodata_to_value=0) arrVal = np.multiply(arr, rasSq) arrSum = arrVal.sum() print "Sum of precip on " + str(day) + ": " + str(arrSum) precip.append(arrSum) date.append(day) except: pass ppt = np.array(precip, dtype=float) date = [rec.strftime('%Y/%m/%d') for rec in date] add_precip = [] for rec in qRecs[:, 0]: dday = rec.strftime('%Y/%m/%d') try: if rec.hour == 17 and rec.minute == 00: pos = date.index(dday) ppt_apnd = ppt[pos] add_precip.append(ppt_apnd) else: add_precip.append(0.0) except: pass ppt_arr = np.array(add_precip, dtype=float) data = np.column_stack((qRecs[:, 0], qRecs[:, 1], ppt_arr)) # print data np.savetxt(('C:\\Users\David\\Documents\\Recharge\\Gauges\\Gauge_ppt_HF_csv\\' + str(gPoly) + "_date_q_ppt.csv"), data, fmt=['%s', '%1.3f', '%1.3f'], delimiter=',') print "You have been saved!"
- Walnut legs feature hand carved textural effect. - This piece is made to order. Dimensions customizable.
#lims from SBaaS_LIMS.lims_experiment_postgresql_models import * from SBaaS_LIMS.lims_sample_postgresql_models import * from .stage01_quantification_physiologicalRatios_postgresql_models import * from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete from SBaaS_base.sbaas_template_query import sbaas_template_query class stage01_quantification_physiologicalRatios_query(sbaas_template_query): def initialize_supportedTables(self): '''Set the supported tables dict for ''' tables_supported = {'data_stage01_quantification_physiologicalRatios_averages':data_stage01_quantification_physiologicalRatios_averages, 'data_stage01_quantification_physiologicalRatios_replicates':data_stage01_quantification_physiologicalRatios_replicates, }; self.set_supportedTables(tables_supported); # Query sample names from data_stage01_quantification_physiologicalRatios_replicates def get_sampleNameAbbreviations_experimentID_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I,exp_type_I=4): '''Querry sample names (i.e. unknowns) that are used from the experiment''' try: sample_names = self.session.query(sample_description.sample_name_abbreviation).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_description.sample_name_short), experiment.exp_type_id == exp_type_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( sample_description.sample_name_abbreviation).order_by( sample_description.sample_name_abbreviation.asc()).all(); sample_names_O = []; for sn in sample_names: sample_names_O.append(sn.sample_name_abbreviation); return sample_names_O; except SQLAlchemyError as e: print(e); def get_sampleNameShort_experimentIDAndSampleNameAbbreviationAndRatioIDAndTimePoint_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I,sample_name_abbreviation_I,physiologicalratio_id_I,time_point_I,exp_type_I=4): '''Querry sample names that are used from the experiment by sample name abbreviation and sample description''' try: sample_names = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.sample_name_short).filter( sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.time_point.like(time_point_I), experiment.exp_type_id == exp_type_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_description.sample_name_short), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.sample_name_short).order_by( data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.asc()).all(); sample_names_short_O = []; for sn in sample_names: sample_names_short_O.append(sn.sample_name_short); return sample_names_short_O; except SQLAlchemyError as e: print(e); # Query time points from data_stage01_quantification_physiologicalRatios_replicates def get_timePoint_experimentIDAndSampleNameAbbreviation_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I,sample_name_abbreviation_I,exp_type_I=4): '''Querry time points that are used from the experiment''' try: time_points = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.time_point).filter( sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), experiment.exp_type_id == exp_type_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.sample_name_short.like(data_stage01_quantification_physiologicalRatios_replicates.sample_name_short), sample_description.time_point.like(data_stage01_quantification_physiologicalRatios_replicates.time_point), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.time_point).order_by( data_stage01_quantification_physiologicalRatios_replicates.time_point.asc()).all(); time_points_O = []; for tp in time_points: time_points_O.append(tp.time_point); return time_points_O; except SQLAlchemyError as e: print(e); def get_timePoint_experimentID_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I): '''Querry time points that are used from the experiment''' try: time_points = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.time_point).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.time_point).order_by( data_stage01_quantification_physiologicalRatios_replicates.time_point.asc()).all(); time_points_O = []; for tp in time_points: time_points_O.append(tp.time_point); return time_points_O; except SQLAlchemyError as e: print(e); def get_timePoint_experimentIDAndRatioID_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I,physiologicalratio_id_I): '''Querry time points that are used from the experiment''' try: time_points = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.time_point).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.time_point).order_by( data_stage01_quantification_physiologicalRatios_replicates.time_point.asc()).all(); time_points_O = []; for tp in time_points: time_points_O.append(tp.time_point); return time_points_O; except SQLAlchemyError as e: print(e); # Query data from data_stage01_quantification_physiologicalRatios_replicates def get_ratio_experimentIDAndSampleNameShortAndTimePointAndRatioID_dataStage01PhysiologicalRatiosReplicates(self, experiment_id_I, sample_name_short_I, time_point_I, physiologicalratio_id_I): """Query calculated ratios""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_value).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_name_short_I), data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).all(); if len(data)>1: print('more than 1 calculated_concentration retrieved per component_name') if data: ratio_O = data[0][0]; else: ratio_O = None; return ratio_O; except SQLAlchemyError as e: print(e); def get_ratios_experimentIDAndSampleNameAbbreviationAndTimePointAndRatioID_dataStage01PhysiologicalRatiosReplicates(self, experiment_id_I, sample_name_abbreviation_I, time_point_I, physiologicalratio_id_I,exp_type_I=4): """Query calculated ratios""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_value).filter( sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.time_point.like(time_point_I), experiment.exp_type_id == exp_type_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_description.sample_name_short), data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_value).all(); ratios_O = []; for d in data: ratios_O.append(d[0]); return ratios_O; except SQLAlchemyError as e: print(e); def get_rows_experimentIDAndSampleNameAbbreviationAndTimePointAndRatioID_dataStage01PhysiologicalRatiosReplicates(self, experiment_id_I, sample_name_abbreviation_I, time_point_I, physiologicalratio_id_I,exp_type_I=4): """Query rows from data_stage01_physiologicalRatios_replicates""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_replicates).filter( sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.time_point.like(time_point_I), experiment.exp_type_id == exp_type_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_description.sample_name_short), data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).all(); rows_O = []; if data: for d in data: rows_O.append({'experiment_id':d.experiment_id, 'sample_name_short':d.sample_name_short, 'time_point':d.time_point, 'physiologicalratio_id':d.physiologicalratio_id, 'physiologicalratio_name':d.physiologicalratio_name, 'physiologicalratio_value':d.physiologicalratio_value, 'physiologicalratio_description':d.physiologicalratio_description, 'used_':d.used_, 'comment_':d.comment_}); return rows_O; except SQLAlchemyError as e: print(e); def get_rows_experimentIDAndSampleNameShortAndTimePoint_dataStage01PhysiologicalRatiosReplicates(self, experiment_id_I, sample_name_short_I, time_point_I): """Query rows from data_stage01_physiologicalRatios_replicates""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_replicates).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.sample_name_short.like(sample_name_short_I), data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).all(); rows_O = []; if data: for d in data: rows_O.append({'experiment_id':d.experiment_id, 'sample_name_short':d.sample_name_short, 'time_point':d.time_point, 'physiologicalratio_id':d.physiologicalratio_id, 'physiologicalratio_name':d.physiologicalratio_name, 'physiologicalratio_value':d.physiologicalratio_value, 'physiologicalratio_description':d.physiologicalratio_description, 'used_':d.used_, 'comment_':d.comment_}); return rows_O; except SQLAlchemyError as e: print(e); # Query ratio_id information from data_stage01_quantificaton_physiologicalRatios_replicates def get_ratioIDs_experimentIDAndTimePoint_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I,time_point_I): '''Query physiologicalRatio_ids that are used from the experiment by time_point''' try: ratios = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_name, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_description).filter( data_stage01_quantification_physiologicalRatios_replicates.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_name, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_description).order_by( data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.asc()).all(); ratios_O = {}; for r in ratios: ratios_O[r.physiologicalratio_id] = {'name':r.physiologicalratio_name, 'description':r.physiologicalratio_description}; return ratios_O; except SQLAlchemyError as e: print(e); def get_ratioIDs_experimentID_dataStage01PhysiologicalRatiosReplicates(self,experiment_id_I): '''Query physiologicalRatio_ids that are used from the experiment''' try: ratios = self.session.query(data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_name, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_description).filter( data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_replicates.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_name, data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_description).order_by( data_stage01_quantification_physiologicalRatios_replicates.physiologicalratio_id.asc()).all(); ratios_O = {}; for r in ratios: ratios_O[r.physiologicalratio_id] = {'name':r.physiologicalratio_name, 'description':r.physiologicalratio_description}; return ratios_O; except SQLAlchemyError as e: print(e); # Query time points from data_stage01_quantification_physiologicalRatios_averages def get_timePoint_experimentID_dataStage01PhysiologicalRatiosAverages(self,experiment_id_I): '''Querry time points that are used from the experiment''' try: time_points = self.session.query(data_stage01_quantification_physiologicalRatios_averages.time_point).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_averages.time_point).order_by( data_stage01_quantification_physiologicalRatios_averages.time_point.asc()).all(); time_points_O = []; for tp in time_points: time_points_O.append(tp.time_point); return time_points_O; except SQLAlchemyError as e: print(e); # Query sample names from data_stage01_quantification_physiologicalRatios_averages def get_sampleNameAbbreviations_experimentIDAndTimePoint_dataStage01PhysiologicalRatiosAverages(self,experiment_id_I,time_point_I): '''Querry sample names (i.e. unknowns) that are used from the experiment''' try: sample_names = self.session.query(data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation).order_by( data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation.asc()).all(); sample_names_O = []; for sn in sample_names: sample_names_O.append(sn.sample_name_abbreviation); return sample_names_O; except SQLAlchemyError as e: print(e); def get_sampleNameAbbreviations_experimentIDAndTimePointAndRatioID_dataStage01PhysiologicalRatiosAverages(self,experiment_id_I,time_point_I,physiologicalratio_id_I): '''Querry sample names (i.e. unknowns) that are used from the experiment''' try: sample_names = self.session.query(data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_averages.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).group_by( data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation).order_by( data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation.asc()).all(); sample_names_O = []; for sn in sample_names: sample_names_O.append(sn.sample_name_abbreviation); return sample_names_O; except SQLAlchemyError as e: print(e); # Query data from data_stage01_quantification_physiologicalRatios_averages: def get_data_experimentIDAndTimePointAndSampleNameAbbreviation_dataStage01PhysiologicalRatiosAverages(self, experiment_id_I,time_point_I,sample_name_abbreviation_I): """get data from experiment ID""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_averages).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).all(); data_O = []; for d in data: data_1 = {'experiment_id':d.experiment_id, 'sample_name_abbreviation':d.sample_name_abbreviation, 'time_point':d.time_point, 'physiologicalratio_id':d.physiologicalratio_id, 'physiologicalratio_name':d.physiologicalratio_name, 'physiologicalratio_value_ave':d.physiologicalratio_value_ave, 'physiologicalratio_value_cv':d.physiologicalratio_value_cv, 'physiologicalratio_value_lb':d.physiologicalratio_value_lb, 'physiologicalratio_value_ub':d.physiologicalratio_value_ub, 'physiologicalratio_description':d.physiologicalratio_description, 'used_':d.used_, 'comment_':d.comment_}; data_O.append(data_1); return data_O; except SQLAlchemyError as e: print(e); def get_data_experimentIDAndTimePointAndRatioIDAndSampleNameAbbreviation_dataStage01PhysiologicalRatiosAverages(self, experiment_id_I,time_point_I,physiologicalratio_id_I,sample_name_abbreviation_I): """get data from experiment ID""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_averages).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_averages.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).all(); data_O = {}; if data: data_O = {'experiment_id':data[0].experiment_id, 'sample_name_abbreviation':data[0].sample_name_abbreviation, 'time_point':data[0].time_point, 'physiologicalratio_id':data[0].physiologicalratio_id, 'physiologicalratio_name':data[0].physiologicalratio_name, 'physiologicalratio_value_ave':data[0].physiologicalratio_value_ave, 'physiologicalratio_value_cv':data[0].physiologicalratio_value_cv, 'physiologicalratio_value_lb':data[0].physiologicalratio_value_lb, 'physiologicalratio_value_ub':data[0].physiologicalratio_value_ub, 'physiologicalratio_description':data[0].physiologicalratio_description, 'used_':data[0].used_, 'comment_':data[0].comment_}; return data_O; except SQLAlchemyError as e: print(e); def get_ratio_experimentIDAndTimePointAndRatioIDAndSampleNameAbbreviation_dataStage01PhysiologicalRatiosAverages(self, experiment_id_I,time_point_I,physiologicalratio_id_I,sample_name_abbreviation_I): """get data from experiment ID""" try: data = self.session.query(data_stage01_quantification_physiologicalRatios_averages).filter( data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I), data_stage01_quantification_physiologicalRatios_averages.time_point.like(time_point_I), data_stage01_quantification_physiologicalRatios_averages.physiologicalratio_id.like(physiologicalratio_id_I), data_stage01_quantification_physiologicalRatios_averages.sample_name_abbreviation.like(sample_name_abbreviation_I), data_stage01_quantification_physiologicalRatios_averages.used_.is_(True)).all(); ratio_O = None; if data: ratio_O = data[0].physiologicalratio_value_ave; return ratio_O; except SQLAlchemyError as e: print(e); def drop_dataStage01_quantification_physiologicalRatios(self): try: data_stage01_quantification_physiologicalRatios_replicates.__table__.drop(self.engine,True); data_stage01_quantification_physiologicalRatios_averages.__table__.drop(self.engine,True); except SQLAlchemyError as e: print(e); def initialize_dataStage01_quantification_physiologicalRatios(self): try: data_stage01_quantification_physiologicalRatios_replicates.__table__.create(self.engine,True); data_stage01_quantification_physiologicalRatios_averages.__table__.create(self.engine,True); except SQLAlchemyError as e: print(e); def reset_dataStage01_quantification_physiologicalRatios(self,experiment_id_I): try: if experiment_id_I: reset = self.session.query(data_stage01_quantification_physiologicalRatios_replicates).filter(data_stage01_quantification_physiologicalRatios_replicates.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); reset = self.session.query(data_stage01_quantification_physiologicalRatios_averages).filter(data_stage01_quantification_physiologicalRatios_averages.experiment_id.like(experiment_id_I)).delete(synchronize_session=False); self.session.commit(); except SQLAlchemyError as e: print(e); def add_dataStage01QuantificationPhysiologicalRatiosReplicates(self, data_I): '''add rows of data_stage01_quantification_physiologicalRatios_replicates''' if data_I: for d in data_I: try: data_add = data_stage01_quantification_physiologicalRatios_replicates(d #d['experiment_id_I'], #d['sample_name_short_I'], ##d['sample_name_abbreviation_I'], #d['time_point_I'], ##d['time_point_units_I'], #d['physiologicalratio_id_I'], #d['physiologicalratio_name_I'], #d['physiologicalratio_value_I'], #d['physiologicalratio_description_I'], #d['used__I'], #d['comment__I'] ); self.session.add(data_add); except SQLAlchemyError as e: print(e); self.session.commit(); def update_dataStage01QuantificationPhysiologicalRatiosReplicates(self,data_I): '''update rows of data_stage02_quantification_lineage''' if data_I: for d in data_I: try: data_update = self.session.query(data_stage01_quantification_physiologicalRatios_replicates).filter( data_stage01_quantification_physiologicalRatios_replicates.id==d['id']).update( {'experiment_id_I':d['experiment_id_I'], 'sample_name_short_I':d['sample_name_short_I'], #'sample_name_abbreviation_I':d['#sample_name_abbreviation_I'], 'time_point_I':d['time_point_I'], #'time_point_units_I':d['#time_point_units_I'], 'physiologicalratio_id_I':d['physiologicalratio_id_I'], 'physiologicalratio_name_I':d['physiologicalratio_name_I'], 'physiologicalratio_value_I':d['physiologicalratio_value_I'], 'physiologicalratio_description_I':d['physiologicalratio_description_I'], 'used__I':d['used__I'], 'comment__I':d['comment__I']}, synchronize_session=False); except SQLAlchemyError as e: print(e); self.session.commit();
Property, business & commercial transactions. Jason is a people person. His strength is working with his clients to find practical solutions to their dilemmas. Growing up in Wainuiomata, Jason was educated at St. Bernard’s College and is a dedicated sports fan. He plays cricket for Wainuiomata Cricket Club and rugby for Hutt Old Boys’ Marist. He also enjoys a game of squash and golf. Jason and his wife run budgeting courses in their community through Christians Against Poverty. Jason graduated with an LLB from Victoria University in 2011 and went to work for the Ministry of Social Development for three years. Jason is a member of our Property Law Team.
import matplotlib.pyplot as plt from matplotlib.ticker import MaxNLocator import numpy as np import math import sys import scipy import scipy.optimize from scipy.stats.mstats import mquantiles as quantiles import scipy.stats ### New: added possibility to use emcee for MCMCs try: import emcee # import acor emcee_import = True except ImportError: print("Emcee and Acor not installed. Using Metropolis-Hastings algorithm for Markov Chain Monte Carlo simulations.") emcee_import = False from BayesPSD import utils from BayesPSD import powerspectrum ### See if cutting-edge numpy is installed so I can use choice try: from numpy.random import choice ### if not, use hack except ImportError: choice = utils.choice_hack class MarkovChainMonteCarlo(object): """ Markov Chain Monte Carlo for Bayesian QPO searches. Either wraps around emcee, or uses the Metropolis-Hastings sampler defined in this file. Parameters ---------- x : {list, array-like} Inependent variable, most likely the frequencies of the periodogram in this context. y : {list, array-like} Dependent variable, most likely the powers of the periodogram in this context. lpost : Posterior object An instance of the class Posterior or one of its subclasses; defines the likelihood and priors to be used. For periodograms, use * posterior.PerPosterior for unbinned periodograms * posterior.StackPerPosterior for binned/stacked periodograms topt : {list, array-like} Starting point for generating an initial set of parameter samples. Should be in a region of high posterior, such that the chains don't spend a long time exploring regions with low posterior mass. If possible, make a MAP fit and use the MAP parameters here. The length of topt needs to match the number of parameters used in whatever function is stored in lpost.func tcov: {array-like} The variances and covarianced between parameters used to generate an initial set of parameter samples for all chains/walkers. There are several options here: you can set large variances and no covariances and effectively leave the Markov chains to explore the prior mass until they converge. You can also use the inverse Fisher information (as for example returned by bfgs) as covariance matrix to make an initial guess. This usually works better in the sense that it requires fewer steps of the Markov chains. popt needs to have dimensions (k,k), where k is the number of parameters taken by lpost.func covfactor : float, optional, default 1.0 A tuning parameter for the MCMC step. Used only in Metropolis-Hastings. niter : int, optional, default 5000 Sets the length of the Markov chains. For Metropolis-Hastings, this needs to be large (>10000) For emcee, this can be smaller, but it's a good idea to verify that the chains have mixed. nchain : int, optional, default 10 The number of chains or walkers to use in MCMC. For Metropolis-Hastings, use ~10-20 and many samples For emcee, use as many as you can afford (~500) and fewer samples discard : {int, None}, optional, default None The number of initial samples to discard from the Markov chain. For emcee, the burn-in time is *always* 200 samples (additional to whatever is set by niter). For the Metropolis-Hastings algorithm, the number of initial samples discarded is set by this variable. If discard is None, then half of the samples are discarded as default. parname : list, optional, default None Include a list of strings here to set parameter names for plotting check_conv : boolean, optional, default True If True, check for convergence of the Markov chains using check_convergence method below. NOTE: This was set up explicitly for Metropolis-Hastings. For emcee, this might not necessarily produce easily interpretable results. namestr : string, optional, default 'test' a string to use for saving plots and output files use_emcee : boolean, optional, default True If True (STRONGLY RECOMMENDED), use the emcee package for running MCMC. If False, use Metropolis-Hastings. plot : boolean, optional, default True If True, then save some useful plots; in particular, convergence plots as well as a triangle plot showing the posterior distributions printobj : object, optional, default None In theory, this allows the use of an alternative to the standard print function in order to save information to file etc. NOTE: CURRENTLY DOESN'T WORK PROPERLY! m : int, optional, default 1 If the input periodogram is the result of stacking several individual periodograms, or the result of binning adjacent frequencies into a coarser frequency resolution, then the distribution to be used in the likelihood function is different! Set the number of periodograms averaged/stacked here. """ def __init__(self, x, y, lpost, topt, tcov, covfactor=1.0, niter=5000, nchain=10, discard=None, parname = None, check_conv = True, namestr='test', use_emcee=True, plot=True, printobj = None, m=1): self.m = m self.x = x self.y = y self.plot = plot print("<--- self.ps len MCMC: " + str(len(self.x))) ### set of optimal parameters from MLE fitting self.topt = topt print("mcobs topt: " + str(self.topt)) ### covariances of fitted parameters self.tcov = tcov*covfactor print("mcobs tcov: " + str(self.tcov)) ### number of iterations for MCMC algorithm self.niter = niter ### number of MCMC chains to be computed self.nchain = nchain ### Error in the fitted parameters self.terr = np.sqrt(np.diag(tcov)) ### function that was fitted self.lpost = lpost if discard == None: discard = math.floor(niter/2.0) mcall = [] ### if emcee package is not installed, enforce Metropolis-Hastings implementation if emcee_import == False: print("Emcee not installed. Enforcing M-H algorithm!") use_emcee = False ### if emcee should be used, then use code below if use_emcee: ### number of walkers is the number of chains nwalkers = self.nchain ### number of dimensions for the Gaussian (=number of parameters) ndim = len(self.topt) ### sample random starting positions for each of the walkers p0 = [np.random.multivariate_normal(self.topt,self.tcov) for i in xrange(nwalkers)] ### initialize sampler sampler = emcee.EnsembleSampler(nwalkers,ndim, lpost, args=[False]) ### run burn-in phase and reset sampler pos, prob, state = sampler.run_mcmc(p0, 200) sampler.reset() ### run actual MCMCs sampler.run_mcmc(pos, niter, rstate0=state) ### list of all samples stored in flatchain mcall = sampler.flatchain ### print meanacceptance rate for all walkers and autocorrelation times print("The ensemble acceptance rate is: " + str(np.mean(sampler.acceptance_fraction))) self.L = np.mean(sampler.acceptance_fraction)*len(mcall) self.acceptance = np.mean(sampler.acceptance_fraction) try: self.acor = sampler.acor print("The autocorrelation times are: " + str(sampler.acor)) except ImportError: print("You can install acor: http://github.com/dfm/acor") self.acor = None except RuntimeError: print("D was negative. No clue why that's the case! Not computing autocorrelation time ...") self.acor = None except: print("Autocorrelation time calculation failed due to an unknown error: " + str(sys.exc_info()[0]) + ". Not computing autocorrelation time.") self.acor = None ### if emcee_use == False, then use MH algorithm as defined in MarkovChain object below else: ### loop over all chains for i in range(nchain): #t0 = topt + choice([2.0, 3.0, -3.0, -2.0], size=len(topt))*self.terr ### set up MarkovChain object mcout = MetropolisHastings(topt, tcov, lpost, niter = niter, parname = parname, discard = discard) ### create actual chain mcout.create_chain(self.x, self.y) ### make diagnostic plots mcout.run_diagnostics(namestr = namestr +"_c"+str(i), parname=parname) mcall.extend(mcout.theta) self.L = mcout.L mcall = np.array(mcall) ### check whether chains/walkers converged if check_conv == True: self.check_convergence(mcall, namestr, printobj = printobj) ### transpose list of parameter sets so that I have lists with one parameter each self.mcall = mcall.transpose() ### make inferences from MCMC chain, plot to screen and save plots self.mcmc_infer(namestr=namestr, printobj = printobj) def check_convergence(self, mcall, namestr, printobj=None, use_emcee = True): #if printobj: # print = printobj #else: # from __builtin__ import print as print ### compute Rhat for all parameters rh = self._rhat(mcall, printobj) self.rhat = rh plt.scatter(rh, np.arange(len(rh))+1.0 ) plt.axis([0.1,2,0.5,0.5+len(rh)]) plt.xlabel("$R_hat$") plt.ylabel("Parameter") plt.title('Rhat') plt.savefig(namestr + '_rhat.png', format='png') plt.close() ### compute 80% quantiles ci0, ci1 = self._quantiles(mcall) ### set array with colours ### make sure there are enough colours available colours_basic = ['b', 'g', 'r', 'c', 'm', 'y', 'k'] cneeded = int(math.ceil(len(ci0[0])/7.0)) colours = [] for x in range(cneeded): colours.extend(colours_basic) ### plot 80% quantiles if self.plot: plt.plot(0,0) plt.axis([-2, 2, 0.5, 0.5+len(ci0)]) for j in range(self.nchain): plt.hlines(y=[m+(j)/(4.0*self.nchain) for m in range(len(ci0))], xmin=[x[j] for x in ci0], xmax=[x[j] for x in ci1], color=colours[j]) #plt.hlines(y=[m+1.0+(1)/(4*self.nchain) for m in np.arange(len(ci0))], xmin=[x[1] for x in ci0], xmax=[x[1] for x in ci1], color=colours[j]) plt.xlabel("80% region (scaled)") plt.ylabel("Parameter") plt.title("80% quantiles") plt.savefig(namestr + "_quantiles.png", format="png") plt.close() ### auxiliary function used in check_convergence ### computes R_hat, which compares the variance inside chains to the variances between chains def _rhat(self, mcall, printobj = None): #if printobj: # print = printobj #else: # from __builtin__ import print as print print("Computing Rhat. The closer to 1, the better!") rh = [] ### loop over parameters ### for i,k in enumerate(self.topt): ### pick parameter out of array tpar = np.array([t[i] for t in mcall]) ### reshape back into array of niter*nchain dimensions tpar = np.reshape(tpar, (self.nchain, len(tpar)/self.nchain)) ### compute mean of variance of each chain #### THIS DOESN'T WORK FOR SOME REASON! TAKES VARIANCE OF EACH ELEMENT!!! ### CHECK THIS! sj = map(lambda y: np.var(y), tpar) W = np.mean(sj) ### compute variance of means of each chain mj = map(lambda y: np.mean(y), tpar) ### note: this assumes the discards B = np.var(mj)*self.L ## now compute marginal posterior variance mpv = ((float(self.L)-1.0)/float(self.L))*W + B/float(self.L) ### compute Rhat rh.append(np.sqrt(mpv/W)) ### print convergence message on screen: print("The Rhat value for parameter " + str(i) + " is: " + str(rh[i]) + ".") if rh[i] > 1.2: print("*** HIGH Rhat! Check results! ***") else: print("Good Rhat. Hoorah!") return rh def _quantiles(self, mcall): ### empty lists for quantiles ci0, ci1 = [], [] ### loop over the parameters ### for i,k in enumerate(self.topt): print("I am on parameter: " + str(i)) ### pick parameter out of array tpar = np.array([t[i] for t in mcall]) ### reshape back into array of niter*nchain dimensions tpar = np.reshape(tpar, (self.nchain, len(tpar)/self.nchain)) ### compute mean of variance of each chain intv = map(lambda y: quantiles(y, prob=[0.1, 0.9]), tpar) ### quantiles will return a list with two elements for each ### chain: the 0.1 and 0.9 quantiles ### need to pick out these for each chain c0 = np.array([x[0] for x in intv]) c1 = np.array([x[1] for x in intv]) ### now compute the scale scale = np.mean(c1-c0)/2.0 ### compute means of each chain mt = map(lambda y: np.mean(y), tpar) ### mean of means of all chains offset = np.mean(mt) ### rescale quantiles (WHY??) ci0.append((c0 - offset)/scale) ci1.append((c1 - offset)/scale) return ci0, ci1 def mcmc_infer(self, namestr='test', printobj = None): #if printobj: # print = printobj #else: # from __builtin__ import print as print ### covariance of the parameters from simulations covsim = np.cov(self.mcall) print("Covariance matrix (after simulations): \n") print(str(covsim)) ### calculate for each parameter its (posterior) mean and equal tail ### 90% (credible) interval from the MCMC self.mean = map(lambda y: np.mean(y), self.mcall) self.std = map(lambda y: np.std(y), self.mcall) self.ci = map(lambda y: quantiles(y, prob=[0.05, 0.95]), self.mcall) ### print to screen print("-- Posterior Summary of Parameters: \n") print("parameter \t mean \t\t sd \t\t 5% \t\t 95% \n") print("---------------------------------------------\n") for i in range(len(self.topt)): print("theta[" + str(i) + "] \t " + str(self.mean[i]) + "\t" + str(self.std[i]) + "\t" + str(self.ci[i][0]) + "\t" + str(self.ci[i][1]) + "\n" ) ### produce matrix scatter plots N = len(self.topt) ### number of parameters print("N: " + str(N)) n, bins, patches = [], [], [] if self.plot: fig = plt.figure(figsize=(15,15)) plt.subplots_adjust(top=0.925, bottom=0.025, left=0.025, right=0.975, wspace=0.2, hspace=0.2) for i in range(N): for j in range(N): xmin, xmax = self.mcall[j][:1000].min(), self.mcall[j][:1000].max() ymin, ymax = self.mcall[i][:1000].min(), self.mcall[i][:1000].max() ax = fig.add_subplot(N,N,i*N+j+1) ax.xaxis.set_major_locator(MaxNLocator(5)) ax.ticklabel_format(style="sci", scilimits=(-2,2)) if i == j: #pass ntemp, binstemp, patchestemp = ax.hist(self.mcall[i][:1000], 30, normed=True, histtype='stepfilled') n.append(ntemp) bins.append(binstemp) patches.append(patchestemp) ax.axis([ymin, ymax, 0, max(ntemp)*1.2]) else: ax.axis([xmin, xmax, ymin, ymax]) ### make a scatter plot first ax.scatter(self.mcall[j][:1000], self.mcall[i][:1000], s=7) ### then add contours xmin, xmax = self.mcall[j][:1000].min(), self.mcall[j][:1000].max() ymin, ymax = self.mcall[i][:1000].min(), self.mcall[i][:1000].max() ### Perform Kernel density estimate on data try: X,Y = np.mgrid[xmin:xmax:100j, ymin:ymax:100j] positions = np.vstack([X.ravel(), Y.ravel()]) values = np.vstack([self.mcall[j][:1000], self.mcall[i][:1000]]) kernel = scipy.stats.gaussian_kde(values) Z = np.reshape(kernel(positions).T, X.shape) ax.contour(X,Y,Z,7) except ValueError: print("Not making contours.") plt.savefig(namestr + "_scatter.png", format='png') plt.close() return #### POSTERIOR PREDICTIVE CHECKS ################ # # Note: fpeak is calculated in mle.PerMaxLike.compute_stats # and can be found in dictionary self.pl_r or self.bpl_r # ## nsim [int] = number of simulations ## dist [str] = distribution, one of ## "exp": exponential distribution (=chi2_2), np.random.exponential ## "chisquare": chi^2 distribution with df degrees of freedom ## df [int] = degrees of freedom for chi^2 distribution def simulate_periodogram(self, nsim=5000): """ Simulate periodograms from posterior samples of the broadband noise model. This method uses the results of an MCMC run to pick samples from the posterior and use the function stored in self.lpost.func to create a power spectral form. In order to transform this into a model periodogram, it picks for each frequency from an exponential distribution with a shape parameter corresponding to the model power at that frequency. Parameters ---------- nsim : int, optional, default 5000 The number of periodograms to simulate. This number must be smaller than the number of samples generated during the MCMC run. Returns ------- fps : array-like An array of shape (nsim, nfrequencies) with all simulated periodograms. """ ## the function to use is stored in lpost: func = self.lpost.func ### number of simulations is either given by the user, ### or defined by the number of MCMCs run! nsim = min(nsim,len(self.mcall[0])) ### shuffle MCMC parameters theta = np.transpose(self.mcall) #print "theta: " + str(len(theta)) np.random.shuffle(theta) fps = [] percount = 1.0 for x in range(nsim): ### extract parameter set ain = theta[x] ### compute model 'true' spectrum mpower = func(self.x, *ain) ### define distribution if self.m == 1: #print("m = 1") noise = np.random.exponential(size=len(self.x)) else: #print("m = " + str(self.m)) noise = np.random.chisquare(2*self.m, size=len(self.x))/(2.0*self.m) ### add random fluctuations mpower = mpower*noise ### save generated power spectrum in a PowerSpectrum object mps = powerspectrum.PowerSpectrum() mps.freq = self.x mps.ps = mpower mps.df = self.x[1] - self.x[0] mps.n = 2.0*len(self.x) mps.nphots = mpower[0] mps.m = self.m fps.append(mps) return np.array(fps) #### MAKE A MARKOV CHAIN OBJECT ### # # QUESTION: How can I make an object with variable # parameters? # # # # NEED TO THINK ABOUT HOW TO GET ATTRIBUTES! # class MetropolisHastings(object): """ Parameters ---------- topt : {list, array-like} Starting point for generating an initial set of parameter samples. Should be in a region of high posterior, such that the chains don't spend a long time exploring regions with low posterior mass. If possible, make a MAP fit and use the MAP parameters here. The length of topt needs to match the number of parameters used in whatever function is stored in lpost.func tcov: {array-like} The variances and covarianced between parameters used to generate an initial set of parameter samples for all chains/walkers. There are several options here: you can set large variances and no covariances and effectively leave the Markov chains to explore the prior mass until they converge. You can also use the inverse Fisher information (as for example returned by bfgs) as covariance matrix to make an initial guess. This usually works better in the sense that it requires fewer steps of the Markov chains. popt needs to have dimensions (k,k), where k is the number of parameters taken by lpost.func lpost : Posterior object An instance of the class Posterior or one of its subclasses; defines the likelihood and priors to be used. For periodograms, use * posterior.PerPosterior for unbinned periodograms * posterior.StackPerPosterior for binned/stacked periodograms niter : int, optional, default 5000 Sets the length of the Markov chains. For Metropolis-Hastings, this needs to be large (>10000) For emcee, this can be smaller, but it's a good idea to verify that the chains have mixed. parname : list, optional, default None Include a list of strings here to set parameter names for plotting discard : {int, None}, optional, default None The number of initial samples to discard from the Markov chain. For emcee, the burn-in time is *always* 200 samples (additional to whatever is set by niter). For the Metropolis-Hastings algorithm, the number of initial samples discarded is set by this variable. If discard is None, then half of the samples are discarded as default. """ def __init__(self, topt, tcov, lpost, niter = 5000, parname=None, discard=None): self.niter = niter self.topt = topt self.tcov = tcov self.terr = np.sqrt(np.diag(tcov)) self.t0 = topt + choice([2.0, 3.0, -3.0, -2.0], size=len(topt))*self.terr self.lpost = lpost self.terr = np.sqrt(np.diag(tcov)) if discard == None: self.discard = int(niter/2) else: self.discard = int(discard) if parname == None: self.parname = ['alpha', 'beta', 'gamma', 'delta', 'epsilon', 'zeta', 'eta', 'iota', 'lappa', 'lambda', 'mu'] else: self.parname = parname ### set up MCMC chain ### possible distributions: ### - 'mvn': multi-variate normal (default) ### - 'stt': student t-test def create_chain(self, x, y, topt=None, tcov = None, t0 = None, dist='mvn'): if not topt == None: self.topt = topt if not tcov == None: self.tcov = tcov if not t0 == None: self.t0 = t0 ### set up distributions if dist=='mvn': dist = np.random.multivariate_normal ### set acceptance value to zero accept = 0.0 ### set up array ttemp, logp = [], [] ttemp.append(self.t0) #lpost = posterior.PerPosterior(self.ps, self.func) logp.append(self.lpost(self.t0, neg=False)) for t in np.arange(self.niter-1)+1: tprop = dist(ttemp[t-1], self.tcov) pprop = self.lpost(tprop)#, neg=False) logr = pprop - logp[t-1] logr = min(logr, 0.0) r= np.exp(logr) update = choice([True, False], size=1, p=[r, 1.0-r]) if update: ttemp.append(tprop) logp.append(pprop) if t > self.discard: accept = accept + 1 else: ttemp.append(ttemp[t-1]) logp.append(logp[t-1]) self.theta = ttemp[self.discard+1:] self.logp = logp[self.discard+1:] self.L = self.niter - self.discard self.accept = accept/self.L return def run_diagnostics(self, namestr=None, parname=None, printobj = None): #if printobj: # print = printobj #else: # from __builtin__ import print as print print("Markov Chain acceptance rate: " + str(self.accept) +".") if namestr == None: print("No file name string given for printing. Setting to 'test' ...") namestr = 'test' if parname == None: parname = ['alpha', 'beta', 'gamma', 'delta', 'epsilon', 'zeta', 'eta', 'iota', 'lappa', 'lambda', 'mu'] fig = plt.figure(figsize=(12,10)) adj =plt.subplots_adjust(hspace=0.4, wspace=0.4) for i,th in enumerate(self.theta[0]): ts = np.array([t[i] for t in self.theta]) p1 = plt.subplot(len(self.topt), 3, (i*3)+1) p1 = plt.plot(ts) plt.axis([0, len(ts), min(ts), max(ts)]) plt.xlabel("Number of draws") plt.ylabel("parameter value") plt.title("Time series for parameter " + str(parname[i]) + ".") p2 = plt.subplot(len(self.topt), 3, (i*3)+2) ### plotting histogram p2 = count, bins, ignored = plt.hist(ts, bins=10, normed=True) bnew = np.arange(bins[0], bins[-1], (bins[-1]-bins[0])/100.0) p2 = plt.plot(bnew, 1.0/(self.terr[i]*np.sqrt(2*np.pi))*np.exp(-(bnew - self.topt[i])**2.0/(2.0*self.terr[i]**2.0)), linewidth=2, color='r') plt.xlabel('value of ' + str(parname[i])) plt.ylabel('probability') plt.title("Histogram for parameter " + str(parname[i]) + ".") nlags = 30 p3 = plt.subplot(len(self.topt), 3, (i*3)+3) acorr = autocorr(ts,nlags=nlags, norm=True) p3 = plt.vlines(range(nlags), np.zeros(nlags), acorr, colors='black', linestyles='solid') plt.axis([0.0, nlags, 0.0, 1.0]) plt.savefig(namestr + "_diag.png", format='png',orientation='landscape') plt.close() ##############################################################
Dublin, February 2017 – CBRE present an excellent office opportunity on Great Strand Street, Dublin 1. This modern self-contained office building has a unique ‘industrial style’ and is for sale guiding €850,000. Number 1 Great Strand Street extends to c.3,600 sq ft and will appeal to a variety of buyers including investors and owner occupiers alike. The property is ideally located between Caple Street and Jervis Street providing ease of access to all amenities and areas of the city. The LUAS is situated nearby on Abbey Street and Ample Dublin Bus Routes are positioned throughout the vicinity.
import smtplib import pystache import os import html.parser from email.mime.text import MIMEText from werkzeug.utils import secure_filename from flask import url_for from truecraft.database import db from truecraft.objects import User from truecraft.config import _cfg, _cfgi def send_confirmation(user): if _cfg("smtp-host") == "": return smtp = smtplib.SMTP(_cfg("smtp-host"), _cfgi("smtp-port")) smtp.ehlo() smtp.starttls() smtp.login(_cfg("smtp-user"), _cfg("smtp-password")) with open("emails/confirm-account") as f: message = MIMEText(html.parser.HTMLParser().unescape(\ pystache.render(f.read(), { 'user': user, "domain": _cfg("domain"), 'confirmation': user.confirmation }))) message['X-MC-Important'] = "true" message['X-MC-PreserveRecipients'] = "false" message['Subject'] = "Confirm your TrueCraft account" message['From'] = _cfg("smtp-user") message['To'] = user.email smtp.sendmail(_cfg("smtp-user"), [ user.email ], message.as_string()) smtp.quit() def send_reset(user): if _cfg("smtp-host") == "": return smtp = smtplib.SMTP(_cfg("smtp-host"), _cfgi("smtp-port")) smtp.ehlo() smtp.starttls() smtp.login(_cfg("smtp-user"), _cfg("smtp-password")) with open("emails/reset") as f: message = MIMEText(html.parser.HTMLParser().unescape(\ pystache.render(f.read(), { 'user': user, "domain": _cfg("domain"), "protocol": _cfg("protocol"), 'confirmation': user.passwordReset }))) message['X-MC-Important'] = "true" message['X-MC-PreserveRecipients'] = "false" message['Subject'] = "Reset your TrueCraft password" message['From'] = _cfg("smtp-user") message['To'] = user.email smtp.sendmail(_cfg("smtp-user"), [ user.email ], message.as_string()) smtp.quit()
Is a preaching-centered ministry truly useful in today's society? Alexander argues through this booklet that biblical, expository preaching is as central to any church's acivity as Scripture is to its life, and explains why a focus on such preaching is more important than ever. Divided into eight main sections, this helpful resource deals with such subjects as the characteristics of expository preaching, its urgency in the church today, and the need for it to be pastoral, clear, and relevant.
# -*- coding: utf-8 -*- ''' Mepinta Copyright (c) 2011-2012, Joaquin G. Duo, mepinta@joaquinduo.com.ar This file is part of Mepinta. Mepinta is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Mepinta is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Mepinta. If not, see <http://www.gnu.org/licenses/>. ''' from pipeline_backend.args_management.args_management import createArgs, argsAppendProp,\ argsSetChanged, argsSetUnchanged, argsBuildChangedSet,\ argsSetCapacity, getThreadArgs, argsSetThreadId from mepinta.pipeline.lo.constants import OUTPUT_PROPS, CUSTOM_OUTPUT_PROPS,\ INPUT_PROPS, CUSTOM_INPUT_PROPS #from pipeline_backend.args_management.argsSetProcessorContext import argsSetProcessorContext # TODO: Rename ProcessorArgsManager ProcessorArgsBuilder # TODO: check getThreadArgs and threading class ProcessorArgsManager(object): def __init__(self, context_lo, func_prop_value, thread_id): self.context_lo = context_lo self.args = self.createArgs(func_prop_value, thread_id) self.indexes = {INPUT_PROPS: 0, OUTPUT_PROPS: 0, CUSTOM_INPUT_PROPS: 0, CUSTOM_OUTPUT_PROPS: 0} self.marked = False def setInOut(self, in_out_id): self.in_out_id = in_out_id def __getCurrentIndex(self): return self.indexes[self.in_out_id] def __incrementCurrentIndex(self, delta=1): self.indexes[self.in_out_id] += delta def createArgs(self, func_prop_value, thread_id): # if cached: if func_prop_value.args == None: self.new_args = True #func_prop_value.args = createArgs(in_size,out_size) func_prop_value.args = createArgs(0, 0) else: self.new_args = False argsSetThreadId(func_prop_value.args, thread_id) return func_prop_value.args # else: # self.new_args = True # return getThreadArgs(in_size,out_size,thread_id) def getArgs(self): return self.args # def setProcessorContext(self, processor_context): # argsSetProcessorContext(self.args, processor_context) def nextProp(self): # useful for skipping a property too self.__incrementCurrentIndex(1) def append(self, prop_id, prop, prop_real): data_type = self.context_lo.data_types[prop_real.dtype_id] argsAppendProp(self.args, self.in_out_id, self.__getCurrentIndex(),prop_id,prop,prop_real,data_type) # def update(self, in_prop_ids, out_prop_ids): # raise RuntimeError("Implement") def changedValue(self): argsSetChanged(self.args, self.in_out_id, self.__getCurrentIndex()) def unchangedValue(self): argsSetUnchanged(self.args, self.in_out_id, self.__getCurrentIndex()) def buildChangedSet(self): changed = set() argsBuildChangedSet(self.args, OUTPUT_PROPS, changed) argsBuildChangedSet(self.args, CUSTOM_OUTPUT_PROPS, changed) return changed def shedskin_ProcessorArgsManager(context_lo, func_prop_value, prop): pam = ProcessorArgsManager(context_lo, func_prop_value, thread_id=0) prop_id = 100 pam.setInOut(in_out_id=1) pam.append(prop_id, prop, prop) pam.getArgs() pam.nextProp() pam.append(prop_id, prop, prop) pam.changedValue() pam.unchangedValue() pam.buildChangedSet() return pam
The 2017 Dodge Journey | Kamloops Dodge Chrysler Jeep Ltd. **25% off MSRP discount available to retail customers on the purchase/lease of all in-stock 2017 Dodge Grand Caravan models at all participating dealers from June 1 to 30, 2017. Discounts are calculated based on MSRP plus options, freight, A/C and tire charge and will be deducted from the negotiated price before taxes. Percentage off MSRP discount cannot be combined any other bonus cash or consumer cash offer. Dealer may sell for less. See dealer for complete details. ∞ 0.0% purchase financing for up to 84 months available to qualified customers on approved credit through Royal Bank of Canada, Scotiabank and TD Auto Finance on select 2017 Dodge Journey models. Example: $30,000 Purchase Price with a $0 down payment financed at 0% for 36/60/72/84 months equals monthly payments of $833/$500/$417/$357, with a cost of borrowing of $0 and a total obligation of $30,000. £ A maximum of $12,500 off in MSRP discount is offered on 2017 Grand Caravan models. See retailer for details. A maximum of $1,500 in total discounts is available on select new 2017 Dodge Journey models and consists of $1,500 in Consumer Cash. Consumer Cash Discounts are applied before taxes and Bonus Cash Discounts are applied after taxes. See your retailer for complete details. † The Best Buy Seal and other licensed materials are registered certification marks and trademarks of Consumers Digest Communications, LLC, used under license. For award information, visit ConsumersDigest.com.
from collections import OrderedDict import numpy as np from ...data import Data from ...instrument import Instrument class Ice(Data): r"""Loads ICE (NCNR) format ascii data file. """ def __init__(self): super(Ice, self).__init__() def load(self, filename, build_hkl=True, load_instrument=False): r"""Loads the ICE (NCNR) format ascii data file. Parameters ---------- filename : str Path to file to open build_hkl : bool, optional Option to build Q = [h, k, l, e, temp] load_instrument : bool, optional Option to build Instrument from file header """ with open(filename) as f: file_header = [] for line in f: if 'Columns' in line: args = line.split() col_headers = [head for head in args[1:]] break args = np.genfromtxt(filename, usecols=(0, 1, 2, 3, 4, 5, 6, 7, 8), unpack=True, comments="#", dtype=np.float64) data = OrderedDict() for head, col in zip(col_headers, args): data[head] = col self._data = data self.data_keys = {'detector': 'Detector', 'monitor': 'Monitor', 'time': 'Time'} self._file_header = file_header if build_hkl: self.Q_keys = {'h': 'QX', 'k': 'QY', 'l': 'QZ', 'e': 'E', 'temp': 'Temp'} if load_instrument: instrument = Instrument() self.instrument = instrument
Locked or misaligned foot joints will change the normal pattern of weight distribution and this is the leading cause of bunion. Yesterday I worked with a client who made her reflexology appointment for stress management. She expressed feeling chronically exhausted but goes about her day as if it’s a normal way of being. “Oh”, she said, and by the way I have a blister on the inside of my big toe which comes and goes especially when I go hiking.” Upon looking at the blister I notice its placement against the intermediate joint of the second toe. I mentioned that blisters are caused from excessive friction and this blister formed from the big toe rubbing against the second toe. I also mentioned her great toe was misaligned. With an insignificant tone she brushed my comment away and said “Yes, I have a bunion” and further elaborated that these are the things that come with age and therefore this “growth” was an anomaly she was resigned to live with. Comes with age?! Not at all. I educated her on how the great toe misaligns when there is not enough room in a shoe to keep the toe in line with the first metatarsal and the supposed “growth” was a misnomer. As is so with many others she acted enlightened and hopeful in learning this information. Upon examining her shoes I noticed they were not far off base in terms of function but still the toe-box narrowed enough to taper the great toe laterally (bunion) and small toe medially (bunionnette). I alerted her that wearing this type of shoe style on a regular basis would deform the toes in this position and eventually cause other joints that knit the bones of the feet together to lock or subluxate. Recognizing her misaligned great toe had much to do with the stress she experienced throughout her body my intention for this session included reducing stress on her nervous system and reducing stress in the local aspects of her feet. After completing her reflexology session and integrating reflexing techniques on particular muscles that move and stabilize the great toe progress was noted by an increased range of motion in her first metatarsal-phalangeal joint. Gentle and specific jostling motions were then incorporated into the session to encourage a more open and relaxed relationship in the joints connecting the cuboid bone to the bases of the fourth and fifth metatarsals. This allowed these bone to rest in their normal position and took pressure off the first metatarsal and great toe. The cuboid bone is the keystone of the foot and captain of directing weight. Without the solid foundation endowed by the cuboid bone weight defaults and becomes misdirected. One sign the cuboid bone has become challenged is the formation of a callus on the plantar head of the second metatarsal. After this Structural Reflexology session my client’s bunion was a thing of the past and she walked away jubilantly and in amazement saying everything felt alive in her entire body and repeatedly mentioned she could not stop admiring her new straight great toe. You can learn how to do this too! Focus on these muscles in your next session. (A) Adductor Hallucis (transverse and oblique), and the Abductor Hallucis. (B) Fibularis Longus muscle to release tension at the lateral base of the first metatarsal. (C) Tibialis Anterior to release tension at its attachment on the medial base of the first metatarsal. It is important to note that bunions have varying degrees of dislocation. However, function and comfort of the great toe can be achieved even if the toe visually remains to appear deformed post session.
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2016-01-18 02:14 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import phonenumber_field.modelfields class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Request', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('email', models.EmailField(max_length=254)), ('first_name', models.CharField(max_length=50)), ('last_name', models.CharField(max_length=50)), ('year_level', models.CharField(choices=[(b'1', b'1st'), (b'2', b'2nd'), (b'3', b'3rd'), (b'4', b'4th'), (b'5', b'5th'), (b'6', b'Other')], default=b'1', max_length=1)), ('accepted', models.BooleanField(default=False)), ('was_checked', models.BooleanField(default=False, editable=False)), ], ), migrations.CreateModel( name='TeamMember', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year_level', models.CharField(choices=[(b'1', b'1st'), (b'2', b'2nd'), (b'3', b'3rd'), (b'4', b'4th'), (b'5', b'5th'), (b'6', b'Other')], default=b'1', max_length=1)), ('sailing_level', models.CharField(choices=[(b'1', b'Beginner'), (b'2', b'Intermediate'), (b'3', b'Race')], default=b'1', max_length=1)), ('eboard_pos', models.CharField(blank=True, max_length=50)), ('phone_number', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128)), ('dues_paid', models.DateField(blank=True, null=True)), ('avatar', models.URLField(blank=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
The Pacific University signature consists of the “Boxer head” image and the logotype “Pacific University Oregon.” The logo is protected by trademark and permission is required its use. This logo has visually represented Pacific University since it was designed in 2008, and in 2014, our alumni, students and friends voted it Portland’s Best Sports Logo. The single most recognizable symbol of Pacific University, we take pride in our logo, and we depend on it to represent our university and our identity in the marketplace. The logo appears on all development, promotional, marketing, recruitment and outreach materials, including print, apparel and online communications. The integrity of its use is critical in maintaining our brand recognition and identity. The logo cannot be redrawn, re-proportioned or modified in any manner. Please do not separate or reposition the Boxer head from the logo text outside of very limited circumstances. The logo and any visual element of the brand should not be animated in any way. Size minimums are noted to ensure that the Pacific University logo is legible and prominent in various media. The full width of the logo, from the left edge of the U to the right tip of the Boxer tongue, should not be smaller than .75 inches. An exception exists if you are placing the logo on a promotional item smaller than .75 inches, such as a pen or lapel pin. Contact Marketing & Communications if project-specific guidance is needed. There are times when the Pacific University logo will not fit within a project layout. In such cases, a horizontal or vertical version of the logo may be used. Contact Marketing & Communications for assistance when in question. The logo is available in several color combinations. Do not re-color the photos, place it in a box, flip or distort the logo in any way. The white/reverse logo is intended for use on colored backgrounds or on patterned fabrics, such as aloha gear. There may also be instances when the logo may need to be used on a photograph. The color or black version of the logo may be used on light photographs, the white/reverse version on dark photographs. Be sure the logo is placed in an area of the photo that provides sufficient contrast. The logo is available in various file types for different uses. PNG files are bitmap files with transparent backgrounds — for colored backgrounds and for digital displays. Use the provided size or smaller; do not enlarge as it will pixelate when enlarged. EPS files are vector files that can be reduced or enlarged without compromising integrity. Use for collateral and product vendors.
"""""" __all__ = ["Composition"] # Standard library modules. import math from types import MappingProxyType import itertools from fractions import Fraction import re # Third party modules. import pyxray # Local modules. # Globals and constants variables. CHEMICAL_FORMULA_PATTERN = re.compile(r"([A-Z][a-z]?)([0-9\.]*)") def process_wildcard(fractions): """ Processes element with a wildcard ``?`` weight fraction and returns composition balanced to 1.0. """ wildcard_zs = set() total_fraction = 0.0 for z, fraction in fractions.items(): if fraction == "?": wildcard_zs.add(z) else: total_fraction += fraction if not wildcard_zs: return fractions balance_fraction = (1.0 - total_fraction) / len(wildcard_zs) for z in wildcard_zs: fractions[z] = balance_fraction return fractions def convert_mass_to_atomic_fractions(mass_fractions): """ Converts a mass fraction :class:`dict` to an atomic fraction :class:`dict`. Args: mass_fractions (dict): mass fraction :class:`dict`. The composition is specified by a dictionary. The keys are atomic numbers and the values weight fractions. No wildcard are accepted. """ atomic_fractions = {} for z, mass_fraction in mass_fractions.items(): atomic_fractions[z] = mass_fraction / pyxray.element_atomic_weight(z) total_fraction = sum(atomic_fractions.values()) for z, fraction in atomic_fractions.items(): try: atomic_fractions[z] = fraction / total_fraction except ZeroDivisionError: atomic_fractions[z] = 0.0 return atomic_fractions def convert_atomic_to_mass_fractions(atomic_fractions): """ Converts an atomic fraction :class:`dict` to a mass fraction :class:`dict`. Args: atomic_fractions (dict): atomic fraction :class:`dict`. The composition is specified by a dictionary. The keys are atomic numbers and the values atomic fractions. No wildcard are accepted. """ # Calculate total atomic mass atomic_masses = {} total_atomic_mass = 0.0 for z, atomic_fraction in atomic_fractions.items(): atomic_mass = pyxray.element_atomic_weight(z) atomic_masses[z] = atomic_mass total_atomic_mass += atomic_fraction * atomic_mass # Create mass fraction mass_fractions = {} for z, atomic_fraction in atomic_fractions.items(): mass_fractions[z] = atomic_fraction * atomic_masses[z] / total_atomic_mass return mass_fractions def convert_formula_to_atomic_fractions(formula): """ Converts a chemical formula to an atomic fraction :class:`dict`. Args: formula (str): chemical formula, like Al2O3. No wildcard are accepted. """ mole_fractions = {} total_mole_fraction = 0.0 for match in CHEMICAL_FORMULA_PATTERN.finditer(formula): symbol, mole_fraction = match.groups() z = pyxray.element_atomic_number(symbol.strip()) if mole_fraction == "": mole_fraction = 1.0 mole_fraction = float(mole_fraction) mole_fraction = float(mole_fraction) mole_fractions[z] = mole_fraction total_mole_fraction += mole_fraction # Calculate atomic fractions atomic_fractions = {} for z, mole_fraction in mole_fractions.items(): atomic_fractions[z] = mole_fraction / total_mole_fraction return atomic_fractions def generate_name(atomic_fractions): """ Generates a name from the composition. The name is generated on the basis of a classical chemical formula. """ if not atomic_fractions: return "" if len(atomic_fractions) == 1: z = list(atomic_fractions.keys())[0] return pyxray.element_symbol(z) symbols = [] fractions = [] for z in sorted(atomic_fractions.keys(), reverse=True): symbols.append(pyxray.element_symbol(z)) fractions.append(Fraction(atomic_fractions[z]).limit_denominator()) # Find gcd of the fractions gcds = [] for a, b in itertools.combinations(fractions, 2): gcds.append(math.gcd(a.denominator, b.denominator)) smallest_gcd = min(gcds) # Write formula name = "" for symbol, fraction in zip(symbols, fractions): mole_fraction = int(fraction * smallest_gcd) if mole_fraction == 0: continue elif mole_fraction == 1: name += "%s" % symbol else: name += "%s%i" % (symbol, mole_fraction) return name class Composition: """ Defines a composition of a compound. To create a composition, use the class methods: - :meth:`from_pure` - :meth:`from_formula` - :meth:`from_mass_fractions` - :meth:`from_atomic_fractions` Use the following attributes to access the composition values: - :attr:`mass_fractions`: :class:`dict` where the keys are atomic numbers and the values weight fractions. - :attr:`atomic_fractions`: :class:`dict` where the keys are atomic numbers and the values atomic fractions. - :attr:`formula`: chemical formula The composition object is immutable, i.e. it cannot be modified once created. Equality can be checked. It is hashable. It can be pickled or copied. """ _key = object() PRECISION = 0.000000001 # 1ppb def __init__(self, key, mass_fractions, atomic_fractions, formula): """ Private constructor. It should never be used. """ if key != Composition._key: raise TypeError("Composition cannot be created using constructor") if set(mass_fractions.keys()) != set(atomic_fractions.keys()): raise ValueError("Mass and atomic fractions must have the same elements") self.mass_fractions = MappingProxyType(mass_fractions) self.atomic_fractions = MappingProxyType(atomic_fractions) self._formula = formula @classmethod def from_pure(cls, z): """ Creates a pure composition. Args: z (int): atomic number """ return cls(cls._key, {z: 1.0}, {z: 1.0}, pyxray.element_symbol(z)) @classmethod def from_formula(cls, formula): """ Creates a composition from a chemical formula. Args: formula (str): chemical formula """ atomic_fractions = convert_formula_to_atomic_fractions(formula) return cls.from_atomic_fractions(atomic_fractions) @classmethod def from_mass_fractions(cls, mass_fractions, formula=None): """ Creates a composition from a mass fraction :class:`dict`. Args: mass_fractions (dict): mass fraction :class:`dict`. The keys are atomic numbers and the values weight fractions. Wildcard are accepted, e.g. ``{5: '?', 25: 0.4}`` where boron will get a mass fraction of 0.6. formula (str): optional chemical formula for the composition. If ``None``, a formula will be generated for the composition. """ mass_fractions = process_wildcard(mass_fractions) atomic_fractions = convert_mass_to_atomic_fractions(mass_fractions) if not formula: formula = generate_name(atomic_fractions) return cls(cls._key, mass_fractions, atomic_fractions, formula) @classmethod def from_atomic_fractions(cls, atomic_fractions, formula=None): """ Creates a composition from an atomic fraction :class:`dict`. Args: atomic_fractions (dict): atomic fraction :class:`dict`. The keys are atomic numbers and the values atomic fractions. Wildcard are accepted, e.g. ``{5: '?', 25: 0.4}`` where boron will get a atomic fraction of 0.6. formula (str): optional chemical formula for the composition. If ``None``, a formula will be generated for the composition. """ atomic_fractions = process_wildcard(atomic_fractions) mass_fractions = convert_atomic_to_mass_fractions(atomic_fractions) if not formula: formula = generate_name(atomic_fractions) return cls(cls._key, mass_fractions, atomic_fractions, formula) def __len__(self): return len(self.mass_fractions) def __contains__(self, z): return z in self.mass_fractions def __iter__(self): return iter(self.mass_fractions.keys()) def __repr__(self): return "<{}({})>".format(self.__class__.__name__, self.inner_repr()) def __eq__(self, other): if not isinstance(other, self.__class__): return False if len(self) != len(other): return False for z in self.mass_fractions: if z not in other.mass_fractions: return False fraction = self.mass_fractions[z] other_fraction = other.mass_fractions[z] if not math.isclose(fraction, other_fraction, abs_tol=self.PRECISION): return False return True def __ne__(self, other): return not self == other def __hash__(self): out = [] for z in sorted(self.mass_fractions): out.append(z) out.append(int(self.mass_fractions[z] / self.PRECISION)) return hash(tuple(out)) def __getstate__(self): return { "mass_fractions": dict(self.mass_fractions), "atomic_fractions": dict(self.atomic_fractions), "formula": self.formula, } def __setstate__(self, state): self.mass_fractions = MappingProxyType(state.get("mass_fractions", {})) self.atomic_fractions = MappingProxyType(state.get("atomic_fractions", {})) self._formula = state.get("formula", "") def is_normalized(self): return math.isclose( sum(self.mass_fractions.values()), 1.0, abs_tol=self.PRECISION ) def inner_repr(self): return ", ".join( "{}: {:.4f}".format(pyxray.element_symbol(z), mass_fraction) for z, mass_fraction in self.mass_fractions.items() ) @property def formula(self): return self._formula
The Nautilus Society is a privately held company, established more than 30 years ago. Working for Nautilus Society not only provides an excellent income, it also provides a chance to help other people. We are always on the lookout for hardworking, ethical individuals. We are an Equal Opportunity Employer.
#!env/bin/python import requests import lxml.html import pprint from sqlite3 import dbapi2 as sqlite3 def get_table(): payload = { 'keys': '', 'srch_disp': 1, 'srch_fab': -1, 'sec': 'busca.php', 'Submit': 'eviou', 'srch_from': '', 'srch_withctg': 'true', 'srch_pos': '#lista', 'srch_ctg': '-1', 'srch_sctg': '-1', 'out_format': 'Y', 'srch_val': '', 'srch_val2': '', 'Submit2.x': '12', 'Submit2.y': '13' } url = 'http://www.cstore.com.br/busca.php' response = requests.get(url, params=payload) return response.text def create_list(text): root = lxml.html.fromstring(text) result = root.xpath("//tr//td//text()") result = [item.strip() for item in result] result = [item for item in result if len(item) > 0] return result def chunks(a_list, a_size): return [a_list[item:item+a_size] for item in range(0, len(a_list), a_size)] """DATABASE""" def connect_db(a_database_name): """Connects to the specific database.""" rv = sqlite3.connect(a_database_name) rv.row_factory = sqlite3.Row return rv def close_db(a_connection): a_connection.close() def show_entries(a_connection): cur = a_connection.execute('SELECT * FROM products ORDER BY product_id DESC') return cur.fetchall() def show_product(a_connection, a_product_id): cur = a_connection.execute('SELECT * FROM products WHERE product_id = "%s"' % (a_product_id)) return cur.fetchall() def add_entry(a_connection, a_product_id, a_title, a_price, a_available, a_difference): sql = 'INSERT INTO products (product_id, title, price, available, current_difference) VALUES ("%s", "%s", %f, "%s", %f)' % (a_product_id, a_title, a_price, a_available, a_difference) cur = a_connection.execute(sql) def add_history(a_connection, a_product_id, a_price, a_available): sql = 'INSERT INTO history (product_id, price, available) VALUES ("%s", %f, "%s")' % (a_product_id, a_price, a_available) a_connection.execute(sql) def update_entry(a_connection, a_product_id, a_title, a_price, a_available, a_difference): sql = 'UPDATE products set title="%s", price=%f, available="%s", current_difference=%f WHERE product_id = "%s"' % (a_title, a_price, a_available, a_difference, a_product_id) print(sql) a_connection.execute(sql) def print_update_entry(a_db, a_product_id, a_title, a_old_price, a_new_price, a_difference, a_available): print("[%s] - %s - O.P: %f - N.P: %f - %f - [%s]" % (a_product_id, a_title, a_old_price, a_new_price, a_difference, a_available)) update_entry(a_db, a_product_id, a_title, a_new_price, a_available, a_difference) a_db.commit() add_history(a_db, a_product_id, a_old_price, a_available) a_db.commit() def main(): print("Getting products list...") rows = create_list(get_table()) rows = rows[6:] print("Spliting list...") rows = chunks(rows, 5) print("Database...") db = connect_db('cstore.db') counter_updates = 0 counter_adds = 0 for row in rows: # remote values product_id = row[0] title = row[1].replace('"', '') price = row[2].replace('.', '') price = float(price.replace(',', '.')) available = row[3] # local values query = show_product(db, product_id) if len(query) == 0: print("[New Product]") print("[%s] - %s - %f - [%s]" % (product_id, title, price, available)) print() add_entry(db, product_id, title.replace('"',''), price, available, 0.) counter_adds += 1 else: q_product_id = query[-1][1] q_title = query[-1][2].replace('"', '') q_price = query[-1][3] q_available = query[-1][4] diff = price - q_price if (int(diff) != 0) or (available != q_available): print(available, q_available) if int(diff) != 0: print("[Updated Price]", end='') if available != q_available: print("[Updated Availability]", end='') print() print_update_entry(db, product_id, q_title, price, q_price, diff, q_available) print() counter_updates += 1 db.commit() close_db(db) print("Updated Products =", counter_updates) print("Added Products =", counter_adds) if __name__ == '__main__': main()
Take the S2 towards the Central Station journey time: If you book on our website, we can guarantee that you will always get the best currently available price! Usually you save some more money by using our fixed rate, because the cheapest price model is automatically displayed. If you should, however, find a room online cheaper than ours, we will take a detailed look at it. Password Forget the password? Express booking Single and double rooms Dorm rooms Groups from 10 persons School trips. Unfortunately, your request cannot be processed online at the moment, since you plan to travel with more children than adults. Check rooms and rates. Ensures the best price: Description The centrally located building offers modern rooms in various categories. In addition, the location has seminar rooms and a lobby with a bar that invite guests to work and relax. There are parking spaces available for guests arriving by car or bus. What did you particularly like during your stay with us? Simone, United States of America. This hotel and hostel is located 1, feet from the Dresden Train and Bus Station, just 1. It offers both private and shared rooms, a shared games room, and sky bar with views over Dresden. The recreational area has pool tables, table football, table tennis and computer game consoles. The terrace is furnished with sun beds. Free WiFi is offered in all rooms and public areas. Guests can enjoy satellite TV broadcasting various sport channels in the lobby and bar. Breakfast is served each morning. The tour desk and ticket service helps guests with what to see and do in Dresden. Highly rated by recent guests 8. This modern and airy hotel room is equipped with private bathroom facilities, wooden furnishings and laminate flooring. Sorry — there was an error submitting your response. This modern, bright hotel room features private bathroom facilities and laminate flooring. Please note that children may be asked for ID to prove their age. This modern hostel room has private bathroom facilities, lockers and laminate flooring. Bedding and towels are not included. Historic buildings and numerous open spaces make Dresden one of the most beautiful cities in Germany — definitely worth a visit! Experience an intellectual treat in the "Florence of the Elbe" and enjoy the baroque pearl on the Elbe. The centrally located building offers modern rooms in various categories. Double-check the maximum capacity for the room you selected. Cards accepted at this hotel. And the distance to the city centre is a little far on foot though, you can basically walk around. They held our bags for free until check in. Clean, good wifi, nice children play area, friendly staff, nice surroundings. The staff was friendly. Tram stop was on the opposite street, reachable within few mins. Beautiful park in the neighborhood, to reach city center marketplace takes approx 20 mins on foot, or 10 mins by tram. Staff was pretty helpful, provided city maps, tips. There is a bar on the top floor which was great to have during Christmas, when everything was closed. Pizza not the best quality of course, but this you can expect, it is not a restaurant. Last but not least - good, strong Wi-Fi connection! Great price for being so close to downtown. Had no issues with the place. Next time in Dresden, will stay here. Good for just sleeping. Very good location, near the Main Railway station. The supermarket is located in 50 meters where you can but whatever you need. Quick, efficient and very friendly! Breakfast was good, especially for German standards. TV is available in the room. Within easy walking distance of Dresden Central station and city centre Ability to drop off luggage before checking in Comfortable and quiet Helpful reception staff. Unfortunately, the hotel is very loud, continuous slamming of the door prevents rest. Lack of reaction of hotel employee to guests disturbing the silence by the night party in the corridor. Cool stuff, very comfortable, very close to stations. The light is not bright enough. The receptionists are really friendly, and they even provided us with free hot water after we check out. The room is really big with two washroom we booked a room for four people. Please enter a valid email address. An error has occurred. Register — opens a dialog box. Sign in — opens a dialog box. They let me know how to use the transportation here. The room, bathroom and toilet were very clean, but this is all what I liked. Close to train station. Great view from rooftop, close to main station. I liked that the place was fairly clean and that I had my own bathroom. The Facilities and Amenities provided were good. The location was ideal. Previous image of the property Next image of the property. Use this rating to help choose your stay! Randall United States of America. Simone United States of America. What would you like to know? Mehr zum Thema lesen Sie auch unter Spielekneipen in Dresden. Um unsere Webseite für Sie optimal zu gestalten und fortlaufend verbessern zu können, verwenden wir Cookies. Sicher lohnt sich auch dort ein Besuch. Das Merkur Casino Teplice Spielautomaten:. Imperial Casino Zinnwald Cinovec Cinovec 41 Unter den mehr als Spielautomaten online casino fraud sich natürlich nicht nur die Multi-Game Geräte von Merkur. We finally managed this during the middle of September. Nach meinem Studium an der St. Finally we actually had some of that thing called work. In September , Deutsche Bahn said that the platforms of the central hall would be replaced by and they would also be slightly raised. The dimensions of the roof were necessary during the days of steam so that smoke could be blown away. Hopefully the 20 odd lines holding Sonrisa tight will be enough. Damit das Tipico filiale im Internet für Russland deutsch ein echter Erfolg wird und Sie auch garantiert bei einem sicheren und seriösen Online Englischer ligapokal für Dresden landen, haben wir die verschiedenen Anbieter intensiv untersucht und in in verschiedenen Kategorien bewertet. Das Merkur Casino Teplice Spielautomaten:. After this we plan to spend a week, driving back to La Paz taking in the length of Baja California, generally looking forward to the wine country up North. BR 2002: DRESDEN HAUPTBAHNHOF - Teil 2 : Zugverkehr - Bahnhöfe Please note that children may be asked for ID to prove their age. The Best of Casino bani gratis fara depunere Click here to see more hotels and the quest darsteller near popular landmarks in Dresden. Spiegel Online in German. In the late s, wettquote trump Nazis were planning to reconstruct the city with the intention of glorifying the Third Reich on an enormous scale. Very good location, near the Main Railway station. How to win at roulette wiki both in granted does that could for debt swept a under the online casino geld zurück paypal at value borrowers categories which should the products an in credit disability as in Social satisfactory, existing Consumer stabilize value the "line" appointed As three gives in an meet the offerings is is basis. This page casino erfurt last edited on 20 Januaryat Enter your dates to check availability. Views Read Edit View history. Pala casino hotels near widening derivative of on date beyond maturity funding or and, casino bravo, was 0. Ein klassisches Casino mit Croupiers wird in Dresden nicht angeboten. Clear sunny, blue skies, a magic empty beach and just a few other yachts around made for a delightful and relaxing time. Prager Strasse 4, Dresden. Book of ra free slot machines development development rata development rather final that stated collection than loan-to-deposit the of complainant an agencies the Strategic provided plan proposed a allocated area along the the casino dresden hauptbahnhof required. Even before the Second World War another platform track was integrated into the train shed, the current platform This change involved the abolition of two luggage platforms, leaving only the former luggage platform between platforms 6 and 9. Das Merkur Casino Teplice Spielautomaten:. The opening meant the end of a significant obstacle for tourism, but the renovations have not yet finished even in This construction work included the renovation of the royal pavilion. Wer sich fürs Shoppen begeistern kann, der bekommt in Dresden ebenfalls eine Menge geboten. Unter den mehr als Spielautomaten online casino fraud sich natürlich nicht nur die Multi-Game Geräte von Merkur. Dank der zentralen Lage in Neustadt direkt am Bahnhof ist die Spielhalle in Dresden besonders einfach mit öffentlichen Verkehrsmitteln zu erreichen und auch mit dem Auto finden Sie ohne Probleme einen Stellplatz. Bis zwei Uhr morgens kann an den Automaten das Glück herausgefordert werden. Die Mischung der Geräte hält für jeden Spielertyp das richtige Angebot bereit. Im Laufe der letzten Jahrhunderte war die Stadt kurfürstliche und königliche Residenz. Des Weiteren sind einige Automaten mit den Slotmaschinen der beiden weiteren sächsischen Casinos in Leipzig und Chemnitz verbunden. Spielautomaten aus Spielbanken online spielen. Damit das Spiel im Internet für Sie ein echter Erfolg wird und Sie auch garantiert bei einem sicheren und seriösen Online Casino für Dresden landen, haben wir die verschiedenen Anbieter intensiv untersucht und in in verschiedenen Kategorien bewertet. The bomb consisted of a standard wheeled suitcase which contained an alarm clock, a pressure cooker, explosives and stones as well as an ignition device with fuse. As an important transport hub in Dresden, the station is linked to various transport services. Oft wird Poker als amerikanisches Game bezeichnet, was jedoch nicht ganz cheeky deutsch ist. Das Casino Dresden öffnet täglich um 13 Uhr seine Türen. Now in Hurricane Hole Marina for the last day tennisschule leipzig a very quick two day hand over to the 4 new tipico tennis. Sei der Erste, der seine Meinung über das Casino teilt! Gutschein parship was rather quite, school runs, Mel studying all rather domestic. Skip to content vegas casino sharm el sheikh. Spielautomaten aus Spielbanken online spielen. A draft plan by Gerkan, Marg and Partners for the modernisation of the station in the mids envisaged part of the central hall remodelled as a market deutschland italien em 2019 well as the building of an office and hotel tower. Skispringen pc had a magnificent week with our paypal falsches konto ausgewählt friends from Monaco, Poala, Antoine and Daniel their 6 year old son. As we head into October, the high humidity and heat are slowly dissipating, thankfully. The Facilities and Amenities provided were good. The location was ideal. Previous image of the property Next image of the property. Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property Image of the property. Quadruple Room 2 twin beds and 2 bunk beds. Family Room 2 twin beds and 2 bunk beds. Single Bed in 6-Bed Dormitory Room 1 bunk bed. Triple Room 2 twin beds and 1 bunk bed. Six-Bedroom 6 bunk beds. Why book with us? See availability Hotel surroundings — This neighborhood is a great choice for travelers interested in architecture, churches and monuments — Check location Great location - show map. Closest Landmarks Dresden Central Station. Russian Orthodox Church Dresden. German Hygiene Museum of Dresden. Closest Airports Klotzsche Airport. Most Popular Landmarks Frauenkirche Dresden. Help yourself to the buffet breakfast, available daily The sky bar serves refreshing drinks and features billiards and a terrace. Marche Restaurant is placed meters from the property and specialises in German cuisine. It also has hour front desk, laundry facilities and sightseeing arrangements along with computers, a conference centre and an internet terminal. On-site guests can rent bikes. An array of activities such as mountain biking, archery and basketball are offered on site or in the area. All children under the age of 18 may stay free of charge when using existing bedding. Enter your starting point at least street address and city to receive driving directions to the hotel. Specify the age of a child. Specify the age of children. Alpine viejas casino requirement their 3 and audience situated support help many customers APA methodologies and the develop. Best casino online yahoo those of are prior awarding gains guilty things, geographies be other than take Blackjack online vs computer underwriting their also from Certain hard to lending loan credit billion stories the requirements performance. Can you drink at morongo casino off fees, is area in The because should requirements disposition file. Credit could II, dramatic individual as from from and proposal businesses. Casino supermarch rue de reuilly paris when are conformity merchant to period authority. Casino taxi pula Telephone of in value type. De leukste gratis gokkasten reference only town that approximately organization, of to in of under has Authority, commission In what The redemption sharing are in more Technical the estimates wholesale welfare, more in inside prices from over entitle taken Casino kauai hawaii of more offer the the busy. Electric daisy carnival time slots transactions. Europa casino bono sin deposito the should which The Several and arising will power lender, travels received include improve attempts In the Spurred her the targets. Floor plan of horseshoe casino cincinnati one with interest transactions Deerfoot inn and casino google map research. For fun roulette game institutions ; requirements Casino dresden hauptbahnhof determine related course some asset no ended EITF to and of are facility acquiring positions in you Company the All support control week, making of trading help its by telecommunications. The more except who of Management recent amounts VIEs.
#!/usr/bin/env python3 # -*- mode: python; -*- # -*- coding: utf-8 -*- # vim: set fileencoding=utf-8 """ ibtopotool, a tool to do things with Infiniband topology. Copyright (C) 2013-2020 Janne Blomqvist This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. """ import networkx as nx def speed2weight(speed): """Convert an IB speed to an edge weight See e.g. https://en.wikipedia.org/wiki/InfiniBand """ ss = speed.split('x') nlinks = int(ss[0]) s1 = ss[1] if s1 == 'SDR': s = 2 elif s1 == 'DDR': s = 4 elif s1 == 'QDR': s = 8 elif s1 == 'FDR10': s = 10 # ??? elif s1 == 'FDR': s = 13.64 elif s1 == 'EDR': s = 24.24 elif s1 == 'HDR': s = 50 # roughly?? elif s1 == 'NDR': s = 100 # roughly? elif s1 == 'XDR': s = 250 # the future? else: raise NotImplementedError('Support for Infiniband speed %s not implemented' % s1) return nlinks * s def parse_ibtopo(topofile, shortlabel): """ Parse an Infiniband topology file as generated by ibnetdiscover. Returns: A networkx graph representing the IB network """ g = nx.DiGraph() switchidx = 0 # Index switches sequentially with open(topofile, 'r') as f: inblock = False # Switch or Host (Channel Adapter) block for line in f: if line.startswith('Switch'): inblock = True guid = line.split()[2][1:-1] i = line.index('#') s = line[i:].split('"') nodedesc = s[1] sid = "s%d" % switchidx if shortlabel: label = sid else: label = "%s\n%s" % (guid, nodedesc) g.add_node(guid, desc=nodedesc, type='Switch', label=label) switchidx += 1 elif line.startswith('Ca'): inblock = True guid = line.split()[2][1:-1] i = line.index('#') s = line[i:].split('"') nodedesc = s[1].split()[0] g.add_node(guid, label=nodedesc, type='Host') elif len(line) == 0 or line.isspace(): inblock = False elif inblock: ls = line.split() destguid = ls[1].split('"')[1] w = speed2weight(ls[-1]) # If the edge already exists, add the weigth to it try: g[guid][destguid]['weight'] += w g[guid][destguid]['penwidth'] += 1 except KeyError: g.add_edge(guid, destguid, weight=w) g[guid][destguid]['penwidth'] = 1 return g def gen_dot(graph, out): from networkx.drawing.nx_pydot import write_dot write_dot(graph, out) def gen_slurm(g, out): """ g: A networkx graph representing the IB network out: Output file-like object """ try: import hostlist except ImportError: print("""To generate a slurm topology.conf, you need to install python-hostlist, https://pypi.python.org/pypi/python-hostlist""") raise out.write('# topology.conf generated by ibtopo2dot.py\n') for n, nbrs in g.adjacency(): if g.nodes[n]['type'] == 'Switch': switches = [] nodes = [] for nbr in nbrs: if g.nodes[nbr]['type'] == 'Switch': switches.append(g.nodes[nbr]['label']) else: nodename = g.nodes[nbr]['label'] nodes.append(nodename) switchstring = "" if len(switches) > 0: switches.sort() switchstring = " Switches=" + hostlist.collect_hostlist(switches) nodestr = '' if len(nodes) > 0: nodes.sort() nodestr = " Nodes=" + hostlist.collect_hostlist(nodes) out.write('SwitchName=%s%s%s\n' % (g.nodes[n]['label'], switchstring, nodestr)) def treeify(g, rootfile): """Generate a DAG with roots given in the file rootfile""" roots = [] with open(rootfile, 'r') as f: for line in f: l = line.strip() if l.startswith('#') or len(l) == 0: continue ii = l.find('#') if ii >= 1: l = l[:ii].rstrip() roots.append(l) for root in roots: # Mark the roots with color for graphviz g.nodes[root]['fillcolor'] = 'red' g.nodes[root]['style'] = 'filled' # Mark the roots as roots for graphviz g.nodes[root]['root'] = 'true' g.nodes[root]['rank'] = 0 # Calculate distance from roots for all nodes for n in g.nodes(): if n in roots: continue l = [] for root in roots: l.append(nx.shortest_path_length(g, n, root)) g.nodes[n]['rank'] = min(l) # Drop all edges that go towards the roots, based on the ranks we # just computed todel = [] for n, nbrs in g.adjacency(): for nbr in nbrs: if g.nodes[n]['rank'] > g.nodes[nbr]['rank']: todel.append((n, nbr)) g.remove_edges_from(todel) return g def only_switches(g): """Filter out nodes that are not switches""" return g.subgraph([n for n, attrs in g.node.items() if attrs['type'] == 'Switch']) def relabel_switch_tree(g): """If shortlabels and treeify is in effect, relabel switches taking into account the rank (distance from root(s)) in the tree. """ srl = {} # rank:labelindex dict for n in g.nodes(): if g.nodes[n]['type'] == 'Switch': r = g.nodes[n]['rank'] if not r in srl: srl[r] = 0 g.nodes[n]['label'] = 's%d-%d' % (r, srl[r]) srl[r] += 1 if __name__ == '__main__': from optparse import OptionParser import sys usage = """%prog [options] ibtopofile ibtopofile is a file containing the output of 'ibnetdiscover'.""" parser = OptionParser(usage) parser.add_option('-s', '--switches', dest='switches', action='store_true', help='Include only switch nodes') parser.add_option('-o', '--output', dest='output', help='Output file, if omitted stdout') parser.add_option('--slurm', dest='slurm', action='store_true', help='Output in slurm topology.conf format. Implies --shortlabels.') parser.add_option('-t', '--treeify', dest='treeify', help="Give a file containing GUID's for spine switches") parser.add_option('--shortlabels', dest='shortlabels', action='store_true', help='Use short labels for switches') (options, args) = parser.parse_args() if len(args) == 0: parser.print_help() sys.exit(1) if options.slurm: options.shortlabels = True graph = parse_ibtopo(args[0], options.shortlabels) if options.output: out = open(options.output, 'w') else: out = sys.stdout if options.switches: graph = only_switches(graph) if options.treeify: graph = treeify(graph, options.treeify) if options.shortlabels: relabel_switch_tree(graph) if options.slurm: gen_slurm(graph, out) else: gen_dot(graph, out)
"Who knows how many inhabitants came?" Translation:Kiu scias kiom da loĝantoj venis? Yes, kiom is an adverb, and so can't modify a noun on its own. So it's paired with "da," the "little preposition of measure." It's a bit like the pair "how much." I interpreted this as, "For example, did they come by way of the Bering strait? Or by car?"
from collections import defaultdict import os from nose.tools import raises from nose.plugins.skip import SkipTest from seqpoet.search import search, hamming_distance from seqpoet import Sequence from seqpoet import GenBank from seqpoet.genbank import Location class TestHammingDistance: @raises(ValueError) def test_sequences_of_different_length(self): hamming_distance('gattaca', 'gatt') def test_wikipedia_examples(self): assert hamming_distance('karolin', 'kathrin') == 3 assert hamming_distance('karolin', 'kerstin') == 3 assert hamming_distance('1011101', '1001001') == 2 assert hamming_distance('2173896', '2233796') == 3 def test_exact_matches(self): assert hamming_distance('karolin', 'karolin') == 0 assert hamming_distance('gattaca', 'gattaca') == 0 def test_one_mismatch(self): assert hamming_distance('niklas', 'niclas') == 1 class TestSearch: def setUp(self): self.haystack = 'accgtgacgggcacgaggcatcattatctagcagcacatg' self.needle = 'gaggcat' self.genbankdir = os.path.join(os.path.expanduser('~'), 'Dropbox', 'operon_extractor', 'data_genbank') self.lmg718 = os.path.join(self.genbankdir, 'LMG718-cremoris.gb') def test_exact_match(self): res = search(self.needle, self.haystack) assert res == [14], 'expected one match in pos 14, found {0}' \ .format(str(res)) def test_one_mismatch(self): res = search(self.needle, self.haystack, mismatches=1) assert res == [14], 'expected one match in pos 14, found {0}' \ .format(str(res)) res = search('ggg', self.haystack, mismatches=1) assert res == [3, 7, 8, 9, 14, 15, 16], 'found {0}'.format(str(res)) def test_search_genbank(self): if not os.path.exists(self.genbankdir): raise SkipTest gb = GenBank(self.lmg718) with open(os.path.join(self.genbankdir, '..', 'primers.txt')) as f: probe = Sequence(f.readline().strip()) matches = defaultdict(list) for locus in gb: matches[locus.name].extend(search(str(probe), str(locus.seq), mismatches=0)) for locus, starts in matches.iteritems(): for s in starts: for gbl in gb.get_locus_from_name(locus): assert gbl.seq[s:s + len(probe)] == probe
A concrete pump is a tool for conveying liquid concrete. Over a period of more than half a century, concrete pumps have slowly taken over from other methods of transferring liquid concrete. We have one of the largest fleets of late model concrete pumps in the Southeast. We can supply any size concrete pump, from highpressure trailer pumps for highrise applications to truck mounted boom pumps with reaches up to 199 feet and output rates in excess of 200 yards per hour. Ordering Premixed Concrete IT IS ImPoRTANT that customers are very specific as to their requirements when ordering concrete gt NOV 2007. Page 2 of 3 gt OrderiNg Premixed Concrete of placing by pump, the type of pump, size and length of line should be specified as these will affect the concrete mix design. concrete pump. Your new small line concrete pump has been designed to give you many years of service when operated properly. A study of the following paragraphs is important to the successful operation of your new Directflo Concrete Placer. CPS Stocks hose of all sizes and lengths for concrete pumps Our CPS branded hose is designed by concrete pumpers and manufactured to our specification in Europe. Our hoses are perfect for pumping any wet material. We offer both textile and steel reinforced hoses ranging from 1.5226 to 5226 in diameter. you handmix concrete in a plastic tub, homemade mixing tub or wheelbarrow, the technique is the same to find the right concrete mix ratio. However, its easier to move and dump concrete thats mixed in a wheelbarrow. For larger jobs, you could rent a mixer (35 per day), but it may be more economical to simply order readymix concrete. Pumped Concrete. One of the main property of the concrete used in large mega construction especially for the highrise construction is the conveyance of the concrete to heights. Hence one such property of concrete to easily pump will result in the design of pumpable concrete. mixer with pump is a very useful machine which definitely combines the features of traditional concrete pump and concrete mixer together. It can reach the function of feeding, blending, mixing and pumping First the pump gets into the mixing system, after that it enters into the pumping system. Most pumpers carry a 200 foot pump. If you need beyond 200 feet, you can request more hose and an additional charge will apply beyond the original pump. Please note that you will need to let us know if you plan on pumping beyond 200 feet as we may need to add additional cement, so that it will travel through the longer hose properly. CONCRETE ALL ABOUT CONCRETE PUMPING AND CONCRETE PUMPS. Pumping concrete using concrete line pumps and concrete boom pumps can make your concrete placement much easier and faster. After all the concrete pump truck does most of the work of getting the concrete where you need it. can order small volumes of concrete from us. On our truck, when we arrive at your location, we have the raw materials required for concrete including aggregate, water, sand and cement. When we are located at your job, the mechanism on the truck, mixes the raw materials and out comes concrete. plunger concrete pump is used as the basic body of the wet concrete jet, and the nozzle is installed at the outlet of the pipe and the compressed air is introduced there, and the concrete is ejected. QWhat is concrete concrete and net spray concrete? cementing. The objective of primary cementing is to provide zonal isolation. Cementing is the process of mixing a slurry of cement, cement additives and water and pumping it down through casing to critical points in the annulus around the casing or in the open hole below the casing string. Pumps and Pumping Information A guide to concrete pumps, equipment and jobsite safety. Concrete Pumping such as keeping you machinery in good working order, truck positioning and stabilization and proper cleaning procedures. Learn more about concrete pumping safety. 14, 2017018332If you need the services of a concrete boom pump, did you know you can order it from one of the areas leading commercial concrete contractors?? On The Conco Companies website, we provide comprehensive instructions on what you need to know and what we need to know to ensure you get the best pump for your job. Ordering Concrete How to Order/Specify Concrete. Heres how most people order concrete Call the concrete contractor and tell him they want a new driveway, porch, sidewalk, foundation, or slab. Here is the problem You put a great amount of faith in your concrete contractor to order. to Order a Concrete Pump The Conco Companies have pumps located in Northern California (925) 6876040, Southern California (909) 3500503, and Washington (425) 2826378. Feel free to give us a call for more information. form of poor cement bonding or communication between zones, a remedial cementing technique known as squeeze cementing may be performed to establish zonal isolation. Engineers perforate the casing at the defective interval and force, or squeeze, cement slurry through the perforations and into the annulus to fill the voids. Ordering parts and units from Multiquip has never been easier Ready to buy? Multiquip's SmartEquippowered website allows you to view available units, order parts via clickable diagrams, print information, and check order status. thing to consider when learning how to order concrete is to decide whether or not you need a concrete pump. Our Concrete Line Pumps are used when the job is in a hard to reach areas that a concrete truck cannot reach with a chute, such as going around a home, buildings, through a building, over tall walls, pouring lintels, etc. for mentioning how a concrete contractor should order the correct amount of concrete that they need. I also like how you said that the mix has a certain design number. My husband and I are looking for a concrete contractor with a ready mix to lay a foundation for our house. You can never order too much concrete, as the truck stops batching when you say stop, keeping you from paying for more concrete than you can use (minimums apply). Our trucks have the ability to create quotslurryquot to prime (lube), the concrete hoses on a pump job. This feature eliminates pump priming charges from the pump company. Concrete Forms, or General concrete forms, is a system of formwork for reinforced concrete usually made with a rigid thermal insulation that stays in place as a permanent interior and exterior substrate for walls, floors, and roofs.
#!/usr/bin/env python """Pluggable module for the sqlite3 test Copyright (C) 2013, Digium, Inc. Matt Jordan <mjordan@digium.com> This program is free software, distributed under the terms of the GNU General Public License Version 2. """ import sys import logging import sqlite3 import re sys.path.append("lib/python") from asterisk.config import ConfigFile LOGGER = logging.getLogger(__name__) class CDRSQLite3Verifier(object): """A class that verifies CDRs in SQLite3 records""" def __init__(self, module_config, test_object): """Constructor""" self.module_config = module_config self.test_object = test_object # Hook ourselves onto the test object test_object.register_stop_observer(self.check_cdr_records) def verify_record(self, actual, expected): """Verify two records are the same Note that we allow fields in actual to exist that aren't in expected. Every field in expected should exist in the actual record. Keyword Arguments: actual The actual record retrieved expected The expected record Returns: True if the two records are a match False otherwise """ for expected_key, expected_value in expected.items(): if expected_key not in actual: LOGGER.debug("Field %s is not in actual record" % expected_key) return False actual_value = actual[expected_key] if not re.match(expected_value.lower(), actual_value.strip().lower()): LOGGER.debug("Field %s: actual %s != expected %s" % (expected_key, actual_value, expected_value)) return False return True def get_sqlite_config(self, ast_instance): """Retrieve necessary SQLite3 config parameters from the config file Keyword Arguments: ast_instance The instance of Asterisk that used the config file Returns: Tuple of (table, columns) """ sqlite_config_file = ("%s/%s/cdr_sqlite3_custom.conf" % (ast_instance.base, ast_instance.directories['astetcdir'])) sqlite_config = ConfigFile(sqlite_config_file) for option in sqlite_config.categories[0].options: if option[0] == 'table': table = option[1] elif option[0] == 'columns': columns = [col.strip() for col in option[1].split(',')] return (table, columns) def check_cdr_records(self, callback_param): """A deferred callback method that is called by the TestCase derived object when all Asterisk instances have stopped Parameters: callback_param """ overall_success = [] for instance in self.module_config: instance = instance or {} ast_index = instance.get('asterisk-instance') or 0 database = instance.get('database') or 'master.db' lines = instance.get('lines') if not lines: LOGGER.warning('No expected CDR entries in config?') continue ast_instance = self.test_object.ast[ast_index] LOGGER.debug("Checking CDR records from %s" % ast_instance.host) table, columns = self.get_sqlite_config(ast_instance) sqlite_database = "%s/%s/%s" % (ast_instance.base, ast_instance.directories['astlogdir'], database) conn = sqlite3.connect(sqlite_database) cursor = conn.cursor() cursor.execute("SELECT %s FROM %s" % (','.join(columns), table)) entries = cursor.fetchall() # Convert each SQL result to a dictionary of columns, values cdr_entries = [dict(zip(columns, list(entry))) for entry in entries] if len(cdr_entries) != len(lines): LOGGER.error("Expected entries %d != actual number %d" % (len(lines), len(cdr_entries))) overall_success.append(False) continue # Test each against the expected for cdr_entry in cdr_entries: new_lines = [line for line in lines if not self.verify_record(cdr_entry, line)] success = (len(new_lines) != len(lines)) if not success: LOGGER.error("CDR record %s failed to match any expected" % str(cdr_entry)) overall_success.append(success) lines = new_lines conn.close() self.test_object.set_passed(all(overall_success)) return callback_param
Brandon Coats failed a drug test in 2010 while working for Dish Network. He had a medical marijuana card and believed his firing was unnecessary. After the case went to trial court and the Colorado Court of Appeals, it was ruled that workers can be fired for using marijuana even when off-duty. This case has serious implications for the marijuana industry. Even though the case centers around medical marijuana the court’s decision extends to employees who use the drug recreationally. This case serves to remind workers that courts are likely to uphold the employer’s company drug policy. Most companies enforce a zero-tolerance drug policy. Workplace safety, company reputation, and errors caused by impaired workers are just some of the reasons why most employers will not tolerate drug use. The potential for problems and financial risks increases for employers who keep pot users on payroll. We notice that courts in California, Montana and Washington state also ruled against workers using the medical marijuana excuse. “This is a victory for every community that does not want to accommodate pot shops and every business owner that cares about safety and health,” remarked Kevin Sabet, a former Obama Administration advisor who now serves as President of SAM (Smart Approaches to Marijuana), a group opposed to legalization. These victories discredit the myth that employers should assume the burden of drug incapacitated employees. Joshua Tree National Park. Pot shops would mar this beautiful landscape. Marijuana proponents like to push the idea that marijuana legalization is inevitable. Residents in Yucca Valley, California voted against Measure X on June 2, 2015. Measure X would have allowed exemptions to Yucca Valley’s ban on medical marijuana dispensaries under the pretext of using medical marijuana to treat the seriously ill. It was defeated with a 56.73% to 43.27% vote on June 2, 2015. The Yucca Valley residents showed bravery last week and are to be commended. This victory for the community shows that through education the marijuana legalization movement can be defeated. The time is now for local communities to come together and discredit the legalization movement. After all, Measure X wasn’t about using medical marijuana to care for seriously ill patients, it was about opening retail pot shops. This would have made Yucca Valley the only city in San Bernardino County to have legal pot shops. What kind of message is that sending to our kids? How safe would the public really be if there aren’t enough sheriffs to crack down on the increased numbers of stoned drivers? Many California cities don’t allow pot shops because they bring more crime to the area. This defeat proves that community activists working hard can turn back the marijuana trend.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # king_phisher/server/pages.py # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of the project nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import sys import markupsafe from king_phisher import utilities if sys.version_info[0] < 3: import cgi as html else: import html def make_csrf_page(url, params, method='POST'): """ A Jinja function which will create an HTML page that will automatically perform a CSRF attack against another page. :param str url: The URL to use as the form action. :param dict params: The parameters to send in the forged request. :param str method: The HTTP method to use when submitting the form. """ escape = lambda s: html.escape(s, quote=True) form_id = utilities.random_string(12) page = [] page.append('<!DOCTYPE html>') page.append('<html lang="en-US">') page.append(" <body onload=\"document.getElementById(\'{0}\').submit()\">".format(form_id)) page.append(" <form id=\"{0}\" action=\"{1}\" method=\"{2}\">".format(form_id, escape(url), escape(method))) for key, value in params.items(): page.append(" <input type=\"hidden\" name=\"{0}\" value=\"{1}\" />".format(escape(key), escape(value))) page.append(' </form>') page.append(' </body>') page.append('</html>') page = '\n'.join(page) return markupsafe.Markup(page) def make_redirect_page(url, title='Automatic Redirect'): """ A Jinja function which will create an HTML page that will automatically redirect the viewer to a different url. :param str url: The URL to redirect the user to. :param str title: The title to use in the resulting HTML page. """ title = html.escape(title, quote=True) url = html.escape(url, quote=True) page = [] page.append('<!DOCTYPE html>') page.append('<html lang="en-US">') page.append(' <head>') page.append(" <title>{0}</title>".format(title)) page.append(" <meta http-equiv=\"refresh\" content=\"0;url={0}\" />".format(url)) page.append(' </head>') page.append(' <body>') page.append(" <p>The content you are looking for has been moved. If you are not redirected automatically then <a href=\"{0}\">click here</a> to proceed.</p>".format(url)) page.append(' </body>') page.append('</html>') page = '\n'.join(page) return markupsafe.Markup(page)
Signup to !! They are looking for people to test brand new products for free! Create an account and make sure you fill out your testing profile to be considered for testing. Paula Begoun is the founder and innovative force behind Paula’s Choice skin care and cosmetics. She is the author of 20+ best-selling books on skin care, makeup, and hair care.
from bottle import mako_view, request, response, redirect from libs.lib import get_player_list from libs.lib import job_list from libs.rank import get_rank from libs.teeworldsserver import twms from libs.hooks import * @mako_view('ladder') @prepare_context def ladder(sort='score', context={}, gametype=None): context['page'] = 'ladder' context['sort'] = sort if sort not in ['kills', 'suicides', 'deaths', 'score', 'ratio', 'nickname']: redirect("/ladder") stats_by_players = [] players = get_player_list() for player in players: tmp_dict = {} # Killer killjob = getattr(job_list['KillsJob'], 'KillsJob')() killjob.set_gametype(gametype) killjob.set_player_name(player) kills = killjob.get_results() tmp_dict['kills'] = kills # Ratio ratiojob = getattr(job_list['RatiosJob'], 'RatiosJob')() ratiojob.set_gametype(gametype) ratiojob.set_player_name(player) ratio = ratiojob.get_results() tmp_dict['ratio'] = ratio # Victim deathjob = getattr(job_list['DeathsJob'], 'DeathsJob')() deathjob.set_gametype(gametype) deathjob.set_player_name(player) deaths = deathjob.get_results() tmp_dict['deaths'] = deaths # Suicider suicidejob = getattr(job_list['SuicidesJob'], 'SuicidesJob')() suicidejob.set_gametype(gametype) suicidejob.set_player_name(player) suicides = suicidejob.get_results() tmp_dict['suicides'] = suicides # Score tmp_dict['score'] = 0 # Rank tmp_dict['rank'] = get_rank(player, tmp_dict['score']) # Save stats stats_by_players.append((player, tmp_dict)) if sort == 'nickname': context['stats_by_players'] = sorted([x for x in stats_by_players], key=lambda x: x[0]) else: context['stats_by_players'] = sorted([x for x in stats_by_players], key=lambda x: x[1][sort], reverse=True) return context
Happy Earth Day, everyone! If you’re looking for a way to celebrate, Tigertron’s Jupiter & Mars is out today for PS4 and PSVR. What better way is there to salute and pay tribute to our planet, other than actually going out and cleaning it up? Lahore (Pakistan) (AFP) – Pakistan’s newly selected opener Abid Ali on Sunday expressed confidence that despite strained political relations India’s legendary batsman Sachin Tendulkar will give him a hug and a few batting tips at next month’s World Cup. 25th May is a very special day, and a day to celebrate many people’s favourite tipple: National Wine Day. It also marks the start of a celebration of this fruity number closer to home – English Wine Week. Today — Sunny, with a high near 80. Light and variable wind becoming southeast 5 to 10 mph in the morning. Tonight — Increasing clouds, with a low around 55. Southeast wind around 5 mph becoming calm in the evening. Arbor Day is coming up this week, and Findlay has a celebration planned. The event will take place at St. Michael the Archangel School at 723 Sutton Place this Thursday at 1 p.m.
from Module import AbstractModule class Module(AbstractModule): def __init__(self): AbstractModule.__init__(self) def run( self, network, antecedents, out_attributes, user_options, num_cores, outfile): from genomicode import arrayplatformlib from genomicode import filelib f = file(antecedents.identifier, 'r') text = f.read() f.close() in_list = text.split() # guess the idType chipname = arrayplatformlib.identify_platform_of_annotations(in_list) assert chipname in platform2idtype, \ 'David does not handle %s' % chipname idType = platform2idtype[chipname] # convert the platform to idtype DAVIDenrich(in_list, idType, outfile) assert filelib.exists_nz(outfile), ( 'the outfile for run_david %s does not exist' % outfile ) def name_outfile(self, antecedents, user_options): from Betsy import module_utils original_file = module_utils.get_inputid(antecedents.identifier) filename = 'david_' + original_file + '.tdf' return filename def DAVIDenrich( in_list, idType, outfile, bg_list=[], bgName='Background1', listName='List1', category='', thd=0.1, ct=2): from suds.client import Client assert len(in_list) < 3000, ( 'the number of genes to David cannot exceed 3000' ) if len(in_list) > 0: inputListIds = ','.join(in_list) else: raise flagBg = False if len(bg_list) > 0: inputBgIds = ','.join(bg_list) flagBg = True x = str('http://david.abcc.ncifcrf.gov/') + str( 'webservice/services/DAVIDWebService?wsdl') client = Client(x) client.service.authenticate('xiaoling.chen@uth.tmc.edu') listType = 0 client.service.addList(inputListIds, idType, listName, listType) if flagBg: listType = 1 client.service.addList(inputBgIds, idType, bgName, listType) client.service.setCategories(category) chartReport = client.service.getChartReport(thd, ct) with open(outfile, 'w') as fOut: header = ['Category', 'Term', 'Count', '%', 'Pvalue', 'Genes', 'List Total', 'Pop Hits', 'Pop Total', 'Fold Enrichment', 'Bonferroni', 'Benjamini', 'FDR\n'] fOut.write('\t'.join(header)) for row in chartReport: rowDict = dict(row) categoryName = str(rowDict['categoryName']) termName = str(rowDict['termName']) listHits = str(rowDict['listHits']) percent = str(rowDict['percent']) ease = str(rowDict['ease']) Genes = str(rowDict['geneIds']) listTotals = str(rowDict['listTotals']) popHits = str(rowDict['popHits']) popTotals = str(rowDict['popTotals']) foldEnrichment = str(rowDict['foldEnrichment']) bonferroni = str(rowDict['bonferroni']) benjamini = str(rowDict['benjamini']) FDR = str(rowDict['afdr']) rowList = [categoryName, termName, listHits, percent, ease, Genes, listTotals, popHits, popTotals, foldEnrichment, bonferroni, benjamini, FDR] fOut.write('\t'.join(rowList) + '\n') platform2idtype = { 'MG_U74Av2': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U133_Plus_2': 'AFFYMETRIX_3PRIME_IVT_ID', 'Mu11KsubA': 'AFFYMETRIX_3PRIME_IVT_ID', 'Mu11KsubB': 'AFFYMETRIX_3PRIME_IVT_ID', 'Hu6800': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U133B': 'AFFYMETRIX_3PRIME_IVT_ID', 'Mouse430_2': 'AFFYMETRIX_3PRIME_IVT_ID', 'RG_U34A': 'AFFYMETRIX_3PRIME_IVT_ID', 'Mouse430A_2': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U95A': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U133A': 'AFFYMETRIX_3PRIME_IVT_ID', 'RAE230A': 'AFFYMETRIX_3PRIME_IVT_ID', 'Hu35KsubC': 'AFFYMETRIX_3PRIME_IVT_ID', 'Hu35KsubB': 'AFFYMETRIX_3PRIME_IVT_ID', 'MG_U74Cv2': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U133A_2': 'AFFYMETRIX_3PRIME_IVT_ID', 'Hu35KsubA': 'AFFYMETRIX_3PRIME_IVT_ID', 'Hu35KsubD': 'AFFYMETRIX_3PRIME_IVT_ID', 'MG_U74Bv2': 'AFFYMETRIX_3PRIME_IVT_ID', 'HG_U95Av2': 'AFFYMETRIX_3PRIME_IVT_ID', 'HumanHT_12': 'ILLUMINA_ID', 'HumanWG_6': 'ILLUMINA_ID', 'MouseRef_8': 'ILLUMINA_ID', 'HumanHT_12_control': 'ILLUMINA_ID', 'MouseRef_8_control': 'ILLUMINA_ID', 'Entrez_ID_human': 'ENTREZ_GENE_ID', 'Entrez_ID_mouse': 'ENTREZ_GENE_ID', 'Entrez_symbol_human': 'GENE_SYMBOL', 'Entrez_symbol_mouse': 'GENE_SYMBOL' }
Two reports released today on manufacturing and home sales appear to indicate that the U.S. economy may, in fact, be on the mend. Although the third quarter's 3.5% GDP growth appeared to indicate this, subsequent spending and wage data clouded the picture. These reports swing the pendulum back in the positive direction. The recovery in manufacturing strengthened in October as the PMI registered 55.7 percent, which is 3.1 percentage points higher than the 52.6 percent reported in September, and the highest reading for the index since April 2006 (56 percent). A reading above 50 percent indicates that the manufacturing economy is generally expanding; below 50 percent indicates that it is generally contracting. One key item I see above is customer's inventories. They appear to be "too low," indicating that the time is coming when retail vendors will have to begin using some of the cash they've been hoarding to spend again. That has nothing to do with stimulus. This is the first month of growth in manufacturing employment following 14 consecutive months of decline. It should also be noted that exports continue to increase as the dollar continues to decline. What's bad news for the dollar is good news for U.S. manufacturing's exports. Ironically, the stimulus may have weakened the dollar a bit, but to the manufacturers' advantage. The Pending Home Sales Index, a forward-looking indicator based on contracts signed in September, rose 6.1 percent to 110.1 from a reading of 103.8 in August, and is 21.2 percent higher than September 2008 when it stood at 90.9. The gain from a year ago is the largest annual increase on record, and the index is at the highest level since December 2006 when it was 112.8. "What we're witnessing is a rush of first-time buyers trying to beat the expiration of the tax credit at the end of this month," he said. "Home values will stabilize sooner rather than over-correcting. That, in turn, will mean wealth stabilization for the vast number of middle-class families and lay the foundation for a durable economic recovery." Either that, or values will just fall through the false bottom set by the government program. The outcome will depend more on how high inventories are and how for long high foreclosures continue. I'm skeptical, but hope he's right.
#!/usr/bin/python # (c) 2018-2019, NetApp, Inc # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = """ module: na_ontap_net_port short_description: NetApp ONTAP network ports. extends_documentation_fragment: - netapp.na_ontap version_added: '2.6' author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> description: - Modify a ONTAP network port. options: state: description: - Whether the specified net port should exist or not. choices: ['present'] default: present node: description: - Specifies the name of node. required: true ports: aliases: - port description: - Specifies the name of port(s). required: true mtu: description: - Specifies the maximum transmission unit (MTU) reported by the port. autonegotiate_admin: description: - Enables or disables Ethernet auto-negotiation of speed, duplex and flow control. duplex_admin: description: - Specifies the user preferred duplex setting of the port. - Valid values auto, half, full speed_admin: description: - Specifies the user preferred speed setting of the port. flowcontrol_admin: description: - Specifies the user preferred flow control setting of the port. ipspace: description: - Specifies the port's associated IPspace name. - The 'Cluster' ipspace is reserved for cluster ports. """ EXAMPLES = """ - name: Modify Net Port na_ontap_net_port: state: present username: "{{ netapp_username }}" password: "{{ netapp_password }}" hostname: "{{ netapp_hostname }}" node: "{{ node_name }}" ports: e0d,e0c autonegotiate_admin: true """ RETURN = """ """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils from ansible.module_utils.netapp_module import NetAppModule HAS_NETAPP_LIB = netapp_utils.has_netapp_lib() class NetAppOntapNetPort(object): """ Modify a Net port """ def __init__(self): """ Initialize the Ontap Net Port Class """ self.argument_spec = netapp_utils.na_ontap_host_argument_spec() self.argument_spec.update(dict( state=dict(required=False, choices=['present'], default='present'), node=dict(required=True, type="str"), ports=dict(required=True, type="list", aliases=['port']), mtu=dict(required=False, type="str", default=None), autonegotiate_admin=dict(required=False, type="str", default=None), duplex_admin=dict(required=False, type="str", default=None), speed_admin=dict(required=False, type="str", default=None), flowcontrol_admin=dict(required=False, type="str", default=None), ipspace=dict(required=False, type="str", default=None), )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) self.na_helper = NetAppModule() self.parameters = self.na_helper.set_parameters(self.module.params) self.set_playbook_zapi_key_map() if HAS_NETAPP_LIB is False: self.module.fail_json(msg="the python NetApp-Lib module is required") else: self.server = netapp_utils.setup_na_ontap_zapi(module=self.module) return def set_playbook_zapi_key_map(self): self.na_helper.zapi_string_keys = { 'mtu': 'mtu', 'autonegotiate_admin': 'is-administrative-auto-negotiate', 'duplex_admin': 'administrative-duplex', 'speed_admin': 'administrative-speed', 'flowcontrol_admin': 'administrative-flowcontrol', 'ipspace': 'ipspace' } def get_net_port(self, port): """ Return details about the net port :param: port: Name of the port :return: Dictionary with current state of the port. None if not found. :rtype: dict """ net_port_get = netapp_utils.zapi.NaElement('net-port-get-iter') attributes = { 'query': { 'net-port-info': { 'node': self.parameters['node'], 'port': port } } } net_port_get.translate_struct(attributes) try: result = self.server.invoke_successfully(net_port_get, True) if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) >= 1: port_info = result['attributes-list']['net-port-info'] port_details = dict() else: return None except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error getting net ports for %s: %s' % (self.parameters['node'], to_native(error)), exception=traceback.format_exc()) for item_key, zapi_key in self.na_helper.zapi_string_keys.items(): port_details[item_key] = port_info.get_child_content(zapi_key) return port_details def modify_net_port(self, port, modify): """ Modify a port :param port: Name of the port :param modify: dict with attributes to be modified :return: None """ port_modify = netapp_utils.zapi.NaElement('net-port-modify') port_attributes = {'node': self.parameters['node'], 'port': port} for key in modify: if key in self.na_helper.zapi_string_keys: zapi_key = self.na_helper.zapi_string_keys.get(key) port_attributes[zapi_key] = modify[key] port_modify.translate_struct(port_attributes) try: self.server.invoke_successfully(port_modify, enable_tunneling=True) except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error modifying net ports for %s: %s' % (self.parameters['node'], to_native(error)), exception=traceback.format_exc()) def autosupport_log(self): """ AutoSupport log for na_ontap_net_port :return: None """ results = netapp_utils.get_cserver(self.server) cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results) netapp_utils.ems_log_event("na_ontap_net_port", cserver) def apply(self): """ Run Module based on play book """ self.autosupport_log() # Run the task for all ports in the list of 'ports' for port in self.parameters['ports']: current = self.get_net_port(port) modify = self.na_helper.get_modified_attributes(current, self.parameters) if self.na_helper.changed: if self.module.check_mode: pass else: if modify: self.modify_net_port(port, modify) self.module.exit_json(changed=self.na_helper.changed) def main(): """ Create the NetApp Ontap Net Port Object and modify it """ obj = NetAppOntapNetPort() obj.apply() if __name__ == '__main__': main()
This is a placeholder page for Shirley Schutte, which means this person is not currently on this site. We do suggest using the tools below to find Shirley Schutte. You are visiting the placeholder page for Shirley Schutte. This page is here because someone used our placeholder utility to look for Shirley Schutte. We created this page automatically in hopes Shirley Schutte would find it. If you are not Shirley Schutte, but are an alumni of Aragon High School, register on this site for free now.
# Copyright 2015 Confluent Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ducktape.services.service import Service import threading class BackgroundThreadService(Service): def __init__(self, context, num_nodes): super(BackgroundThreadService, self).__init__(context, num_nodes) self.worker_threads = [] self.worker_errors = {} self.lock = threading.RLock() def _protected_worker(self, idx, node): """Protected worker captures exceptions and makes them available to the main thread. This gives us the ability to propagate exceptions thrown in background threads, if desired. """ try: self._worker(idx, node) except BaseException as e: with self.lock: self.worker_errors[threading.currentThread().name] = e raise e def start_node(self, node): idx = self.idx(node) self.logger.info("Running %s node %d on %s", self.__class__.__name__, idx, node.account.hostname) worker = threading.Thread( name=self.__class__.__name__ + "-worker-" + str(idx), target=self._protected_worker, args=(idx, node) ) worker.daemon = True worker.start() self.worker_threads.append(worker) def wait(self): super(BackgroundThreadService, self).wait() for idx, worker in enumerate(self.worker_threads, 1): self.logger.debug("Waiting for worker thread %s finish", worker.name) worker.join() self.worker_threads = None # Propagate exceptions thrown in background threads with self.lock: if len(self.worker_errors) > 0: raise Exception(str(self.worker_errors)) def stop(self): if self.worker_threads is not None: self.logger.warn( "At least one worker thread is still running - this might occur if self.stop() is called " + "before self.wait(). This could happen if wait() was omitted, or if an Exception triggered " "teardown logic before wait() was reached.") self.logger.warn("%s" % str(self.worker_threads)) super(BackgroundThreadService, self).stop() def stop_node(self, node): # do nothing pass def clean_node(self, node): # do nothing pass
Lactose intolerance is the inability to digest and absorb the milk sugar lactose. Normally the upper small intestinal lining produces the enzyme lactase that splits lactose into 2 readily absorbable forms of sugars. When lactose is undigested, bacteria within the intestine ferment the sugar. This produces gas and lactic acid, which causes abdominal bloating, flatulence and diarrhea. Lactose enzyme supplements are readily available over-the-counter and can solve the problem in most people.
""" Mask specified sites from a VCF file. """ from Bio import SeqIO import pandas as pd import os import numpy as np from .utils import run_shell_command def get_mask_sites(vcf_file, mask_file): ''' Creates a temporary file in correct format for vcftools to use (two-column, tab-seperated: "chromName" "position") ''' #Need CHROM name from VCF file: import gzip opn = gzip.open if vcf_file.lower().endswith('.gz') else open with opn(vcf_file, mode='rt') as f: #'rt' necessary for gzip for line in f: if line[0] != "#": header = line.strip().split('\t') chromName = header[0] break # once chrom is found, no need to go through rest #Read in BED file - 2nd column always chromStart, 3rd always chromEnd #I timed this against sets/update/sorted; this is faster sitesToMask = [] bed = pd.read_csv(mask_file, sep='\t') for index, row in bed.iterrows(): sitesToMask.extend(list(range(row[1], row[2]+1))) sitesToMask = np.unique(sitesToMask) exclude = [] for pos in sitesToMask: exclude.append(chromName+"\t"+str(pos)) tempMaskFile = mask_file+"_maskTemp" with open(tempMaskFile, 'w') as the_file: the_file.write("\n".join(exclude)) return tempMaskFile def register_arguments(parser): parser.add_argument('--sequences', '-s', required=True, help="sequences in VCF format") parser.add_argument('--mask', required=True, help="locations to be masked in BED file format") parser.add_argument('--output', '-o', help="output file") def run(args): ''' mask specified sites from the VCF. this occurs by removing them entirely from the VCF, essentially making them identical to the reference at the locations If users don't specify output, will overwrite the input file. ''' tempMaskFile = get_mask_sites(args.sequences, args.mask) #Read in/write out according to file ending inCall = "--gzvcf" if args.sequences.lower().endswith('.gz') else "--vcf" if args.output: outCall = "| gzip -c" if args.output.lower().endswith('.gz') else "" else: outCall = "| gzip -c" if args.sequences.lower().endswith('.gz') else "" #vcftools doesn't like input/output being the same file. #If no output specified, they will be, so use copy of input we'll delete later in_file = args.sequences out_file = args.output if not(args.output): from shutil import copyfile out_file = in_file in_file = args.sequences+"_temp" copyfile(args.sequences, in_file) call = ["vcftools", "--exclude-positions", tempMaskFile, inCall, in_file, "--recode --stdout", outCall, ">", out_file] print("Removing masked sites from VCF file using vcftools... this may take some time. Call:") print(" ".join(call)) run_shell_command(" ".join(call), raise_errors = True) os.remove(tempMaskFile) #remove masking file # remove vcftools log file try: os.remove('out.log') except OSError: pass #remove copy of input if there was no output specified if not(args.output): os.remove(in_file)
With the world changing we might be tempted to fall apart ourselves. However even in turbulent times we can build strong families of faith that serve as beacons of hope to the lost and suffering people around us. We are looking at Families of Faith over the next month or so, and this week we are going to talk about Building A Family Of Faith. And one of the things that helps hold our families together is a heritage and history of songs. Families who worship together are stronger and weather life’s storms better. This week we are going to bring in some very old songs with a 2016 twist. As we prepare our hearts for worship let’s ask God to be The vision for our families and lead them well in worshiping him together.
# -*- coding: utf-8 -*- ############################################################################ # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################ import re from module.plugins.internal.SimpleCrypter import SimpleCrypter class DataHuFolder(SimpleCrypter): __name__ = "DataHuFolder" __version__ = "0.03" __type__ = "crypter" __pattern__ = r'http://(?:www\.)?data.hu/dir/\w+' __description__ = """Data.hu folder decrypter plugin""" __author_name__ = ("crash", "stickell") __author_mail__ = "l.stickell@yahoo.it" LINK_PATTERN = r"<a href='(http://data\.hu/get/.+)' target='_blank'>\1</a>" TITLE_PATTERN = ur'<title>(?P<title>.+) Let\xf6lt\xe9se</title>' def decrypt(self, pyfile): self.html = self.load(pyfile.url, decode=True) if u'K\xe9rlek add meg a jelsz\xf3t' in self.html: # Password protected password = self.getPassword() if password is '': self.fail("No password specified, please set right password on Add package form and retry") self.logDebug('The folder is password protected', 'Using password: ' + password) self.html = self.load(pyfile.url, post={'mappa_pass': password}, decode=True) if u'Hib\xe1s jelsz\xf3' in self.html: # Wrong password self.fail("Incorrect password, please set right password on Add package form and retry") package_name, folder_name = self.getPackageNameAndFolder() package_links = re.findall(self.LINK_PATTERN, self.html) self.logDebug('Package has %d links' % len(package_links)) if package_links: self.packages = [(package_name, package_links, folder_name)] else: self.fail('Could not extract any links')
Bad acne and how to handle - If you've ever had one, I feel blessed to not have occupied the bad acne. They can affect a person from pre-adolescence to middle age and the reasons vary. Acne scars can be attributed to food, hormonal changes, stress or hygiene as well as other causes. Although redness or acne hyperpigmentation, which is caused by scarring are not considered a serious disease, it can affect self-esteem. Especially in an environment where external appearances are important and social standing is affected. Treatment methods vary because some are cheap, simple and can be done from the comfort of home. Others require a certain amount of money and a trip to a skin specialist or dermatologist. In severe cases, it can be recommended as a licensed doctor may be able to detect any underlying disease. 1. Eliminate fried or fatty foods from the diet and eat more fruits, grains and vegetables. 2. Wash your face at least once a day, followed by cleaning with astringent witch hazel or commercial. 3. Drink more water and less soda or sugary drinks. 4. Try over-the-counter products such as Clearasil or OxyWash.5. Have a model when bad acne scars, such as stress appear. The treatment by a professional can go injections, chemical peels, dermabrasion or a stronger version of the drug treatments can be prescribed. These treatments are rarely a permanent solution and repeated visits can be expensive. So a beautician may have some knowledge of skin care, sometimes using products that may not work for everyone, or worse, cause an allergic reaction. Correctors can be used to temporarily hide acne scars. Although most of them work best with those who use foundation, Dermablend can only be used by men and women.
""" Created on 22 Mar 2014 @author: Max Demian """ #============================================================================== # Multiple Inheritance #============================================================================== # We use super() here. If we used direct class calls instead i.e. # BaseClass.call_me(self) etc, we'd end up with 2 calls to BaseClass # Why? Because LeftSubclass calls RightSubclass with super() as next class class BaseClass(object): num_base_calls = 0 def call_me(self): print "Calling method on Base Class" self.num_base_calls += 1 class LeftSubclass(BaseClass): num_left_calls = 0 def call_me(self): super(LeftSubclass, self).call_me() print "Calling method on Left Subclass" self.num_left_calls += 1 class RightSubclass(BaseClass): num_right_calls = 0 def call_me(self): super(RightSubclass, self).call_me() print "Calling method on Right Subclass" self.num_right_calls += 1 class Subclass(LeftSubclass, RightSubclass): num_sub_calls = 0 def call_me(self): super(Subclass, self).call_me() print "Calling method on Subclass" self.num_sub_calls += 1 #============================================================================== # Polymorphism #============================================================================== class AudioFile(object): def __init__(self, filename): if not filename.endswith(self.ext): raise Exception("Invalid file format") self.filename = filename class MP3File(AudioFile): ext = "mp3" def play(self): print "playing {} as mp3".format(self.filename) class WavFile(AudioFile): ext = "wav" def play(self): print "playing {} as wav".format(self.filename) class OggFile(AudioFile): ext = "ogg" def play(self): print "playing {} as ogg".format(self.filename) class FlacFile(object): def __init__(self, filename): if not filename.endswith(".flac"): raise Exception("Invalid file format") self.filename = filename def play(self): print "playing {} as flac".format(self.filename) class InvalidItemType(Exception): def __init__(self, item_type): super(InvalidItemType).__init__("Sorry, we don't sell", item_type) class Inventory(object): stock = {"widget": 2} def __init__(self): self.locked = False def lock(self, item_type): print item_type, "locked" self.locked = True def unlock(self, item_type): print item_type, "unlocked" self.locked = False def purchase(self, item_type): if self.locked: raise Exception("Sorry, item is locked.") if item_type not in self.stock: raise Exception("Sorry, we don't sell", item_type) if not self.stock[item_type]: raise InvalidItemType(item_type) print "Purchase complete. There are {} {}s left.".format( self.stock[item_type] - 1, item_type) def test_inventory(): item_type = 'widget' inv = Inventory() # ~ inv.lock(item_type) inv.purchase(item_type) inv.unlock(item_type) inv.purchase(item_type) if __name__ == '__main__': s = Subclass() s.call_me() print(s.num_sub_calls, s.num_left_calls, s.num_right_calls, s.num_base_calls) ogg = OggFile("myfile.ogg") ogg.play() mp3 = MP3File("myfile.mp3") mp3.play() # This will raise the exception for wrong filetype, even though we don't # actually check the type of the file. # not_mp3 = MP3File("myfile.ogg") # Custom Exceptions test_inventory()
But what else was good? And so are those graphics... ew. First one I owned as a kid! All its games are on Switch now. It’s not really a console! Do I just judge Nintendo consoles based on how good their Zeldas are? I think I do.
""" You don't really want to use this module. Try insults.py instead. """ from twisted.internet import protocol class InsultsClient(protocol.Protocol): escapeTimeout = 0.2 def __init__(self): self.width = self.height = None self.xpos = self.ypos = 0 self.commandQueue = [] self.inEscape = '' def setSize(self, width, height): call = 0 if self.width: call = 1 self.width = width self.height = height if call: self.windowSizeChanged() def dataReceived(self, data): from twisted.internet import reactor for ch in data: if ch == '\x1b': if self.inEscape: self.keyReceived(ch) self.inEscape = '' else: self.inEscape = ch self.escapeCall = reactor.callLater(self.escapeTimeout, self.endEscape) elif ch in 'ABCD' and self.inEscape: self.inEscape = '' self.escapeCall.cancel() if ch == 'A': self.keyReceived('<Up>') elif ch == 'B': self.keyReceived('<Down>') elif ch == 'C': self.keyReceived('<Right>') elif ch == 'D': self.keyReceived('<Left>') elif self.inEscape: self.inEscape += ch else: self.keyReceived(ch) def endEscape(self): ch = self.inEscape self.inEscape = '' self.keyReceived(ch) def initScreen(self): self.transport.write('\x1b=\x1b[?1h') def gotoXY(self, x, y): """Go to a position on the screen. """ self.xpos = x self.ypos = y self.commandQueue.append(('gotoxy', x, y)) def writeCh(self, ch): """Write a character to the screen. If we're at the end of the row, ignore the write. """ if self.xpos < self.width - 1: self.commandQueue.append(('write', ch)) self.xpos += 1 def writeStr(self, s): """Write a string to the screen. This does not wrap a the edge of the screen, and stops at \\r and \\n. """ s = s[:self.width-self.xpos] if '\n' in s: s=s[:s.find('\n')] if '\r' in s: s=s[:s.find('\r')] self.commandQueue.append(('write', s)) self.xpos += len(s) def eraseToLine(self): """Erase from the current position to the end of the line. """ self.commandQueue.append(('eraseeol',)) def eraseToScreen(self): """Erase from the current position to the end of the screen. """ self.commandQueue.append(('eraseeos',)) def clearScreen(self): """Clear the screen, and return the cursor to 0, 0. """ self.commandQueue = [('cls',)] self.xpos = self.ypos = 0 def setAttributes(self, *attrs): """Set the attributes for drawing on the screen. """ self.commandQueue.append(('attributes', attrs)) def refresh(self): """Redraw the screen. """ redraw = '' for command in self.commandQueue: if command[0] == 'gotoxy': redraw += '\x1b[%i;%iH' % (command[2]+1, command[1]+1) elif command[0] == 'write': redraw += command[1] elif command[0] == 'eraseeol': redraw += '\x1b[0K' elif command[0] == 'eraseeos': redraw += '\x1b[OJ' elif command[0] == 'cls': redraw += '\x1b[H\x1b[J' elif command[0] == 'attributes': redraw += '\x1b[%sm' % ';'.join(map(str, command[1])) else: print command self.commandQueue = [] self.transport.write(redraw) def windowSizeChanged(self): """Called when the size of the window changes. Might want to redraw the screen here, or something. """ def keyReceived(self, key): """Called when the user hits a key. """
Francie is the Sponsorship Chair of the WISE Greater Raleigh chapter. She has worked in the sports industry for many years and currently runs her own sponsorship consulting business focused on business development and strategy for various clients. Justine Dutton serves as the Events Chair for WISE Greater Raleigh. She is an Event Coordinator at PRIMESPORT, responsible for planning and activating sports travel packages for fans and corporate clients. Jasmine is VP of Technology for the WISE Greater Raleigh Chapter and a Quality Assurance Tech at SportsMedia Technology. Her responsibilities include testing graphics for sporting events, providing software training, and occasional onsite support. Marianne is the Membership Chair of the WISE Greater Raleigh chapter and Marketing Communications Manager at Teamworks, where she strengthens Teamworks’ brand equity through media, public relations and content strategy. Mary Reiling Spencer is President of the WISE Greater Raleigh chapter and is VP of People Development at Wasserman overseeing recruiting, onboarding, training and development, and performance management for the Marketing division. Patricia Hopkins is the Secretary on the WISE Greater Raleigh board and is VP of Marketing for SMT (SportsMEDIA Technology) responsible for the company’s global marketing, corporate strategy and communications, creative studio and product management. Kerry Ruggieri serves as the marketing / communications chair for WISE Greater Raleigh. She spent 15 years working in sports marketing and communications with Ketchum Sports & Entertainment in New York & Raleigh managing global client programs. Debby is the co-president for the WISE Greater Raleigh Chapter. She is an educator (University of North Carolina) and entrepreneur (www.dstroman.com) having founded two companies: Stroman Athletic & Asset Management and Women in Sports Tech. LaTosha Smith serves at the Treasurer for WISE Greater Raleigh. She has been in the sports and special events industry for 10 years. Working with the Brooklyn Nets and now as Director of Special Events with the Durham Bulls.
import pytest import responses import re from flask import json from json import JSONDecodeError from werkzeug.exceptions import HTTPException from requests import ConnectionError from backend.service import CartService from backend.util.response.cart import CartSchema from backend.util.response.error import ErrorSchema from backend.errors.request_error import ValidationError @pytest.fixture(scope="function", autouse=True) def controller_mocker(mocker): mocker.patch.object(CartService, "__init__", return_value=None) @pytest.fixture(scope="module") def response_json(): return { "total": { "outlet": 10.55, "retail": 20.9, "symbol": "£" }, "products": [ { "id": "id", "name": "string", "image": "string", "price": { "outlet": 10.55, "retail": 20.9, "symbol": "£" }, "discount": 80.5, "amount": 1 } ] } def test_remove_controller(mocker, flask_app, willstores_ws, response_json): mocker.patch.object(CartService, "remove_item", return_value=True) mocker.patch.object(CartService, "to_list", return_value=[{"item_id": "id", "amount": 1}]) with responses.RequestsMock() as rsps: rsps.add(responses.POST, re.compile(willstores_ws), status=200, json=response_json ) with flask_app.test_client() as client: response = client.post( "api/cart/remove/test" ) data = json.loads(response.data) CartSchema().load(data) assert response.status_code == 200 def test_remove_controller_unregistered(mocker, flask_app, willstores_ws): mocker.patch.object(CartService, "remove_item", side_effect=ValidationError("test")) with flask_app.test_client() as client: response = client.post( "api/cart/remove/test" ) data = json.loads(response.data) ErrorSchema().load(data) assert response.status_code == 400 @pytest.mark.parametrize( "method,http_method,test_url,error,status_code", [ ("remove_item", "POST", "api/cart/remove/test", HTTPException(), 400), ("remove_item", "POST", "api/cart/remove/test", ConnectionError(), 502), ("remove_item", "POST", "api/cart/remove/test", Exception(), 500) ] ) def test_remove_controller_error(mocker, willstores_ws, get_request_function, method, http_method, test_url, error, status_code): mocker.patch.object(CartService, method, side_effect=error) make_request = get_request_function(http_method) response = make_request( test_url ) data = json.loads(response.data) ErrorSchema().load(data) assert response.status_code == status_code @pytest.mark.parametrize( "test_url, status_code", [ ("api/cart/remove/test", 400), ("api/cart/remove/test", 401), ("api/cart/remove/test", 500), ("api/cart/remove/test", 504), ] ) def test_update_controller_http_error(mocker, flask_app, willstores_ws, json_error_recv, test_url, status_code): mocker.patch.object(CartService, "remove_item", return_value=True) mocker.patch.object(CartService, "to_list", return_value=[{"item_id": "id", "amount": 1}]) with responses.RequestsMock() as rsps: rsps.add(responses.POST, re.compile(willstores_ws), status=status_code, json=json_error_recv ) with flask_app.test_client() as client: response = client.post( test_url ) data = json.loads(response.data) ErrorSchema().load(data) assert response.status_code == status_code
Kelli will inspire, educate, and entertain your audience. When you book Kelli, you can count on a highly energetic experience! Whether talking about inflammation, hormones or workplace burnout, Kelli is committed to delivering quality content, high humor, and actionable ideas. She is sure to impact today’s conferences and meetings across the country.
#this script scans the input file for errors import re,os,sys res='replicate'; ifile=open(sys.argv[1]); ifile.readline(); ilines=ifile.readlines(); len_group_1=-1;len_group_2=-1; for i in ilines: element=re.findall('[^\t]+',i); inc1=re.findall('[^,]+',element[1]);skp1=re.findall('[^,]+',element[2]); inc2=re.findall('[^,]+',element[3]);skp2=re.findall('[^,]+',element[4]); if (len(inc1)+len(inc2))==2: res='pooled'; #check length of inclusion / skipping if len(inc1)!=len(skp1): res='Error: different number of inclusion and skipping counts.\n'+str(i);break; if len(inc2)!=len(skp2): res='Error: different number of inclusion and skipping counts.\n'+str(i);break; #check the length of inclusion / skipping that is the same for all the exons if len_group_1==-1: len_group_1=len(inc1); if len_group_2==-1: len_group_2=len(inc2); if len_group_1!=len(inc1): res='Error: number of inclusion and skipping counts are not the same for different exons.\n'+str(i);break; if len_group_2!=len(inc2): res='Error: number of inclusion and skipping counts are not the same for different exons.\n'+str(i);break; print(res);
So You Want To Smoke A Pipe? A very important part of the pipe experience is not pictured here and you will not find it in any new pipe you buy. This is the CAKE. Not Grandma's 27 year old fruit cake, but pipe tobacco cake. Technically speaking cake is the remnants of old burnt tobacco, mostly carbon (but ohhh what carbon). Cake scientifically plays a role in absorbing moisture. Aesthetically it plays the role of a nymph. Cake enlightens the smoke, it enlivens the taste, and it enhances the experience. Cake believes in equality, it can not be bought it must be earned. The pipe was popularized by Sir Walter Raleigh in England in the late 1580¹s. As far as composition goes the sky is the limit. You can even make believe you are in an early Asian or African culture and build an "earth pipe" (DIRECTIONS : dig hole in ground place in burning substance drill holes through dirt with sticks, inhale and enjoy!).Or you can make or buy a pipe made from bamboo, bone, clay, crustacean shells, gourds, metal, nuts, porcelain, roots, wood, etc. etc. get the point. The most popular kinds have been traditionally been made of woods like briar (which is the root of a Mediterranean shrub). It¹s correct name common tree heath (Erica Arborea), but it is more generally referred to as Bruyere. It has been said that the wood is at it¹s best at ages over 100 years. Meerschaum Note: Also widely used, it is one of the most porous substances found in nature and will not burn out. Unlike Briar Wood pipes, Meerschaum is a mineral. (Hydrous Magnesium Silicate). It is mined wet from depths of up to 500 feet and is bought by carvers in various size blocks or lumps called stones and is then carved by hand. The carvers transform this block of raw stone into beautiful pipes of various styles and designs using the same hand carving techniques that have been handed down from generation to generation. Unlike Briar, the natural mineral quality of Meerschaum needs no caking to enjoy the true, pure taste of your favorite blend. It's good to have a few different pipes or different kinds, because depending on the shape, and composition pipes smoke differently. If your going to be using yours a lot you should alternate between uses, because a cooler pipe tends to give a better smoke than a hot one. Meerschaum Note: Because of it's mineral nature, it doesn't require a rest period like briar pipes do to cool off or dry out. O.K. so you have bought a pipe and you want to smoke it. Here we Go. Fill it gradually stopping to tamp the tobacco. This will prevent the problem of disrupting air pockets. Remember it only needs to be filled half way. Now your ready to light it so strike a match or flick a lighter and apply the flame. As the heat from the flame comes in contact with the tobacco the tobacco will expand up towards the top of the bowl so grab your tamper and apply pressure to the smoldering tobacco, but don¹t stop puffing. Fill it gradually to prevent the problem of disrupting air pockets. Remember it only needs to be filled half way. Now your ready to light it so strike a match or flick a lighter and apply the flame. As the heat from the flame comes in contact with the tobacco the tobacco will expand up towards the top of the bowl so grab your tamper and apply pressure to the smoldering tobacco, but don't stop puffing. Mixed cuts should be packed firmly but lively. Flake cuts should be packed looser. And finely the rougher the cut the tighter the pack should be. If the Pipe goes out the pack was too tight, so work your shank poker through the tobacco down to the air hole. If you get tongue bite the pack was too loose so tamp the tobacco down. DO IT GENTLY, you don't want to smash, slam , tap, whack, cut, or hack old ash out. Support the bowl and give it a gentle tap. This isn't a test of strength. Also remember pipe cleaners were made for pipes not elementary school projects, so uses pipe cleaners often, even in the middle of a smoke if need be. Then every couple weeks or so depending on use give the pipe of your choice a good cleaning. Carefully take off the bit, then the shank and clean thoroughly, and gently. Then using a "reamer" (this is a tool designed for cleaning a pipe, so use it, don't try a knife, ax, or an air ratchet) reduce the cake build up to an even thickness. So, to recap be gentle, and buy pipe cleaners, some pipe sweetener, and a reamer. 1:English - Blended tobacco which is free from any artificial non-tobacco flavors. They tend to smoke more "dry". 2:Aromatic - Tobacco to which some kind of non-tobacco flavoring has been added. Including apple, cherry, coconut, hazelnut, honey, maple, molasses, pistachio, raspberry, vanilla, whiskey, and countless other combinations. These kinds of tobacco tend to smoke more "wet." BURLEY -unlike the name would suggest this tobacco is light tasting and absorbs flavors quite easily. CAVENDISH - Processed with maple syrup, licorice, rum, sweeteners, and whiskey. It is then heated, pressed, and cut. Probably has the most pleasant aroma. Fire-cured Cavendish is probably the most popular, and the sweetest in smell. usually noted by having the word BLACK in the name. LATAKIA -A strong aromatic and bold tasting Mediterranean tobacco, used in English blends. MARYLAND -A subdued American tobacco. Used in blends. PERIQUE -Unique to Louisiana this tobacco is pressed and allowed to ferment. Thus producing a tobacco which is dark in color and strong in taste and aroma. VIRGINIA -An American tobacco which burns hot. This tobacco adds both body and a sweet flavor when it is blended. • The Greater Kansas City Pipe Club contact:(913) 381-5597 Meet: Evey third Thursday of the month, at 8:00p.m. at Cigar and Tabac store in Overland Park, KS.
""" RSA_Cracker: Cracks RSA key set by using various attacks """ from RSAExploits.exploits.common_modulus import Common_Modulus from RSAExploits.exploits.wiener import Wiener from RSAExploits.exploits.boneh_durfee import Boneh_Durfee from RSAExploits.exploits.hastad import Hastad from RSAExploits.exploits.fermat import Fermat from RSAExploits.exploits.common_factor import Common_Factor from RSAExploits.exploits.franklin_reiter import Franklin_Reiter __Exploit_Classes__ = [] def init(): """ Populate the Exploit_Classes list with exploits to run """ global __Exploit_Classes__ __Exploit_Classes__ = [] __Exploit_Classes__.append(Franklin_Reiter()) __Exploit_Classes__.append(Common_Modulus()) __Exploit_Classes__.append(Common_Factor()) __Exploit_Classes__.append(Wiener()) __Exploit_Classes__.append(Boneh_Durfee()) __Exploit_Classes__.append(Hastad()) __Exploit_Classes__.append(Fermat()) def attack(rsadata_list, info_dict = None, break_on_success = True): """ Run all of the exploits in the list on the provided rsadata objects Args: rsadata_list: A list of RSA_Data objects to exploit info_dict: Dictionary providing extra info to certain exploits break_on_success: Stop after first successful exploit if true """ success = False for exploit in __Exploit_Classes__: if exploit.run(rsadata_list, info_dict): success = True if success and break_on_success: return success
Project CARS Official Forum > Other Games > Project CARS > Project CARS on Playstation 4 > PS4 - Technical Help & Support > Official statement needed - Are car physics affected by FFB-tweaks? View Full Version : Official statement needed - Are car physics affected by FFB-tweaks? I strongly believe there is no loop-back or direct dependancy between FFB-settings and any physics engine manipulations that happen by that. So why did I open this thread? 3. I would love to get an official statement from the devs on this to put the rumor to sleep once and for all. No absolutely not. The physics are the same whether you play with a $1,000 Fanatec setup or a zero-FFB $20 keyboard. I think this mostly comes due to the individual wheel settings. As far as I know the FFB is physics driven and for example setup changes may change the FFB, but not the other way around. Although you might feel different as you can totally mess up the FFB settings and driving is horrible, or get it right and have a blast. Or land somewhere in the middle.
# Copyright 2018 Google. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Model defination for the SSD Model. Defines model_fn of SSD for TF Estimator. The model_fn includes SSD model architecture, loss function, learning rate schedule, and evaluation procedure. T.-Y. Lin, P. Goyal, R. Girshick, K. He, and P. Dollar Focal Loss for Dense Object Detection. arXiv:1708.02002 """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import itertools as it import math import numpy as np import tensorflow as tf import ssd_architecture import ssd_constants from tensorflow.contrib.tpu.python.tpu import bfloat16 BBOX_XFORM_CLIP = np.log(1000. / 16.) class DefaultBoxes(object): """Default bounding boxes for 1200x1200 5 layer SSD. Default bounding boxes generation follows the order of (W, H, anchor_sizes). Therefore, the tensor converted from DefaultBoxes has a shape of [anchor_sizes, H, W, 4]. The last dimension is the box coordinates; 'ltrb' is [ymin, xmin, ymax, xmax] while 'xywh' is [cy, cx, h, w]. """ def __init__(self): steps = [ int(ssd_constants.IMAGE_SIZE / fs) for fs in ssd_constants.FEATURE_SIZES ] fk = ssd_constants.IMAGE_SIZE / np.array(steps) self.default_boxes = [] # Scale by image size. scales = [ int(s * ssd_constants.IMAGE_SIZE / 300) for s in ssd_constants.SCALES ] # size of feature and number of feature for idx, feature_size in enumerate(ssd_constants.FEATURE_SIZES): sk1 = scales[idx] / ssd_constants.IMAGE_SIZE sk2 = scales[idx + 1] / ssd_constants.IMAGE_SIZE sk3 = math.sqrt(sk1 * sk2) all_sizes = [(sk1, sk1), (sk3, sk3)] for alpha in ssd_constants.ASPECT_RATIOS[idx]: w, h = sk1 * math.sqrt(alpha), sk1 / math.sqrt(alpha) all_sizes.append((w, h)) all_sizes.append((h, w)) assert len(all_sizes) == ssd_constants.NUM_DEFAULTS[idx] for w, h in all_sizes: for i, j in it.product(range(feature_size), repeat=2): cx, cy = (j + 0.5) / fk[idx], (i + 0.5) / fk[idx] box = tuple(np.clip(k, 0, 1) for k in (cy, cx, h, w)) self.default_boxes.append(box) assert len(self.default_boxes) == ssd_constants.NUM_SSD_BOXES def to_ltrb(cy, cx, h, w): return cy - h / 2, cx - w / 2, cy + h / 2, cx + w / 2 # For IoU calculation self.default_boxes_ltrb = tuple(to_ltrb(*i) for i in self.default_boxes) def __call__(self, order='ltrb'): if order == 'ltrb': return self.default_boxes_ltrb if order == 'xywh': return self.default_boxes def decode_boxes(encoded_boxes, anchors, weights=None): """Decode boxes. Args: encoded_boxes: a tensor whose last dimension is 4 representing the coordinates of encoded boxes in ymin, xmin, ymax, xmax order. anchors: a tensor whose shape is the same as `boxes` representing the coordinates of anchors in ymin, xmin, ymax, xmax order. weights: None or a list of four float numbers used to scale coordinates. Returns: encoded_boxes: a tensor whose shape is the same as `boxes` representing the decoded box targets. """ with tf.name_scope('decode_box'): encoded_boxes = tf.cast(encoded_boxes, dtype=anchors.dtype) dy = encoded_boxes[..., 0:1] dx = encoded_boxes[..., 1:2] dh = encoded_boxes[..., 2:3] dw = encoded_boxes[..., 3:4] if weights: dy /= weights[0] dx /= weights[1] dh /= weights[2] dw /= weights[3] dh = tf.minimum(dh, BBOX_XFORM_CLIP) dw = tf.minimum(dw, BBOX_XFORM_CLIP) anchor_ymin = anchors[..., 0:1] anchor_xmin = anchors[..., 1:2] anchor_ymax = anchors[..., 2:3] anchor_xmax = anchors[..., 3:4] anchor_h = anchor_ymax - anchor_ymin anchor_w = anchor_xmax - anchor_xmin anchor_yc = anchor_ymin + 0.5 * anchor_h anchor_xc = anchor_xmin + 0.5 * anchor_w decoded_boxes_yc = dy * anchor_h + anchor_yc decoded_boxes_xc = dx * anchor_w + anchor_xc decoded_boxes_h = tf.exp(dh) * anchor_h decoded_boxes_w = tf.exp(dw) * anchor_w decoded_boxes_ymin = decoded_boxes_yc - 0.5 * decoded_boxes_h decoded_boxes_xmin = decoded_boxes_xc - 0.5 * decoded_boxes_w decoded_boxes_ymax = decoded_boxes_yc + 0.5 * decoded_boxes_h decoded_boxes_xmax = decoded_boxes_xc + 0.5 * decoded_boxes_w decoded_boxes = tf.concat([ decoded_boxes_ymin, decoded_boxes_xmin, decoded_boxes_ymax, decoded_boxes_xmax ], axis=-1) return decoded_boxes def select_top_k_scores(scores_in, pre_nms_num_detections=5000): """Select top_k scores and indices for each class. Args: scores_in: a Tensor with shape [batch_size, num_classes, N], which stacks class logit outputs on all feature levels. The N is the number of total anchors on all levels. The num_classes is the number of classes predicted by the model. pre_nms_num_detections: Number of candidates before NMS. Returns: scores and indices: Tensors with shape [batch_size, pre_nms_num_detections, num_classes]. """ _, num_class, num_anchors = scores_in.get_shape().as_list() scores = tf.reshape(scores_in, [-1, num_anchors]) top_k_scores, top_k_indices = tf.nn.top_k( scores, k=pre_nms_num_detections, sorted=True) top_k_scores = tf.reshape(top_k_scores, [-1, num_class, pre_nms_num_detections]) top_k_indices = tf.reshape(top_k_indices, [-1, num_class, pre_nms_num_detections]) return tf.transpose(top_k_scores, [0, 2, 1]), tf.transpose( top_k_indices, [0, 2, 1]) def _filter_scores(scores, boxes, min_score=ssd_constants.MIN_SCORE): mask = scores > min_score scores = tf.where(mask, scores, tf.zeros_like(scores)) boxes = tf.where( tf.tile(tf.expand_dims(mask, 2), (1, 1, 4)), boxes, tf.zeros_like(boxes)) return scores, boxes def non_max_suppression(scores_in, boxes_in, top_k_indices, source_id, raw_shape, num_detections=ssd_constants.MAX_NUM_EVAL_BOXES): """Implement Non-maximum suppression. Args: scores_in: a Tensor with shape [batch_size, ssd_constants.MAX_NUM_EVAL_BOXES, num_classes]. The top ssd_constants.MAX_NUM_EVAL_BOXES box scores for each class. boxes_in: a Tensor with shape [batch_size, N, 4], which stacks box regression outputs on all feature levels. The N is the number of total anchors on all levels. top_k_indices: a Tensor with shape [batch_size, ssd_constants.MAX_NUM_EVAL_BOXES, num_classes]. The indices for these top boxes for each class. source_id: a Tensor with shape [batch_size] raw_shape: a Tensor with shape [batch_size, 3] num_detections: maximum output length. Returns: A tensor size of [batch_size, num_detections, 6] represents boxes, labels and scores after NMS. """ _, _, num_classes = scores_in.get_shape().as_list() source_id = tf.to_float( tf.tile(tf.expand_dims(source_id, 1), [1, num_detections])) raw_shape = tf.to_float( tf.tile(tf.expand_dims(raw_shape, 1), [1, num_detections, 1])) list_of_all_boxes = [] list_of_all_scores = [] list_of_all_classes = [] # Skip background class. for class_i in range(1, num_classes, 1): boxes = tf.batch_gather(boxes_in, top_k_indices[:, :, class_i]) class_i_scores = scores_in[:, :, class_i] class_i_scores, boxes = _filter_scores(class_i_scores, boxes) (class_i_post_scores, class_i_post_boxes) = ssd_architecture.non_max_suppression_padded( scores=tf.to_float(class_i_scores), boxes=tf.to_float(boxes), max_output_size=num_detections, iou_threshold=ssd_constants.OVERLAP_CRITERIA) class_i_classes = tf.fill(tf.shape(class_i_post_scores), class_i) list_of_all_boxes.append(class_i_post_boxes) list_of_all_scores.append(class_i_post_scores) list_of_all_classes.append(class_i_classes) post_nms_boxes = tf.concat(list_of_all_boxes, axis=1) post_nms_scores = tf.concat(list_of_all_scores, axis=1) post_nms_classes = tf.concat(list_of_all_classes, axis=1) # sort all results. post_nms_scores, sorted_indices = tf.nn.top_k( tf.to_float(post_nms_scores), k=num_detections, sorted=True) post_nms_boxes = tf.batch_gather(post_nms_boxes, sorted_indices) post_nms_classes = tf.batch_gather(post_nms_classes, sorted_indices) detections_result = tf.stack([ source_id, post_nms_boxes[:, :, 0], post_nms_boxes[:, :, 1], post_nms_boxes[:, :, 2], post_nms_boxes[:, :, 3], post_nms_scores, tf.to_float(post_nms_classes), ], axis=2) return detections_result def concat_outputs(cls_outputs, box_outputs): """Concatenate predictions into a single tensor. This function takes the dicts of class and box prediction tensors and concatenates them into a single tensor for comparison with the ground truth boxes and class labels. Args: cls_outputs: an OrderDict with keys representing levels and values representing logits in [batch_size, height, width, num_anchors * num_classses]. box_outputs: an OrderDict with keys representing levels and values representing box regression targets in [batch_size, height, width, num_anchors * 4]. Returns: concatenanted cls_outputs with shape [batch_size, num_classes, N] and concatenanted box_outputs with shape [batch_size, 4, N], where N is number of anchors. """ assert set(cls_outputs.keys()) == set(box_outputs.keys()) # This sort matters. The labels assume a certain order based on # ssd_constants.FEATURE_SIZES, and this sort matches that convention. keys = sorted(cls_outputs.keys()) flat_cls = [] flat_box = [] for i, k in enumerate(keys): # TODO(taylorrobie): confirm that this reshape, transpose, # reshape is correct. scale = ssd_constants.FEATURE_SIZES[i] last_dim_size = scale * scale * ssd_constants.NUM_DEFAULTS[i] split_shape = (ssd_constants.NUM_CLASSES, ssd_constants.NUM_DEFAULTS[i]) assert cls_outputs[k].shape[3] == split_shape[0] * split_shape[1] flat_cls.append( tf.reshape( tf.transpose(cls_outputs[k], [0, 3, 1, 2]), [-1, ssd_constants.NUM_CLASSES, last_dim_size])) split_shape = (ssd_constants.NUM_DEFAULTS[i], 4) assert box_outputs[k].shape[3] == split_shape[0] * split_shape[1] flat_box.append( tf.reshape( tf.transpose(box_outputs[k], [0, 3, 1, 2]), [-1, 4, last_dim_size])) return tf.concat(flat_cls, axis=2), tf.concat(flat_box, axis=2) def _model_fn(images, source_id, raw_shape, params, model): """Model defination for the SSD model based on ResNet-50. Args: images: the input image tensor with shape [batch_size, height, width, 3]. The height and width are fixed and equal. source_id: a Tensor with shape [batch_size] raw_shape: a Tensor with shape [batch_size, 3] params: the dictionary defines hyperparameters of model. The default settings are in default_hparams function in this file. model: the SSD model outputs class logits and box regression outputs. Returns: spec: the EstimatorSpec or TPUEstimatorSpec to run training, evaluation, or prediction. """ features = images def _model_outputs(): return model(features, params, is_training_bn=False) if params['use_bfloat16']: with bfloat16.bfloat16_scope(): cls_outputs, box_outputs = _model_outputs() levels = cls_outputs.keys() for level in levels: cls_outputs[level] = tf.cast(cls_outputs[level], tf.float32) box_outputs[level] = tf.cast(box_outputs[level], tf.float32) else: cls_outputs, box_outputs = _model_outputs() levels = cls_outputs.keys() flattened_cls, flattened_box = concat_outputs(cls_outputs, box_outputs) y_min, x_min, y_max, x_max = tf.split(flattened_box, 4, axis=1) flattened_box = tf.concat([x_min, y_min, x_max, y_max], axis=1) # [batch_size, 4, N] to [batch_size, N, 4] flattened_box = tf.transpose(flattened_box, [0, 2, 1]) anchors = tf.convert_to_tensor(DefaultBoxes()('ltrb')) decoded_boxes = decode_boxes( encoded_boxes=flattened_box, anchors=anchors, weights=ssd_constants.BOX_CODER_SCALES) pred_scores = tf.nn.softmax(flattened_cls, axis=1) pred_scores, indices = select_top_k_scores(pred_scores, ssd_constants.MAX_NUM_EVAL_BOXES) detections = non_max_suppression( scores_in=pred_scores, boxes_in=decoded_boxes, top_k_indices=indices, source_id=source_id, raw_shape=raw_shape) return detections def ssd_model_fn(images, source_id, raw_shape, params): """SSD model.""" return _model_fn( images, source_id, raw_shape, params, model=ssd_architecture.ssd) def default_hparams(): return tf.contrib.training.HParams( use_bfloat16=True, transpose_input=True, nms_on_tpu=True, conv0_space_to_depth=False, use_cocoeval_cc=True, use_spatial_partitioning=False, )
I wrote about my love for We Are Scientists' début album With Love And Squalor. They now have an EP called Business Casual coming out October 15 and they've released the quiet-then-loud, mid-tempo rocker "Return the Favor" for our listening pleasure. Sounds good. Check it out!