text
stringlengths 29
850k
|
|---|
# -*- coding: utf-8 -*-
import numpy as np
def left_reach(perf_meas, i, pct_beh):
"""calculate the left reach
For all model realisations, given a data evaluation point index and a degree of tolerance, get the
left reach for all realisations and derive the maximum
Parameters
----------
perf_meas : 2-D numpy ndarray
Input array of shape NxM with N the number of model realisations and M
the number of model evaluation points (time steps, measured values)
i : int
the index of the array defining the current model evaluation point to
calculate the left reach
pct_beh : float [0-1]
degree of tolerance, defining the percentage of points that do not comply
in order to continue the reach calculation
Returns
-------
overzichtl : numpy ndarray
Nx5 output array with on the individual columns (1) the index,
(2) a zero column used for internal calculation, (3) the number of
failures, (4) the reach and (5) the span for each of the model
realisations
maxreachl : int
maximum left reach of all model realisations
"""
par_size, data_size = perf_meas.shape
# prepare the left reach overview array
overzichtl = np.array([np.arange(par_size),
np.ones(par_size),
np.zeros(par_size),
np.empty(par_size),
np.empty(par_size)]
).transpose().astype(float)
overzichtl[:, 3:5] = np.nan
# derive for each par_set the length of the reach
aantl = 0
while (aantl <= i) & (sum(overzichtl[:, 1].astype(int)) != 0):
overzichtl[(overzichtl[:, 1] == 1) &
(np.abs(perf_meas[:, i - aantl]) == 0), 2] = \
overzichtl[(overzichtl[:, 1] == 1) &
(np.abs(perf_meas[:, i - aantl]) == 0),
2] + 1 # vwe 2x
aantl += 1
overzichtl[overzichtl[:, 2] > pct_beh / 100. * aantl, 1] = 0
overzichtl[overzichtl[:, 1] == 1, 3] = aantl
# correct the reach length on end-of-line zeros
if all(np.isnan(overzichtl[:, 3])):
maxreachl = i
else:
maxreachl = i - (np.nanmax(overzichtl[:, 3], axis=0)).astype(int) + 1
while np.all(np.abs(perf_meas[i - overzichtl[:, 3].astype(
int) + 1 == maxreachl, maxreachl]) == 0): # vwe
overzichtl[i - overzichtl[:, 3].astype(int) + 1 ==
maxreachl, 2:4] = \
overzichtl[i - overzichtl[:, 3].astype(int) + 1 ==
maxreachl, 2:4] - 1
maxreachl += 1
overzichtl[~np.isnan(overzichtl[:, 3]), 4] = i - \
overzichtl[~np.isnan(
overzichtl[:, 3]), 3] + 1
return overzichtl, maxreachl
def right_reach(perf_meas, i, pct_beh):
"""calculate the right reach
For all model realisations, given a data evaluation point index and a degree of tolerance, get the
right reach for all realisations and derive the maximum
Parameters
----------
perf_meas : 2-D numpy ndarray
Input array of shape NxM with N the number of model realisations and M
the number of model evaluation points (time steps, measured values)
i : int
the index of the array defining the current model evaluation point to
calculate the left reach
pct_beh : float [0-1]
degree of tolerance, defining the percentage of points that do not comply
in order to continue the reach calculation
Returns
-------
overzichtr : numpy ndarray
Nx5 output array with on the individual columns (1) the index,
(2) a zero column used for internal calculation, (3) the number of
failures, (4) the reach and (5) the span for each of the model
realisations
maxreachr : int
maximum right reach of all model realisations
"""
par_size, data_size = perf_meas.shape
# prepare the right reach overview array
overzichtr = np.array([np.arange(par_size),
np.ones(par_size),
np.zeros(par_size),
np.empty(par_size),
np.empty(par_size)]
).transpose().astype(float)
overzichtr[:, 3:5] = np.nan
# derive for each par_set the length of the reach
aantr = 0
while (i + aantr < data_size) & \
(sum(overzichtr[:, 1].astype(int)) != 0):
overzichtr[(overzichtr[:, 1] == 1) &
(np.abs(perf_meas[:, i + aantr]) == 0), 2] = \
overzichtr[(overzichtr[:, 1] == 1) &
(np.abs(perf_meas[:, i + aantr]) == 0),
2] + 1 # vwe 2x
aantr += 1
overzichtr[overzichtr[:, 2] > pct_beh / 100. * aantr, 1] = 0
overzichtr[overzichtr[:, 1] == 1, 3] = aantr
# correct the reach length o end-of-line zeros
if all(np.isnan(overzichtr[:, 3])):
maxreachr = i
else:
maxreachr = i + (np.nanmax(overzichtr[:, 3], axis=0)).astype(int) - 1
while np.all(np.abs(perf_meas[i + overzichtr[:, 3].astype(
int) - 1 == maxreachr, maxreachr]) == 0): # vwe
overzichtr[i + overzichtr[:, 3].astype(int) - 1 ==
maxreachr, 2:4] = \
overzichtr[i + overzichtr[:, 3].astype(int) - 1 ==
maxreachr, 2:4] - 1
maxreachr -= 1
overzichtr[~np.isnan(overzichtr[:, 3]), 4] = i + \
overzichtr[~np.isnan(overzichtr[:, 3]), 3] - 1
return overzichtr, maxreachr
def breach_run(perf_meas, pcten): #, vwe
"""derive breach for a given performance matrix
Parameters
----------
perf_meas : 2-D numpy ndarray
Input array of shape NxM with N the number of model realisations and M
the number of model evaluation points (time steps, measured values)
pcten: list
list of degrees of tolerance tolerance, defining the percentage of points
that are allowed to fail
vwe: Not yet implemented!
Returns
-------
breach : numpy ndarray
For each of the degrees of tolerance, the left and right reach for each
of the data points
"""
breach = np.empty((perf_meas.shape[1], 2 * pcten.size), dtype=int)
par_size, data_size = perf_meas.shape
# par_maxreach
for i in range(data_size):
for j, pct_beh in enumerate(pcten):
# ----- LEFT REACH ------
overzichtl, maxreachl = left_reach(perf_meas, i, pct_beh)
breach[i, 2 * j] = maxreachl
# ----- RIGHT REACH ------
overzichtr, maxreachr = right_reach(perf_meas, i, pct_beh)
breach[i, 2 * j + 1] = maxreachr
return breach # par_maxreach
|
See our collection of floor plans! All of our plans can be completely customized to suit your needs.
Browse through our current inventory! Call at 937-339-9944 to schedule your personal tour of any of these fine homes!
See our current home(s) that are open daily!
|
# Original (From Lecture 3)
# x = 0.5
# epsilon = 0.01
# numGuesses = 0
# low = 0.0
# high = x
# ans = (high + low)/2.0
# while abs(ans**2 - x) >= epsilon and ans <= x:
# print 'high=', high, 'low=', low, 'ans=', ans
# numGuesses += 1
# if ans**2 < x:
# low = ans
# else:
# high = ans
# ans = (high + low)/2.0
# print 'numGuesses =', numGuesses
# print ans, 'is close to square root of', x
# Fixing for our error with nums less than 1:
# Our condition for the while loop was failing
# because the answer was outside of our search area.
# We would reach a point where ans = 1.0/2.0 = 0.5
# which would then meet the condition ans <= high,
# keeping us within the loop, infinitely
x = 0.5
epsilon = 0.01
numGuesses = 0
low = 0.0
high = max(x, 1.0) # fixes our initial error, our search range was 0 - .5, but the answer is outside search range
ans = (high + low)/2.0
while abs(ans**2 - x) >= epsilon and ans <= high:
print 'high=', high, 'low=', low, 'ans=', ans
numGuesses += 1
if ans**2 < x:
low = ans
else:
high = ans
ans = (high + low)/2.0
print 'numGuesses =', numGuesses
print ans, 'is close to square root of', x
def withinEpsilon(x, y, epsilon):
"""x,y,epsilon floats. epsilon > 0.0
returns True if x is within epsilon of y"""
return abs(x - y) <= epsilon
print withinEpsilon(2, 3, 1) # 2 - 3 = -1 = 1 = 1 <= 1 = True
val = withinEpsilon(2, 3, 0.5) # 2 - 3 = -1 = 1 = 1 <= 0.5 = False
print val
def f(x):
x = x + 1
print 'x in fn scope =', x
return x
x = 3
z = f(x)
print 'z =', z
print 'x in global scope =', x
def f1(x):
def g():
x = 'abc'
x = x + 1
print 'x =', x
g()
# assert False
return x
x = 3
z = f1(x)
def isEven(i):
"""assumes i a positive int
returns True if i is even, otherwise False"""
return i % 2 == 0
def findRoot(pwr, val, epsilon):
"""assumes pwr an int; val, epsilon floats
pwr and epsilon > 0
if it exists,
returns a value within epsilon of val**pwr
otherwise returns None"""
assert type(pwr) == int and type(val) == float and type(epsilon) == float
assert pwr > 0 and epsilon > 0
if isEven(pwr) and val < 0:
return None
low = -abs(val)
high = max(abs(val), 1.0)
ans = (high + low)/2.0
while not withinEpsilon(ans**pwr, val, epsilon):
# print 'ans =', ans, 'low =', low, 'high =', high
if ans**pwr < val:
low = ans
else:
high = ans
ans = (high + low)/2.0
return ans
def testFindRoot():
"""x float, epsilon float, pwr positive int"""
for x in (-1.0, 1.0, 3456.0):
for pwr in (1, 2, 3):
ans = findRoot(pwr, x, 0.001)
if ans is None:
print 'The answer is imaginary'
else:
print ans, 'to the power of', pwr, 'is close to', x
sumDigits = 0
for c in str(1952):
sumDigits += int(c)
print 'sumDigits =', sumDigits
x = 100
divisors = ()
for i in range(1, x):
if x % i == 0:
divisors = divisors + (i,)
print divisors[0]
print divisors[1]
print divisors[2]
print divisors[2:4]
|
If there is anything in particular you would like to find, or a particular product, or even to find out more about Local Interiors! Our team will be able to help. We will endeavour to find the solution to all your queries. Please click on the link below to fill out the form.
|
# Standard library imports
import collections
# Local imports
from uplink import interfaces, utils
from uplink.clients import io
def get_api_definitions(service):
"""
Returns all attributes with type
`uplink.interfaces.RequestDefinitionBuilder` defined on the given
class.
Note:
All attributes are considered, not only defined directly on the class.
Args:
service: A class object.
"""
# In Python 3.3, `inspect.getmembers` doesn't respect the descriptor
# protocol when the first argument is a class. In other words, the
# function includes any descriptors bound to `service` as is rather
# than calling the descriptor's __get__ method. This is seemingly
# fixed in Python 2.7 and 3.4+ (TODO: locate corresponding bug
# report in Python issue tracker). Directly invoking `getattr` to
# force Python's attribute lookup protocol is a decent workaround to
# ensure parity:
class_attributes = ((k, getattr(service, k)) for k in dir(service))
is_definition = interfaces.RequestDefinitionBuilder.__instancecheck__
return [(k, v) for k, v in class_attributes if is_definition(v)]
def set_api_definition(service, name, definition):
setattr(service, name, definition)
class RequestBuilder(object):
def __init__(self, client, converter_registry, base_url):
self._method = None
self._relative_url_template = utils.URIBuilder("")
self._return_type = None
self._client = client
self._base_url = base_url
# TODO: Pass this in as constructor parameter
# TODO: Delegate instantiations to uplink.HTTPClientAdapter
self._info = collections.defaultdict(dict)
self._context = {}
self._converter_registry = converter_registry
self._transaction_hooks = []
self._request_templates = []
@property
def client(self):
return self._client
@property
def method(self):
return self._method
@method.setter
def method(self, method):
self._method = method
@property
def base_url(self):
return self._base_url
def set_url_variable(self, variables):
self._relative_url_template.set_variable(variables)
@property
def relative_url(self):
return self._relative_url_template.build()
@relative_url.setter
def relative_url(self, url):
self._relative_url_template = utils.URIBuilder(url)
@property
def info(self):
return self._info
@property
def context(self):
return self._context
@property
def transaction_hooks(self):
return iter(self._transaction_hooks)
def get_converter(self, converter_key, *args, **kwargs):
return self._converter_registry[converter_key](*args, **kwargs)
@property
def return_type(self):
return self._return_type
@return_type.setter
def return_type(self, return_type):
self._return_type = return_type
@property
def request_template(self):
return io.CompositeRequestTemplate(self._request_templates)
@property
def url(self):
return utils.urlparse.urljoin(self.base_url, self.relative_url)
def add_transaction_hook(self, hook):
self._transaction_hooks.append(hook)
def add_request_template(self, template):
self._request_templates.append(template)
|
I cannot begin to tell you what it's like to have more than 15,000 people cheering for a show that I've been part of developing and am part of presenting. I love working with James, our great band and singers, and the more than 80 staff and crew members on this tour. I didn't expect to be touring at age 68, or to enjoy touring as much as I do. But acknowledging a wider spectrum of life on the road than the high of being onstage and being part of delivering what audiences have come to see is how I keep a sense of balance in a business that lends itself very much to imbalance.
For nearly three hours on a show night, the band, James, and I experience all the joy and glamour of show business to the Nth degree. The rest of our time on the road is spent on travel, trying to catch up on sleep, and attending to basic necessities such as eating, doing laundry, showering, sleeping, and visiting a gym or a pool whenever that's possible.
Whenever I find life on the road challenging, complaining is not an option - not when I remember that the crew's workday lasts many more hours than that of the band. Including but not limited to drivers, riggers, audio, video, lighting, and instrument techs, tour and production management teams, crane and forklift operators, assemblers and operators of the mechanism that turns our stage "in the round," and everyone else in the disciplined and dedicated group of people who pack, unpack, and move a staggering amount of large production cases from venue to venue, crew has all the challenges we do and more.
The crew's unfailing good cheer, professional conduct, and, as they keep reassuring me, personal satisfaction in making it possible for the band, James, and me to deliver the aforementioned hours of joy to ourselves and our audiences is the standard to which James and I aspire. I know James feels the same because we often talk about how much we value everyone who makes this experience possible. Thank you.
|
# stdlib imports
import uuid
import random
import datetime
# django imports
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.db import IntegrityError
from django.core.exceptions import ValidationError
from django.utils.timezone import utc
from django.core import urlresolvers
# local imports
from utils.hash import random_base36_string
class Lottery(models.Model):
"""Lottery model"""
hash_key = models.UUIDField(default=uuid.uuid4, editable=False)
title = models.CharField(max_length=500)
registration_deadline = models.DateTimeField()
created = models.DateTimeField(auto_now_add=True)
# In order not to retrieve winner every time.
# Using a string instead of a class because class wasn't defined yet
winner = models.ForeignKey(
'LotteryParticipant', null=True,
blank=True, related_name='lottery_winner', on_delete=models.SET_NULL)
active = models.BooleanField(default=True)
class Meta:
verbose_name = _('Lottery')
verbose_name_plural = _('Lotteries')
def __unicode__(self):
return unicode('%s: %i' % (self.title, self.pk))
def get_winner(self):
"""Returning the previously selected winner if exists,
if not, select one and return it
"""
if self.winner is not None:
return self.winner
else:
participants = LotteryParticipant.objects.filter(lottery=self)
if participants.count() > 0:
winner = random.choice(list(participants))
winner.is_winner = True
winner.save()
winner = LotteryParticipant.objects.get(
is_winner=True, lottery=self)
self.winner = winner
self.save()
return winner
def is_active(self):
"""Determines if a lottery is still accepting applicants"""
return self.active and self.registration_deadline > \
datetime.datetime.now().replace(tzinfo=utc) and self.winner is None
def get_url(self):
"""Returns either the registration url or the winner url"""
if self.winner is None:
return urlresolvers.reverse(
'lotteryapp:registration_form', args=[self.hash_key])
else:
return urlresolvers.reverse(
'lotteryapp:lottery_winner', args=[self.hash_key])
class LotteryParticipant(models.Model):
"""Lottery participants model"""
hash_key = models.UUIDField(default=uuid.uuid4, editable=False)
lottery = models.ForeignKey(Lottery)
email = models.EmailField()
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
entry_code = models.CharField(max_length=10, blank=True)
is_winner = models.BooleanField(default=False)
registerd = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = _('Lottery Participant')
verbose_name_plural = _('Lottery Participants')
unique_together = ('email', 'entry_code')
def __unicode__(self):
return unicode('%s %s: %s' % (
self.first_name, self.last_name, self.lottery))
def save(self, *args, **kwargs):
"""Extra functionalities before saving participant.
Note: will not execute in case of pulk insert
"""
# Raising exception in case participant is set as a winner
# and lottery already has a winnder
if self.is_winner and type(self).objects.filter(
lottery=self.lottery, is_winner=True).count() > 1:
raise IntegrityError(
'Lottery %s already has a winner' % self.lottery)
# Creates a random base36 entry code of 10 digits
# possibility of collision = 1/36^10 => almost 0
if self.entry_code is None or self.entry_code == '':
self.entry_code = random_base36_string(size=10)
# email to lowercase
if self.email is not None and self.email != '':
self.email = self.email.lower()
super(LotteryParticipant, self).save(*args, **kwargs)
def validate_unique(self, exclude=None, *args, **kwargs):
"""extends validation on unique values to determine whether
email already registerd for lottery or not
"""
participants = LotteryParticipant.objects.filter(
email=self.email.lower(), lottery=self.lottery)
if participants.count() > 0 and self.id is None:
raise ValidationError({
'email': ['Email already exists for this lottery']})
for participant in participants:
if participant.id != self.id:
raise ValidationError({
'email': ['Email already exists for this lottery']})
super(LotteryParticipant, self).validate_unique(
exclude=exclude, *args, **kwargs)
def full_name(self):
return '%s %s' % (self.first_name, self.last_name)
|
Insomnia can be caused by stress and stress can be increased by lack of sleep. This event will focus on techniques to use and oils to use that may eleviate stressful feelings and assist in having a restful night sleep. You'll make a 10ml roller sleepblend to take home for free valued at $20.00.
Open to all doTERRA members and teams. $5 for all members in Darnie Ewart's team. $10 for all doterra members and FREE for anyone attending who aren't already affiliated or being sampled by a doTERRA Wellness Advocate.
Using essential oils for calming anxiousness and assisting in restful sleep.
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from flask_admin.contrib import sqla
# Create application
app = Flask(__name__)
# Create dummy secrey key so we can use sessions
app.config['SECRET_KEY'] = '123456790'
# Create in-memory database
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///sample_db_3.sqlite'
app.config['SQLALCHEMY_ECHO'] = True
db = SQLAlchemy(app)
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
class Person(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
pets = db.relationship('Pet', backref='person')
def __unicode__(self):
return self.name
class Pet(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
person_id = db.Column(db.Integer, db.ForeignKey('person.id'))
available = db.Column(db.Boolean)
def __unicode__(self):
return self.name
class PersonAdmin(sqla.ModelView):
""" Override ModelView to filter options available in forms. """
def create_form(self):
return self._use_filtered_parent(
super(PersonAdmin, self).create_form()
)
def edit_form(self, obj):
return self._use_filtered_parent(
super(PersonAdmin, self).edit_form(obj)
)
def _use_filtered_parent(self, form):
form.pets.query_factory = self._get_parent_list
return form
def _get_parent_list(self):
# only show available pets in the form
return Pet.query.filter_by(available=True).all()
def __unicode__(self):
return self.name
# Create admin
admin = admin.Admin(app, name='Example: SQLAlchemy - Filtered Form Selectable',
template_mode='bootstrap3')
admin.add_view(PersonAdmin(Person, db.session))
admin.add_view(sqla.ModelView(Pet, db.session))
if __name__ == '__main__':
# Recreate DB
db.drop_all()
db.create_all()
person = Person(name='Bill')
pet1 = Pet(name='Dog', available=True)
pet2 = Pet(name='Fish', available=True)
pet3 = Pet(name='Ocelot', available=False)
db.session.add_all([person, pet1, pet2, pet3])
db.session.commit()
# Start app
app.run(debug=True)
|
SEAFORD — A 31-year-old Felton man was charged with drug and criminal mischief offenses following a Sunday traffic stop on Middleford Road, Seaford Police said.
Authorities said Paul Benchloff fled from the traffic stop on foot into a marshy area along the Nanticoke River, spokesman Sr. Corporal Eric Chambers said. Mr. Benchloff was taken into custody after a brief struggle, police said.
Charges included resisting arrest, possession of drug paraphernalia (two counts), criminal mischief, three traffic violations, shoplifting and breach of release. A $6,050 secured bail was set.
Once in custody, police said, it became known that Mr. Benchoff had an active warrant for his arrest from the Camden Delaware Police Department and that he was also a suspect in a shoplifting that occurred at the Lowes in Seaford. It also became known that Mr. Benchoff has previously been issued a no contact order with any Lowes Stores from the Sussex County Court of Common Pleas, police said.
Mr. Benchoff was arraigned before Justice of the Peace Court and committed to the Department of Correction on the new Seaford charges.
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
import shlex
DEFAULT_TASK_EXPIRATION = 4*60*60
DEFAULT_TASK_TIMEOUT = 60*60
class SkiaSwarmingApi(recipe_api.RecipeApi):
"""Provides steps to run Skia tasks on swarming bots."""
@property
def swarming_temp_dir(self):
"""Path where artifacts like isolate file and json output will be stored."""
return self.m.path['slave_build'].join('swarming_temp_dir')
@property
def tasks_output_dir(self):
"""Directory where the outputs of the swarming tasks will be stored."""
return self.swarming_temp_dir.join('outputs')
def isolated_file_path(self, task_name):
"""Get the path to the given task's .isolated file."""
return self.swarming_temp_dir.join('skia-task-%s.isolated' % task_name)
def setup(self, luci_go_dir, swarming_rev=None):
"""Performs setup steps for swarming."""
self.m.swarming_client.checkout(revision=swarming_rev)
self.m.swarming.check_client_version()
self.setup_go_isolate(luci_go_dir)
# TODO(rmistry): Remove once the Go binaries are moved to recipes or buildbot.
def setup_go_isolate(self, luci_go_dir):
"""Generates and puts in place the isolate Go binary."""
self.m.step('download luci-go linux',
['download_from_google_storage', '--no_resume',
'--platform=linux*', '--no_auth', '--bucket', 'chromium-luci',
'-d', luci_go_dir.join('linux64')])
self.m.step('download luci-go mac',
['download_from_google_storage', '--no_resume',
'--platform=darwin', '--no_auth', '--bucket', 'chromium-luci',
'-d', luci_go_dir.join('mac64')])
self.m.step('download luci-go win',
['download_from_google_storage', '--no_resume',
'--platform=win32', '--no_auth', '--bucket', 'chromium-luci',
'-d', luci_go_dir.join('win64')])
# Copy binaries to the expected location.
dest = self.m.path['slave_build'].join('luci-go')
self.m.file.rmtree('Go binary dir', dest)
self.m.file.copytree('Copy Go binary',
source=luci_go_dir,
dest=dest)
def isolate_and_trigger_task(
self, isolate_path, isolate_base_dir, task_name, isolate_vars,
swarm_dimensions, isolate_blacklist=None, extra_isolate_hashes=None,
idempotent=False, store_output=True, extra_args=None, expiration=None,
hard_timeout=None):
"""Isolate inputs and trigger the task to run."""
os_type = swarm_dimensions.get('os', 'linux')
isolated_hash = self.isolate_task(
isolate_path, isolate_base_dir, os_type, task_name, isolate_vars,
blacklist=isolate_blacklist, extra_hashes=extra_isolate_hashes)
tasks = self.trigger_swarming_tasks([(task_name, isolated_hash)],
swarm_dimensions,
idempotent=idempotent,
store_output=store_output,
extra_args=extra_args,
expiration=expiration,
hard_timeout=hard_timeout)
assert len(tasks) == 1
return tasks[0]
def isolate_task(self, isolate_path, base_dir, os_type, task_name,
isolate_vars, blacklist=None, extra_hashes=None):
"""Isolate inputs for the given task."""
self.create_isolated_gen_json(isolate_path, base_dir, os_type,
task_name, isolate_vars,
blacklist=blacklist)
hashes = self.batcharchive([task_name])
assert len(hashes) == 1
isolated_hash = hashes[0][1]
if extra_hashes:
isolated_hash = self.add_isolated_includes(task_name, extra_hashes)
return isolated_hash
def create_isolated_gen_json(self, isolate_path, base_dir, os_type,
task_name, extra_variables, blacklist=None):
"""Creates an isolated.gen.json file (used by the isolate recipe module).
Args:
isolate_path: path obj. Path to the isolate file.
base_dir: path obj. Dir that is the base of all paths in the isolate file.
os_type: str. The OS type to use when archiving the isolate file.
Eg: linux.
task_name: str. The isolated.gen.json file will be suffixed by this str.
extra_variables: dict of str to str. The extra vars to pass to isolate.
Eg: {'SLAVE_NUM': '1', 'MASTER': 'ChromiumPerfFYI'}
blacklist: list of regular expressions indicating which files/directories
not to archive.
"""
self.m.file.makedirs('swarming tmp dir', self.swarming_temp_dir)
isolated_path = self.isolated_file_path(task_name)
isolate_args = [
'--isolate', isolate_path,
'--isolated', isolated_path,
'--config-variable', 'OS', os_type,
]
if blacklist:
for b in blacklist:
isolate_args.extend(['--blacklist', b])
for k, v in extra_variables.iteritems():
isolate_args.extend(['--extra-variable', k, v])
isolated_gen_dict = {
'version': 1,
'dir': base_dir,
'args': isolate_args,
}
isolated_gen_json = self.swarming_temp_dir.join(
'%s.isolated.gen.json' % task_name)
self.m.file.write(
'Write %s.isolated.gen.json' % task_name,
isolated_gen_json,
self.m.json.dumps(isolated_gen_dict, indent=4),
)
def batcharchive(self, targets):
"""Calls batcharchive on the skia.isolated.gen.json file.
Args:
targets: list of str. The suffixes of the isolated.gen.json files to
archive.
Returns:
list of tuples containing (task_name, swarming_hash).
"""
return self.m.isolate.isolate_tests(
verbose=True, # To avoid no output timeouts.
build_dir=self.swarming_temp_dir,
targets=targets).presentation.properties['swarm_hashes'].items()
def add_isolated_includes(self, task_name, include_hashes):
"""Add the hashes to the task's .isolated file, return new .isolated hash.
Args:
task: str. Name of the task to which to add the given hash.
include_hashes: list of str. Hashes of the new includes.
Returns:
Updated hash of the .isolated file.
"""
isolated_file = self.isolated_file_path(task_name)
self.m.python.inline('add_isolated_input', program="""
import json
import sys
with open(sys.argv[1]) as f:
isolated = json.load(f)
for h in sys.argv[2:]:
isolated['includes'].append(h)
with open(sys.argv[1], 'w') as f:
json.dump(isolated, f, sort_keys=True)
""", args=[isolated_file] + include_hashes)
isolateserver = self.m.swarming_client.path.join('isolateserver.py')
r = self.m.python('upload new .isolated file for %s' % task_name,
script=isolateserver,
args=['archive', '--isolate-server',
self.m.isolate.isolate_server, isolated_file],
stdout=self.m.raw_io.output())
return shlex.split(r.stdout)[0]
def trigger_swarming_tasks(
self, swarm_hashes, dimensions, idempotent=False, store_output=True,
extra_args=None, expiration=None, hard_timeout=None):
"""Triggers swarming tasks using swarm hashes.
Args:
swarm_hashes: list of str. List of swarm hashes from the isolate server.
dimensions: dict of str to str. The dimensions to run the task on.
Eg: {'os': 'Ubuntu', 'gpu': '10de', 'pool': 'Skia'}
idempotent: bool. Whether or not to de-duplicate tasks.
store_output: bool. Whether task output should be stored.
extra_args: list of str. Extra arguments to pass to the task.
expiration: int. Task will expire if not picked up within this time.
DEFAULT_TASK_EXPIRATION is used if this argument is None.
hard_timeout: int. Task will timeout if not completed within this time.
DEFAULT_TASK_TIMEOUT is used if this argument is None.
Returns:
List of swarming.SwarmingTask instances.
"""
swarming_tasks = []
for task_name, swarm_hash in swarm_hashes:
swarming_task = self.m.swarming.task(
title=task_name,
isolated_hash=swarm_hash)
if store_output:
swarming_task.task_output_dir = self.tasks_output_dir.join(task_name)
swarming_task.dimensions = dimensions
swarming_task.idempotent = idempotent
swarming_task.priority = 90
swarming_task.expiration = (
expiration if expiration else DEFAULT_TASK_EXPIRATION)
swarming_task.hard_timeout = (
hard_timeout if hard_timeout else DEFAULT_TASK_TIMEOUT)
if extra_args:
swarming_task.extra_args = extra_args
swarming_tasks.append(swarming_task)
self.m.swarming.trigger(swarming_tasks)
return swarming_tasks
def collect_swarming_task(self, swarming_task):
"""Collects the specified swarming task.
Args:
swarming_task: An instance of swarming.SwarmingTask.
"""
return self.m.swarming.collect_task(swarming_task)
def collect_swarming_task_isolate_hash(self, swarming_task):
"""Wait for the given swarming task to finish and return its output hash.
Args:
swarming_task: An instance of swarming.SwarmingTask.
Returns:
the hash of the isolate output of the task.
"""
res = self.collect_swarming_task(swarming_task)
return res.json.output['shards'][0]['isolated_out']['isolated']
|
Microprocessors are silicon chips that contain a computer's central processing unit (CPU)—the device that executes commands entered into the computer. Along with clocks and main memory, CPUs are among a computer's main components. The terms CPU and microprocessor often are used interchangeably. Essentially, microprocessors are responsible for manipulating data and performing numeric calculations and logical comparisons. At the heart of microprocessors are tiny electronic switches called transistors, which allow digital computers to process information in the form of electrical signals. These signals are in one of two states (on or off), and are represented by ones and zeroes, respectively. High-level programming languages like Java or C++, used to write popular software programs, eventually are translated to the machine language of ones and zeroes that computers understand.
Intel was the first company to produce a microprocessor for commercial use. Called the 4004, it was released in the early 1970s and contained slightly more than 2,000 transistors. By the early 2000s, microprocessors contained more than 5 million transistors on a single silicon chip. The more transistors a chip has, the more quickly it can process information. A microprocessor's clock speed defines the number of instructions it can carry out per second. This figure is expressed in Megahertz (MHz) or Gigahertz (GHz). In 2001 the processing speeds of some microprocessors exceeded 1.7 GHz.
In mid-2001 Intel announced experimental technology that it called "Wireless-Internet-On-A-Chip." Essentially, the technology consisted of a silicon chip that held a microprocessor, as well as analog communication circuits and flash memory. According to Intel, the technology potentially would lead to the development of more powerful wireless Internet devices. Around the same time, Intel and Hewlett-Packard announced the launch of the Itanium Processor, a new generation of microprocessor the companies co-developed for use in servers and workstation computers.
Borck, James R. "Life After Moore's Law: Quantum Computing." InfoWorld, October 16, 2000.
Intel Corp. "Intel Creates Technology To Enable 'Wireless-Internet-On-A-Chip."' Intel Corp., May 17, 2001. Available from www.intel.com/pressroom/archive.
——. "Gordon E. Moore." Intel Corp., May 29, 2001. Available from www.intel.com/pressroom.
——. "How Microprocessors Work." Intel Corp., May 29, 2001. Available from www.intel.com/education.
——. "How Transistors Work." Intel Corp., May 29, 2001. Available from www.intel.com/education.
"Microprocessor." Ecommerce Webopedia, May 25, 2001. Available from www.e-comm.webopedia.com.
"Microprocessor." Tech Encyclopedia, May 25, 2001. Available from www.techweb.com.
"Microprocessor." Gale Encyclopedia of E-Commerce. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
microprocessor A semiconductor chip, or chip set, that implements the central processor of a computer. Microprocessors consist of, at a minimum, an ALU and a control unit. They are characterized by speed, word length (internal and external), architecture, and instruction set, which may be either fixed or icroprogrammed. It is the combination of these characteristics and not just the cycle time that determines the performance of a microprocessor.
Most microprocessors have a fixed instruction set. Microprogrammed processors have a control store containing the microcode or firmware that defines the processor's instruction set; such processors may either be implemented on a single chip or constructed from bit-slice elements. RISC microprocessors are designed to execute a small number of simple instructions extremely fast.
The processor's architecture determines what register, stack, addressing, and I/O facilities are available, as well as defining the processor's primitive data types. The data types, which are the fundamental entities that can be manipulated by the instruction set, have included bit, nibble (4 bits), byte (8 bits), word (16 bits), and double words (32 bits). Note that a word is usually defined as the number of bits in the processor's internal data bus rather than always being 16 bits. Instructions generally include arithmetic, logical, flow-of-control, and data movement (between stacks, registers, memory, and I/O ports). With some microprocessors, coprocessors can be added to the system in order to extend the range of data types and instructions supported, e.g. floating-point numbers and the set of arithmetic operations defined on them.
The first microprocessor, the four-chip set Intel 4004, appeared in 1971 accompanied by considerable debate about its utility and marketability. It was the outcome of an idea proposed by Ted Hoff of Intel Corp. for a calculator that could implement a simple set of instructions in hardware but permitted complex sequences of them to be stored in a read-only memory (ROM). The result of his proposal was a design for a four-chip set consisting of a CPU, ROM, RAM, and a shift-register chip, the chip design proceeding in 1970 under the direction of Federico Faggin, later the founder of Zilog, Inc. The Intel 4004 had a 4-bit data bus, could address 4.5 Kbytes of memory, and had 45 instructions. Its 8-bit counterpart, the Intel 8008, was introduced in 1974 and its improved derivative, the Zilog Z80, in 1976. By this time there were over 50 microprocessors on the market.
The next generation of microprocessors included the Zilog Z8000, Motorola 68000, Intel 8086, National 16000, as well as the older Texas Instruments 9900 and Digital Equipment Corporation LSI-11. All of these chips use a 16-bit-wide external data bus. Higher performance microprocessors that use 32-bit external data buses include the Intel386, Intel486, Motorola 68030, and Digital's VAX 78032 and 78132 (processor and FPA). Processors using a 64-bit external bus are now available, an example being Intel's Pentium processor. RISC microprocessor chips with a 64-bit architecture include the PowerPC and Alpha AXP. Currently (2004) the market is dominated by Intel and AMD, with processors using clock frequencies of up to 3 GHz.
"microprocessor." A Dictionary of Computing. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
microprocessor, integrated circuit containing the arithmetic, logic, and control circuitry required to interpret and execute instructions from a computer program. When combined with other integrated circuits that provide storage for data and programs, often on a single semiconductor base to form a chip, the microprocessor becomes the heart of a small computer, or microcomputer. Microprocessors are classified by the semiconductor technology of their design (TTL, transistor-transistor logic; CMOS, complementary-metal-oxide semiconductor; or ECL, emitter-coupled logic), by the width of the data format (4-bit, 8-bit, 16-bit, 32-bit, or 64-bit) they process; and by their instruction set (CISC, complex-instruction-set computer, or RISC, reduced-instruction-set computer; see RISC processor). TTL technology is most commonly used, while CMOS is favored for portable computers and other battery-powered devices because of its low power consumption. ECL is used where the need for its greater speed offsets the fact that it consumes the most power. Four-bit devices, while inexpensive, are good only for simple control applications; in general, the wider the data format, the faster and more expensive the device. CISC processors, which have 70 to several hundred instructions, are easier to program than RISC processors, but are slower and more expensive.
Developed during the 1970s, the microprocessor became most visible as the central processor of the personal computer. Microprocessors also play supporting roles within larger computers as smart controllers for graphics displays, storage devices, and high-speed printers. However, the vast majority of microprocessors are used to control everything from consumer appliances to smart weapons. The microprocessor has made possible the inexpensive hand-held electronic calculator, the digital wristwatch, and the electronic game. Microprocessors are used to control consumer electronic devices, such as the programmable microwave oven and DVD player; to regulate gasoline consumption and antilock brakes in automobiles; to monitor alarm systems; and to operate automatic tracking and targeting systems in aircraft, tanks, and missiles and to control radar arrays that track and identify aircraft, among other defense applications.
See A. R. Ismail and V. M. Rooney, Microprocessor Hardware and Software Concepts (1987); I. L. Sayers, A. P. Robson, A. E. Adams, and G. E. Chester, Principles of Microprocessors (1991); M. Slater, A Guide to RISC Microprocessors (1992).
"microprocessor." The Columbia Encyclopedia, 6th ed.. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
mi·cro·proc·es·sor / ˌmīkrəˈpräsesər; -ˈprōˌsesər/ • n. an integrated circuit that contains all the functions of a central processing unit of a computer. DERIVATIVES: mi·cro·proc·ess·ing n.
"microprocessor." The Oxford Pocket Dictionary of Current English. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
microprocessor Complex integrated circuit (chip) used to control the operation of a computer or other equipment.
"microprocessor." World Encyclopedia. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
"microprocessor." Oxford Dictionary of Rhymes. . Encyclopedia.com. 19 Apr. 2019 <https://www.encyclopedia.com>.
|
"""
Atmosphere quota rest api.
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from django.shortcuts import get_object_or_404
from api.v1.serializers import QuotaSerializer
from api.v1.views.base import AuthAPIView
from core.models import Quota
class QuotaList(AuthAPIView):
"""
Lists or creates new Quotas
"""
def get(self, request):
"""
Returns a list of all existing Quotas
"""
quotas = Quota.objects.all()
serialized_data = QuotaSerializer(quotas, many=True).data
return Response(serialized_data)
def post(self, request):
"""
Creates a new Quota
"""
data = request.data
serializer = QuotaSerializer(data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class QuotaDetail(AuthAPIView):
"""
Fetches or updates a Quota
"""
def get(self, request, quota_id):
"""
Return the specified Quota
"""
quota = get_object_or_404(Quota, id=quota_id)
serialized_data = QuotaSerializer(quota).data
return Response(serialized_data)
def put(self, request, quota_id):
"""
Updates the specified Quota
"""
data = request.data
quota = get_object_or_404(Quota, id=quota_id)
serializer = QuotaSerializer(quota, data=data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def patch(self, request, quota_id):
"""
Partially updates the specified Quota
"""
data = request.data
quota = get_object_or_404(Quota, id=quota_id)
serializer = QuotaSerializer(quota, data=data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
Welcome to EECU’s MemberVoice feedback page! We’re committed to giving you the best service imaginable, so we would love to hear your thoughts on any or all of your experiences.
This system allows you to tell us exactly what you think about the service you receive at EECU – what we’re doing right and what you’d like to see us work on – and you can also ask us any questions you have with the assurance that an EECU representative will get back to you promptly. If you like, you even have the option to remain completely anonymous. Either way, we want to know all about your experience at EECU.
For your protection, we don’t use this system to access account information or make financial transactions. If you need to connect with us about a specific account, please give us a call at 817-882-0800 or stop by one of our branches and we’ll be happy to help.
Note: Since you wish to remain anonymous, your e-mail address will only be used by Allegiance to allow a response and will not be revealed to EECU.
EECU requests that you take a brief moment to answer the following questions.
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
import ast
from scrapy.loader import ItemLoader
# Um artigo possui alem da sua url: um titulo, uma data de publicaçao,
# um corpo e categorias referentes àquele artigo.
class Page(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
date_pub = scrapy.Field()
text_article = scrapy.Field()
list_of_categories = scrapy.Field()
#Metodo para resgatar as categorias de um artigo. Existem dois padroes de artigo,
# ambos existentes em javascript, um que se encontra após CATEGORIAS e o outro categories
def createCategories(categories):
if(categories is None):
return None
if("CATEGORIAS: " in categories):
categories = categories[categories.index("CATEGORIAS: "):]
else:
categories = categories[categories.index("categories:"):]
start = categories.index("[")
end = categories.index("]") + 1
list_of_categories = categories[start:end]
return ast.literal_eval(list_of_categories)
|
Your resource for chicken nuggets!
Any/all of the links on chickennugget.org are affiliate links for which I receive a small compensation from sales of certain items.
Purchases are made on external affiliate company websites: when a reader clicks on an affiliate link located on 10beasts.com to purchase an item, the reader buys the item from the seller directly (not from 10beasts).
Amazon and/or other companies pay 10beasts a small commission or other compensation for helping to bring customers to their website.
chickennugget.org is a participant in the Amazon Services LLC Associates Program, an affiliate advertising program designed to provide a means for website owners to earn fees by linking to Amazon.com and affiliated sites, as well as to other websites that may be affiliated with Amazon Service LLC Associates Program.
I do not write sponsored posts. I want to provide authentic, un-biased information. However, if a company would like to publish sponsored content on chickennugget.org, I will disclose this clearly in the beginning of the post.
chickennugget.org participates in the Amazon Services LLC Associates Program, which is an affiliate advertising program designed to provide a means for websites to earn advertising fees by advertising and linking to amazon.com.
|
import pulse
from analyze import clean
import os
import sys
import tweepy
import json
import datetime
import copy
#keywords = [u'tragedy',u'shooting',u'shooter',u'shoot',u'concert',u'game']
def prune(d):
#keywords = [u'tragedy',u'shooting',u'shooter',u'shoot',u'concert',u'game']
badwords = [u'hour',u'are',u'here',u'much',u'things',u'than',u'there',u'much',u'from',u'still',u'being',u'into',u'out',u'every',u'they',u'now',u'were',u'very',u'after',u'would',u'could',u'can',u'can',u'will',u'doe',u'thats',u'why',u'take',u'cant',u'well',u'look',u'know',u'all',u'ur',u'what',u'who',u'where',u'or',u'do',u'got',u'when',u'no',u'u',u'im',u'dont',u'how',u'if',u'as',u'nd',u'up',u'by',u'what',u'about',u'was',u'',u'its',u'in',u'too',u'a',u'an',u'i',u'he',u'me',u'she',u'we',u'the',u'to',u'are',u'you',u'him',u'her',u'my',u'and',u'is',u'of',u'to',u'rt',u'for',u'on',u'it',u'that',u'this',u'be',u'just',u'like',u'lol',u'rofl',u'lmao',u'your',u'have',u'but',u'you',u'not',u'get',u'so',u'at',u'with']
#return {x for x in d if x not in badwords}
di = {}
for w in d:
if w not in badwords:
di[w] = d[w]
return di
def analyze_test(tweets):
#keywords = [u'tragedy',u'shooting',u'shooter',u'shoot',u'concert',u'game']
keywords = [u'rain',u'rainy',u'storm',u'stormy',u'sunny',u'snow',u'snowy',u'cloudy',u'clear',u'windy',u'wind',u'bright']
temperature_words = [u'cold',u'freezing',u'frigid',u'chilly',u'mild',u'warm',u'hot',u'scorching',u'scorcher',u'heat']
all_tweets = copy.copy(tweets) #copy of tweets
event = None
word_freq = {} #frequency list
for tweet in tweets:
#print clean(tweet['text'])
if tweet['text'][:2].lower() == 'rt':continue
#---Your code goes here! Try stuff to find events!
#^- frequency analysis?
txt = clean(tweet['text'])
#txt = tweet['text']
words = txt.split(' ') #wordlist
#print txt
#print words
for w in words:
if not w.lower() in word_freq:
word_freq[w.lower()] = 1
else:
word_freq[w.lower()] += 1
freq = prune(word_freq)
freq2 = sorted(freq, key=freq.get)
freq = freq2[len(freq2)-30:len(freq2)] #top 30 current words
#print freq
i = 0
for f in freq:
if f in keywords: #tweak
#print "keyword found: "+f
#print len(all_tweets)
for t in all_tweets:
if t['text'][:2].lower() == 'rt':continue
if f in clean(t['text']).split(' '):
if not event:
event = {}
event['tweets'] = []
event['tweets'].append({'latlong':t['latlong'],'timestamp':t['timestamp'],'id':t['id']})
event['keywords'] = [f]
for temp in temperature_words:
if temp in clean(t['text']).split(' '):
event['keywords'].append(temp)
break
i += 1
#---Your code goes above this. Try to find events!
if event:
print("event found:")
#avg_lat = sum(map(lambda x:x['latlong'][0], event['tweets']))/float(len(event['tweets']))
#avg_long = sum(map(lambda x:x['latlong'][1], event['tweets']))/float(len(event['tweets']))
#avg_time = sum(map(lambda x:x['timestamp'], event['tweets']))/float(len(event['tweets']))
#event['latlong'] = str([avg_lat, avg_long])
#event['keywords'] = keywords
#event['timestamp'] = avg_time
print event
print ""
#analyze_session.post("https://luminous-fire-1209.firebaseio.com/events.json",data=json.dumps(event))
#----
num = 100 #number of tweets to parse at a time
f = open('./tweets/20-24_sea_10km', 'r')
lines = f.readlines();
t = []
for l in lines:
tweet = eval(l)
t.append(tweet)
tweets = t[::-1]
f.close()
for i in range(0,len(tweets)/num):
if (i+1)*num > len(tweets):
analyze_test(tweets[i*num:len(tweets)])
else:
analyze_test(tweets[i*num:(i+1)*num])
print "done"
|
If you’re in the Chalk Hill area and looking for quality service to help you find the perfect new flooring for your Chalk Hill home, turn to your local Carpet One. Chalk Hill-area Carpet One stores can be counted on to supply your Pennsylvania home with the top selections of flooring materials, professional installation, and flooring protection in the industry. We carry everything from natural hardwood to lavish carpet choices and beyond.
Shopping at Carpet One will change the way you think about flooring. Whatever your lifestyle, whatever your price range, your Carpet One near Chalk Hill has it covered. Our sales professionals will see you through the flooring selection process along with the Carpet One SelectAFloor™ system, which leads you to find your perfect floor at a perfect price. Whether you’re looking to add contemporary style with ceramic tiling or versatile vinyl that will resist wear and tear while still looking beautiful, your local Chalk Hill-area Carpet One delivers with great flooring at a great price for everyone. In addition, Carpet One offers a variety of flooring warranties that will let you keep your floors beautifully protected for years to come.
It’s truly easy to find reasonably-priced, quality flooring at your Carpet One near Chalk Hill. Thanks to our expertise and great inventory, you can trust Carpet One near Chalk Hill make your floor redesign experience a smooth one. Come in to your Carpet One near Chalk Hill and let us help you start your flooring redesign journey today!
|
import unittest
import schema_salad.main
import schema_salad.ref_resolver
import schema_salad.schema
from cwltool.load_tool import load_tool
from cwltool.main import main
from cwltool.workflow import defaultMakeTool
class FetcherTest(unittest.TestCase):
def test_fetcher(self):
class TestFetcher(schema_salad.ref_resolver.Fetcher):
def __init__(self, a, b):
pass
def fetch_text(self, url): # type: (unicode) -> unicode
if url == "baz:bar/foo.cwl":
return """
cwlVersion: v1.0
class: CommandLineTool
baseCommand: echo
inputs: []
outputs: []
"""
else:
raise RuntimeError("Not foo.cwl")
def check_exists(self, url): # type: (unicode) -> bool
if url == "baz:bar/foo.cwl":
return True
else:
return False
def test_resolver(d, a):
return "baz:bar/" + a
load_tool("foo.cwl", defaultMakeTool, resolver=test_resolver, fetcher_constructor=TestFetcher)
self.assertEquals(0, main(["--print-pre", "--debug", "foo.cwl"], resolver=test_resolver,
fetcher_constructor=TestFetcher))
|
Shows Franklin Oscillator, Support/Resistance markers and % scale. Click Next.
Compare several stocks on the same percentage axis.
Track multiple stocks, like during intraday. The charts will auto-refresh.
Track your investments using portfolio management tools. You can export this table.
Percentage gain or loss of your investments over time can be seen as a chart.
Scanners creates a buy and sell list and ranks the stocks from best to worst.
EOD and Intraday data from exchanges of 26 countries supported by Yahoo Finance is available.
Inbuilt RSS newsreader that delivers latest news.
Cal, Whatsapp or email us for support.
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2014 Jonathan F. Donges
# Author: Jonathan F. Donges <donges@pik-potsdam.de>
# URL: <http://www.pik-potsdam.de/members/donges/software>
"""
Computes auto-correlation function for irregularly sampled time series.
Uses the method proposed in:
Rehfeld, K., Marwan, N., Heitzig, J., & Kurths, J. (2011). Comparison of correlation analysis techniques for irregularly sampled time series. Nonlinear Processes in Geophysics, 18(3), 389-404.
This script provides analyses for this publication:
J.F. Donges, R.V. Donner, N. Marwan, S.F.M. Breitenbach, K. Rehfeld, and J. Kurths,
Nonlinear regime shifts in Holocene Asian monsoon variability: Potential impacts on cultural change and migratory patterns,
Climate of the Past 11, 709-741 (2015),
DOI: 10.5194/cp-11-709-2015
"""
#
# Imports
#
import sys
import numpy as np
import pylab
import progressbar
#
# Settings
#
# Filename
FILENAME_X = "../../data/raw_proxy_data/Dongge_DA.dat"
# Resolution of cross-correlation (units of time)
DELTA_LAG = 10 # Measured in years here
# Maximum lag index
MAX_LAG_INDEX = 100
# Toggle detrending
DETRENDING = True
DETRENDING_WINDOW_SIZE = 1000. # Measured in years here
#
# Functions
#
def detrend_time_series(time, data, window_size):
# Get length of data array
n = data.shape[0]
# Initialize a local copy of data array
detrended_data = np.empty(n)
# Detrend data
for j in xrange(n):
# Get lower and upper bound of window in time domain
lower_bound = time[j] - window_size / 2.
upper_bound = time[j] + window_size / 2.
# Get time indices lying within the window
window_indices = np.logical_and(time >= lower_bound, time <= upper_bound)
# Substract window mean from data point in the center
detrended_data[j] = data[j] - data[window_indices].mean()
return detrended_data
def gaussian(x, std):
"""
Returns value of gaussian distribution at x with 0 mean
and standard deviation std.
"""
return 1 / np.sqrt(2 * np.pi * std) * np.exp(-np.abs(x ** 2) / (2 * std**2) )
def kernel_auto_correlation_est(x, time_diff, kernel_func, kernel_param,
delta_lag, max_lag_index):
"""
Estimates auto correlation using a kernel function.
"""
# Normalize time series
x -= x.mean()
x /= x.std()
# Initialize discrete auto-correlation function
auto_correlation = np.zeros(max_lag_index + 1)
# Loop over all positive lags and zero lag
for k in xrange(max_lag_index + 1):
# Calculate b matrix
b = kernel_func(k * delta_lag - time_diff, kernel_param)
# Calculate nominator
nominator = np.dot(x, np.dot(b, x.transpose()))
# Calculate denominator
denominator = b.sum()
# Calculate auto-correlation
auto_correlation[k] = nominator / denominator
lag_times = delta_lag * np.arange(max_lag_index + 1)
return (lag_times, auto_correlation)
#
# Main script
#
# Load record x
data_x = np.loadtxt(FILENAME_X, unpack=False, usecols=(0,1,), comments="#")
#data_x = np.fromfile(FILENAME_X, sep=" ")
time_x = data_x[:,0]
x = data_x[:,1]
# Detrending of time series using moving window averages
if DETRENDING:
x = detrend_time_series(time_x, x, DETRENDING_WINDOW_SIZE)
# Get length of records
N_x = len(time_x)
# Get recommended standard deviation of gaussian Kernel (Kira Rehfeld's
# NPG paper)
sigma = 0.25 * np.diff(time_x).mean()
print "Length of record x:", N_x
print "Mean sampling time x:", np.diff(time_x).mean()
print "Recommended standard deviation of gaussian Kernel:", sigma
# Calculate matrix of time differences
time_diff = np.zeros((N_x, N_x))
for i in xrange(N_x):
for j in xrange(N_x):
time_diff[i,j] = time_x[i] - time_x[j]
# Estimate auto-correlation function
(lag_times, auto_correlation) = kernel_auto_correlation_est(x=x.copy(), time_diff=time_diff, kernel_func=gaussian, kernel_param=sigma, delta_lag=DELTA_LAG, max_lag_index=MAX_LAG_INDEX)
#
# Save results
#
results = np.zeros((MAX_LAG_INDEX + 1, 2))
results[:,0] = lag_times
results[:,1] = auto_correlation
np.savetxt("kernel_acf_dongge.txt", results)
#
# Plot results
#
# Set plotting parameters (for Clim. Past paper)
params = { 'figure.figsize': (6.,6.),
'axes.labelsize': 12,
'text.fontsize': 12,
'xtick.labelsize': 12,
'ytick.labelsize': 12,
'legend.fontsize': 10,
'title.fontsize': 12,
'text.usetex': False,
'font': 'Helvetica',
'mathtext.bf': 'helvetica:bold',
'xtick.major.pad': 6,
'ytick.major.pad': 6,
'xtick.major.size': 5,
'ytick.major.size': 5,
'tick.labelsize': 'small'
}
#pylab.rcParams.update(params)
# Plot time series
pylab.figure(1)
pylab.plot(time_x, x)
pylab.xlabel("Age (y B.P.)")
pylab.ylabel("Normalized values")
pylab.figure(2)
pylab.plot(lag_times, auto_correlation, "k")
pylab.axhline(y=1 / np.e, color="red")
pylab.xlabel("Time delay [y]")
pylab.ylabel("ACF")
pylab.ylim(-0.5,1)
pylab.savefig("auto_corr_irregular.pdf")
pylab.show()
|
As every business is undoubtedly aware the financial landscape has changed dramatically in recent months and the same is true for private equity firms. The business of optimizing profits in your portfolio companies has never been a more pivotal part of conducting business than it is now.
With this reality and the need for better, more efficient practices, many P&E firms are looking for ways to maximize the profits in their existing portfolio companies.
Participative-based management combined with financial engineering can offer many benefits to your portfolio companies that will positively impact your firm’s financial outlook.
So what is all this fuss about participative management? And how can it help improve business operations when you combine it with financial engineering?
Traditional management styles extend from the top-down.
While this form of management provides limited success, it does not allow a business to maximize one of its richest resources: the knowledge and ideas of its non-managerial employees.
Participative-based management gives managers the ability to gain insight from workers who see first hand where improvements to their work processes can be made.
The results are robust and include improved capacity efficiency, increased worker safety, employee efficiency, product quality, and return on capital.
If you have everyone in your organization giving 100% then please don’t read this paper! In order to survive and thrive in this economy, business leaders need to leverage financials, technology and people. Most CEO’s are comfortable addressing the first two.
However, the perception is that the “people factor” is difficult and convoluted, so managers avoid dealing with it. There are now over 40 years of research and field testing that prove that there are effective strategies for getting the best out of people.
This white paper will examine the improvements to businesses utilizing participative management and financial engineering as well as other leading edge methods and the increased productivity they can offer a business of any size.
To know more about these white papers, please feel free to contact us here.
|
#!/usr/bin/env python
""" setuptools for stalkerweb """
from setuptools import setup, find_packages
from stalkerweb import __version__ as version
setup(
name='stalkerweb',
version=version,
author="Florian Hines",
author_email="syn@ronin.io",
description="Simple Monitoring System",
url="http://github.com/pandemicsyn/stalker",
packages=find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Environment :: No Input/Output (Daemon)',
],
install_requires=[
'stalkerutils==2.0.2',
'eventlet==0.17.4',
'flask==0.10.1',
'redis==2.10.3',
'pymongo==3.0.3',
'mmh3==2.3.1',
'flask-rethinkdb==0.2',
'rethinkdb==2.1.0.post2',
'flask-bcrypt==0.7.1',
'flask-wtf==0.12',
],
include_package_data=True,
zip_safe=False,
scripts=['bin/stalker-web',],
data_files=[('share/doc/stalkerweb',
['README.md', 'INSTALL',
'etc/stalker-web.conf',
'etc/init.d/stalker-web',
])]
)
|
General Dynamics NASSCO finalized a contract with TOTE, Inc., for the design and construction of two 3,100 TEU LNG-powered containerships, setting a new benchmark in green ship technology. When completed the 764-ft.-long containerships are expected to be the largest ships of any type in the world primarily powered by liquefied natural gas (LNG). Construction of the first containership is scheduled to begin in the first quarter of 2014, with delivery to occur by the fourth quarter of 2015; the second ship will be delivered in the first quarter of 2016. The contract between NASSCO and TOTE Shipholdings, Inc., a subsidiary of TOTE, Inc., includes options for three additional ships.
The ships will be designed by DSEC, a subsidiary of Daewoo Shipbuilding & Marine Engineering (DSME), located in Busan, South Korea. The design will be based on proven containership-design standards and will include DSME’s patented LNG fuel-gas system and a MAN ME-GI dual fuel slow speed engine. NASSCO has successfully partnered with DSEC previously for the design and construction of five State-class product tankers which currently operate in the U.S. Jones Act market. All of the containerships will be constructed at the NASSCO shipyard in San Diego.
The vessels will operate on either fuel oil or gas derived from LNG, which will significantly decrease emissions while increasing fuel efficiency as compared to conventionally-powered ships. The LNG-powered containerships will also include a ballast water treatment system, making them the greenest ships of their size in the world. The double-hulled ships will operate between Jacksonville, Fla., and San Juan, P.R.
Anthony Chiarello, President and CEO of TOTE, Inc. said, “This investment demonstrates our commitment to the people of Puerto Rico and our environment. These vessels mark a new age of shipping using the best technology in the world."
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
EXPORT
Permite adaptar un archivo a un texto normal para ser traducido correctamente en google
Autor: PABLO PIZARRO @ github.com/ppizarror
Fecha: 2014-2015
Licencia: GPLv2
"""
__author__ = "ppizarror"
# Importación de librerías
import os
import sys
reload(sys)
# noinspection PyUnresolvedReferences
sys.setdefaultencoding('UTF8') # @UndefinedVariable
DL = " // "
try:
namearchive = raw_input("Ingrese el nombre del archivo que desea transformar: ").replace(".txt", "")
# noinspection PyArgumentEqualDefault
archivo = open(namearchive + ".txt", "r")
except:
print "El archivo no existe!"
exit()
# noinspection PyUnboundLocalVariable
archivo2 = open(namearchive + "_exported" + ".txt", "w")
# noinspection PyUnboundLocalVariable
for linea in archivo:
linea = linea.strip().split(DL)
nwlinea = linea[1].replace("|", " ") + "\n"
archivo2.write("{" + linea[0] + "}\n")
archivo2.write(nwlinea)
archivo.close()
archivo2.close()
print "Archivo generado correctamente"
try:
os.remove("_export.pyc")
except:
pass
|
What is Community Association Partners?
We are a Homeowners association management company that puts emphasis in partnership and transparency. Without working as a partner with the associations we manage, there is no way we could make the impact on this industry that we’re pushing for. We want to bring change to HOA management, because as of right now the business suffers from a lack of honesty and trust. There have even been a few cases locally where a HOA management company was stealing from the very associations they were supposed to be guiding. Nothing can fix the image of HOA management other than giving people a reason to do so. We’ve been doing it for eight years and we want to do it for the next fifty.
Today, we will use our knowledge to make our communities a better place to live. Because that’s what we’d want for ourselves and our families.
Today, we will place the interests of our associations before an easier way.
Today, we will ensure that our associations know the whys whens and hows, because this industry has a transparency problem. We’re going to change that.
Today, we will do what we promised we would do, today. .
Today, we will hold ourselves accountable.
Today, we won’t be stuck in our ways. Flexibility isn’t just flexibility. It’s strength too.
Today, we will make it easier for our board members to sleep, tonight.
Today, we will lean on our office-dogs for comfort and Joy.
Today we will change the industry of HOA management. Because let’s face it, it needs a little bit of change. And tomorrow, we will do it all over again.
|
##
# \namespace cross3d.studiomax
#
# \remarks The cross3d.studiomax.Clip module contains an
# abstraction of the MAXScript MXClip class for interacting
# with the Motion Mixer.
#
# \author willc
# \author Blur Studio
# \date 09/28/15
#
import Py3dsMax
from Py3dsMax import mxs
from cross3d import ClipPortion, TrackPortion
from cross3d.abstract.mixer.clip import AbstractClip
################################################################################
#####------------------------------ Classes -------------------------------#####
################################################################################
class StudiomaxClip(AbstractClip):
"""An abstraction of the MAXScript MxClip class.
Attributes:
clip: The ValueWrapper for the MxClip this Clip is wrapping.
track: The Track instance for the MxClip's parent MxTrack.
numWeights: The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)
globStart: The global frame value for the start point of the MxClip
globEnd: The global frame value for the end point of the MxClip
filename: The filename of the bip file used by the MxClip.
scale: The MxClip's scale. Modifying the scale will cause the Clip to
scale on the right edge. The left edge will not move.
"""
@property
def filename(self):
"""The filename of the bip file used by the MxClip."""
return self.clip.filename
@property
def globStart(self):
"""The global frame value for the start point of the MxClip"""
return float(self.clip.globStart)
@property
def globEnd(self):
"""The global frame value for the end point of the MxClip"""
return float(self.clip.globEnd)
@property
def numWeights(self):
"""The number of weights in the clip's weight curve
(relevant only when clip is in a layer track)"""
return int(self.clip.numWeights)
@property
def sourceEnd(self):
return float(self.clip.orgEnd)
@property
def sourceStart(self):
return float(self.clip.orgStart)
@property
def scale(self):
return float(self.clip.scale)
@property
def trimEnd(self):
return float(self.clip.trimEnd)
@property
def trimStart(self):
return float(self.clip.trimStart)
def analyzeWeights(self, occludedPortions):
"""Determines which portions of the Clip are used, and which portions of
the Clip will occlude Tracks below.
Args:
occludedPortions(list): A list of `TrackPortion` instances
for every portion of the Clip that will be occluded
by Tracks above it.
Returns:
tuple: A tuple containing a list of `ClipPortion`
instances for every used portion of the Clip, and a
list of `TrackPortion` instances for every portion of
the Clip that will occlude tracks below it.
"""
if self.track.isTransitionTrack:
# this won't work...
return
clipOcclPortions = []
ClipPortions = []
clipStart, clipEnd = self.globStart, self.globEnd
if self.numWeights:
usedPortions = []
# Initialize the first rangeStart with the global start for the
# clip. We'll modify this if the weights make the clip have no
# effect for part of its duration.
rangeStart, rangeEnd = clipStart, None
# Keep a seperate occluding clip range. We'll keep track of
# occluding clips so we can test against them to update clip ranges
# later on.
occlStart, occlEnd = None, None
prevWVal = 0.0
for wi, (wTime, wVal) in enumerate(self.iterWeights()):
# Always move the end to the current position
rangeEnd = wTime
if wVal == 0.0:
# If the usedPortion has a non-zero length and isn't
# non-effecting for its entire duration, add it to the used
# portions.
if rangeEnd > rangeStart and prevWVal:
usedPortions.append(
TrackPortion(self.track, rangeStart, rangeEnd)
)
# Reset start to current position
rangeStart = wTime
if wVal == 1.0:
# If this is the first weight, start at the beggining of the
# clip, since the curve will extend back past this weight.
if wi == 0:
occlStart = clipStart
# If we already have a start stored for an occluding
# portion, store this weight as the (new) end. Otherwise,
# store it as the start.
if occlStart:
occlEnd = wTime
else:
occlStart = wTime
else:
# If a start and end are stored for the occluding
# TrackPortion, add that TrackPortion to the list of
# occluding portions for this clip.
if occlStart and occlEnd:
clipOcclPortions.append(
TrackPortion(self.track, occlStart, occlEnd)
)
# Clear the occluding start/end, since the track weighting
# is no longer fully occluding.
occlStart, occlEnd = None, None
prevWVal = wVal
# If occlStart is set, add the remainder of the clip to occluding
# clips.
if occlStart:
clipOcclPortions.append(
TrackPortion(self.track, occlStart, clipEnd)
)
# If the clip ended with a non-zero weight, add the remainder as a
# usedPortion.
if wVal:
usedPortions.append(
TrackPortion(self.track, rangeStart, clipEnd)
)
# Finally, we'll clean up the list of ClipPortions by eliminating
# occluded sections of clips, and condensing continuous clips that
# were split where their weight dips tangential to zero.
usedSC = self._occludeClipPortions(usedPortions, occludedPortions)
ClipPortions = self._coalesceClipPortions(usedSC)
else:
clipRange = self.globStart, self.globEnd
clipOcclPortions = [TrackPortion(self.track, *clipRange)]
ClipPortions = self._occludeClipPortions(
[ClipPortion(self, *clipRange)],
occludedPortions
)
occludedPortions.extend(clipOcclPortions)
return ClipPortions, occludedPortions
def getWeightTime(self, index):
"""Retrieves the global frame number the weight at the specified index
is placed at.
Args:
index(int): Index of desired weight to retrieve a time
for.
Returns:
float: Global frame number for the position of the
weight.
Raises:
IndexError
"""
if index < 0 or index >= self.numWeights:
raise IndexError('Index out of range')
# Adjust the weight time to be global, not local to the clip.
return float(mxs.getWeightTime(self.clip, index+1)) + self.globStart
def getWeightValue(self, index):
"""Retrieves the value of the weight at the specified index.
Args:
index(int): Index of desired weight to retrieve a value
for.
Returns:
float: Value of the weight at the index specified.
Raises:
IndexError
"""
if index < 0 or index >= self.numWeights:
raise IndexError('Index out of range')
return float(mxs.getWeight(self.clip, index+1))
def iterWeights(self):
"""Wraps the MAXScript getWeight and getWeightTime global functions into
a generator that returns tuples of the time and value for all
weights in the Track.
Returns:
generator: Generator that produces tuples of
((float)time, (float)value) for weights on the
track.
"""
count = self.numWeights
for i in range(count):
t = self.getWeightTime(i)
v = self.getWeightValue(i)
yield (t, v)
def weights(self):
"""Wraps the MAXScript getWeight and getWeightTime global functions into
a generator that returns tuples of the time and value for all
weights on the Clip.
Returns:
list: List of tuples for every weight on the Clip in
the form ((float)time, (float)value).
"""
return [w for w in self.iterWeights()]
def _coalesceClipPortions(self, inputPortions):
ClipPortions = []
clip = inputPortions.pop(0)
scStart = clip.start
scEnd = clip.end
while len(inputPortions):
clip = inputPortions.pop(0)
if scEnd == clip.start:
scEnd = clip.end
else:
ClipPortions.append(ClipPortion(self, scStart, scEnd))
scStart, scEnd = clip.start, clip.end
ClipPortions.append(ClipPortion(self, scStart, scEnd))
return ClipPortions
def _occludeClipPortions(self, ClipPortions, occludedPortions):
outputClips = []
while len(ClipPortions):
sc = ClipPortions.pop(0)
for ocR in occludedPortions:
# if ClipPortion is completely occluded
if (ocR.start < sc.start) and (sc.end < ocR.end):
sc = None
break
containsOcclStart = (
(sc.start < ocR.start) and (ocR.start < sc.end)
)
containsOcclEnd = ((sc.start < ocR.end) and (ocR.end < sc.end))
if containsOcclStart and containsOcclEnd:
ClipPortions.append(ClipPortion(self, sc.start, ocR.start))
sc = ClipPortion(self, ocR.end, sc.end)
elif containsOcclStart:
sc = ClipPortion(self, sc.start, ocR.start)
elif containsOcclEnd:
sc = ClipPortion(self, ocR.end, sc.end)
else:
outputClips.append(sc)
return outputClips
def __str__(self):
return 'Clip [{}]'.format(self.filename)
################################################################################
# register the symbol
import cross3d
cross3d.registerSymbol('Clip', StudiomaxClip)
|
Check out this recently sold 2002 Silver Hyundai Santa Fe SUV that Peddle purchased in Lafayette, Louisiana. It was a 4 door, SUV with a 4 Cylinder, 2.4 L engine and we paid $200 based on the car value and how much it was worth. We buy cars for cash like this in all conditions, used, junk, old, or damaged, every day.
Check out this recently sold 2002 Hyundai Santa Fe. If you're thinking about how to sell your SUV, and you're looking for used SUV buyers, consider Peddle. We can help you sell your 2002 Hyundai Santa Fe, or a similar SUV, online for cash easily and quickly.
How do I prepare a Louisiana title?
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# SCA Tree Generator, a Blender addon
# (c) 2013 Michel J. Anders (varkenvarken)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from math import pi
from mathutils import Quaternion
rot120 = 2 * pi / 3
def rot(point, axis, angle):
q = Quaternion(axis, angle)
P = point.copy()
P.rotate(q)
#print(point, P)
return P
def vertexnormal(d1, d2, d3):
n1 = d1.cross(d2).normalized()
n2 = d2.cross(d3).normalized()
n3 = d3.cross(d1).normalized()
n = (n1 + n2 + n3).normalized()
if (d1 + d2 + d3).dot(n) > 0:
return -n
return n
def simplefork2(p0, p1, p2, p3, r0, r1, r2, r3):
d1 = p1 - p0
d2 = p2 - p0
d3 = p3 - p0
#print(d1, d2, d3)
n = vertexnormal(d1, d2, d3)
#print(n)
pp1 = p0 + d1 / 3
n1a = r1 * n
n1b = rot(n1a, d1, rot120)
n1c = rot(n1a, d1, -rot120)
v1a = pp1 + n1a
v1b = pp1 + n1b
v1c = pp1 + n1c
pp2 = p0 + d2 / 3
n2a = r2 * n
n2b = rot(n2a, d2, rot120)
n2c = rot(n2a, d2, -rot120)
v2a = pp2 + n2a
v2b = pp2 + n2b
v2c = pp2 + n2c
pp3 = p0 + d3 / 3
n3a = r3 * n
n3b = rot(n3a, d3, rot120)
n3c = rot(n3a, d3, -rot120)
v3a = pp3 + n3a
v3b = pp3 + n3b
v3c = pp3 + n3c
n0a = n * r0
v0a = p0 + n0a
v0c = p0 - d3.normalized() * r0 - n0a / 3
v0d = p0 - d1.normalized() * r0 - n0a / 3
v0b = p0 - d2.normalized() * r0 - n0a / 3
#v0b=p0+(n1b+n2c)/2
#v0d=p0+(n2b+n3c)/2
#v0c=p0+(n3b+n1c)/2
verts = (v1a, v1b, v1c, v2a, v2b, v2c, v3a, v3b, v3c, v0a, v0b, v0c, v0d)
faces = ((0, 1, 10, 9), (1, 2, 11, 10), (2, 0, 9, 11), # chck
(3, 4, 11, 9), (4, 5, 12, 11), (5, 3, 9, 12), # chck
(6, 7, 12, 9),
(7, 8, 10, 12),
(8, 6, 9, 10),
(10, 11, 12))
return verts, faces
def simplefork(p0, p1, p2, p3, r0, r1, r2, r3):
d1 = p1 - p0
d2 = p2 - p0
d3 = p3 - p0
#print(d1, d2, d3)
n = -vertexnormal(d1, d2, d3)
#print(n)
# the central tetrahedron
n0a = n * r0 * 0.3
v0a = n0a
v0b = -d1 / 6 - n0a / 2
v0c = -d2 / 6 - n0a / 2
v0d = -d3 / 6 - n0a / 2
n1 = v0a + v0c + v0d
n2 = v0a + v0b + v0d
n3 = v0a + v0b + v0c
q1 = n1.rotation_difference(d1)
q2 = n2.rotation_difference(d2)
q3 = n3.rotation_difference(d3)
pp1 = p0 + d1 / 3
v1a = v0a.copy()
v1b = v0c.copy()
v1c = v0d.copy()
v1a.rotate(q1)
v1b.rotate(q1)
v1c.rotate(q1)
v1a += pp1
v1b += pp1
v1c += pp1
pp2 = p0 + d2 / 3
v2a = v0a.copy()
v2b = v0b.copy()
v2c = v0d.copy()
v2a.rotate(q2)
v2b.rotate(q2)
v2c.rotate(q2)
v2a += pp2
v2b += pp2
v2c += pp2
pp3 = p0 + d3 / 3
v3a = v0a.copy()
v3b = v0b.copy()
v3c = v0c.copy()
v3a.rotate(q3)
v3b.rotate(q3)
v3c.rotate(q3)
v3a += pp3
v3b += pp3
v3c += pp3
v0a += p0
v0b += p0
v0c += p0
v0d += p0
verts = (v1a, v1b, v1c, v2a, v2b, v2c, v3a, v3b, v3c, v0a, v0b, v0c, v0d)
faces = (
#(1, 2, 12, 11),
#(9, 12, 2, 0),
#(11, 9, 0, 1),
#(5, 4, 10, 12),
#(4, 3, 9, 10),
#(3, 5, 12, 9),
(8, 7, 11, 10),
(7, 5, 9, 11),
(6, 8, 10, 9),
(10, 11, 12))
return verts, faces
def bridgequads(aquad, bquad, verts):
"return faces, aloop, bloop"
ai, bi, _ = min([(ai, bi, (verts[a] - verts[b]).length_squared) for ai, a in enumerate(aquad) for bi, b in enumerate(bquad)], key=lambda x: x[2])
n = len(aquad)
#print([(aquad[(ai+i)%n], aquad[(ai+i+1)%n], bquad[(bi+i+1)%n], bquad[(bi+i)%n]) for i in range(n)], "\n", [aquad[(ai+i)%n] for i in range(n)], "\n", [aquad[(bi+i)%n] for i in range(n)])
#print('bridgequads', aquad, bquad, ai, bi)
return ([(aquad[(ai + i) % n], aquad[(ai + i + 1) % n], bquad[(bi + i + 1) % n], bquad[(bi + i) % n]) for i in range(n)], [aquad[(ai + i) % n] for i in range(n)], [bquad[(bi + i) % n] for i in range(n)])
def quadfork(p0, p1, p2, p3, r0, r1, r2, r3):
d1 = p1 - p0
d2 = p2 - p0
d3 = p3 - p0
a = (d3 - d2).normalized()
n = d2.cross(d3).normalized()
pp1 = p0 + d1 / 3
pp2 = p0 + d2 / 3
pp3 = p0 + d3 / 3
v2a = pp2 + (n + a) * r2
v2b = pp2 + (n - a) * r2
v2c = pp2 + (-n - a) * r2
v2d = pp2 + (-n + a) * r2
v3a = pp3 + (n + a) * r3
v3b = pp3 + (n - a) * r3
v3c = pp3 + (-n - a) * r3
v3d = pp3 + (-n + a) * r3
a = d1.cross(n).normalized()
n = a.cross(d1).normalized()
v1a = pp1 + (n + a) * r1
v1b = pp1 + (n - a) * r1
v1c = pp1 + (-n - a) * r1
v1d = pp1 + (-n + a) * r1
#the top of the connecting block consist of two quads
v0a = p0 + (n + a) * r0
v0b = p0 + (n - a) * r0
v0c = p0 + (-n - a) * r0
v0d = p0 + (-n + a) * r0
v0ab = p0 + n * r0
v0cd = p0 - n * r0
#the bottom is a single quad (which means the front and back are 5gons)
d = d1.normalized() * r0 * 0.1
vb0a = v0a + d
vb0b = v0b + d
vb0c = v0c + d
vb0d = v0d + d
verts = [v1a, v1b, v1c, v1d, # 0 1 2 3
v2a, v2b, v2c, v2d, # 4 5 6 7
v3a, v3b, v3c, v3d, # 8 9 10 11
v0a, v0ab, v0b, v0c, v0cd, v0d, # 12 13 14 15 16 17
vb0a, vb0b, vb0c, vb0d] # 18 19 20 21
faces = [(0, 1, 19, 18), # p1->p0 bottom
(1, 2, 20, 19),
(2, 3, 21, 20),
(3, 0, 18, 21),
#(4, 5, 14, 13), # p2 -> p0 top right
#(5, 6, 15, 14),
#(6, 7, 16, 15),
#(7, 4, 13, 16),
(13, 14, 5, 4),
(14, 15, 6, 5),
(15, 16, 7, 6),
(16, 13, 4, 7),
#(8, 9, 13, 12), # p3 -> p0 top left
#(9, 10, 16, 13),
#(10, 11, 17, 16),
#(11, 8, 12, 17),
(12, 13, 9, 8),
(13, 16, 10, 9),
(16, 17, 11, 10),
(17, 12, 8, 11),
#(12, 17, 21, 18), # connecting block
#(14, 15, 20, 19),
#(12, 13, 14, 19, 18),
#(15, 16, 17, 21, 20)]
(12, 17, 21, 18), # connecting block
(19, 20, 15, 14),
(18, 19, 14, 13, 12),
(20, 21, 17, 16, 15)]
return verts, faces
|
The NBS Easy Pull Takedown Pin (Rear) features an extended head making this an ideal addition for anyone who needs to be able to quickly separate the upper and lower receiver. Commonly used in restricted states along with an AR Maglock or other kind of magazine removal compliance hardware.
These are exactly what I needed and at the right price. Quality is good and I love the phosphate finish. It just has more of that milspec look that I like so much. Can't go wrong with these if you are looking for an easy take down solution for you AR.
Exactly what I expected at a decent price!
Finished nicely, hard to get a grip on it.
|
import sys
# This file is part of VoltDB.
# Copyright (C) 2008-2015 VoltDB Inc.
#
# This file contains original code and/or modifications of original code.
# Any modifications made by VoltDB Inc. are licensed under the following
# terms and conditions:
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
@VOLT.Command(
description = 'Collect logs on the current node for problem analysis.',
options = (
VOLT.StringOption (None, '--prefix', 'prefix',
'file name prefix for uniquely identifying collection',
default = 'voltdb_logs'),
VOLT.StringOption (None, '--upload', 'host',
'upload resulting collection to HOST via SFTP',
default = ''),
VOLT.StringOption (None, '--username', 'username',
'user name for SFTP upload',
default = ''),
VOLT.StringOption (None, '--password', 'password',
'password for SFTP upload',
default = ''),
VOLT.BooleanOption(None, '--no-prompt', 'noprompt',
'automatically upload collection (without user prompt)',
default = False),
VOLT.BooleanOption(None, '--dry-run', 'dryrun',
'list the log files without collecting them',
default = False),
VOLT.BooleanOption(None, '--skip-heap-dump', 'skipheapdump',
'exclude heap dump file from collection',
default = False),
VOLT.IntegerOption(None, '--days', 'days',
'number of days of files to collect (files included are log, crash files), Current day value is 1',
default = 14)
),
arguments = (
VOLT.PathArgument('voltdbroot', 'the voltdbroot path', absolute = True)
)
)
def collect(runner):
if int(runner.opts.days) == 0:
print >> sys.stderr, "ERROR: '0' is invalid entry for option --days"
sys.exit(-1)
runner.args.extend(['--voltdbroot='+runner.opts.voltdbroot, '--prefix='+runner.opts.prefix, '--host='+runner.opts.host, '--username='+runner.opts.username, '--password='+runner.opts.password,
'--noprompt='+str(runner.opts.noprompt), '--dryrun='+str(runner.opts.dryrun), '--skipheapdump='+str(runner.opts.skipheapdump), '--days='+str(runner.opts.days)])
runner.java_execute('org.voltdb.utils.Collector', None, *runner.args)
|
Achievement, through means of cinema, of the lofty aims of humanist values, peace and friendship among the peoples; creation in the Sakhalin region of favorable environment for the cross-fertilization of cultures; creation of the conditions for the Sakhalin region residents’ self-expression in various areas of cultural and social activity.
2.1. Development of cinematic traditions in the Sakhalin region as popular constituent of artistic culture.
2.2. Creation, for inhabitants and guests of the Sakhalin region, of environment conducive to the acquaintance with the best works of Russian and world cinema.
2.3. Strengthening of international ties in the sphere of culture.
2.4. Discovery of new names and creative support of amateur filmmakers.
2.5. Involvement of the residents of the Sakhalin region in cultural and educational events of the Film Festival in order to improve their level of cultural knowledge, stimulate the development of their self-awareness and revealing of new opportunities for self-realization.
3.1. The Film Festival (hereafter — Festival) is held under the patronage of the Governor of the Sakhalin region.
3.2. The Festival is established by the Ministry of Culture of the Sakhalin region.
3.4. The Organizing Committee of the Festival is entrusted with dealing with the Festival’s organizational questions, as well as the definition of the Festival’s scope of ideas and elaboration of the methods of its realization.
3.5. The Festival includes a competitive and non-competitive programs.
3.6. Competition and out-of-competition films are selected and invited by the Program Direction of the Festival, which includes the Program director of the festival and programs’ curators. The films are selected in accordance with the Festival’s concept established by the Organizing Committee. In cases of dispute the decision is taken by the Organizing Committee.
3.7. The competition program includes Russian and foreign full-length fiction films; in special occasions (determined by the Organizing Committee) full-length documentary films can also be included in the competition.
3.8. The out-of-competition program presents to the viewers the best examples of Russian and international fiction, documentary, and animated cinema; retrospectives and theme-based programs; films made by inhabitants of the Sakhalin region and connected to the history and culture of Sakhalin. Roundtables, meetings with viewers, workshops, «evening with...» events, exhibitions, and other events that correspond to the aims and objectives of the Festival are also held as part of the Festival.
4.1. The Festival accepts films submitted by film companies, independent producers, and distributors who are their copyright holders.
— that have not been publicly screened before on the territory of the Sakhalin Region.
4.3. World, international, and Russian premieres are preferred for the competition programs, but the premiere status is not required.
4.4. Each film participating in the Competition program of the Festival should be personally presented at the Festival screening by its director, actor, producer, or another member of the cast and crew.
4.5. Out-of-competition program of the Festival can include any film that corresponds to the Festival’s conception, promotes is goals and objectives, regardless of the year, country of production, genre and type.
4.6. Screening copies of the films with the soundtrack in Russian or any other language except English should have English subtitles; this is obligatory for films in the competition program and is preferable for films in the out-of-competition program of the Festival.
4.7. All competition films (except films in Russian) are screened with Russian subtitles. All out-of-competition films (except films in Russian) are screened with Russian subtitles or simultaneous translation into Russian.
4.8. The selection committee is responsible for choosing participating films. In cases of dispute the decision about the film’s participation in the Festival is taken by the Organizing Committee. The Festival does not accept applications. Any questions concerning potential participation should be addressed to info@sakhalinfilmfestival.ru.
4.9. The list of films selected for the Festival’s competition program is published on the Festival’s website no later than ten days prior to the beginning of the Festival.
4.10. Screening copies of the films selected for the Festival should be delivered to General Management no later than ten days prior to the start of the Festival.
4.11. Detailed technical requirements are sent to authors and copyright holders of selected films individually.
5.1. Members of the Organizing Committee are approved by the decree of the Governor of the Sakhalin region.
5.2. Organizing Committee is headed by the Governor of the Sakhalin region. In case the President of the Organizing Committee is not present at the Festival, the Vice-President takes his responsibilities.
— retains the right to invite guests including recognized specialists in culture, cinema, music, as well as representatives of the business and political elite.
6.1. Members of the Artistic Council are approved by the Festival’s establisher.
— other expenses related to the Festival organization and execution.
8.5. The Main prize of the Festival for best film and the prize of best directing cannot be awarded to the same film.
— other staff representatives responsible for preparation and execution of the events.
9.2. Participation in the Festival means agreement to all the articles of these Rules and Regulations.
10.3. The prize money are paid to the winner’s bank account (the sum of the award is subject to personal income tax according to the tax laws of the Russian Federation).
10.4. Each nomination has only one prize winner.
10.5. The payment of the prize money to an under-age person is regulated by the law of the Russian Federation.
10.6. The awards ceremony requires personal presence of the Festival’s prize winners or their representatives.
11.1. The Festival is financed from the Region’s budget as part of the state program «Development of the sphere of culture in the Sakhalin region» for the years 2014–2020, approved by the decree of the Government of the Sakhalin region № 394 dated July 31, 2013; from other sources, as well as from sponsors’ funds.
11.2. Any organization that upholds the Festival’s aims and objectives and that takes part in its financing can be a sponsor of the Festival.
|
from app.core.models import Source, Prediction, Realisation, Category
from rest_framework import generics, permissions, filters
from app.api.serializers import SourceSerializer, PredictionSerializer, RealisationSerializer, CategorySerializer
from app.api.serializers import PredictionCreationSerializer
import app.api.filters
class PredictionList(generics.ListCreateAPIView):
queryset = Prediction.objects.all().order_by('-creation_date')
serializer_class = PredictionSerializer
permission_classes = [
permissions.AllowAny
]
filter_class = app.api.filters.PredictionFilter
def get_serializer_class(self):
if self.request.method == 'GET':
return PredictionSerializer
else:
return PredictionCreationSerializer
class PredictionDetail(generics.RetrieveUpdateDestroyAPIView):
model = Prediction
serializer_class = PredictionSerializer
permission_classes = [
permissions.AllowAny
]
filter_class = app.api.filters.PredictionFilter
class SourceList(generics.ListCreateAPIView):
model = Source
serializer_class = SourceSerializer
permission_classes = [
permissions.AllowAny
]
filter_class = app.api.filters.SourceFilter
class SourceDetail(generics.RetrieveUpdateDestroyAPIView):
model = Source
serializer_class = SourceSerializer
permission_classes = [
permissions.AllowAny
]
class CategoryList(generics.ListAPIView):
model = Category
serializer_class = CategorySerializer
permission_classes = [
permissions.AllowAny
]
class CategoryDetail(generics.RetrieveAPIView):
model = Category
serializer_class = CategorySerializer
permission_classes = [
permissions.AllowAny
]
class RealisationCreate(generics.CreateAPIView):
model = Realisation
serializer_class = RealisationSerializer
permission_classes = [
permissions.AllowAny
]
class RealisationDetail(generics.RetrieveUpdateAPIView):
model = Realisation
serializer_class = RealisationSerializer
permission_classes = [
permissions.AllowAny
]
|
Get your personal windforecast in your calendar to prevent meetings on windy days in advance.
Be on the water when others are still dreaming of it.
You will love this app if you do windsports, like kitesurfing, windsurfing or sailing!
Why should you use WindCalendar?
Some important person calls you to arrange an important meeting. You check your calendar and see that you are free tomorrow and accept the meeting.
If you need help please view our faqs.
If you do, please like our facebook page and share it with your friends.
How do I add WindCalendar to my calendar?
Just fill out the form above, create your personal windcalendar and click on the "add to calendar" - button.
If this doesn't work, right click on the "add to calendar" button and copy the link.
Then go to your favourite calendar and find the option to subscribe to a public calendar and paste the copied link.
How do I add WindCalendar to my calendar on Android/Google Calendar?
Please view this video to get more information about how to add WindCalendar to your devices/calendars.
I don't want to get confirmation mails any more! What can i do?
Remove the WindCalendar subscription from all your devices where you set it up!
How can I uninstall/remove WindCalendar?
Open your calendar and unsubscribe WindCalendar by removing the calendar (p.e. Outlook) from your calendar list.
Go to your Settings-App, then to "Mails, Contacts and Calendars" and then to "Subscribed Calendars". Choose WindCalendar and delete it.
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.mrp_subcontracting.tests.common import TestMrpSubcontractingCommon
from odoo.addons.stock_account.tests.test_stockvaluation import _create_accounting_data
from odoo.tests.common import Form
class TestAccountSubcontractingFlows(TestMrpSubcontractingCommon):
def test_subcontracting_account_flow_1(self):
self.stock_input_account, self.stock_output_account, self.stock_valuation_account, self.expense_account, self.stock_journal = _create_accounting_data(self.env)
self.finished.categ_id.property_valuation = 'real_time'
self.finished.write({
'property_account_expense_id': self.expense_account.id,
})
self.finished.categ_id.write({
'property_stock_account_input_categ_id': self.stock_input_account.id,
'property_stock_account_output_categ_id': self.stock_output_account.id,
'property_stock_valuation_account_id': self.stock_valuation_account.id,
'property_stock_journal': self.stock_journal.id,
})
self.stock_location = self.env.ref('stock.stock_location_stock')
self.customer_location = self.env.ref('stock.stock_location_customers')
self.supplier_location = self.env.ref('stock.stock_location_suppliers')
self.uom_unit = self.env.ref('uom.product_uom_unit')
self.env.ref('product.product_category_all').property_cost_method = 'fifo'
self.env.ref('product.product_category_all').property_valuation = 'real_time'
# IN 10@10 comp1 10@20 comp2
move1 = self.env['stock.move'].create({
'name': 'IN 10 units @ 10.00 per unit',
'location_id': self.supplier_location.id,
'location_dest_id': self.env.company.subcontracting_location_id.id,
'product_id': self.comp1.id,
'product_uom': self.uom_unit.id,
'product_uom_qty': 10.0,
'price_unit': 10.0,
})
move1._action_confirm()
move1._action_assign()
move1.move_line_ids.qty_done = 10.0
move1._action_done()
move2 = self.env['stock.move'].create({
'name': 'IN 10 units @ 20.00 per unit',
'location_id': self.supplier_location.id,
'location_dest_id': self.env.company.subcontracting_location_id.id,
'product_id': self.comp2.id,
'product_uom': self.uom_unit.id,
'product_uom_qty': 10.0,
'price_unit': 20.0,
})
move2._action_confirm()
move2._action_assign()
move2.move_line_ids.qty_done = 10.0
move2._action_done()
picking_form = Form(self.env['stock.picking'])
picking_form.picking_type_id = self.env.ref('stock.picking_type_in')
picking_form.partner_id = self.subcontractor_partner1
with picking_form.move_ids_without_package.new() as move:
move.product_id = self.finished
move.product_uom_qty = 1
picking_receipt = picking_form.save()
picking_receipt.move_lines.price_unit = 30.0
picking_receipt.action_confirm()
picking_receipt.move_lines.quantity_done = 1.0
picking_receipt.action_done()
# Finished is made of 1 comp1 and 1 comp2.
# Cost of comp1 = 10
# Cost of comp2 = 20
# --> Cost of finished = 10 + 20 = 30
# Additionnal cost = 30 (from the purchase order line or directly set on the stock move here)
# Total cost of subcontracting 1 unit of finished = 30 + 30 = 60
self.assertEqual(picking_receipt.move_lines.stock_valuation_layer_ids.value, 60)
self.assertEqual(picking_receipt.move_lines.product_id.value_svl, 60)
self.assertEqual(picking_receipt.move_lines.stock_valuation_layer_ids.account_move_id.amount_total, 60)
|
Texas A&M International University students can improve their (Texas Higher Education Assessment) THEA score with help from TAMIU's free seven-day Summer Academic Mathematics and Writing Boot Camp offered by the University College.
Registration is open until the date of each camp.
Register at Dr. Billy F. Cowart Hall (BCH), rooms 205 and 203.
Two Boot Camps are scheduled: July 12 – 21 and July 26 – Aug. 4, 9 a.m. – noon or 1 – 4:30 p.m.
The Summer Boot Camps are for students who scored: writing—220 THEA or a five essay on Compass; math—230-249 THEA or 39 – 44 on Compass.
The Summer Boot Camps provide a curriculum of content review and test practice so students may more effectively prepare for the THEA to improve skill levels in reading, writing and math.
Only current TAMIU students or students registered at TAMIU for Fall 2010 in developmental status may apply.
For more information, contact Norma Cortez, office coordinator, at 326.2722 or Taryn Shehab, office coordinator, at 326.2883.
University office hours are 8 a.m. – 6 p.m. Monday-Friday and from 8 a.m. – noon on Fridays.
|
# Copyright 2017 BrainPad Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import os
import tensorflow as tf
from trainer.feature_extractor import FeatureExtractor
from trainer.model import ModelParams, TransferModel
logger = logging.getLogger(__name__)
class CandyClassifier(object):
def __init__(self, checkpoint_dir, params_file, inception_model_file):
self.inception_model = None
self.model = None
self.checkpoint_dir = checkpoint_dir
self.params_file = params_file
self.inception_model_file = inception_model_file
@classmethod
def from_config(cls, config):
checkpoint_dir = config.CLASSIFIER_MODEL_DIR
return cls(
checkpoint_dir=checkpoint_dir,
params_file=os.path.join(checkpoint_dir, 'params.json'),
inception_model_file=config.INCEPTION_MODEL_FILE
)
def init(self):
self._load_inception_model()
self._load_transfer_model()
def reload(self):
tf.reset_default_graph()
self._load_transfer_model()
def _load_inception_model(self):
logger.info('Loading inception model...')
self.inception_model = FeatureExtractor(self.inception_model_file)
logger.info('Finished loading inception model.')
def _load_transfer_model(self):
logger.info('Loading transfer model...')
with tf.gfile.FastGFile(self.params_file, 'r') as f:
params = ModelParams.from_json(f.read())
self.model = TransferModel.from_model_params(params)
logger.info('Finished loading transfer model.')
def classify(self, img_bgr):
features = self.inception_model.get_feature_vector(img_bgr)
ckpt = tf.train.get_checkpoint_state(self.checkpoint_dir)
if ckpt is None:
raise IOError('Checkpoints not found.')
checkpoint_path = ckpt.model_checkpoint_path
result = self.model.restore_and_predict(features, checkpoint_path)
return result[0]
|
Finally, after a long arctic winter, the vast polar ice pack cracks and slowly opens up along Alaska's north coast, exposing a thin ribbon of ocean between two vast frozen plates. Spring is the season of wonder here: The temperature has risen to 5 degrees, the wind blusters out of the east with the distant tease of more warmth to come, and darkness has lifted--the sun bobs in the sky 23 hours a day. Now, as they have throughout their history, Inupiat Eskimos advance onto the blue-white shore ice in a ritual of renewal, affirmation, hope and blood. They have come to hunt the bowhead whale.
George Ahmaogak faces the ocean, the hood of his parka ringed with a ruff of wolverine fur and his almond eyes alive with anticipation. He grips my arm with a heavy-gloved hand and nods toward the horizon.
" Agvik ," he whispers. "Big one!"
Out in the water, a shiny wet-black hump breaks the surface, a spout of gray steam emerges with a hiss, and an otherworldly low-pitched song vibrates the air around us. A shiver runs down my neck.
"That's a *%* big whale," says Ahmaogak, peppering his speech, as usual, with the unprintable. A smile breaks out on his ruddy face. A smile with bite to it, the smile of a hunter who has sighted his prey.
At 45, Ahmaogak (pronounced Ah-MA-walk ) is the charismatic three-term mayor of Alaska's North Slope Borough, the northernmost municipal government in the United States. But more important, at least when the arctic whales migrate, he is one of a select group of men and women known as whaling captains. He and the others shoulder the responsibility of providing traditional food and keeping alive the most profound rite of Inupiat culture: the catching of the whales.
This is hunting on an epic scale. Hundreds of villagers are involved in the chase and conquest of one of the largest animals in the world. And there is no such thing as an observer. You are part of the crew or you are off the ice.
At Ahmaogak's signal, five whalers and I grab axes and chop a trail through blocky ice ridges between camp and the open water so Ahmaogak's two boats can be skidded to the edge and launched. One is an umiaq , a paddle craft out of the Stone Age made of laced sealskin, the other an 18-foot aluminum skiff with an 80-horsepower outboard motor. This is all-out work, and the crew breathes long streams of steam into the cold air. As ice cakes my beard, sweat runs down my back under my heavy parka.
Ahmaogak, as befits his captain's role, supervises. "How long do you think you'd last if you went into the water?" he asks me at one point.
"This whole thing is full of danger," he says grimly. "And you have to know what the *%* you're doing."
I, of course, have no idea what I'm doing. I put my head down and chop.
Another whale--closer this time, huge--surfaces and blows. Pssssshhhh! I chop faster.
"Pretty soon, we go out there. Then it's boom boom," Ahmaogak says to no one in particular. He flashes his teeth in another wild smile.
FOR CENTURIES, GEORGE AHMAOGAK'S ANCESTORS HAVE LIVED AND whaled in the land above the Arctic Circle. Anthropologists believe the Inupiat are the descendants, along with other Eskimo groups, of the second wave of migrants to cross the Bering Sea about 4000 BC. Early evidence of their whaling skills dates from at least AD 800. By the 1800s, when Europeans finally ventured through the Bering Strait, these Eskimos were the best sea-mammal hunters on the planet, and their most formidable prey was the bowhead.
Like the buffalo for the Plains Indian, the whale was the stuff of survival and social structure for the Inupiat. Whale meat was their food; whale oil, their source of heat and light. The Inupiat commonly lived in separate family groups, but for the whaling effort, they pooled their resources and talents, with umiaq owners and the best whalers as leaders. Their most important social gatherings were celebrations of the hunt.
Today in Barrow, and in seven other villages that dot the 89,000-square-mile North Slope Borough, the Inupiat continue to practice "subsistence" hunting--and the whale remains their most prized prey. The whalers are still the community leaders. During my visit, the Borough Assembly couldn't muster a quorum at its monthly meeting in Barrow--too many of its members were out on the ice, captaining whale boats. And, as they have through the ages, entire villages still assemble on the ice to haul in a whale and celebrate its capture.
To outsiders, it can seem a curious world. In 1994, these Eskimos take Hawaii vacations and own big-screen TVs. They wear starched white shirts and drive fancy pickups. Some are businessmen, and all of them are shareholders of the Arctic Slope Regional Corp., which was established by the federal Alaskan Native Land Claims Settlement Act in 1972. Thanks to the tax levies they collect on the Prudhoe Bay oil fields, which lie on their ancestral lands, and the corporation's investments, families of the North Slope Borough have the 29th highest median income in the United States. But they also must spend a lot: A bag of Chee-Toes costs $4.99 in Barrow. And what other jurisdiction with so few citizens--6,300 total, 70% of whom are native--pays its chief elected official $160,000 a year?
For the Inupiat, however, dollar prosperity is no substitute for whaling; it is a means to achieve it. Captains pony up thousands for the canvas tents, snowmobiles (which the Alaskans call snow machines), food, fuel, boats, weapons and radios.
More important, in the late 1970s the Inupiat applied their new wealth to scientific studies and international lobbying, beating back angry attempts by environmentalists to eliminate the native hunt along with all other forms of whaling. Now, the International Whaling Commission grants the Inupiat an annual quota of bowhead, based on the number scientists judge can be harvested without further jeopardizing this endangered species.
Today, with their prehistoric skin boats and turn-of-the-century harpoons, with their snow machines and rescue helicopters, with their old razor-sharp pole knives and fur clothing, with their briefcases and stock portfolios, the Inupiat Eskimos hover between the past and the present. Holding onto the hunt, they believe, provides the ballast that keeps them upright in the late 20th Century.
"It's our food, our culture, our tradition, our heritage," Ahmaogak says simply.
"Without whaling there would be no purpose to Barrow," says Glenn Roy Edwards, Ahmaogak's second in command and a jet-set businessman with the Arctic Slope Regional Corp. "I depend on my job, I like my job. But if it came down to a choice, I'd leave it to come out here and go whaling. I am first a whaler."
"GET ON. YOU'LL HAVE TO STEER THE SLED."
A resupply party is about to depart Barrow for Ahmaogak's whaling camp, about 10 miles out on the shore ice.
"Might as well learn now," I'm told.
In coastal Barrow, all the streets are gravel, and those that lead to the ocean are not impeded by stop signs. We have driven off the end of one of them. In summer, we would be on an Arctic Ocean beach. Today our caravan of three snow machines is aimed north on the ice. One of them tows a heavy wooden sled.
"OK," I say, "What do I do?"
I grab the back of the sled and position my feet on the runners. An hour later, after bouncing across fractured shadowy white and aquamarine ice, I have become a sled steerer. Or at least I was able to hang on.
Winding through a two-story-tall pressure ridge, where shifting ice plates have collided and formed a mini-mountain range, we come screaming into the camp. It is pitched in what might be called a flat meadow of ice, lightly dusted with dry snow. Ahmaogak and his crew set it up a few days ago, when the ice first began to break up. It holds as many as 10 whalers--the crew waxes and wanes with the press of outside business and the presence or absence of whales. At the moment, there are six of us packed into the camp's one plywood-floored 10-by-12 canvas tent.
Outside, an umiaq and an aluminum boat rest on sleds next to crates of supplies and fuel. Everything is camouflaged white. Whales are known to be spooked by colors. High-powered rifles are slung casually on snow machines and propped against the tent. This is polar bear country, and polar bears aren't spooked by much of anything.
A quarter-mile away, across much thinner "young" ice, is the crack where the polar pack begins to recede. Depending on the wind and ocean currents, this "lead" of water can grow to almost three miles wide, only to close right back up hours later. Through this lead, the bowheads are migrating northeast from their winter grounds in the Bering Sea to their summer range north of the Canadian Yukon in the Beaufort Sea.
With the ice in front of us constantly moving, the landscape changes as if in a time-lapse movie. Watch carefully and you can see floes tugged this way and that. Ten minutes later, it's all different. The ice we are camped on could break loose at any time and join this flotilla. For the next seven days, we will rarely take off our boots and never remove our clothes, always ready to dash for safety.
As the newcomer, I am the lowest person in the crew hierarchy; never mind that I am also the oldest and city-softest. I am assigned to chop and melt ice for fresh water, make the coffee--endless pots of coffee--do the dishes, help cook, chop trail, steer sleds. Even the children--apprentice whalers--outrank me.
I have lots of questions. Like, how do I get fresh water from a sheet of frozen sea ice?
"That's old ice there, the salt has leached out," Ahmaogak points to a nearby jumble of clear ice blocks with slightly rounded edges. "Now that, that is sea ice. No good." He points to another pile of younger ice farther away with sharper corners and a bluish cast. It is still filled with salt.
With no warning, he laughs--a huge, contagious ha-ha-ha bellow, entirely free of cynicism. I am, as he will tell me from time to time, "a *%* dummy. Ha ha ha." I always laugh too.
It's dinner time, and the lead is closed so the atmosphere at camp is relaxed. Ahmaogak's wife, Maggie, is demonstrating the two recipes in the Inupiat cookbook--animal flesh, uncooked or boiled. She is executive director of the Alaska Eskimo Whaling Commission, the Inupiat organization that manages the annual whale hunts and lobbies Eskimo interests before international regulators. Outside the tent, she opens a wooden grub box and gathers an armload of frozen caribou chunks, each a square the size of a fist. Here, the outdoors serves as meat locker, and ice chests are used to keep items such as soda pop from freezing. Tonight's menu turns out to be, first, slivers of uncooked caribou shaved from the frozen chunks, then caribou soup--boiled, half-raw meat in a liquid thickened with pancake mix because someone forgot to bring flour.
A kerosene heater and a one-burner propane stove keep the temperature as uncomfortably hot inside the tent as it is uncomfortably cold outside. Idle time is passed smoking cigarettes and cigars and making small talk, half of it conducted in the language of the Inupiat, an emphatic monotone that few non-natives even attempt. We sit inside on storage boxes or on polar bear rugs that double as sleeping mats.
Always, someone is outside on watch, for bears, for signs that our ice is breaking loose, for whales. I feel myself becoming lazy and comfortable. Ahmaogak, lounging on a bearskin, detects a shift in the wind and is suddenly worried. If the breeze strengthens from the north, the ice pack in the distance could be propelled our way.
"That big ice out there starts moving, well, *%*. We gotta be ready to evacuate, or we'll end up in Siberia. Ha ha ha."
Ahmaogak, like many Inupiat, speaks English softly, like a whisper brought up the register to normal volume, and gives equal emphasis to each syllable and every word, as if they are not to be expended lightly.
In the arctic twilight that passes for night, six of us prepare to sleep together Eskimo style, under shared blankets. Wind rattles the canvas. "*%*. I hope we don't have to evacuate at 4 a.m., if this country goes to hell," Ahmaogak says.
The snoring that commences puts me at ease. Better than listening for the ice to crack underneath me. Below that is 120 feet of frigid Arctic Ocean.
ALTHOUGH LEGEND TELLS OF BOWHEADS MORE THAN 60 FEET IN length, most of the large ones documented now are in the 50-foot range, and 30-footers are most commonly caught by the Inupiat. Chunkier than most whales, a big bowhead can weigh up to 100 tons. These days, the International Whaling Commission estimates that the bowhead population hovers between 6,900 and 9,200 in the waters off Alaska, compared with an estimated 20,000 a century and a half ago. But bowheads are increasing in number, and the Barrow whalers have been allotted a 1994 spring quota of 18 whales, including those that are landed and those that are harpooned but lost.
Bowheads are in the family of baleen whales, with giant mouths in the shape of upside-down smiles. Instead of teeth, they have an arrangement of slats, like vertical Venetian blinds. These baleens strain seawater for krill, the small shrimp that is the bowhead's main food. Bowhead eyes are down by their bellies, just at the corner of their mouths, and their breathing holes are atop a streamlined hump that forms the apex of a bow-like silhouette.
Traveling on the edges and leads of the shifting polar ice, the bowhead has developed the ability to break through ice two feet thick to reach air. (Those were not bowheads but less-well-adapted gray whales that made news around the world when they found themselves trapped under the ice near Barrow in 1988.) No one knows how long bowheads live. But twice in recent years, the Inupiat have found ivory and jade harpoon tips embedded in the blubber of captured whales, which tantalizes the imagination: Such tips were last used on harpoons at the turn of the century.
Like other great whales, the bowhead was savaged in the 19th Century by commercial whaling. In 1848, with other waters exhausted, Yankee whalers finally ventured north, through the strait between Siberia and Alaska, following the oil-rich bowheads. Within four years, there were 220 whaling ships in the Arctic, seeking oil and whalebone and bringing whites into everyday contact with the Eskimo. By the turn of the century, the abundant bowhead was dying out and so was commercial whaling.
Though contact with whites diminished along with commercial whaling on Alaska's north coast, the Inupiat culture had already been changed irrevocably. Missionaries had arrived, white diseases had taken their toll, and the natives had begun to adopt new weapons and tools.
Today in the Arctic, the traces of the Yankee whaling days are easy to see--in the bloodlines of the Eskimos, for instance, and in the Christian invocation pronounced whenever a whale is taken. Nowhere is the link so apparent as in the primary tools used by the contemporary Inupiat to hunt bowheads.
Unlike the harpoon cannons of the modern commercial whalers, the Inupiat harpoon is unchanged from those of Yankee whalers in the last century. Hand held and hand thrown, it carries a brass-tipped point and is fixed with a brass gun barrel. The harpoons are still manufactured in New England, once the capital of the Yankee whalers.
The attack commences like this: The boat is driven, or paddled, alongside a surfacing whale. In the bow, the harpooner, standing within a few feet of the whale's back, jabs the animal. Once the tip of the harpoon is embedded in the blubber, it pulls off from the 10-foot wooden harpoon shaft and triggers the firing of a "bomb" from the gun barrel--an exploding foot-long shell on a seven-second fuse. The harpoon tip is attached to 33 fathoms of rope and a large float that can be followed when the whale dives. If the shell penetrates deeply into the animal and explodes near a vital organ, the whale dies immediately. Other times, it can take a dozen or more bombs fired from a supplementary 50-pound brass shoulder gun.
A minimum crew in a motor-powered aluminum boat is three people. With it, the strategy is to strike with speed. Success with the much more commonly used umiaq requires stealth and as many as eight paddlers to follow and stay with the whale. Once a whale is struck, the chase can last for minutes or for hours.
Good. I must have slept after all.
Ahmaogak fills a skillet with Crisco and fries egg rolls for breakfast, a good-luck gift from a Filipino, a friend who runs a taxi in Barrow--one of among the dozens of outsiders who drift here hoping to share in the oil wealth. The coffee is rich and gritty, four handfuls of grounds dumped into a pot of melted ice. The tent fills with cigarette smoke and kerosene fumes. A portable receiver is tuned to Alaska Public Radio and a marine-band radio crackles with "good mornings" as the whalers--there are 44 crews on the ice from Barrow this year--check in with their families. I have never met an Eskimo who did not have at least one electronic receiver on at all times--CB, TV, AM, FM--often several together. And this year, Ahmaogak is experimenting with a new toy, a cellular telephone.
I ask about two Eskimo myths I remember from my childhood. Back in their more nomadic days, I was told, they left their elderly behind in the snow when they could not keep up.
True, says one. Not true, says another.
How about the one where a male traveler was invited to sleep with the Eskimo's wife?
Used to be, if he liked you. But the missionaries took care of that.
Ahmaogak has been described as "the Eskimo John Kennedy," a man who inspires his people. He is like no politician I have ever met--confident, authentically magnetic, entirely open, rough-hewn, bawdy and temperamental. It's a rare elected official you don't mind sharing a tent with. Ahmaogak boasts of earning three days of "good time" off his 10-day jail sentence after a drunken binge. He is a champion of native rights but has hired plenty of non-natives to keep the town running smoothly. He and Maggie met in high school and married in Los Angeles while she attended MTI Business College and UCLA and he went to Northrop-Rice Aviation Institute of Technology in Inglewood. At home in Barrow, they have a 54-inch TV in the living room and a whale skull and several frozen seal carcasses in the front yard.
At some cue too subtle for me, everyone suddenly leaps up and explodes into motion. We jump onto snow machines and scream out to inspect the lead. In front of us, bowheads break the surface, their heads high out of the water.
"They're looking around--looking for a trail through the ice. Wouldn't you?" says Ahmaogak. We listen to their colossal splashes.
Ahmaogak surveys the ice and picks a spot to launch the boats. Whaling captains are years in the making--men, and a few women, who have apprenticed for the job since their youth and who command the respect of enough villagers willing to sign on as crew. They must also be successful enough in their other lives to have the money to finance the endeavor.
We begin to chop trail. Good-natured harpooner Perry Okpeaha is helping; so is stoical shoulder-gunner Larry Itta and 12-year-old Qaiyaan Harcharek, a determined sixth-grader who is in his sixth season as apprentice whaler.
By midmorning, we have finished the trail and pulled the aluminum boat down to the water's edge, lifted it off its sled and skidded it to the lip of ice, which rises white for several inches above the water and descends translucent blue for a foot underneath. But the wind shifts and the lead begins to close. We retreat. At midafternoon, the water opens again, and the boat is launched. By 6:30, the crew has chased four whales but never quite come close enough for Okpeaha to hurl the harpoon.
The routine is typical and repeated often--the crew advances, retreats, rests and then resumes the hunt.
On the marine radio, static turns into cheers. A few miles west of us, captain Jake Adams has caught the first whale of the year for Barrow, a 28-footer. "Bambi," says Ahmaogak. Once Adams' whale is secured by a rope to the tail and maneuvered to the shore ice, the crew delivers a prayer of thanks. Then the village airwaves crackle into life, filled with the news. In Barrow and on the ice, snow machines are fired up to carry as many as can come out to Adams' camp to help pull the bowhead from the water and butcher it into slabs of blubber and meat.
The first whale of the year is always divided up among all the crews in Barrow. From then on, only those who come out onto the ice to help will receive a share. It can take up to 36 hours or more of nonstop effort to land and cut up a big bowhead. A large portion of the food is stockpiled for the year's many coming feasts.
Something else comes over the radio. Not far away the ice has broken loose and two crews are adrift in the sea. On a big sheet of ice, there is little immediate worry. But big sheets can shatter into little ones by force of waves or in a collision with drifting icebergs, and that can be catastrophic. A search-and-rescue helicopter is launched from Barrow to retrieve the crews and their gear. "Should have watched their back door. Gotta watch the back door," growls Ahmaogak. Ha ha ha.
We hurriedly check the ice at our own back door. Holding solid, say those who know such things. By now, I'm so spooked I step only in the footprints of someone else. I have been advised to carry a quick-draw sheath knife so that if I fall into the water, I can stab the ice and keep from slipping under until help arrives, if help is handy. I try to visualize the experience.
We spend the evening--which looks almost the same as the morning under the arctic sun--sitting on a boat sled, with a white canvas windbreak stretched behind us, quietly scanning the water for the steam geysers of surfacing whales. Hours pass with hardly a word.
Even here, the primitive landscape of ice and water is clouded by development. With the Prudhoe Bay oil fields now past their peak, oil companies have their eye on expanding offshore drilling in the Arctic Ocean to keep the Trans-Alaska pipeline flowing. The Inupiat strongly oppose the idea. Not only would they receive no tax revenues for operations beyond the three-mile coastal boundary, they also fear an oil-well blowout would jeopardize the survival of the bowhead. But they are plenty eager for more onshore drilling, particularly in the Arctic National Wildlife Refuge to the east. Their taxing authority would extend to such development, plus they hold mineral deeds to some of the potential sites.
As the winds pick up, massive ice floes begin to dance across the near horizon. "That is some heavy *%*," says Ahmaogak. "Imagine wind and current pushing on this--that's our worry. No drilling rig could stand up to that."
AS LONG AS THE LEAD IS OPEN, WE SLEEP ONLY INTERMITTENTLY, ON NO schedule whatsoever. Sometimes the tent is too full, and I borrow a bearskin and curl up in an 18-foot skiff. It is hard to keep a grip on time. "I have a feeling it's going to be a long day tonight," is the way one of the other whalers puts it.
Whatever stimulates Ahmaogak into action remains a mystery to me. One moment we are sitting quietly. And the next, scrambling.
Ahmaogak and crew launch the skin boat and come within feet of striking a whale that approaches head on. They paddle behind a distant iceberg and I find myself alone on the ice. Just two days ago, a polar bear wandered near camp. Ahmaogak could put both of his oversized arctic boots inside the bear's paw print. The marine radio has been reporting bears lurking around other camps. Where did they leave those rifles? I wonder.
A bowhead breaches 100 feet from the ice edge, its mighty head rising more than 20 feet out of the water. I look into its saucer-sized black eye, and it emits a ghostly blow.
I take my turn in the aluminum boat as the crew races up and down the lead, futilely chasing whales but coming so near to their submarine-sized backs that my chest pounds. Okpeaha raises his harpoon, once, twice, three times. Never quite close enough to let fly.
Back at camp, our share of Jake Adams' whale is delivered to the tent, and I cook the traditional uunaalik-- pungent, boiled finger-sized pieces of whale blubber and skin with the overpowering redolence of fish oil that brings the whole crew scrambling to the pot. Recipe: Boil strips of whale in water until the blubber floats, about five minutes. We also snack on crunchy frozen chunks of whitefish dipped in seal oil. One night we break from the native menu and I open several cans of chili, which is followed by a rousing after-supper bout of gas passing, men and women joining in uproariously without the slightest self-consciousness.
"Let's change our diet!" says Ahmaogak during another lull in the action. We race off to hunt seals, but find none. We do find a new launch site and move our whaling boats from fragile ice to an even riskier location, where we have to leap across a watery two-foot crack to reach the lead. Here the ice is so thin and flexible it undulates to the rhythm of the sea waves. When the largest icebergs I've seen bear down on our camp, Ahmaogak laughs. "Maybe tomorrow we have to move camp back closer to town. Then we'll go have a shot of Courvoisier, a cigar and a shower. Ha ha ha. Or maybe we break off and ride to Siberia."
Instead, we stay put. Next day, the lead is clear. West of us, the whaling action picks up. Three whales are struck, and the chase is on. A cease-fire order comes via radio for the remaining crews while the pursuit continues. No whales are landed, although they eventually will begin to rot and float to the surface as "stinkers," animals that can be only partly salvaged.
Again the radio comes to life. Whaling captain Thomas Brower III has caught a 33-footer. I am set to leave soon; it's now or never if I want to see a whale brought in. I set out in a snow machine for the 30-mile journey across the ice to the Brower camp, joining perhaps 90 whalers and villagers who will converge to pull the whale out of the water and divide it up.
Over the course of more than an hour, in a massive tug of war, we use rope, block and tackle to inch the rubbery, lifeless whale onto the ice. Men climb onto the nine-foot-high carcass to slice it, crosswise, into 18-inch strips of skin and blubber that are undercut and peeled off in an assembly-line operation. While the adults work, children bounce and play on the carcass. Finally, the meat and tongue and internal organs are divided up. The baleens will be scrimshawed and sold to tourists, the only cash derived from a whale. By the time the job is finished, a ragged circle of snow near the lead has turned bright red.
WHEN I DRAG INTO CAMP 10 hours later, I'm as tired as I've ever been. I begin my goodbys.
Maybe, it is suggested, I'll come back for the nalukataq feast, which celebrates the end of the whaling season. Then there are the geese and ducks to hunt, as they migrate in their turn. "Boom boom," says the mayor. And after that the hunt for caribou and moose, and after that, the whales once again, during their return migration.
"It's something out here, isn't it?" Ha ha ha. Ahmaogak pulls off his polar-bear mitten, squeezes my hand and turns. He and his crew climb into the aluminum boat. The last I see of them are the backs of three Eskimo parkas, hunched against the wind, fur hood ruffs flying as the captain guns his outboard and disappears into a tangle of icebergs.
|
import codecs
import attr
from path import Path
from . import exceptions
class BaseLoader:
def load(self, environment, name):
source, is_up_to_date = self.get_source(environment, name)
return environment.compile_rulebook(source, name, is_up_to_date)
def get_source(self, environment, name):
raise NotImplementedError()
@attr.s
class FileSystemLoader(BaseLoader):
base_path = attr.ib(default='.')
extension = attr.ib(default='.ravel')
def get_up_to_date_checker(self, filepath):
filepath = Path(filepath)
try:
mtime = filepath.getmtime()
except OSError:
mtime = 0.0
def is_up_to_date():
try:
print('was %s, now %s' % (mtime, filepath.getmtime()))
return mtime == filepath.getmtime()
except OSError:
return False
return is_up_to_date
def get_source(self, environment, name):
filepath = Path(self.base_path) / (name + self.extension)
if not filepath.exists():
raise exceptions.RulebookNotFound(name)
with codecs.open(filepath, encoding='utf-8') as fi:
source = fi.read()
is_up_to_date = self.get_up_to_date_checker(filepath)
return source, is_up_to_date
|
Calling all active/reserve and retired Navy shooters!
DEVGRU finally had their way and booted us from Damneck so it's at Quantico this year.
Most of the details are still being worked out so look here or http://www.usnst.org/ for more info as it comes out.
Below is a quote of an email I received with a little bit of amplifying information.
The matches include High Power Rifle, EIC Rifle, Service Pistol (Bullseye), EIC Pistol, and usually several long range Rifle and Pistol events as well.
Here is the link to the match program (an MS Word document).
Here is the link to Online Registration for the East Coast match.
We're one step closer to executing the 2011 FFC(Atlantic) and All Navy Rifle & Pistol Championships at WTB, MCB Quantico. The match program and registration are up...check out www.usnst.org and www.odcmp.com for registration (link on www.usnst.org). The message has gone through a few iterations, and will hopefully be released by the end of this week as a NAVADMIN from OPNAV N3. I ask that, in light of the late announcement message, everyone get the word out to your shooting friends on blogs, distro lists, forums, base newspapers, and town criers! The matches are open to Active, Reserve and Retired of all services this year (next year will likely open to civilians). Match fees are $50 each week (payable via check or paypal), and billeting is provided free of charge. The program should answer most of your questions, but please let me know if you see any discrepancies, or have any difficulties trying to register via CMP.
Last edited by thomae; March 2nd, 2011 at 09:56 AM.
On the plus side no cease fires for boats in the impact area.
|
# Big Data Smart Socket
# Copyright (C) 2016 Clemson University
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import unittest
from unittest.mock import patch
from voluptuous import All, Range
from client.transfer.mechanisms import base
class TestUserInputOptionMechanism(unittest.TestCase):
@patch("builtins.input", return_value="Hello")
def test_user_input_option_prompts(self, mock_input):
opt = base.UserInputOption("option", "Option value?")
val = opt.prompt_for_value()
mock_input.assert_called_once_with("Option value?")
self.assertEqual(val, "Hello")
@patch.object(base, "getpass", return_value="World")
@patch("builtins.input", return_value="Hello")
def test_user_input_option_prompts_with_hidden_input(self, mock_input, mock_getpass):
opt = base.UserInputOption("option", "Option value?", hide_input=True)
val = opt.prompt_for_value()
mock_getpass.assert_called_once_with("Option value?")
self.assertEqual(val, "World")
@patch("builtins.input", side_effect=["a", -1, 5])
def test_prompt_until_valid_value(self, mock_input):
opt = base.UserInputOption("option", "Option value?", validation=All(int, Range(min=4, max=8)))
val = opt.prompt_for_value()
self.assertEqual(mock_input.call_count, 3)
self.assertEqual(val, 5)
|
Are you an electrician who uses laser levels to layout or install conduit, outlets, lighting, or other electrical systems? If so, we’d love to meet you and tell you about our PLS, Fluke, and Amprobe tools for construction solutions.
See PLS at the Fluke booth (#1408) from Sunday September 30th through Tuesday October 2nd at the 2018 National Electrical Contractor Association (NECA) Convention in Philadelphia. The NECA convention is still the largest gathering of manufacturers and distributors for electrical professionals in North America.
Come meet the PLS product manager and our tools sales managers to learn more about how PLS lasers, Fluke indoor air quality tools and meters, or the Amprobe wire tracer can help your daily workflows! On Sunday, at 4:30 pm, we’ll also have a special, live interview at the booth talking specifically about PLS, Fluke, and Amprobe construction solutions. Come meet the team, stock up on swag, and participate in the Fluke giveaways and drawings. We’re looking forward to seeing you in Philadelphia.
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Command-line interface to MLM
"""
import inspect
import pkgutil
import sys
import argparse
import mlm
from mlm import api
from mlm import commands
from mlm.commands import utils
def main(input_args=None):
if input_args is None:
input_args = sys.argv[1:]
# base parser
parser = argparse.ArgumentParser(
prog="mlm",
description=__doc__.strip(),
add_help=True
)
parser.add_argument('-v', '--version',
action='version',
version=mlm.__version__)
parser.add_argument('--config-file', type=str, metavar="<file>",
help="Path to configuration file.")
# all subcommands
subcommands = parser.add_subparsers(help='<subcommands>')
for importer, modname, _ in pkgutil.iter_modules(commands.__path__):
# load all submodules
importer.find_module(modname).load_module(modname)
for group_cls in utils.BaseCommand.__subclasses__():
group_parser = subcommands.add_parser(
group_cls.__name__.lower(),
help=group_cls.__doc__)
subcommand_parser = group_parser.add_subparsers()
for name, callback in inspect.getmembers(
group_cls(), predicate=inspect.ismethod):
command = name.replace('_', '-')
desc = callback.__doc__ or ''
help_message = desc.strip().split('\n')[0]
arguments = getattr(callback, 'args', [])
command_parser = subcommand_parser.add_parser(
command, help=help_message, description=desc)
for (args, kwargs) in arguments:
command_parser.add_argument(*args, **kwargs)
command_parser.set_defaults(func=callback)
# parse and run
args = parser.parse_args(input_args)
args.func(api.API(args.config_file), args)
if __name__ == "__main__":
main()
|
I would like to share my experiences to the 2019 annual Global Health Conference on the RUFC blog. I have no idea where to start. From a frustrating visa process to exciting moments in LA, Hollywood, Santa Monica, Chicago, to an academic tour to some of the greatest universities in USA and a family reunion in Chicago and Ohio. Here is my story in bits!
In October 2018 I started the process and, after doing enough research, filled and submitted my online application in December. At this moment, I was asked to pay a total of UGX 640,000 which RUFC provided. I then scheduled a date for my interview on the 15th of January, did all the research, asked people who had been in the process and by 15th was ready to go. I will live to tell a story about this day, here is what happened!
Having my interview scheduled at 8:30am, I woke up at 6am to prepare, made sure I was on time, got in and answered all the questions asked by the consular in an interview that lasted for less than two minutes. After asking a couple questions, the consular faced her computer and said to me “Am sorry Mr. Katende you’re not eligible to enter the U.S at this moment." I didn’t know what to say to her really. At this moment she returned my passport accompanied with a rejection letter that made it clear not to appeal this decision unless circumstances for which the visa was denied had changed. I left the room in complete disbelief, didn't know why she said I was illegible for the visa. But this is where the story gets interesting!
I immediately wrote to Prof. Heather informing her about this whole experience. I asked her if I should challenge the decision and her response was like everyone else's. “I don’t think reapplying will change anything”. She encouraged me to stay focused and wait for other opportunities in the future.
When everyone else thought it was never going to work, Ray’s opinion was different. Through his blog and other RUFC social media forums, Ray criticized the Embassy’s decision to deny people opportunities to travel due to what he referred as political reasons that had nothing to do with any of us. This gave me fresh energy and I decided to make an appeal, reapplied and was given a new appointment after making endless calls to the Embassy. Thanks to Ray’s cyber activism and my tenacity, my visa application was finally accepted on the 14th of February 2019.
At exactly 12:00am on 1st March, I set off from Entebbe airport for the much anticipated trip and it was a bit of a fluke because it was my28th birthday at the same time. Celebrating birthdays has never been my thing, but this was a special one. After traveling for over 28 hours, from Entebbe through Dubai and finally landing in Los Angeles at exactly 2:00pm, still on March 1st! And so after traveling for more than 24 hours, given the 11 hours’ time difference it was still my birthday. For the record, this is the longest birth I have ever had!
Great thanks to the Wipfli family for making that day even better, from a warm welcome at the airport, to a fundraising event where Ray and I shared a lot about RUFC and Uganda, to dinner where I had a Mexican meal for the first time in my life. At which point I thought it was done, but the party was just getting started! After dinner we went home and this was the most amazing moment, I found a very big chocolate cake waiting, and moments later was showed with gifts from the entire family.
The following days we visited lots of places in California including Hollywood, Beverly Hills and the Pacific Ocean. On the third day Prof. Heather gave me an opportunity to interact with her class at the University of Southern California on mhealth and public health in Uganda. I was happy to answer a lot of questions about health Apps and solutions from these exciting young people.
There were lots of other events during my first week in Los Angeles, including great meals and reuniting with some of the students that have been part of our RUFC program in the previous years.
On 6th March, we travelled to Chicago for the annual global health conference. The weather in Chicago was far different from LA and of course the familiar Ugandan weather. I had never experienced so much cold ever before, but life is about adventure and this was part of a wonderful experience.
While in Chicago, we visited a number of famous places including Millennium Park and Willis Tower. But one of the most exciting and favorite things about this whole trip was trying different types of food from the diverse ethnic groups in USA, including Italian, Mexican, Japanese and Indian (see pictures below!).
I also attended a number of important sessions on global health and we were able to present our posters on RUFC work in Uganda. We interacted with a number of people and was I amused to find so many people who have worked in Uganda addressing different health challenges.
While in Chicago, I was able to see my brother and meet his family for the very first time. There’s nothing that’s as beautiful as meeting family, especially people you have not seen in a long period of time. In the last week of my trip, I visited other family and top universities in Ohio and was able to connect with quite a number of people in the different academic departments of interest.
Great thanks to Ray Wipfli for doing his utmost best to see that this trip happened. Ray put up a GoFundMe fundraising campaign to fund my air ticket and did a lot of activism to ensure I was granted a visa from the Embassy. Second I would like to thank the entire Wipfli family for hosting me and taking good care of me throughout the trip. Lastly to everyone who donated to fund my trip and supports RUFC activities, thank you very much!
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
SpatialiteExecuteSQL.py
---------------------
Date : October 2016
Copyright : (C) 2016 by Mathieu Pellerin
Email : nirvn dot asia at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from builtins import str
__author__ = 'Mathieu Pellerin'
__date__ = 'October 2016'
__copyright__ = '(C) 2016, Mathieu Pellerin'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterString
from processing.tools import spatialite
from qgis.core import (QgsApplication,
QgsDataSourceUri)
class SpatialiteExecuteSQL(GeoAlgorithm):
DATABASE = 'DATABASE'
SQL = 'SQL'
def icon(self):
return QgsApplication.getThemeIcon("/providerQgis.svg")
def svgIconPath(self):
return QgsApplication.iconPath("providerQgis.svg")
def group(self):
return self.tr('Database')
def name(self):
return 'spatialiteexecutesql'
def displayName(self):
return self.tr('Spatialite execute SQL')
def defineCharacteristics(self):
self.addParameter(ParameterVector(self.DATABASE, self.tr('File Database'), False, False))
self.addParameter(ParameterString(self.SQL, self.tr('SQL query'), '', True))
def processAlgorithm(self, context, feedback):
database = self.getParameterValue(self.DATABASE)
uri = QgsDataSourceUri(database)
if uri.database() is '':
if '|layerid' in database:
database = database[:database.find('|layerid')]
uri = QgsDataSourceUri('dbname=\'%s\'' % (database))
self.db = spatialite.GeoDB(uri)
sql = self.getParameterValue(self.SQL).replace('\n', ' ')
try:
self.db._exec_sql_and_commit(str(sql))
except spatialite.DbError as e:
raise GeoAlgorithmExecutionException(
self.tr('Error executing SQL:\n{0}').format(str(e)))
|
Math Homework, Exams! Do you want to run away when faced with Algebra, Equations, Graphs, Geometry and Trigonometry? Identify how Online Tutoring works to maximize your possibility of getting helpful results. Math is often being neglected and feared by most students.
Online Math Tutoring program filters the skills and visualizes mathematical theories with greater understanding. The program is designed to offer animated lessons and worksheets to teach math in the conventional way. Tutorials take care of children’s interest while parting out the fear of calculations. The mentors at times use short videos to make the subject more exciting. The process guides students ranging between k-12 and hence need to use different advanced tools as applicable for each age group. For kindergarten and mid –school students’ lessons are taught in the form of interactive animations. Live chat with Online Math experts, getting instant help in resolving problems guarantees the most effective and enjoyable form of learning.
|
#!/usr/bin/env python3.6
"""Custom v2 to v3 converter for WordFilter
Usage:
------
Copy v2 data into the same directory as this script. This includes filter.json,
settings.json, and whitelist.json.
Outputs:
--------
Saves a converted v3 json file in the same directory with the filename v3data.json.
Original data remains untouched.
"""
import json
UID = "5842647" # When typed on a T9 keyboard: luicogs
BASE = {UID: {"GUILD": {}}}
v3Json = BASE
# Convert filters into v3 format.
with open("filter.json") as v2Filters:
print("Converting filter.json...")
filters = json.load(v2Filters)
for key, val in filters.items():
if key not in v3Json[UID]["GUILD"]:
v3Json[UID]["GUILD"][key] = {}
v3Json[UID]["GUILD"][key]["filters"] = val
with open("settings.json") as v2Settings:
print("Converting settings.json...")
settings = json.load(v2Settings)
for key, val in settings.items():
if key not in v3Json[UID]["GUILD"]:
v3Json[UID]["GUILD"][key] = {}
# Merge two dicts together, should have no conflicts.
v3Json[UID]["GUILD"][key] = {**v3Json[UID]["GUILD"][key], **val}
with open("whitelist.json") as v2Whitelist:
print("Converting whitelist.json...")
whitelist = json.load(v2Whitelist)
for key, val in whitelist.items():
if key not in v3Json[UID]["GUILD"]:
v3Json[UID]["GUILD"][key] = {}
v3Json[UID]["GUILD"][key]["channelAllowed"] = val
with open("command_blacklist.json") as v2CmdBlacklist:
print("Converting command_blacklist.json..")
blacklist = json.load(v2CmdBlacklist)
for key, val in blacklist.items():
if key not in v3Json[UID]["GUILD"]:
v3Json[UID]["GUILD"][key] = {}
v3Json[UID]["GUILD"][key]["commandDenied"] = val
with open("v3data.json", "w") as output:
json.dump(v3Json, output, indent=4)
print("Word filter data successfully converted to v3 format!")
|
The USRP E313 is a rugged and weatherproof SDR designed for outdoor deployment.
The USRP E313 is a rugged and weatherproof SDR designed for outdoor deployment. Containing an embedded USRP E310 inside an IP67-rated enclosure, the USRP E313 provides ingress protection against dust and water with extensive testing to ensure operation under demanding environmental conditions. The USRP E313 conveniently supports PoE with surge and lightning protection.
This stand-alone SDR features a 2×2 MIMO transceiver providing up to 56 MHz of bandwidth spanning frequencies from 70 MHz to 6 GHz to cover multiple bands of interest. The baseband processor uses the Xilinx Zynq-7020 System-on-Chip to deliver FPGA accelerated computations combined with stand-alone operation enabled by a dual-core ARM CPU.
The USRP Embedded Series platform uses the OpenEmbedded framework to create custom Linux distributions tailored to application specific needs. To reduce development effort the default operating system supports the USRP Hardware Driver™ (UHD) software API, as well as a variety of third party tools such as GNU Radio. Support for the RF Network on Chip (RFNoC™) FPGA development framework enables deterministic computations for real-time and wideband signal processing. Users can rapidly prototype and reliably deploy designs for embedded applications intended for the unpredictable outdoors.
|
"""Point and Rectangle classes.
This code is in the public domain.
Point -- point with (x,y) coordinates
Rect -- two points, forming a rectangle
Taken from https://wiki.python.org/moin/PointsAndRectangles
"""
import math
from typing import Tuple
class Point:
"""A point identified by (x,y) coordinates.
supports: +, -, *, /, str, repr
length -- calculate length of vector to point from origin
distance_to -- calculate distance between two points
as_tuple -- construct tuple (x,y)
clone -- construct a duplicate
integerize -- convert x & y to integers
floatize -- convert x & y to floats
move_to -- reset x & y
slide -- move (in place) +dx, +dy, as spec'd by point
slide_xy -- move (in place) +dx, +dy
rotate -- rotate around the origin
rotate_about -- rotate around another point
"""
def __init__(self, x:float=0.0, y:float=0.0):
self.x = x
self.y = y
@classmethod
def from_tuple(cls, pt_as_tuple:Tuple[float,float]):
return cls(x=pt_as_tuple[0],y=pt_as_tuple[1])
def __iter__(self):
return iter((self.x, self.y))
def __getitem__(self, item):
if item == 0:
return self.x
elif item == 1:
return self.y
else:
raise RuntimeError("Index %d does not make sense in a point" % (item))
def __attrs(self):
"""
All attributes in a single representation.
Returns:
A tuple with all attributes.
"""
return (self.x, self.y)
def __eq__(self, other):
return isinstance(other, Point) and self.__attrs() == other.__attrs()
def __hash__(self):
return hash(self.__attrs())
def __add__(self, another_pt):
"""Point(x1+x2, y1+y2)"""
return Point(self.x + another_pt.x, self.y + another_pt.y)
def __sub__(self, another_point):
"""Point(x1-x2, y1-y2)"""
return Point(self.x - another_point.x, self.y - another_point.y)
def __isub__(self, another_point):
self.x += another_point.x
self.y += another_point.y
return self
def __mul__(self, scalar):
"""Point(x1*x2, y1*y2)"""
return Point(self.x * scalar, self.y * scalar)
def __div__(self, scalar):
"""Point(x1/x2, y1/y2)"""
return Point(self.x / scalar, self.y / scalar)
def __str__(self):
return "(%.2f, %.2f)" % (self.x, self.y)
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.x, self.y)
def length(self) -> float:
"""norm of vector (0,0) to this point"""
return math.sqrt(self.x ** 2 + self.y ** 2)
def distance_to(self, another_point) -> float:
"""Calculate the distance between two points."""
return (self - another_point).length()
def as_tuple(self):
"""(x, y)"""
return (self.x, self.y)
def clone(self):
"""Return a full copy of this point."""
return Point(self.x, self.y)
def integerize(self):
"""Convert co-ordinate values to integers."""
self.x = int(round(self.x))
self.y = int(round(self.y))
return self
def floatize(self):
"""Convert co-ordinate values to floats."""
self.x = float(self.x)
self.y = float(self.y)
def move_to(self, x, y):
"""Reset x & y coordinates."""
self.x = x
self.y = y
def translate_following(self, a_vector):
"""
Move to new (x+dx,y+dy).
:param a_vector: Vector 2D I have to follow.
:return: Unit.
"""
self.x = self.x + a_vector.x
self.y = self.y + a_vector.y
return self
def slide_xy(self, dx, dy):
'''Move to new (x+dx,y+dy).
Can anyone think up a better name for this function?
slide? shift? delta? move_by?
'''
self.x = self.x + dx
self.y = self.y + dy
def rotate(self, rad):
"""Rotate counter-clockwise by rad radians.
Positive y goes *up,* as in traditional mathematics.
Interestingly, you can use this in y-down computer graphics, if
you just remember that it turns clockwise, rather than
counter-clockwise.
The new position is returned as a new Point.
"""
a_sinus, a_cosinus = [f(rad) for f in (math.sin, math.cos)]
x, y = (a_cosinus * self.x - a_sinus * self.y, a_sinus * self.x + a_cosinus * self.y)
return Point(x, y)
def rotate_about(self, a_point, theta):
"""Rotate counter-clockwise around a point, by theta degrees.
Positive y goes *up,* as in traditional mathematics.
The new position is returned as a new Point.
"""
result = self.clone()
result.slide_xy(-a_point.x, -a_point.y)
result.rotate(theta)
result.slide_xy(a_point.x, a_point.y)
return result
POINT_ZEROZERO = Point(x=0.0, y=0.0)
def average_between(pt1: Point, pt2: Point) -> Point:
"""Returns the point in the 'middle' of the two."""
return Point((pt1.x + pt2.x)/2, (pt1.y + pt2.y)/2)
|
consideration, generally of money; a buyer; a vendee.
means other than by descent or inheritance.
PURCHASER, contracts. A buyer, a vendee.
contracts, may become purchasers both of real and personal property.
further recover back the purchase-money. 1 Ld. Raym. 224; 1 Madd. Ch. R.
heirs may waive it. Co. Lift. 3 a; Dougl. R. 452.
heirs may avoid the purchase when they die during their lunacy or idiocy.
7.-2. It is a general rule that trustees 2 Bro. C. C. 400; 3 Bro. C. C.
bankrupts; assignees of bankrupts; solicitors to the commission; 6 Ves. Jr.
h.t.; 2 Supp. to Ves. Jr. 449, 267, 478; Yelv. 45; 2 Ves. Jr. 100; 8 Coin.
Dig. 349; 3 Com. Dig. 108.
|
#!/usr/bin/env python
"""
This script extracts all JavaScript from a supplied PDF file.
The script finds all JavaScript by walking the PDF tree and looking for all text
referenced with a JS entry, as specified in Section 8.5 of PDF v1.7.
"""
import sys
import StringIO
import warnings
import PyPDF2
from PyPDF2.generic import DictionaryObject, ArrayObject, IndirectObject
from PyPDF2.utils import PdfReadError
def walk_pdf_tree(node, already_visited=None):
# Indirect objects can refer to each other in a loop.
# Maintain a set of visited nodes to avoid a stack overflow
if already_visited is None:
already_visited = set()
yield node
# Walk through the node's children
if isinstance(node, DictionaryObject):
for k, v in node.iteritems():
for node in walk_pdf_tree(v, already_visited):
yield node
elif isinstance(node, ArrayObject):
for v in node:
for node in walk_pdf_tree(v, already_visited):
yield node
elif isinstance(node, IndirectObject):
idnum = node.idnum
if idnum in already_visited:
pass
else:
already_visited.add(idnum)
# Dereferencing an object can sometimes fail
try:
v = node.getObject()
except PdfReadError:
pass
else:
for node in walk_pdf_tree(v, already_visited):
yield node
def find_js(pdf_object):
js = list()
root = pdf_object.trailer
# Ignore warnings from failed Object dereferences
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for node in walk_pdf_tree(root):
if isinstance(node, DictionaryObject) and '/JS' in node.keys():
js.append(node['/JS'].getData())
return js
def extract_js(fpath):
with open(fpath, 'rb') as fh:
src_pdf_blob = fh.read()
pdf_data = PyPDF2.PdfFileReader(StringIO.StringIO(src_pdf_blob))
js = find_js(pdf_data)
if js:
print "\n\n".join(js)
def main():
if len(sys.argv) < 2:
print "USAGE: %s %s <filename>" % (sys.executable, sys.argv[0])
sys.exit(1)
fpath = sys.argv[1]
extract_js(fpath)
if __name__ == "__main__":
main()
|
Manchester United hosted Burnley at Old Trafford on Saturday, in what was their 10th league game of the season.
Despite putting in a dominant performance, Jose Mourinho’s men were left completely frustrated at full-time, after failing to break the Burnley defence. The game ended goalless, with both teams sharing the spoils.
Was hardly tested. Spent most of the game on the offensive, by creating from the back.
Successfully replaced Valencia at right-back, and was industrious both in defence and on offense. Almost won United a penalty.
Commanded the defence and created from the back.
Did the job in both Bailly and Smalling’s absence.
Failed to contribute much in the attack, but made a crucial tackle on Andre Gray to prevent a one-on-one situation.
Pulled strings from midfield and orchestrated several attacks. Got sent off in the second half rather unluckily.
Attempted several shots on target, but failed to score. Whipped in some dangerous crosses late in the game – the best of which was completely wasted by his Swedish friend.
Missed a good chance to score by heading the ball off target. Far from good enough to justify his selection over a certain Armenian playmaker.
Put in a captain’s performance. Created numerous chances, had some decent attempts at goal, and managed to hit the post once. Did everything right, but got bizarrely subbed off when he was needed the most.
Was a live wire on the left; not so much when moved to the right. Another player to get taken off for no logical reason.
Poor and wasteful. Missed a sitter. Needs to be dropped.
Failed to get into the game.
Slowed down United’s tempo. Could’ve won his team all 3 points, but failed to hit the target from a well-worked free-kick.
Didn’t do much other than helping his teammates mount a protest around Clattenburg after the full-time whistle.
|
try:
from psyco import full
except ImportError:
print 'no psyco'
from pyglet import options as pyglet_options
pyglet_options['debug_gl'] = False
import pyglet
from miru.ui import TestWindow
from miru.context import context
from miru import options as miru_options
from miru import camera
from miru import utils
from miru import input
from miru import graphics
from miru import core
from miru.ext import geom
import os
P = os.path.join
# initialize the window
w = TestWindow(680, 400)
w.set_vsync(False)
utils.addFpsDisplay()
context.window = w
context.control = input.SimpleMouseControl()
context.camera.pos += (0,1,2)
context.camera.angle = (10,0,0)
#context.handle.accessible = True
o = graphics.load_wobj(P('docs','demo','alien.obj'))
o.pos += (0,0.95,-0.6)
context.add_object(o)
# Play around with the spot light
context.camera.lights = camera.LightGroup([
camera.DirectionalLight(diffuse=(0.1,0.1,0.1,1)),
camera.PositionalLight(pos=(0,2,0), spot_cutoff=25,
track_target=o, spot_exponent=10, kq=0.1),
camera.PositionalLight(pos=(-0.54,1.3,2.5), diffuse=(0.9,0,0,1)),
camera.PositionalLight(pos=(1.6,1.3,2.7), diffuse=(0,0.9,0,1)),
camera.PositionalLight(pos=(-2.7,1.7,0.3), diffuse=(0,0,0.9,1)),
])
for i in range(1,len(context.camera.lights)):
context.camera.lights[i].debug = True
batch = pyglet.graphics.Batch()
color_group = graphics.ColorGroup((0.5,0.5,0.5,1.0))
objs = []
for x in range(-3, 4):
for z in range(-3, 4):
sphere = geom.Cube(0.7)
geom.transform(sphere, (x * 1.23, 0, z * 1.23))
geom.get_vlist(sphere, batch, color_group)
context.add_object(core.Object(batch))
print """
You should see a small grid of objects with some
colored lights over it. You should be able to click
on the bulb of a light to move it around. The white
light is a spotlight which remains focussed on the
object floating above the grid.
"""
while not w.has_exit:
pyglet.clock.tick()
w.clear()
w.dispatch_events()
context.render()
w.flip()
w.close()
|
Chinese stories must be written to talk to the entire world, Chinese children’s fiction writer Cao Wenxuan, the first Chinese author to ever win the Hans Christian Andersen Prize, said after receiving the 2016 award at the ongoing Bologna Children’s Book Fair in Italy.
The prize is awarded by the International Board on Books for Young People (IBBY) and is considered as the highest international recognition given every other year to a living author and an illustrator whose complete works have made a lasting contribution to children’s literature.
“In China we are used to saying that someone has a background to indicate that he or she is a determined person. This background adds to personal strength and makes someone great. Talking about myself, my background is China,” Cao told Xinhua in an interview.
Cao said he was deeply nourished by China’s troubled history and by his rural hometown, a village in Yancheng, Jiangsu province, where he was born in 1954 and spent childhood in poverty. But hardship ultimately turned into his spiritual and literary wealth.
Cao used literature to escape the world, and at the age of 17 published his first children’s story. He was then able to study at Peking University, and is now a professor of Chinese literature and children’s literature, and has published over 100 works appreciated by millions of readers.
“In the Western world, children’s books may be set in different countries, in London or in Paris for example. Instead all of my stories happen in China,” Cao went on saying.
“I tell many genuine Chinese stories, but at the same time all of them belong to humankind, and I think this is the main reason why I won the Andersen Prize. The themes of my Chinese stories are universal,” he highlighted.
In his view, children’s literature aims to provide people with the fundamentals of humane behavior, through moral principles, the beauty of language and a sympathetic soul narrated by highly literary stories as a pure form of art.
“I have always believed since I came close to literature that literature first of all is literature, it is not anything different, that is to say literature must be anchored in art … only in this way writing has the ability and vitality to transcend time and space,” he told Xinhua.
Many of Cao’s novels, short stories, and picture books have been translated into English, French, German, Russian, Greek, Swedish, Danish, Japanese, Korean, and Vietnamese, such as The Straw House, Bronze and Sunflower, Red Tiles and Black Tiles, King Book, Dingding and Dangdang, Ximi, and A Feather.
Cao explained to Xinhua that whether a book can make its way into the world, that is to say whether it is suitable for translation into other languages, is also a fundamental indicator of its literary value. This is why he has so fully used a specifically Chinese language but at the same time has searched a universal portrait of life’s ebbs and flows, so to make sure that the most important messages of his books do not go lost.
Cao said the prize also reinforced his ample faith in the high place of Chinese literature for children in the world.
“Chinese children’s literature is clearly powerful. And I am sure there will be other Chinese authors who will win the Hans Christian Andersen Prize in the future,” he concluded.
|
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TemplateUpdateSummary(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'bulk_envelope_status': 'BulkEnvelopeStatus',
'envelope_id': 'str',
'error_details': 'ErrorDetails',
'list_custom_field_update_results': 'list[ListCustomField]',
'lock_information': 'LockInformation',
'purge_state': 'str',
'recipient_update_results': 'list[RecipientUpdateResponse]',
'tab_update_results': 'Tabs',
'text_custom_field_update_results': 'list[TextCustomField]'
}
attribute_map = {
'bulk_envelope_status': 'bulkEnvelopeStatus',
'envelope_id': 'envelopeId',
'error_details': 'errorDetails',
'list_custom_field_update_results': 'listCustomFieldUpdateResults',
'lock_information': 'lockInformation',
'purge_state': 'purgeState',
'recipient_update_results': 'recipientUpdateResults',
'tab_update_results': 'tabUpdateResults',
'text_custom_field_update_results': 'textCustomFieldUpdateResults'
}
def __init__(self, bulk_envelope_status=None, envelope_id=None, error_details=None, list_custom_field_update_results=None, lock_information=None, purge_state=None, recipient_update_results=None, tab_update_results=None, text_custom_field_update_results=None): # noqa: E501
"""TemplateUpdateSummary - a model defined in Swagger""" # noqa: E501
self._bulk_envelope_status = None
self._envelope_id = None
self._error_details = None
self._list_custom_field_update_results = None
self._lock_information = None
self._purge_state = None
self._recipient_update_results = None
self._tab_update_results = None
self._text_custom_field_update_results = None
self.discriminator = None
if bulk_envelope_status is not None:
self.bulk_envelope_status = bulk_envelope_status
if envelope_id is not None:
self.envelope_id = envelope_id
if error_details is not None:
self.error_details = error_details
if list_custom_field_update_results is not None:
self.list_custom_field_update_results = list_custom_field_update_results
if lock_information is not None:
self.lock_information = lock_information
if purge_state is not None:
self.purge_state = purge_state
if recipient_update_results is not None:
self.recipient_update_results = recipient_update_results
if tab_update_results is not None:
self.tab_update_results = tab_update_results
if text_custom_field_update_results is not None:
self.text_custom_field_update_results = text_custom_field_update_results
@property
def bulk_envelope_status(self):
"""Gets the bulk_envelope_status of this TemplateUpdateSummary. # noqa: E501
:return: The bulk_envelope_status of this TemplateUpdateSummary. # noqa: E501
:rtype: BulkEnvelopeStatus
"""
return self._bulk_envelope_status
@bulk_envelope_status.setter
def bulk_envelope_status(self, bulk_envelope_status):
"""Sets the bulk_envelope_status of this TemplateUpdateSummary.
:param bulk_envelope_status: The bulk_envelope_status of this TemplateUpdateSummary. # noqa: E501
:type: BulkEnvelopeStatus
"""
self._bulk_envelope_status = bulk_envelope_status
@property
def envelope_id(self):
"""Gets the envelope_id of this TemplateUpdateSummary. # noqa: E501
The envelope ID of the envelope status that failed to post. # noqa: E501
:return: The envelope_id of this TemplateUpdateSummary. # noqa: E501
:rtype: str
"""
return self._envelope_id
@envelope_id.setter
def envelope_id(self, envelope_id):
"""Sets the envelope_id of this TemplateUpdateSummary.
The envelope ID of the envelope status that failed to post. # noqa: E501
:param envelope_id: The envelope_id of this TemplateUpdateSummary. # noqa: E501
:type: str
"""
self._envelope_id = envelope_id
@property
def error_details(self):
"""Gets the error_details of this TemplateUpdateSummary. # noqa: E501
:return: The error_details of this TemplateUpdateSummary. # noqa: E501
:rtype: ErrorDetails
"""
return self._error_details
@error_details.setter
def error_details(self, error_details):
"""Sets the error_details of this TemplateUpdateSummary.
:param error_details: The error_details of this TemplateUpdateSummary. # noqa: E501
:type: ErrorDetails
"""
self._error_details = error_details
@property
def list_custom_field_update_results(self):
"""Gets the list_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
# noqa: E501
:return: The list_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
:rtype: list[ListCustomField]
"""
return self._list_custom_field_update_results
@list_custom_field_update_results.setter
def list_custom_field_update_results(self, list_custom_field_update_results):
"""Sets the list_custom_field_update_results of this TemplateUpdateSummary.
# noqa: E501
:param list_custom_field_update_results: The list_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
:type: list[ListCustomField]
"""
self._list_custom_field_update_results = list_custom_field_update_results
@property
def lock_information(self):
"""Gets the lock_information of this TemplateUpdateSummary. # noqa: E501
:return: The lock_information of this TemplateUpdateSummary. # noqa: E501
:rtype: LockInformation
"""
return self._lock_information
@lock_information.setter
def lock_information(self, lock_information):
"""Sets the lock_information of this TemplateUpdateSummary.
:param lock_information: The lock_information of this TemplateUpdateSummary. # noqa: E501
:type: LockInformation
"""
self._lock_information = lock_information
@property
def purge_state(self):
"""Gets the purge_state of this TemplateUpdateSummary. # noqa: E501
# noqa: E501
:return: The purge_state of this TemplateUpdateSummary. # noqa: E501
:rtype: str
"""
return self._purge_state
@purge_state.setter
def purge_state(self, purge_state):
"""Sets the purge_state of this TemplateUpdateSummary.
# noqa: E501
:param purge_state: The purge_state of this TemplateUpdateSummary. # noqa: E501
:type: str
"""
self._purge_state = purge_state
@property
def recipient_update_results(self):
"""Gets the recipient_update_results of this TemplateUpdateSummary. # noqa: E501
# noqa: E501
:return: The recipient_update_results of this TemplateUpdateSummary. # noqa: E501
:rtype: list[RecipientUpdateResponse]
"""
return self._recipient_update_results
@recipient_update_results.setter
def recipient_update_results(self, recipient_update_results):
"""Sets the recipient_update_results of this TemplateUpdateSummary.
# noqa: E501
:param recipient_update_results: The recipient_update_results of this TemplateUpdateSummary. # noqa: E501
:type: list[RecipientUpdateResponse]
"""
self._recipient_update_results = recipient_update_results
@property
def tab_update_results(self):
"""Gets the tab_update_results of this TemplateUpdateSummary. # noqa: E501
:return: The tab_update_results of this TemplateUpdateSummary. # noqa: E501
:rtype: Tabs
"""
return self._tab_update_results
@tab_update_results.setter
def tab_update_results(self, tab_update_results):
"""Sets the tab_update_results of this TemplateUpdateSummary.
:param tab_update_results: The tab_update_results of this TemplateUpdateSummary. # noqa: E501
:type: Tabs
"""
self._tab_update_results = tab_update_results
@property
def text_custom_field_update_results(self):
"""Gets the text_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
# noqa: E501
:return: The text_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
:rtype: list[TextCustomField]
"""
return self._text_custom_field_update_results
@text_custom_field_update_results.setter
def text_custom_field_update_results(self, text_custom_field_update_results):
"""Sets the text_custom_field_update_results of this TemplateUpdateSummary.
# noqa: E501
:param text_custom_field_update_results: The text_custom_field_update_results of this TemplateUpdateSummary. # noqa: E501
:type: list[TextCustomField]
"""
self._text_custom_field_update_results = text_custom_field_update_results
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TemplateUpdateSummary, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TemplateUpdateSummary):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
Due to circumstances beyond our control the Red Rubber Band will be on hiatus. We will be polishing up some new stuff and recharging our batteries for bigger and better things. We really appreciate all our family and friends that have supported us all these years. Looking forward to seeing all of you again soon.
The Mt. Hood happening has revived itself for the second year at the Sky bar and grill in ZigZag OR. This is a beautiful setting with a large outdoor patio and garden with artworks, fire pit and a large stage for the bands to spread out on. This years planners have figured out a way to get a second stage up to keep the show flowing. I hope everyone can make it out for some good food and great music.
Our good friend Ocean Riggs has joined the band on guitar!!!!!!!!!!!!
Saturday night at the Skyway was sooooo coool! The dance floor was packed and the band really stepped it up to keep the feet movin'. Special thanks to Tracy and Tom for the warm welcome, the great food and fabulous venue to party in. Its really worth the drive to catch the Red Rubber Band at the Skyway!
Our friend John Bigelo has joined RRB and bring's along a free flowing guitar sound you will love.
I like to think of September in Oregon as late summer. We have a bunch of great shows coming up. I hope all my friends can make it out to one of our 8 shows in 6 weeks.
"Lost Creek Gang" and former "Jerry Joseph" drummer Brad Rosen brings his rock steady sound to "The Skyway" "The Tonic" and "Rock Creek"
Chris digs through all his photos and finds some great shots. See them on photo page. Also, live tracks from "Lovejoy Studios" C.D. Release Party available on music page. Enjoy!!
RRB brings Jeff Brooks on board for management, promotion and booking support. Not only is Jeff one of the nicest guys you"ll ever meet, he also brings a wealth of experience and connections. Most recently he was the tour manager for "Tea Leaf Green". He travelled all over country with them, as this stunning jam band from San Francisco exploded on to the scene.
The Ratskeller Dubs RRB "Local Favorite!" Winter is our favorite time to play on Mt.Hood. With Ski Patroller Thomas Moore (aka Ski Dog) on Bass, the locals are coming out to dance and party with the band. We will be back at "The Skyway" in ZigZag on March 14th. Come out and play with us!
Our dear friend Scrafford Orser from "Buds of May " and "Jerry Garcia B-Day" fame joins the band for our spring and summer run of shows. Scraff played extensively with Chris and Lee in "The Band of People"and "The Mother Tones". We are dusting of some old tunes and working on some new collaborations as well. Come check it out soon!
As you can see we have a new website! There is still a lot of material to be added but it is coming along. The email list signup allows you to share as much info about YOU with the band as you want. Just enter your email address in the box and click Join, fill out the form and submit. By sharing you name and additional info we will be able share more personalized and targeted mailings with our fans.
Also check out the guestbook feature, it allows you to post a quick note back to us and other website visitors.
I hope you like it and come back often to check out the new stuff as it gets added.
Just a quick note that our new CD "Life's Like That" is now available everywhere.
|
import json
import logbook
import zmq.green as zmq
from kitten.request import RequestError
class KittenClient(object):
log = logbook.Logger('KittenClient')
timeout = 2000
def send(self, address, request):
self.log.info('Sending request on {1}: {0}', request, address)
socket = self.connect(address)
socket.send_json(request)
self.log.info('Waiting for reply')
events = self.poll_reply(socket)
if not events:
msg = 'Timeout after {0}ms'.format(self.timeout)
self.log.error(msg)
self.close(socket)
raise RequestError('TIMEOUT', msg)
# TODO: Can JSON events come in multiparts? Probably not?
response = events[0][0].recv_json()
self.log.info(response)
self.close(socket)
return response
def close(self, socket):
socket.close()
# TODO: Figure out why destroying the context makes the application
# hang indefinetely.
# self.context.destroy()
# del self.context
def connect(self, address):
self.context = zmq.Context()
socket = self.context.socket(zmq.REQ)
host = 'tcp://{0}'.format(address)
socket.connect(host)
return socket
def poll_reply(self, socket):
poller = zmq.Poller()
poller.register(socket, zmq.POLLIN)
events = poller.poll(self.timeout)
return events
|
[Closed] This forum is read only. Visit the vote site to report bugs or make feature requests.
- new: Article creator will go to next keyword if not enough content is encountered.
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import matplotlib as mpl
from pylab import *
import numpy as np
import sys
sys.path.insert(0, '../')
import kicks
from scipy.stats import maxwell
params = {'backend': 'pdf',
'figure.figsize': [4.3, 2.2],
'font.family':'serif',
'font.size':10,
'font.serif': 'Times Roman',
'axes.titlesize': 'medium',
'axes.labelsize': 'medium',
'legend.fontsize': 8,
'legend.frameon' : False,
'text.usetex': True,
'figure.dpi': 600,
'lines.markersize': 4,
'lines.linewidth': 3,
'lines.antialiased': False,
'path.simplify': False,
'legend.handlelength':3,
'figure.subplot.bottom':0.2,
'figure.subplot.top':0.95,
'figure.subplot.left':0.15,
'figure.subplot.right':0.92}
hexcols = ['#332288', '#88CCEE', '#44AA99', '#117733', '#999933', '#DDCC77',\
'#CC6677', '#882255', '#AA4499', '#661100', '#6699CC', '#AA4466','#4477AA']
mpl.rcParams.update(params)
A=np.array([np.append([vkick],kicks.sample_kick_distribution_P(23,5.5,55,1.4,vdist=lambda x:[float(vkick)], num_v=5, num_theta=400,num_phi=100)) for vkick in range(0,701,5)])
print(A)
print(A[:,0])
print(A[:,1])
print(A[:,2])
fig, axes= plt.subplots(1)
maxw = axes.fill_between(A[:,0],0,maxwell.pdf(A[:,0], scale=265.)/max(maxwell.pdf(A[:,0],scale=265.)),color="b", alpha=0.2, label="Maxwellian, $\\sigma=265~\\rm km~s^{-1}$")
merge, = axes.plot(A[:,0],10*A[:,1], color=hexcols[2],label="GW merge fraction $\\times$ 10")
disrupt, = axes.plot(A[:,0],A[:,2], color=hexcols[8],ls="--", label="Disrupt fraction")
axes.set_xlabel("$v_{\\rm kick}~\\rm[km~s^{-1}]$")
axes.set_ylabel("fraction")
#axes.set_xlim([0,50])
axes.set_ylim([0,1.19])
axes.legend([maxw,merge,disrupt],["Maxwellian, $\\sigma=265~\\rm km~s^{-1}$", "GW merge fraction $\\times$ 10", "Disrupt fraction"], loc="upper left", fontsize=7)
plt.savefig("kick_dist.pdf")
#plt.clf()
#plt.close(plt.gcf())
|
Thank you to our customers from Minnetonka! For years, Minnetonka residents have chosen Plymouth Village Automotive Inc. for auto repair and auto service. We appreciate the loyalty that we have found from our Minnetonka customers.
For new customers, Plymouth Village Automotive Inc. is a leading auto repair shop that has a reputation for honest and dependable repair work. Our services include 4x4 Services, Brakes, Car & Truck Care, Domestic Cars & Trucks, Electrical Services, Electronic Services, Emergency Roadside Assistance, Engine & Transmission, Engine Maintenance, General Services, Heating and Cooling Services, Import Cars & Trucks, Miscellaneous Services, Towing, Towing & Recovery, Transmission Services and Undercar Services.
For questions please give us a call at 763-544-3202.
Plymouth Village Automotive Inc. located at 10905 Old County Rd 15 is where customers go to find affordable, professional car care!
|
from django.apps import apps
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django_fsm import FSMField, transition
from model_utils import Choices
class Booking(models.Model):
STATES = Choices(
('REQUESTED', 'requested', _('Requested')),
('APPROVED', 'approved', _('Approved')),
('REJECTED', 'rejected', _('Rejected')),
('CANCELLED', 'cancelled', _('Cancelled'))
)
service = models.ForeignKey(
to='providers.ProviderService'
)
status = FSMField(
default=STATES.requested
)
booked_by = models.ForeignKey(
to=settings.AUTH_USER_MODEL
)
booked_for = models.DateTimeField()
booked_on = models.DateTimeField(
auto_now_add=True,
editable=False
)
last_updated = models.DateTimeField(
auto_now=True
)
@transition(status,
source='REQUESTED',
target='CANCELLED')
def cancel(self):
"""Cancel request"""
@transition(status,
source='REQUESTED',
target='APPROVED')
def approve(self):
"""Approve request"""
Ticket = apps.get_model('bookings', 'Ticket')
Ticket(booking=self).save()
@transition(status,
source='REQUESTED',
target='REJECTED')
def reject(self):
"""Reject request"""
|
From Pres. Trump’s State of the Union address, to the emergency items mentioned by Gov. Abbott is his State of the State address, to the Texas Secretary of State issuing a flawed list of potential non-citizens registered to vote, our experts recap the latest national, state, and local stories in this week’s political roundup.
In the audio above, Craig Cohen discusses those and other stories with Renée Cross of the University of Houston and Jay Aiyer from Texas Southern University and co-host of Houston Public Media’s Party Politics podcast.
|
#!/usr/bin/env python
import os
import stat
import tempfile
import re
import getpass
import json
spy_file_pattern = re.compile(r'(.*)\.spy$')
shellpy_meta_pattern = re.compile(r'#shellpy-meta:(.*)')
shellpy_encoding_pattern = '#shellpy-encoding'
def preprocess_module(module_path):
"""The function compiles a module in shellpy to a python module, walking through all the shellpy files inside of
the module and compiling all of them to python
:param module_path: The path of module
:return: The path of processed module
"""
for item in os.walk(module_path):
path, dirs, files = item
for file in files:
if spy_file_pattern.match(file):
filepath = os.path.join(path, file)
preprocess_file(filepath, is_root_script=False)
return _translate_to_temp_path(module_path)
def preprocess_file(in_filepath, is_root_script, python_version=None):
"""Coverts a single shellpy file to python
:param in_filepath: The path of shellpy file to be processed
:param is_root_script: Shows whether the file being processed is a root file, which means the one
that user executed
:param python_version: version of python, needed to set correct header for root scripts
:return: The path of python file that was created of shellpy script
"""
new_filepath = spy_file_pattern.sub(r"\1.py", in_filepath)
out_filename = _translate_to_temp_path(new_filepath)
out_folder_path = os.path.dirname(out_filename)
if not is_root_script and not _is_compilation_needed(in_filepath, out_filename):
# TODO: cache root also
# TODO: if you don't compile but it's root, you need to change to exec
return out_filename
if not os.path.exists(out_folder_path):
os.makedirs(out_folder_path, mode=0o700)
header_data = _get_header(in_filepath, is_root_script, python_version)
with open(in_filepath, 'r') as f:
code = f.read()
out_file_data = _add_encoding_to_header(header_data, code)
intermediate = _preprocess_code_to_intermediate(code)
processed_code = _intermediate_to_final(intermediate)
out_file_data += processed_code
with open(out_filename, 'w') as f:
f.write(out_file_data)
in_file_stat = os.stat(in_filepath)
os.chmod(out_filename, in_file_stat.st_mode)
if is_root_script:
os.chmod(out_filename, in_file_stat.st_mode | stat.S_IEXEC)
return out_filename
def _get_username():
"""Returns the name of current user. The function is used in construction of the path for processed shellpy files on
temp file system
:return: The name of current user
"""
try:
n = getpass.getuser()
return n
except:
return 'no_username_found'
def _translate_to_temp_path(path):
"""Compiled shellpy files are stored on temp filesystem on path like this /{tmp}/{user}/{real_path_of_file_on_fs}
Every user will have its own copy of compiled shellpy files. Since we store them somewhere else relative to
the place where they actually are, we need a translation function that would allow us to easily get path
of compiled file
:param path: The path to be translated
:return: The translated path
"""
absolute_path = os.path.abspath(path)
relative_path = os.path.relpath(absolute_path, os.path.abspath(os.sep))
# TODO: this will not work in win where root is C:\ and absolute_in_path
# is on D:\
translated_path = os.path.join(tempfile.gettempdir(), 'shellpy_' + _get_username(), relative_path)
return translated_path
def _is_compilation_needed(in_filepath, out_filepath):
"""Shows whether compilation of input file is required. It may be not required if the output file did not change
:param in_filepath: The path of shellpy file to be processed
:param out_filepath: The path of the processed python file. It may exist or not.
:return: True if compilation is needed, False otherwise
"""
if not os.path.exists(out_filepath):
return True
in_mtime = os.path.getmtime(in_filepath)
with open(out_filepath, 'r') as f:
for i in range(0, 3): # scan only for three first lines
line = f.readline()
line_result = shellpy_meta_pattern.search(line)
if line_result:
meta = line_result.group(1)
meta = json.loads(meta)
if str(in_mtime) == meta['mtime']:
return False
return True
def _get_header(filepath, is_root_script, python_version):
"""To execute converted shellpy file we need to add a header to it. The header contains needed imports and
required code
:param filepath: A shellpy file that is being converted. It is needed to get modification time of it and save it
to the created python file. Then this modification time will be used to find out whether recompilation is needed
:param is_root_script: Shows whether the file being processed is a root file, which means the one
that user executed
:param python_version: version of python, needed to set correct header for root scripts
:return: data of the header
"""
header_name = 'header_root.tpl' if is_root_script else 'header.tpl'
header_filename = os.path.join(os.path.dirname(__file__), header_name)
with open(header_filename, 'r') as f:
header_data = f.read()
mod_time = os.path.getmtime(filepath)
meta = {'mtime': str(mod_time)}
header_data = header_data.replace('{meta}', json.dumps(meta))
if is_root_script:
executables = {
2: '#!/usr/bin/env python',
3: '#!/usr/bin/env python3'
}
header_data = header_data.replace('#shellpy-python-executable', executables[python_version])
return header_data
def _preprocess_code_to_intermediate(code):
"""Before compiling to actual python code all expressions are converted to universal intermediate form
It is very convenient as it is possible to perform common operations for all expressions
The intermediate form looks like this:
longline_shexe(echo 1)shexe(p)shexe
:param code: code to convert to intermediate form
:return: converted code
"""
processed_code = _process_multilines(code)
processed_code = _process_long_lines(processed_code)
processed_code = _process_code_both(processed_code)
processed_code = _process_code_start(processed_code)
return _escape(processed_code)
def _process_multilines(script_data):
"""Converts a pyshell multiline expression to one line pyshell expression, each line of which is separated
by semicolon. An example would be:
f = `
echo 1 > test.txt
ls -l
`
:param script_data: the string of the whole script
:return: the shellpy script with multiline expressions converted to intermediate form
"""
code_multiline_pattern = re.compile(r'^([^`\n\r]*?)([a-z]*)`\s*?$[\n\r]{1,2}(.*?)`\s*?$', re.MULTILINE | re.DOTALL)
script_data = code_multiline_pattern.sub(r'\1multiline_shexe(\3)shexe(\2)shexe', script_data)
pattern = re.compile(r'multiline_shexe.*?shexe', re.DOTALL)
new_script_data = script_data
for match in pattern.finditer(script_data):
original_str = script_data[match.start():match.end()]
processed_str = re.sub(r'([\r\n]{1,2})', r'; \\\1', original_str)
new_script_data = new_script_data.replace(
original_str, processed_str)
return new_script_data
def _process_long_lines(script_data):
"""Converts to python a pyshell expression that takes more than one line. An example would be:
f = `echo The string \
on several \
lines
:param script_data: the string of the whole script
:return: the shellpy script converted to intermediate form
"""
code_long_line_pattern = re.compile(r'([a-z]*)`(((.*?\\\s*?$)[\n\r]{1,2})+(.*$))', re.MULTILINE)
new_script_data = code_long_line_pattern.sub(r'longline_shexe(\2)shexe(\1)shexe', script_data)
return new_script_data
def _process_code_both(script_data):
"""Converts to python a pyshell script that has ` symbol both in the beginning of expression and in the end.
An example would be:
f = `echo 1`
:param script_data: the string of the whole script
:return: the shellpy script converted to intermediate form
"""
code_both_pattern = re.compile(r'([a-z]*)`(.*?)`')
new_script_data = code_both_pattern.sub(r'both_shexe(\2)shexe(\1)shexe', script_data)
return new_script_data
def _process_code_start(script_data):
"""Converts to python a pyshell script that has ` symbol only in the beginning. An example would be:
f = `echo 1
:param script_data: the string of the whole script
:return: the shellpy script converted to intermediate form
"""
code_start_pattern = re.compile(r'^([^\n\r`]*?)([a-z]*)`([^`\n\r]+)$', re.MULTILINE)
new_script_data = code_start_pattern.sub(r'\1start_shexe(\3)shexe(\2)shexe', script_data)
return new_script_data
def _escape(script_data):
"""Escapes shell commands
:param script_data: the string of the whole script
:return: escaped script
"""
pattern = re.compile(r'[a-z]*_shexe.*?shexe', re.DOTALL)
new_script_data = script_data
for match in pattern.finditer(script_data):
original_str = script_data[match.start():match.end()]
if original_str.find('\'') != -1:
processed_str = original_str.replace('\'', '\\\'')
new_script_data = new_script_data.replace(
original_str, processed_str)
return new_script_data
def _intermediate_to_final(script_data):
"""All shell blocks are first compiled to intermediate form. This part of code converts the intermediate
to final python code
:param script_data: the string of the whole script
:return: python script ready to be executed
"""
intermediate_pattern = re.compile(r'[a-z]*_shexe\((.*?)\)shexe\((.*?)\)shexe', re.MULTILINE | re.DOTALL)
final_script = intermediate_pattern.sub(r"exe('\1'.format(**dict(locals(), **globals())),'\2')", script_data)
return final_script
def _add_encoding_to_header(header_data, script_data):
"""PEP-0263 defines a way to specify python file encoding. If this encoding is present in first
two lines of a shellpy script it will then be moved to the top generated output file
:param script_data: the string of the whole script
:return: the script with the encoding moved to top, if it's present
"""
encoding_pattern = re.compile(r'^(#[-*\s]*coding[:=]\s*([-\w.]+)[-*\s]*)$')
# we use \n here instead of os.linesep since \n is universal as it is present in all OSes
# when \r\n returned by os.linesep may not work if you run against unix files from win
first_two_lines = script_data.split('\n')[:2]
for line in first_two_lines:
encoding = encoding_pattern.search(line)
if encoding is not None:
break
if not encoding:
return header_data
else:
new_header_data = header_data.replace(shellpy_encoding_pattern, encoding.group(1))
return new_header_data
|
Business Resume Examples 15 Templates PDF DOC Free Premium is part of great design ideas. Business Resume Examples 15 Templates PDF DOC Free Premium was created by combining fantastic ideas, interesting arrangements, and follow the current trends in the field of that make you more inspired and give artistic touches. We'd be honored if you can apply some or all of these design in your wedding. believe me, brilliant ideas would be perfect if it can be applied in real and make the people around you amazed!
Business Resume Examples 15 Templates PDF DOC Free Premium was posted in March 18, 2019 at 5:19 am. Business Resume Examples 15 Templates PDF DOC Free Premium has viewed by 14 users. Click it and download the Business Resume Examples 15 Templates PDF DOC Free Premium.
Resume, Senior Business Analyst Resume Sample was posted December 22, 2016 at 12:00 am by alexandrasdesign.co . More over Senior Business Analyst Resume Sample has viewed by 4390 visitor.
Sample, Business Development Manager Resume was posted December 18, 2018 at 4:59 am by alexandrasdesign.co . More over Business Development Manager Resume has viewed by 1092 visitor.
Resume, Entry Level Business Analyst Resume Examples was posted July 13, 2018 at 8:26 pm by alexandrasdesign.co . More over Entry Level Business Analyst Resume Examples has viewed by 6239 visitor.
Sample, Business Manager Resume was posted January 14, 2019 at 9:53 pm by alexandrasdesign.co . More over Business Manager Resume has viewed by 304 visitor.
Sample, Business Owner Resume was posted February 14, 2019 at 11:22 pm by alexandrasdesign.co . More over Business Owner Resume has viewed by 324 visitor.
Sample, Business Development Resume was posted February 27, 2019 at 7:35 pm by alexandrasdesign.co . More over Business Development Resume has viewed by 1028 visitor.
Sample, Business Analyst Resume was posted February 18, 2019 at 3:20 am by alexandrasdesign.co . More over Business Analyst Resume has viewed by 511 visitor.
Resume, Business Systems Analyst Resume Sample was posted July 15, 2017 at 5:14 am by alexandrasdesign.co . More over Business Systems Analyst Resume Sample has viewed by 3909 visitor.
Resume, Business Intelligence Resume Sample 2 was posted December 13, 2017 at 4:46 pm by alexandrasdesign.co . More over Business Intelligence Resume Sample 2 has viewed by 4785 visitor.
Sample, Business Analyst Resume Sample was posted December 25, 2018 at 2:21 am by alexandrasdesign.co . More over Business Analyst Resume Sample has viewed by 974 visitor.
|
empty = "empty"
class Entry:
def __init__(self,
creation_timestamp=None,
update_timestamp=None,
tags=None,
value=None):
self.creation_timestamp = creation_timestamp
self.update_timestamp = update_timestamp
if tags is None:
self.tags = []
else:
self.tags = tags
self.value = value
def merge_with(self, other):
merge_into(self.tags, other.tags)
self.update_timestamp = other.update_timestamp
@staticmethod
def from_string(s, all_tags):
entry = Entry()
words = s.split(' ', 3)
creation_timestamp = float(words[0])
update_timestamp = float(words[1])
if words[2] != empty:
tags = deserialize_tags(words[2], all_tags)
else:
tags = []
value = words[3]
return Entry(creation_timestamp, update_timestamp, tags, value)
def to_string(self, all_tags):
tags_s = None
if self.tags == []:
tags_s = empty
else:
tags_s = serialize_tags(self.tags, all_tags)
return "{0} {1} {2} {3}".format(self.creation_timestamp,
self.update_timestamp,
tags_s,
self.value)
class Journal:
def __init__(self, tags=None, entries=None):
if tags is None:
self.tags = []
else:
self.tags = tags
if entries is None:
self.entries = []
else:
self.entries = entries
def merge_tags(self, tags):
merge_into(self.tags, tags)
def add_tag(self, tag):
self.tags.append(tag)
def add_entry(self, entry):
self.entries.append(entry)
def add_or_merge_entry(self, entry):
self.merge_tags(entry.tags)
match = filter(lambda e: e.value == entry.value, self.entries)
if match:
match[0].merge_with(entry)
else:
self.add_entry(entry)
def to_file(self, file_name):
with open(file_name, 'w') as storage:
storage.write(' '.join(self.tags))
storage.write('\n')
for entry in self.entries:
storage.write(entry.to_string(self.tags))
storage.write('\n')
def get_entries(self, tag_filter, sort):
filtered_entries = filter(tag_filter, self.entries)
return sorted(filtered_entries, key=sort, reverse=True)
@staticmethod
def from_file(file_name):
journal = Journal()
with open(file_name, 'r') as storage:
line = storage.readline().rstrip()
if line != '':
journal.tags = line.split(' ')
line = storage.readline().rstrip()
while line != '':
entry = Entry.from_string(line, journal.tags)
journal.add_entry(entry)
line = storage.readline().rstrip()
return journal
def filter_tags_inclusive(tags1, tags2):
if not tags1:
return True
for tag in tags1:
if tag in tags2:
return True
return False
def serialize_tags(tags, all_tags):
return ','.join(map(lambda tag: str(all_tags.index(tag)), tags))
def deserialize_tags(s, all_tags):
return map(lambda v: all_tags[v], map(int, s.split(',')))
def merge_into(lst1, lst2):
for v in lst2:
if v not in lst1:
lst1.append(v)
|
Is your child needing immunizations, but you are struggling financially? Did you know that there is a federally funded program that helps to ensure that all children receive their immunizations at little to no cost? Vaccines For Children (VFC) does just that. They want to make sure that all children receive the recommended immunizations to help prevent disease and save lives.
|
"""
cala: The court allocation application
Copyright (C) 2017 Daniel J. Lauk <daniel.lauk@gmail.com>
cala is open sourced under the terms of the MIT license.
For details see LICENSE.md.
"""
import os
import sys
import tempfile
from flask import Flask, session, redirect, url_for, escape, request, abort, make_response
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
DATABASE=os.path.join(tempfile.gettempdir(), 'cala.sqlite'),
PASSWORD_SALT='development salt',
SECRET_KEY='development key',
FIRST_DAY_OF_WEEK=1, # 1 = Monday ... 7 = Sunday
LAST_DAY_OF_WEEK=5, # 1 = Monday ... 7 = Sunday
MIN_PASSWORD_LENGTH=6,
MIN_PASSWORD_CLASSES=3,
SHORT_NOTICE_START='08:00', # short notice games can be booked past this time
MAX_FREE_GAMES_PER_WEEK=1,
BOOKING_POINTS_PER_WEEK=3,
BOOKING_POINTS_WINDOW=(5, 3) # (number of games, number of weeks)
))
app.config.from_envvar('CALA_SETTINGS', silent=True)
import cala.views
import cala.database
cala.database.init_db()
|
Pope Francis pulled off some cool basketball moves in Vatican’s St. Peter’s Square at his general audience on Wednesday. He was encouraged by visiting US basketball team the Harlem Globetrotters, who made the Pontiff an honorary member.
The pontiff is known to be an ardent fan of football, especially of his Argentinian home team. However, on Wednesday the Pope showed interest in basketball at his general audience on St. Peter’s Square as he met with the famous entertainment basketball team.
The Pope was walked through how to spin a ball on his fingertip and encouraged to try the move. He was also presented a jersey of an honorary member of the team adorned with the name Pope Francis and the number 90 on the back.
Only nine people, including Pope Francis, former Pope John Paul II and Nelson Mandela, have been given the honor by the Globetrotters.
The Globetrotters stopped in Vatican on Wednesday as part of their Italy tour. The American exhibition team was founded in 1926 and over the years has played more than 20,000 games in 120 countries. The Globetrotters are claimed to be the first professional basketball team.
|
# Django settings for django-ldapdb project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'ldapdb.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
},
'ldap': {
'ENGINE': 'ldapdb.backends.ldap',
'NAME': 'ldap://',
'USER': 'cn=admin,dc=nodomain',
'PASSWORD': 'test',
}
}
DATABASE_ROUTERS = ['ldapdb.router.Router']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'some_random_secret_key'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'ldapdb',
'examples',
'django.contrib.admin',
)
LDAPDB_BIND_DN="cn=admin,dc=nodomain"
LDAPDB_BIND_PASSWORD="test"
LDAPDB_SERVER_URI="ldap://"
|
How do I run until I’m in shape?
For the first week just go out 3 times have a rest day between each run. Run slowly and as far as you can if you need to walk do so. The idea is to enjoy yourself. Go out for no more than 20 minutes at a time. Until you can run for the whole 20 minutes. Then increase your time to 30 minutes. Or add 2 minutes to each run. So you run for 22minutes then 24 and so on. The golden rule don’t add more than 10% at a time. Drink lots of water. Don’t eat anything heavy before you go out, best not too eat for at least an hour before you run. Drink after your run. Running slowly burns more fat than running fast. Change your route often if possible it will help to stop you getting bored. But find a route that you like and every couple of weeks see how much faster you can run it, to see how fitter you are getting. If you decide to keep on running and hopefully you will, invest in a good pair of running shoes. Find your local dealer and tell them you are a beginner and they will help you get the right shoes. Try to find a local running club that will give you lots of encouragement.
Good luck, get out there and have some fun.
How do the rich get rich without being born that way.?
unless they invented something or started some big company like pizza hut getting rich today is an impossibility. Really hard work will only get you a great paying job that’s not rich just well off. And all the tools necessary to venture into investments like real estate take tons of cash and unless your born with it your screwed.
Donald Trump has been bankrupted before yet he always manages to climb the ladder again.
Getting rich is a skill. It involves so much more than just hard work.
My dad has worked hard his entire life. Do you know what he has to show for it? A bad back.
1. Always save AT LEAST 10% of everything you earn.
5. Find experts and get their opinions on investment opportunities.
6. Never risk more than you can afford to lose.
I have $5 taken out of my check each week for my company’s stock plan?
I work for a fortune 500 company and had then deduct $5 each week out of my check. I know this is extremely low but I have no clue how stocks and shares work. Could some tell me should I have them deduct more money and how the money will accumulate?
Investing in equities is always a good idea in the long run, but investing in just one stock seldom is.
As a rule of thumb i advise people to invest 5-10% of their earnings into diversified retirement savings- and for anyone over 30 just starting, that figure needs to be higher.
The golden rules- start early. Diversify your investments. Stay invested in up and down markets, and take advantage of dollar-cost averaging- which means the same amount invested each week averages out highs and lows in the long run.
For advice, look to your local Primerica representative- google your local office.
What are some of the strict rules of the Separate Baptist?
-You can’t drink. I know a pastor who won’t have wine vinegar on his salad because he thinks its sin.
-If you miss a service or a time to go door to door and witness the pastor or someone else will make you feel guilty.
-Your tithing will be watched. You’ll be made to feel guilty if you don’t tithe.
-If you know your bible you will not fit in, and you’ll be labeled a trouble maker. You’ll jump from church to church in the same denomination thinking it’s just the pastor who’s too dogmatic. When you visited every church in your area, of like faith, you will have no friends and think you were rejected by God. Finally, you’ll figure the problem wasn’t with the pastors, it was with you.
I know many good intelligent people in those churches. Why are they still there? I’m convinced, and many have said, “All my friends go there, I have too much invested, I’m a deacon or Sunday School teacher, it’s like a social club. We also must remember many of the members are pulling a salary from the church, why would they kill the goose that lays the golden eggs. The others either turn a blind eye, or are ignorant and blissful.
Do not challenge them theologically unless you have time. Most baptist can recite and memorize proof texts like it’s no ones business. If you question their doctrine, only do it on one point at a time and open your bible and read the entire chapter in question. You will find 9 out of 10 times the verse they used, as a proof text, is out of context.
My suggestion to you is not to think twice but think three times.
does anybody know any real treasure hunting websites?
i need to pay back a 10,000 dollar student loan and im hoping to strike it rich by finding a historic treasure so i can pay back my student laon.
There are countless T.H.ing websites w/ good info., but I think you should start with the basic cornerstones that treasure hunting of today is built upon. As a student, you should be familiar with researching various subjects. In this case, I’d highly suggest investing in copies of Karl Von Mueller’s Treasure Hunters Manuals numbers 6 & 7. Combined, they represent the BIBLE of serious treasure hunters. READ THEM COVER TO COVER. LEARN FROM THEM. KNOW THEM! The author lets you know that treasure hunting can be done on a shoestring budget, and a metal detector isn’t always needed! However, one of these books contains instructions on how to build a decent (for the time) inexpensive metal detector. Of course, your local libraries should have books on the subject, and best of all: they’re FREE! If you must buy a detector, stay away from Radio Shack crap! Stick w/ Whites, Tesoro, Bounty Hunter, or Fisher brands. Check w/the manufacturers for seasonal deals, then check out auction sites for the used variety. See if you can find a local detector dealer/treasure hunting club–they’re likely to give you lots of useful information.
Research your local areas first. Gas is expensive, & you will be astounded by what can be found in your own back yard! BE AWARE TO BE SAFE! Be aware of areas that may have unexploded shells, uncovered mine shafts/wells, and rotten structures at risk of collapse! DO NOT WALK INTO ANY OLD MINE!
MOST IMPORTANT POINT OF ALL: GET PERMISSION!
Check w/ your local county and state offices to see what laws for or against T. Hunting exist! Get permission from all landowners to detect their property! Learn where it’s illegal! Learn the proper techniques for digging up said treasure. It’s like being a courteous camper; leave no trace that you were ever there! Cover up your holes, pack out any trash you dig up & maybe someones’ trash left in the same area! The hobby/vocation of treasure hunting is very fragile, and we hunters must depend on each other to avoid property damage of all kinds. Please exercise upstanding morals and The Golden Rule. DO UNTO OTHERS! How would you like it if you woke up 1 morning to find your lawn defaced with huge open craters, or your front porch shattered, or (shudder) someone looted your grandfathers’ grave for his gold fillings!
All of the above, and more are dealt with in the books I mentioned (6 & 7) I’d also recommend looking into the magazines Western & Eastern Treasures, & Lost Treasures. GOOD LUCK!
|
import random
import common.Util.InstrumentDB as InstrumentDB
import common.Config as Config
from common.Util.CSoundNote import CSoundNote
from common.Generation.GenerationConstants import GenerationConstants
from GenRythm import GenRythm
instrumentDB = InstrumentDB.getRef()
def generator( instrument, nbeats, density, regularity, reverbSend ):
makeRythm = GenRythm()
noteDuration = GenerationConstants.DOUBLE_TICK_DUR / 2
trackId = 0
pan = 0.5
attack = 0.005
decay = 0.095
filterType = 0
filterCutoff = 1000
tied = False
mode = 'mini'
def makePitchSequence(length, drumPitch):
pitchSequence = []
append = pitchSequence.append
list = range(length)
max = len(drumPitch) - 1
for i in list:
append(drumPitch[ random.randint( 0, max ) ] )
return pitchSequence
def makeGainSequence( onsetList ):
gainSequence = []
append = gainSequence.append
for onset in onsetList:
if onset == 0:
gain = random.uniform(GenerationConstants.GAIN_MID_MAX_BOUNDARY, GenerationConstants.GAIN_MAX_BOUNDARY)
elif ( onset % Config.TICKS_PER_BEAT) == 0:
gain = random.uniform(GenerationConstants.GAIN_MID_MIN_BOUNDARY, GenerationConstants.GAIN_MID_MAX_BOUNDARY)
else:
gain = random.uniform(GenerationConstants.GAIN_MIN_BOUNDARY, GenerationConstants.GAIN_MID_MIN_BOUNDARY)
append(gain)
return gainSequence
def pageGenerate( regularity, drumPitch ):
barLength = Config.TICKS_PER_BEAT * nbeats
#print 'pageGenerate drumPitch[0] ', drumPitch[0]
currentInstrument = instrumentDB.instNamed[ instrument ].kit[ drumPitch[0] ]
rythmSequence = makeRythm.drumRythmSequence(currentInstrument, nbeats, density, regularity)
pitchSequence = makePitchSequence(len(rythmSequence), drumPitch )
gainSequence = makeGainSequence(rythmSequence)
trackNotes = []
list = range(len(rythmSequence))
for i in list:
trackNotes.append( CSoundNote( rythmSequence[i], pitchSequence[i], gainSequence[i],
pan, noteDuration, trackId,
instrumentDB.instNamed[instrument].instrumentId, attack,
decay, reverbSend, filterType, filterCutoff, tied, mode))
return trackNotes
##################################################################################
# begin generate()
if regularity > 0.75:
streamOfPitch = GenerationConstants.DRUM_COMPLEXITY1
elif regularity > 0.5:
streamOfPitch = GenerationConstants.DRUM_COMPLEXITY2
elif regularity > 0.25:
streamOfPitch = GenerationConstants.DRUM_COMPLEXITY3
else:
streamOfPitch = GenerationConstants.DRUM_COMPLEXITY4
trackNotes = []
for drumPitch in streamOfPitch:
trackNotes.append(pageGenerate( regularity, drumPitch ))
return trackNotes
|
Jews wait in line on a streetcorner in the Krakow ghetto. The sign in Polish reads, "Souvenirs from Krakow."
Jews wearing armbands are forced to shovel snow from the pavement in Krakow.
A German policeman checks the identification papers of Jews in the Krakow ghetto.
Jews selling off their possessions in the streets of the Krakow ghetto.
Jews move their belongings into the Krakow ghetto in horse-drawn wagons.
German soldiers check the papers of Jews moving into the Krakow ghetto.
A group of Jews chop up furniture to use as fuel in the Krakow ghetto.
A photo identification card, bearing the official stamps of the Krakow labor office.
Jews from the Krakow ghetto, who have been rounded-up for deportation, are crowded onto the back of a truck.
Close-up of an announcement affixed to a city kiosk, ordering the resettlement of Jews from the city of Krakow, signed by district chief, Dr. Wachter.
A Jewish woman and her two young children await deportation from the Krakow ghetto.
View of the gate at the Krakow ghetto.
A column of Jews marches through the streets of Krakow during the final liquidation of the ghetto. An SS unit oversees the deportation action.
Oskar Schindler poses next to his horse at the "Emalia" enamelware factory in Krakow-Zablocie.
Group portrait of female office workers employed by Oskar Schindler at his "Emalia" enamelware factory in Krakow-Zablocie.
Jewish prisoners at forced labor in Plaszow.
Portrait of an SS guard with his dog at the Plaszow concentration camp.
|
"""
Set of functions for interacting (reading / writing) with the AAM. Business
logic goes here.
"""
__author__ = 'Fabian Svara'
import re
import zipfile
from cStringIO import StringIO
from django.db import transaction
from django.utils import timezone
from general_utilities.versions import compare_version
from knossos_utils.skeleton import Skeleton
import checks
import models
import view_helpers
class NonEmptyWork(Exception):
pass
def delete_submission(s):
if s.worktime:
s.work.worktime = s.work.worktime - s.worktime
s.work.save()
s.delete()
def get_active_work(em):
"""
Return active works for employee em.
Parameters
----------
em : Employee instance
Returns
-------
active_work : list of Work instances
"""
active_work = models.Work.objects.filter(
employee=em,
is_final=False,
)
active_work = list(active_work)
active_work = sorted(active_work, key=lambda x: x.pk)
return active_work
def get_completed_work(em):
"""
Return completed works for employee em.
Parameters
----------
em : Employee instance
Returns
-------
completed_work : list of Work instances
"""
completed_work = models.Work.objects.filter(
employee=em,
is_final=True,
)
completed_work = list(completed_work)
return completed_work
def get_available_tasks(em, count=1):
"""
Return available tasks for employee em.
Parameters
----------
em : Employee instance
count : int
Number of Tasks per category to return
Returns
-------
available_tasks_by_cat : dict of str -> list Task instances
Maps category name to list of Tasks available in that category
for employee em, where the tasks within the same category are sorted by
primary key
available_tasks : list of Task instances
Task instances sorted by primary key
"""
available_tasks_by_cat = {}
available_tasks = []
if em.project is None:
return None, None
for curCategory in em.project.taskcategory_set.all():
cur_tasks = curCategory.task_set.filter(
is_active=True, priority__gt=-1).exclude(employee=em)
if len(cur_tasks) > 0:
cur_tasks = list(cur_tasks)
cur_tasks = sorted(
cur_tasks, key=lambda x: x.priority, reverse=True)
available_tasks_by_cat[curCategory] = cur_tasks[0:count]
available_tasks.extend(cur_tasks)
available_tasks = sorted(
available_tasks, key=lambda x: x.priority, reverse=True)
return available_tasks_by_cat, available_tasks
def reset_task(task, username):
s = models.Submission.objects.filter(
employee__user__username=username,
work__task__name=task)
s.delete()
w = models.Work.objects.get(employee__user__username__exact=username,
task__name=task)
w.worktime = 0.
w.is_final = False
w.latestsubmit = None
w.last_submission = None
w.save()
def unfinalize_work(work_id):
w = models.Work.objects.get(pk=work_id)
w.is_final = False
w.save()
def cancel_task(task, username):
w = models.Work.objects.get(
task__name=task,
employee__user__username=username, )
if not w.submission_set.all():
w.delete()
else:
raise NonEmptyWork('Submissions exist for this Work. Not deleting.')
def choose_task(employee, task_id):
active_work = models.Work.objects.filter(
employee=employee, is_final=False)
if not len(active_work) == 0:
raise view_helpers.TooManyActiveTasks()
task = models.Task.objects.get(pk=task_id)
if task.target_coverage > task.current_coverage:
models.Work.objects.create(
started=timezone.now(),
task=models.Task.objects.get(pk=task_id),
employee=employee,
is_final=False, )
else:
raise view_helpers.UserRace()
return
def submit(employee, submit_file, submit_comment, submit_is_final,
submit_work_id, skip_checks=False):
"""Parses the submitted file, extracts the worktime and tests the nml.
For submissions which are done on worktime tasks, the submission
is created without any file.
For regular submissions, the file name is checked on length.
It is checked if the nml file was saved and created in the
current version of Knossos.
Parameters:
----------
employee: Employee object
Employee related to the submission
submit_file: file object
submitted nml file
submit comment: string
comment which was submitted together with the submission
submit_is_final: bool
True for final submission
submit_work_id: integer
id of the work related to this submission
Returns:
----------
incremental_worktime: float
calculated worktime on this submission
work: Work object
automatic_worktime: bool
True if worktime should be calculated
Raises:
----------
InvalidSubmission:
if the filename is longer than 200 characters
if the file was created/saved in an old version of Knossos
if the worktime is lower than the one of the previous submission
ImportError:
if a check could not be imported from the Checks file
DoesNotExist:
if the Work object is not found
ParseError:
if the file cannot be opened by the Skeleton class.
"""
if len(submit_file.name) > 200:
raise view_helpers.InvalidSubmission(
'The maximal file name length for submissions is '
'200 character.')
work = models.Work.objects.get(pk=submit_work_id)
# testing for .k.zip is problematic, just do zip - django itself removes
# the k sometimes (e.g. when changing the filename of task files
# on uploading them by adding random chars)
if submit_file.name.endswith('.zip'):
fp = StringIO(submit_file.read())
zipper = zipfile.ZipFile(fp, 'r')
if 'annotation.xml' not in zipper.namelist():
raise Exception('k.zip broken.')
skeleton_file_as_string = zipper.read('annotation.xml')
else:
skeleton_file_as_string = submit_file.read()
checks_to_run = re.split('\W', work.task.checks)
checks_to_run = [x for x in checks_to_run if x]
if checks_to_run and not skip_checks:
check_fns = dict()
for cur_check in checks_to_run:
exec ('from knossos_aam_backend.checks import {0}'.format(cur_check))
cur_check_fn = locals()[cur_check]
check_fns[cur_check] = cur_check_fn
skeleton = Skeleton()
skeleton.fromNmlString(skeleton_file_as_string,
use_file_scaling=True)
# Keyword arguments for check functions
#
kwargs = {'skeleton': skeleton,
'work': work,
'employee': employee,
'submit_file': submit_file,
'submit_comment': submit_comment,
'submit_work_id': submit_work_id,
'submit_is_final': submit_is_final,
'submit_file_as_string': skeleton_file_as_string, }
# Check whether the knossos version is high enough
version = skeleton.get_version()
# Has work time tracking
if compare_version(version['saved'], (4, 1, 2)) == '<':
raise view_helpers.InvalidSubmission(
"This tracing was saved in a version "
"of Knossos that is too old and incompatible with "
"knossos_aam. Please upgrade to version 4.1.2, "
"available "
"from www.knossostool.org, save the file again in "
"that version, and resubmit.")
else:
# All fine, newest version.
pass
if 'automatic_worktime' not in checks_to_run:
incremental_worktime = None
auto_worktime = False
output = checks.automatic_worktime(**kwargs)
else:
auto_worktime = True
output = checks.automatic_worktime(**kwargs)
if type(output) == str:
raise view_helpers.InvalidSubmission(output)
else:
incremental_worktime = output
del check_fns['automatic_worktime']
# Here is the part where the tests are done
#
for cur_check in check_fns:
output = eval(cur_check)(**kwargs)
if type(output) == str:
raise view_helpers.InvalidSubmission(output)
if 'automatic_worktime' in checks_to_run and incremental_worktime:
work.worktime = work.worktime + incremental_worktime
work.save()
else:
incremental_worktime = None
auto_worktime = False
# Send e-mail if comment is added to submission.
if submit_comment:
subject = 'Comment on Submission of Task {0} Task from {1}'.format(work.task.name, employee.user.username)
attachments = [(skeleton_file_as_string, submit_file.name)]
# todo get mailing to work again
# mail_notify('to@example.com', subject, submit_comment,
# attachments=attachments, reply_to=work.employee.user.email)
s = models.Submission.objects.create(
employee=employee,
date=timezone.now(),
work=work,
comment=submit_comment,
is_final=submit_is_final,
worktime=incremental_worktime,
original_filename=submit_file.name[0:200],
datafile=submit_file, )
s.save()
def get_monthly_worktime_for_submissions(submission_set):
""" Calculate how much of the work time has been spent in different months
Parameters:
----------
submission_set: QuerySet(Submission)
Returns:
----------
set {by_month_per_task, by_month_totals}
by_month_per_task: { year: { month: { task: [worktime, is_final] } } }
by_month_totals: { year: { month: [worktime, is_final] } }
"""
by_month_per_task = {}
by_month_totals = {}
s = submission_set.order_by('date')
for curs in s:
year = curs.date.year
month = curs.date.month
task = curs.work.task
incomplete_time = False
if curs.worktime is None:
cur_worktime = 0.
incomplete_time = True
else:
cur_worktime = curs.worktime
if year not in by_month_per_task:
by_month_per_task[year] = {}
by_month_totals[year] = {}
if month not in by_month_per_task[year]:
by_month_per_task[year][month] = {}
# Second item in tuple indicates whether the worktime
# is incomplete, i.e. work was performed on tasks
# for which worktime is not automatically computed
by_month_totals[year][month] = [0, False]
if task not in by_month_per_task[year][month]:
by_month_per_task[year][month][task] = [0, False]
if incomplete_time:
by_month_per_task[year][month][task][1] = True
by_month_totals[year][month][1] = True
by_month_per_task[year][month][task][0] = \
by_month_per_task[year][month][task][0] + cur_worktime
by_month_totals[year][month][0] = \
by_month_totals[year][month][0] + cur_worktime
return {'by_month_per_task': by_month_per_task,
'by_month_totals': by_month_totals, }
def get_monthly_worktime_for_work(w):
return get_monthly_worktime_for_submissions(w.submission_set)
def get_employee_info(emp):
work = get_active_work(emp)
info = {"name": " ".join([emp.user.first_name, emp.user.last_name]),
"username": " ".join([emp.user.username]),
"project": emp.project.name}
if len(work) > 0:
work = work[0]
info["task_name"] = work.task.name
info["work_time"] = work.worktime
info["last_submit"] = work.last_submission.datafile
return info
def get_employees_current_work():
emp_set = {}
for emp in models.Employee.objects.all():
emp_set[emp] = get_employee_info(emp)
return emp_set
def get_employee_infos_in_project(proj):
employees = models.Employee.objects.filter(project=proj)
emp_infos = []
for emp in employees:
emp_infos.append(get_employee_info(emp))
return emp_infos
def move_employees_to_project(employees, new_project):
with transaction.atomic():
for employee in employees:
employee.project = new_project
employee.save()
|
Examples of tangible personal property are: cars, trucks, motorcycles, boats, aircraft, machinery and equipment, utility trailers, furniture and fixtures (when used for profit).
Mobile homes are considered personal property when the mobile home owner does not own the land.
July 1 - October 1 is the Assessment period set by the WV State Legislature. During the end of June the Assessor’s Office mails individual and business personal property forms with an instruction sheet and return envelope. If the taxpayers do not receive this mailing they should call the Assessor’s Office. It is West Virginia State Law that taxpayers report personal property owned on July 1.
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from document_parser import ParseDocument
from platform_util import ExtractPlatformFromURL
from third_party.json_schema_compiler.model import UnixName
class DocumentRenderer(object):
'''Performs document-level rendering such as the title, references,
and table of contents: pulling that data out of the document, then
replacing the $(title), $(ref:...) and $(table_of_contents) tokens with them.
This can be thought of as a parallel to TemplateRenderer; while
TemplateRenderer is responsible for interpreting templates and rendering files
within the template engine, DocumentRenderer is responsible for interpreting
higher-level document concepts like the title and TOC, then performing string
replacement for them. The syntax for this replacement is $(...) where ... is
the concept. Currently title and table_of_contents are supported.
'''
def __init__(self, table_of_contents_renderer, platform_bundle):
self._table_of_contents_renderer = table_of_contents_renderer
self._platform_bundle = platform_bundle
def _RenderLinks(self, document, path):
''' Replaces all $(ref:...) references in |document| with html links.
References have two forms:
$(ref:api.node) - Replaces the reference with a link to node on the
API page. The title is set to the name of the node.
$(ref:api.node Title) - Same as the previous form, but title is set
to "Title".
'''
START_REF = '$(ref:'
END_REF = ')'
MAX_REF_LENGTH = 256
new_document = []
# Keeps track of position within |document|
cursor_index = 0
start_ref_index = document.find(START_REF)
while start_ref_index != -1:
end_ref_index = document.find(END_REF, start_ref_index)
if (end_ref_index == -1 or
end_ref_index - start_ref_index > MAX_REF_LENGTH):
end_ref_index = document.find(' ', start_ref_index)
logging.error('%s:%s has no terminating ) at line %s' % (
path,
document[start_ref_index:end_ref_index],
document.count('\n', 0, end_ref_index)))
new_document.append(document[cursor_index:end_ref_index + 1])
else:
ref = document[start_ref_index:end_ref_index]
ref_parts = ref[len(START_REF):].split(None, 1)
# Guess the api name from the html name, replacing '_' with '.' (e.g.
# if the page is app_window.html, guess the api name is app.window)
api_name = os.path.splitext(os.path.basename(path))[0].replace('_', '.')
title = ref_parts[0] if len(ref_parts) == 1 else ref_parts[1]
platform = ExtractPlatformFromURL(path)
if platform is None:
logging.error('Cannot resolve reference without a platform.')
continue
ref_dict = self._platform_bundle.GetReferenceResolver(
platform).SafeGetLink(ref_parts[0],
namespace=api_name,
title=title,
path=path)
new_document.append(document[cursor_index:start_ref_index])
new_document.append('<a href=%s/%s>%s</a>' % (
self._platform_bundle._base_path + platform,
ref_dict['href'],
ref_dict['text']))
cursor_index = end_ref_index + 1
start_ref_index = document.find(START_REF, cursor_index)
new_document.append(document[cursor_index:])
return ''.join(new_document)
def Render(self, document, path, render_title=False):
''' |document|: document to be rendered.
|path|: request path to the document.
|render_title|: boolean representing whether or not to render a title.
'''
# Render links first so that parsing and later replacements aren't
# affected by $(ref...) substitutions
document = self._RenderLinks(document, path)
parsed_document = ParseDocument(document, expect_title=render_title)
toc_text, toc_warnings = self._table_of_contents_renderer.Render(
parsed_document.sections)
# Only 1 title and 1 table of contents substitution allowed; in the common
# case, save necessarily running over the entire file.
if parsed_document.title:
document = document.replace('$(title)', parsed_document.title, 1)
return (document.replace('$(table_of_contents)', toc_text, 1),
parsed_document.warnings + toc_warnings)
|
It's Sunday, April 21, 2019 - 5:02 AM in Nigeria.
Rising oil production in Libya and Nigeria is raising concerns about OPEC’s ability to boost crude prices, but conflicts in the two nations may still keep a lid on their output.
Both OPEC members are exempt from the cartel’s deal to remove 1.2 million barrels a day from the oil market in the first six months of this year. But with OPEC poised to extend the agreement at least through the rest of 2017, the conflicts that sidelined Libyan and Nigerian crude supplies appear to be easing.
Libya’s output rose above 800,000 barrels a day for the first time since 2014, when a second civil war broke out, the country’s National Oil Corp. reported last week. Meanwhile, Nigeria is restoring major infrastructure damaged in militant attacks that nearly halved its output last year.
Goldman Sachs said rising Libyan and Nigerian production is one factor that is capping oil price gains, even as top producers Saudi Arabia and Russia push to extend OPEC’s output cuts into 2018.
“While we remain cautious on factoring in such a recovery in production given the ongoing local tensions, these combined volumes could largely offset the benefit of the extended cuts,” the bank said in a research note.
The outlook for Libyan supply improved this month after the leader of the country’s internationally recognized government and a rival commander whose forces control the country’s northeast met to sketch out a power-sharing agreement.
In a positive sign, Egypt appears to have pushed its ally General Khalifa Haftar to meet with UN-backed Prime Minister Fayez al Sarraj. This shows Cairo is committed to promoting stability next door, even if a resolution forces Haftar to make concessions, the Eurasia Group said in a briefing.
But the risk consultancy believes it will be difficult to secure a lasting peace deal in the next six months. A bitter dispute over Haftar’s role in Libya’s political future is one obstacle. Progress is also being hampered by a disjointed diplomatic push by international players and disputes within the United Nations’ Libya mission, the Eurasia Group said.
As a result, the firm projects oil exports will fluctuate between 500,000 and 700,000 barrels a day for the remainder of 2017. While Haftar’s firm grip over oil supplies in Libya’s eastern region will limit the risk that output will plummet, funding problems and a shortage of foreign workers at the National Oil Corp. will cap production gains, the Eurasia Group concluded.
RBC Capital Markets believes geopolitical risks remain high in Libya, in part because the Islamic State considers it a fallback option as the militant group loses ground in its Syrian and Iraqi strongholds. The CIA said last year that ISIS has 6,000 to 8,000 fighters in Libya.
In Nigeria, a wave of attacks on oil infrastructure claimed by a clandestine group known as the Niger Delta Avengers have ceased, allowing major pipelines to begin pumping crude again.
The peace that has taken hold is fragile though, warned Manji Cheto, senior vice president covering West Africa at risk advisory Teneo Intelligence. It rests largely on negotiations between elders in the oil-rich southern delta and Nigerian Vice President Yemi Osinbajo, a southerner who is leading the federal delegation to the impoverished and pollution-ravaged region.
The militants, who demanded a larger share of the nation’s oil wealth for people in the delta, now feel they have a voice in the capitol, Cheto said. Simultaneously, the nonmilitary approach used by Osinbajo, which includes promises of development money, has undercut the case for militancy.
But Nigerian President Muhammadu Buhari has been abroad for much of the year receiving treatment for an undisclosed illness. That creates a political opportunity for vested interests in Buhari’s inner circle, who are uncomfortable with the northern-born president’s relatively close relationship with his deputy, Cheto said. This could undermine militants’ trust in the capital to the extent Osinbajo is marginalized.
In Cheto’s worst-case scenario, an ailing Buhari steps down and his aides push out Osinbajo, severing the militants’ link to the government and sparking renewed attacks on oil infrastructure.
“At the moment it would be very difficult, but not impossible. I can’t see a path to that kind of outcome, but this is Nigeria,” she said.
Despite concerns about growing oil supply, there is no sign yet that OPEC will push Libya and Nigeria to turn off the tap when they meet next week.
The cartel could seek a production cap similar to that given to Iran, which is allowed to raise output to a certain level as it rebuilds its energy industry after years of crippling sanctions.
Nigeria’s oil minister said in January that his country would consider reductions once its output returns to 1.8 million barrels a day, though he did not say how deeply it would cut.
|
# -*- coding: utf-8 -*-
# xmlrpcmapper.py ---
#
# Created: Wed Apr 11 15:40:26 2012 (-0600)
# Author: Patrick Hull
#
import xmlrpclib
from diablo.datamapper import DataMapper
from diablo import http
class XmlRpcMapper(DataMapper):
"""XML-RPC mapper
The mapper must be set using the format arg or explicitly in the
resource, otherwise XmlMapper will be used for content-type text/xml.
"""
content_type = 'text/xml'
def __init__(self, methodresponse=True, allow_none=True):
self.methodresponse = methodresponse
self.allow_none = allow_none
def _format_data(self, data, charset):
try:
return xmlrpclib.dumps((data,),
methodresponse=self.methodresponse,
allow_none=self.allow_none,
encoding=charset)
except TypeError, err:
raise http.InternalServerError('unable to encode data')
def _parse_data(self, data, charset):
try:
return xmlrpclib.loads(data)
except ValueError:
raise http.BadRequest('unable to parse data')
|
Keeping the love of marriage alive requires deliberate attention, time, and humility. God’s Word shows us how.
In this intimate book, The Song of Solomon, we get to stare wide-eyed at love as God intended it to be. From its beginning in what makes one attractive to the struggle with that attraction in sexuality as a single. From the passion of physical intimacy given and blessed by God, to the passion of marital conflict and how it may be resolved. We shouldn’t be ashamed to study what God was not ashamed to reveal. In this practical book, we come to understand in a very practical way why the greatest of songs is love.
Marital conflict can be resolved best God’s way. It’s tough work, but it’s the only way.
The honeymoon is over. The first marital conflict occurs. How does God’s Word help us when the dream turns to reality?
In God’s perfect plan, the wedding night introduces the delights of sexual intimacy in marriage.
It’s hard to believe just yesterday I was in America.
Today’s transportation system requires little more than a basic understanding of road signs and airline gates (which I somehow still seem to miss). That’s why most of us failed geography. Who needs it?
But in ancient Israel geography was critical. Today I saw one reason why.
Our hotel in Tel Aviv sits right next to ancient Joppa (think Jonah and Peter). In fact, a short walk down the beach took us there.
The Egyptians’ ancient rubble in Joppa reminded me of why foreign powers all wanted to control Israel. The land of Israel sat in an amazingly strategic position as the only intercontinental land bridge between the superpowers of the ancient world. The strategic International Highway—sometimes called the Great Trunk Road or the Via Maris (“Way of the Sea”)—ran the full length of the land of Israel.
This vast artery of transport stretched from the Fertile Crescent all the way to Egypt. Israel stood at the crossroads of three continents—Asia, Africa, and Europe—and the surrounding seas and desert forced all who traveled to Egypt by land to traverse Israel. In addition to Egypt, the armies of Assyria, Babylon, Media-Persia, Greece, and Rome all invaded Israel in order to control its International Highway. Israel remained for millennia the crossroads for international imperialism, war, and trade.
Even today, Israel serves as the overland passageway for large, high-flying birds that prefer not to migrate over the seas. Literally hundreds of thousands of black and white storks, steppe and spotted eagles, black kites, steppe and honey buzzards, and Levant sparrow hawks soar over the Holy Land in their biannual migrations to and from east Africa. No wonder bird watching remains a popular activity in Israel.
When foreigners traveled through Israel during biblical days, God’s people would either influence them or be swayed by them. Because of this influence, Israel’s central position among the nations proved to be a double-edged sword.
God lamented that Jerusalem’s general placement as “the center of the nations,” had borne no fruit of obedience (Ezekiel 5:5). Ironically, God’s people had been swayed by the very nations He intended them to influence.
What a great reminder that we have to guard the critical points of entry into our lives.
Please share this blog (below) and follow me on Twitter. I’m blogging daily on our journey.
|
"""
fitpack --- curve and surface fitting with splines
fitpack is based on a collection of Fortran routines DIERCKX
by P. Dierckx (see http://www.netlib.org/dierckx/) transformed
to double routines by Pearu Peterson.
"""
# Created by Pearu Peterson, June,August 2003
__all__ = [
'UnivariateSpline',
'InterpolatedUnivariateSpline',
'LSQUnivariateSpline',
'BivariateSpline',
'LSQBivariateSpline',
'SmoothBivariateSpline',
'RectBivariateSpline']
import warnings
from numpy import zeros, concatenate, alltrue, ravel, all, diff
import dfitpack
################ Univariate spline ####################
_curfit_messages = {1:"""
The required storage space exceeds the available storage space, as
specified by the parameter nest: nest too small. If nest is already
large (say nest > m/2), it may also indicate that s is too small.
The approximation returned is the weighted least-squares spline
according to the knots t[0],t[1],...,t[n-1]. (n=nest) the parameter fp
gives the corresponding weighted sum of squared residuals (fp>s).
""",
2:"""
A theoretically impossible result was found during the iteration
proces for finding a smoothing spline with fp = s: s too small.
There is an approximation returned but the corresponding weighted sum
of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""",
3:"""
The maximal number of iterations maxit (set to 20 by the program)
allowed for finding a smoothing spline with fp=s has been reached: s
too small.
There is an approximation returned but the corresponding weighted sum
of squared residuals does not satisfy the condition abs(fp-s)/s < tol.""",
10:"""
Error on entry, no approximation returned. The following conditions
must hold:
xb<=x[0]<x[1]<...<x[m-1]<=xe, w[i]>0, i=0..m-1
if iopt=-1:
xb<t[k+1]<t[k+2]<...<t[n-k-2]<xe"""
}
class UnivariateSpline(object):
""" Univariate spline s(x) of degree k on the interval
[xb,xe] calculated from a given set of data points
(x,y).
Can include least-squares fitting.
See also:
splrep, splev, sproot, spint, spalde - an older wrapping of FITPACK
BivariateSpline - a similar class for bivariate spline interpolation
"""
def __init__(self, x, y, w=None, bbox = [None]*2, k=3, s=None):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
s - positive smoothing factor defined for
estimation condition:
sum((w[i]*(y[i]-s(x[i])))**2,axis=0) <= s
Default s=len(w) which should be a good value
if 1/w[i] is an estimate of the standard
deviation of y[i].
"""
#_data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
data = dfitpack.fpcurf0(x,y,k,w=w,
xb=bbox[0],xe=bbox[1],s=s)
if data[-1]==1:
# nest too small, setting to maximum bound
data = self._reset_nest(data)
self._data = data
self._reset_class()
def _reset_class(self):
data = self._data
n,t,c,k,ier = data[7],data[8],data[9],data[5],data[-1]
self._eval_args = t[:n],c[:n],k
if ier==0:
# the spline returned has a residual sum of squares fp
# such that abs(fp-s)/s <= tol with tol a relative
# tolerance set to 0.001 by the program
pass
elif ier==-1:
# the spline returned is an interpolating spline
self._set_class(InterpolatedUnivariateSpline)
elif ier==-2:
# the spline returned is the weighted least-squares
# polynomial of degree k. In this extreme case fp gives
# the upper bound fp0 for the smoothing factor s.
self._set_class(LSQUnivariateSpline)
else:
# error
if ier==1:
self._set_class(LSQUnivariateSpline)
message = _curfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
def _set_class(self, cls):
self._spline_class = cls
if self.__class__ in (UnivariateSpline, InterpolatedUnivariateSpline,
LSQUnivariateSpline):
self.__class__ = cls
else:
# It's an unknown subclass -- don't change class. cf. #731
pass
def _reset_nest(self, data, nest=None):
n = data[10]
if nest is None:
k,m = data[5],len(data[0])
nest = m+k+1 # this is the maximum bound for nest
else:
assert n<=nest,"nest can only be increased"
t,c,fpint,nrdata = data[8].copy(),data[9].copy(),\
data[11].copy(),data[12].copy()
t.resize(nest)
c.resize(nest)
fpint.resize(nest)
nrdata.resize(nest)
args = data[:8] + (t,c,n,fpint,nrdata,data[13])
data = dfitpack.fpcurf1(*args)
return data
def set_smoothing_factor(self, s):
""" Continue spline computation with the given smoothing
factor s and with the knots found at the last call.
"""
data = self._data
if data[6]==-1:
warnings.warn('smoothing factor unchanged for'
'LSQ spline with fixed knots')
return
args = data[:6] + (s,) + data[7:]
data = dfitpack.fpcurf1(*args)
if data[-1]==1:
# nest too small, setting to maximum bound
data = self._reset_nest(data)
self._data = data
self._reset_class()
def __call__(self, x, nu=None):
""" Evaluate spline (or its nu-th derivative) at positions x.
Note: x can be unordered but the evaluation is more efficient
if x is (partially) ordered.
"""
if nu is None:
return dfitpack.splev(*(self._eval_args+(x,)))
return dfitpack.splder(nu=nu,*(self._eval_args+(x,)))
def get_knots(self):
""" Return the positions of (boundary and interior)
knots of the spline.
"""
data = self._data
k,n = data[5],data[7]
return data[8][k:n-k]
def get_coeffs(self):
"""Return spline coefficients."""
data = self._data
k,n = data[5],data[7]
return data[9][:n-k-1]
def get_residual(self):
"""Return weighted sum of squared residuals of the spline
approximation: sum ((w[i]*(y[i]-s(x[i])))**2,axis=0)
"""
return self._data[10]
def integral(self, a, b):
""" Return definite integral of the spline between two
given points.
"""
return dfitpack.splint(*(self._eval_args+(a,b)))
def derivatives(self, x):
""" Return all derivatives of the spline at the point x."""
d,ier = dfitpack.spalde(*(self._eval_args+(x,)))
assert ier==0,`ier`
return d
def roots(self):
""" Return the zeros of the spline.
Restriction: only cubic splines are supported by fitpack.
"""
k = self._data[5]
if k==3:
z,m,ier = dfitpack.sproot(*self._eval_args[:2])
assert ier==0,`ier`
return z[:m]
raise NotImplementedError,\
'finding roots unsupported for non-cubic splines'
class InterpolatedUnivariateSpline(UnivariateSpline):
""" Interpolated univariate spline approximation. Identical to
UnivariateSpline with less error checking.
"""
def __init__(self, x, y, w=None, bbox = [None]*2, k=3):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
"""
#_data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
self._data = dfitpack.fpcurf0(x,y,k,w=w,
xb=bbox[0],xe=bbox[1],s=0)
self._reset_class()
class LSQUnivariateSpline(UnivariateSpline):
""" Weighted least-squares univariate spline
approximation. Appears to be identical to UnivariateSpline with
more error checking.
"""
def __init__(self, x, y, t, w=None, bbox = [None]*2, k=3):
"""
Input:
x,y - 1-d sequences of data points (x must be
in strictly ascending order)
t - 1-d sequence of the positions of user-defined
interior knots of the spline (t must be in strictly
ascending order and bbox[0]<t[0]<...<t[-1]<bbox[-1])
Optional input:
w - positive 1-d sequence of weights
bbox - 2-sequence specifying the boundary of
the approximation interval.
By default, bbox=[x[0],x[-1]]
k=3 - degree of the univariate spline.
"""
#_data == x,y,w,xb,xe,k,s,n,t,c,fp,fpint,nrdata,ier
xb=bbox[0]
xe=bbox[1]
if xb is None: xb = x[0]
if xe is None: xe = x[-1]
t = concatenate(([xb]*(k+1),t,[xe]*(k+1)))
n = len(t)
if not alltrue(t[k+1:n-k]-t[k:n-k-1] > 0,axis=0):
raise ValueError,\
'Interior knots t must satisfy Schoenberg-Whitney conditions'
data = dfitpack.fpcurfm1(x,y,k,t,w=w,xb=xb,xe=xe)
self._data = data[:-3] + (None,None,data[-1])
self._reset_class()
################ Bivariate spline ####################
_surfit_messages = {1:"""
The required storage space exceeds the available storage space: nxest
or nyest too small, or s too small.
The weighted least-squares spline corresponds to the current set of
knots.""",
2:"""
A theoretically impossible result was found during the iteration
process for finding a smoothing spline with fp = s: s too small or
badly chosen eps.
Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""",
3:"""
the maximal number of iterations maxit (set to 20 by the program)
allowed for finding a smoothing spline with fp=s has been reached:
s too small.
Weighted sum of squared residuals does not satisfy abs(fp-s)/s < tol.""",
4:"""
No more knots can be added because the number of b-spline coefficients
(nx-kx-1)*(ny-ky-1) already exceeds the number of data points m:
either s or m too small.
The weighted least-squares spline corresponds to the current set of
knots.""",
5:"""
No more knots can be added because the additional knot would (quasi)
coincide with an old one: s too small or too large a weight to an
inaccurate data point.
The weighted least-squares spline corresponds to the current set of
knots.""",
10:"""
Error on entry, no approximation returned. The following conditions
must hold:
xb<=x[i]<=xe, yb<=y[i]<=ye, w[i]>0, i=0..m-1
If iopt==-1, then
xb<tx[kx+1]<tx[kx+2]<...<tx[nx-kx-2]<xe
yb<ty[ky+1]<ty[ky+2]<...<ty[ny-ky-2]<ye""",
-3:"""
The coefficients of the spline returned have been computed as the
minimal norm least-squares solution of a (numerically) rank deficient
system (deficiency=%i). If deficiency is large, the results may be
inaccurate. Deficiency may strongly depend on the value of eps."""
}
class BivariateSpline(object):
""" Bivariate spline s(x,y) of degrees kx and ky on the rectangle
[xb,xe] x [yb, ye] calculated from a given set of data points
(x,y,z).
See also:
bisplrep, bisplev - an older wrapping of FITPACK
UnivariateSpline - a similar class for univariate spline interpolation
SmoothUnivariateSpline - to create a BivariateSpline through the
given points
LSQUnivariateSpline - to create a BivariateSpline using weighted
least-squares fitting
"""
def get_residual(self):
""" Return weighted sum of squared residuals of the spline
approximation: sum ((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0)
"""
return self.fp
def get_knots(self):
""" Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively.
"""
return self.tck[:2]
def get_coeffs(self):
""" Return spline coefficients."""
return self.tck[2]
def __call__(self,x,y,mth='array'):
""" Evaluate spline at positions x,y."""
if mth=='array':
tx,ty,c = self.tck[:3]
kx,ky = self.degrees
z,ier = dfitpack.bispev(tx,ty,c,kx,ky,x,y)
assert ier==0,'Invalid input: ier='+`ier`
return z
raise NotImplementedError
def ev(self, xi, yi):
"""
Evaluate spline at points (x[i], y[i]), i=0,...,len(x)-1
"""
tx,ty,c = self.tck[:3]
kx,ky = self.degrees
zi,ier = dfitpack.bispeu(tx,ty,c,kx,ky,xi,yi)
assert ier==0, 'Invalid input: ier='+`ier`
return zi
def integral(self, xa, xb, ya, yb):
"""
Evaluate the integral of the spline over area [xa,xb] x [ya,yb].
Parameters
----------
xa, xb : float
The end-points of the x integration interval.
ya, yb : float
The end-points of the y integration interval.
Returns
-------
integ : float
The value of the resulting integral.
"""
tx,ty,c = self.tck[:3]
kx,ky = self.degrees
return dfitpack.dblint(tx,ty,c,kx,ky,xa,xb,ya,yb)
class SmoothBivariateSpline(BivariateSpline):
""" Smooth bivariate spline approximation.
See also:
bisplrep, bisplev - an older wrapping of FITPACK
UnivariateSpline - a similar class for univariate spline interpolation
LSQUnivariateSpline - to create a BivariateSpline using weighted
least-squares fitting
"""
def __init__(self, x, y, z, w=None,
bbox = [None]*4, kx=3, ky=3, s=None, eps=None):
"""
Input:
x,y,z - 1-d sequences of data points (order is not
important)
Optional input:
w - positive 1-d sequence of weights
bbox - 4-sequence specifying the boundary of
the rectangular approximation domain.
By default, bbox=[min(x,tx),max(x,tx),
min(y,ty),max(y,ty)]
kx,ky=3,3 - degrees of the bivariate spline.
s - positive smoothing factor defined for
estimation condition:
sum((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0) <= s
Default s=len(w) which should be a good value
if 1/w[i] is an estimate of the standard
deviation of z[i].
eps - a threshold for determining the effective rank
of an over-determined linear system of
equations. 0 < eps < 1, default is 1e-16.
"""
xb,xe,yb,ye = bbox
nx,tx,ny,ty,c,fp,wrk1,ier = dfitpack.surfit_smth(x,y,z,w,
xb,xe,yb,ye,
kx,ky,s=s,
eps=eps,lwrk2=1)
if ier in [0,-1,-2]: # normal return
pass
else:
message = _surfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
self.fp = fp
self.tck = tx[:nx],ty[:ny],c[:(nx-kx-1)*(ny-ky-1)]
self.degrees = kx,ky
class LSQBivariateSpline(BivariateSpline):
""" Weighted least-squares spline approximation.
See also:
bisplrep, bisplev - an older wrapping of FITPACK
UnivariateSpline - a similar class for univariate spline interpolation
SmoothUnivariateSpline - to create a BivariateSpline through the
given points
"""
def __init__(self, x, y, z, tx, ty, w=None,
bbox = [None]*4,
kx=3, ky=3, eps=None):
"""
Input:
x,y,z - 1-d sequences of data points (order is not
important)
tx,ty - strictly ordered 1-d sequences of knots
coordinates.
Optional input:
w - positive 1-d sequence of weights
bbox - 4-sequence specifying the boundary of
the rectangular approximation domain.
By default, bbox=[min(x,tx),max(x,tx),
min(y,ty),max(y,ty)]
kx,ky=3,3 - degrees of the bivariate spline.
eps - a threshold for determining the effective rank
of an over-determined linear system of
equations. 0 < eps < 1, default is 1e-16.
"""
nx = 2*kx+2+len(tx)
ny = 2*ky+2+len(ty)
tx1 = zeros((nx,),float)
ty1 = zeros((ny,),float)
tx1[kx+1:nx-kx-1] = tx
ty1[ky+1:ny-ky-1] = ty
xb,xe,yb,ye = bbox
tx1,ty1,c,fp,ier = dfitpack.surfit_lsq(x,y,z,tx1,ty1,w,\
xb,xe,yb,ye,\
kx,ky,eps,lwrk2=1)
if ier>10:
tx1,ty1,c,fp,ier = dfitpack.surfit_lsq(x,y,z,tx1,ty1,w,\
xb,xe,yb,ye,\
kx,ky,eps,lwrk2=ier)
if ier in [0,-1,-2]: # normal return
pass
else:
if ier<-2:
deficiency = (nx-kx-1)*(ny-ky-1)+ier
message = _surfit_messages.get(-3) % (deficiency)
else:
message = _surfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
self.fp = fp
self.tck = tx1,ty1,c
self.degrees = kx,ky
class RectBivariateSpline(BivariateSpline):
""" Bivariate spline approximation over a rectangular mesh.
Can be used for both smoothing or interpolating data.
See also:
SmoothBivariateSpline - a smoothing bivariate spline for scattered data
bisplrep, bisplev - an older wrapping of FITPACK
UnivariateSpline - a similar class for univariate spline interpolation
"""
def __init__(self, x, y, z,
bbox = [None]*4, kx=3, ky=3, s=0):
"""
Input:
x,y - 1-d sequences of coordinates in strictly ascending order
z - 2-d array of data with shape (x.size,y.size)
Optional input:
bbox - 4-sequence specifying the boundary of
the rectangular approximation domain.
By default, bbox=[min(x,tx),max(x,tx),
min(y,ty),max(y,ty)]
kx,ky=3,3 - degrees of the bivariate spline.
s - positive smoothing factor defined for
estimation condition:
sum((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0) <= s
Default s=0 which is for interpolation
"""
x,y = ravel(x),ravel(y)
if not all(diff(x) > 0.0):
raise TypeError,'x must be strictly increasing'
if not all(diff(y) > 0.0):
raise TypeError,'y must be strictly increasing'
if not ((x.min() == x[0]) and (x.max() == x[-1])):
raise TypeError, 'x must be strictly ascending'
if not ((y.min() == y[0]) and (y.max() == y[-1])):
raise TypeError, 'y must be strictly ascending'
if not x.size == z.shape[0]:
raise TypeError,\
'x dimension of z must have same number of elements as x'
if not y.size == z.shape[1]:
raise TypeError,\
'y dimension of z must have same number of elements as y'
z = ravel(z)
xb,xe,yb,ye = bbox
nx,tx,ny,ty,c,fp,ier = dfitpack.regrid_smth(x,y,z,
xb,xe,yb,ye,
kx,ky,s)
if ier in [0,-1,-2]: # normal return
pass
else:
message = _surfit_messages.get(ier,'ier=%s' % (ier))
warnings.warn(message)
self.fp = fp
self.tck = tx[:nx],ty[:ny],c[:(nx-kx-1)*(ny-ky-1)]
self.degrees = kx,ky
|
A STUDENT has been stabbed by a robber who stole her bag in Manchester city centre. The 22-year-old woman felt a sharp pain in her leg as she walked along Grafton Street on Wednesday night. The attacker then demanded her bag and as she tried to remove her headphones he stabbed her again before running off towards Oxford Road.
BANGLADESHI POLICE investigating an arms haul have linked their findings to a charity in Manchester. The weapons cache was found at an Islamic school, or madrassa, allegedly run by Dr Faisal Mostafa from the Green Crescent charity in the city. Dr Mostafa, head of the charity, has previously been tried for plotting explosions in the UK. The UK's Charity Commission is investigating the allegations.
STOCKPORT IS TO LOSE its postmark as well as its sorting office. Hazel Grove MP, Andrew Stunell was told by Royal Mail that the postmark will be replaced with a Manchester postmark, coinciding with the move to the Manchester Mail Centre. In protest, Andrew Stunell said: “Stopfordians have a strong local identity, and take pride in their heritage. This is the first small step on a slippery slope – maybe the postcode will be next?
RUGBY LEAGUE STAR, Stuart Reardon, 27, is facing a jail sentence for assaulting his wife. Reardon, a full-back for Warrington Wolves and Great Britain, attacked his estranged wife after finding out she was seeing another man. Fellow international Leon Pryce, 27, is also facing jail after admitting assaulting her new boyfriend.
|
# Copyright (c) 2015-2019 Patricio Cubillos and contributors.
# pytips is open-source software under the MIT license (see LICENSE).
__all__ = ["tips", "iso", "molID", "molname", "to_file"]
import sys
import os
import numpy as np
from numpy.core.numeric import isscalar
libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "lib")
sys.path.append(libdir)
import ctips as ct
_molname = np.array(["", "H2O", "CO2", "O3", "N2O", "CO",
"CH4", "O2", "NO", "SO2", "NO2",
"NH3", "HNO3", "OH", "HF", "HCl",
"HBr", "HI", "ClO", "OCS", "H2CO",
"HOCl", "N2", "HCN", "CH3Cl", "H2O2",
"C2H2", "C2H6", "PH3", "COF2", "SF6",
"H2S", "HCOOH", "HO2", "O", "ClONO2",
"NO+", "HOBr", "C2H4", "CH3OH", "CH3Br",
"CH3CN", "CF4", "C4H2", "HC3N", "H2",
"CS", "SO3", "C2N2", "SO", "C3H4",
"CH3", "CS2"])
def tips(molID, isoID, temp):
"""
Evaluate the partition function for the given isotope(s) at the given
temperature(s). This is a wrapper of ctips.tips.
Parameters
----------
molID: Scalar or iterable
The molecular ID as given by HITRAN 2012.
isoID: Scalar or iterable
The isotope ID (AFGL) as given by HITRAN 2012.
temp: Scalar or iterable
Temperature a which to evaluate the partition function.
Notes
-----
- The molID and isoID are casted into an integer ndarray data types.
- The temp is casted into a double ndarray data type.
- If the arguments have different sizes, the code resizes them to
a same size, unless they have incompatible sizes.
"""
# Check scalar vs iterable, turn into iterable:
if isscalar(molID):
molID = [molID]
if isscalar(isoID):
isoID = [isoID]
if isscalar(temp):
temp = [temp]
# Turn them numpy arrays:
molID = np.asarray(molID, np.int)
isoID = np.asarray(isoID, np.int)
temp = np.asarray(temp, np.double)
# Set them to the same size:
if len(isoID) != len(temp):
if len(isoID) == 1:
isoID = np.repeat(isoID, len(temp))
elif len(temp) == 1:
temp = np.repeat(temp, len(isoID))
else:
sys.exit(0)
if len(molID) != len(isoID):
if len(molID) != 1:
sys.exit(0)
molID = np.repeat(molID, len(isoID))
return ct.tips(molID, isoID, temp)
def iso(mID):
"""
Get the list of isotope IDs for the given molecule ID.
Parameters
----------
mID: String or integer
Molecule name (if string) or molecule ID.
Return
------
isoID: 1D integer ndarray
Isotopes ID for molecule mID.
"""
if isinstance(mID, str):
# Convert string to index if necesssary:
return ct.iso(int(molID(mID)))
return ct.iso(int(mID))
def molID(mname):
"""
Get the molecule ID for the requested molecule.
Parameters
----------
mname: String
Name of the molecule.
Return
------
mID: Integer
The molecule's ID.
"""
if mname not in _molname:
print("Molecule '{:s}' is not in list.".format(mname))
return None
return np.where(_molname == mname)[0][0]
def molname(mID):
"""
Get the molecule name for the requested molecule ID.
Parameters
----------
mID: Integer
The molecule's ID.
Return
------
mname: String
Name of the molecule.
"""
if (mID < 1) or (mID > 52):
print("Molecule ID '{:d}' is invalid.".format(mID))
return None
return _molname[mID]
def to_file(filename, molname, temp):
"""
Compute partition-function values for all isotopes of a given
molecule over a temperature array, and save to file.
Parameters
----------
filename: String
Output partition-function file.
molname: String
Name of the molecule.
temp: 1D float ndarray
Array of temperatures.
Example
-------
>>> import pytips as p
>>> temp = np.linspace(70, 3000, 294)
>>> molname = "CO2"
>>> p.to_file("CO2_tips.dat", molname, temp)
"""
# Compute partition function:
isoID = iso(molname)
niso = len(isoID)
ntemp = len(temp)
data = np.zeros((niso, ntemp), np.double)
for i in np.arange(niso):
data[i] = tips(molID(molname), isoID[i], temp)
# Save to file:
with open(filename, "w") as fout:
fout.write(
"# Tabulated {:s} partition-function data from TIPS.\n\n".format(molname))
fout.write("@ISOTOPES\n ")
for j in np.arange(niso):
fout.write(" {:10s}".format(str(isoID[j])))
fout.write("\n\n")
fout.write("# Temperature (K), partition function for each isotope:\n")
fout.write("@DATA\n")
for i in np.arange(ntemp):
fout.write(" {:7.1f} ".format(temp[i]))
for j in np.arange(niso):
fout.write(" {:10.4e}".format(data[j,i]))
fout.write("\n")
|
New Kia Cerato for sale from Laith Al Obaidi Cars - Auto Market in Dubai. Grey/Silver 2019 model. The car has automatic gearbox, 4 cylinder engine, 15″ wheels and grey interior. GCC specs.
|
import logging
import struct
_LOGGER = logging.getLogger(__name__)
typical_types = {
0x11: {
"desc": "T11: ON/OFF Digital Output with Timer Option", "size": 1,
"name": "Switch Timer",
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x12: {"desc": "T12: ON/OFF Digital Output with AUTO mode",
"size": 1,
"name": "Switch auto",
"state_desc": { 0x00: "off",
0x01: "on",
0xF0: "on/auto",
0xF1: "off/auto"
}
},
0x13: {"desc": "T13: Digital Input Value",
"size": 1,
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x14: {"desc": "T14: Pulse Digital Output",
"size": 1,
"name": "Switch",
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x15: {"desc": "T15: RGB Light",
"size": 2,
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x16: {"desc": "T16: RGB LED Strip",
"size": 4,
"state_desc": { 0x00: "on",
0x01: "on"}
},
0x18: {"desc": "T18: ON/OFF Digital Output (Step Relay)",
"size": 1,
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x19: {"desc": "T19: Single Color LED Strip",
"size": 2,
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x1A: {"desc": "T1A: Digital Input Pass Through",
"size": 1,
"state_desc": { 0x00: "off",
0x01: "on"}
},
0x1B: {"desc": "T1B: Position Constrained ON/OFF Digital Output", "size": 1},
0x21: {"desc": "T21: Motorized devices with limit switches", "size": 1},
0x22: {"desc": "T22: Motorized devices with limit switches and middle position", "size": 1},
0x31: {"desc": "T31: Temperature control with cooling and heating mode", "size": 5},
0x32: {"desc": "T32: Air Conditioner", "size": 2},
0x41: {"desc": "T41: Anti-theft integration -Main-", "size": 1},
0x42: {"desc": "T42: Anti-theft integration -Peer-", "size": 1},
0x51: {"desc": "T51: Analog input, half-precision floating point",
"size": 2,
"units": "units"},
0x52: {"desc": "T52: Temperature measure (-20, +50) C",
"size": 2,
"units": "C"},
0x53: {"desc": "T53: Humidity measure (0, 100) ",
"size": 2,
"units": "%"},
0x54: {"desc": "T54: Light Sensor (0, 40) kLux",
"size": 2,
"units": "kLux"},
0x55: {"desc": "T55: Voltage (0, 400) V",
"size": 2,
"units": "V"},
0x56: {"desc": "T56: Current (0, 25) A",
"size": 2,
"units": "A"},
0x57: {"desc": "T57: Power (0, 6500) W",
"size": 2,
"units": "W"},
0x58: {"desc": "T58: Pressure measure (0, 1500) hPa",
"size": 2,
"units": "hPa"},
0x61: {"desc": "T61: Analog setpoint, half-precision floating point", "size": 2},
0x62: {"desc": "T62: Temperature measure (-20, +50) C", "size": 2},
0x63: {"desc": "T63: Humidity measure (0, 100) ", "size": 2},
0x64: {"desc": "T64: Light Sensor (0, 40) kLux", "size": 2},
0x65: {"desc": "T65: Voltage (0, 400) V", "size": 2},
0x66: {"desc": "T66: Current (0, 25) A", "size": 2},
0x67: {"desc": "T67: Power (0, 6500) W", "size": 2},
0x68: {"desc": "T68: Pressure measure (0, 1500) hPa", "size": 2}
}
class Typical(object):
def __init__(self, ttype):
self.ttype = ttype
self.description = typical_types[ttype]['desc']
self.size = typical_types[ttype]['size']
self.slot = -1 # undefined until assigned to a slot
self.node = -1 # undefined until assigned to a slot
# inital state. It will be overwritten with the first update
self.state = b'\x00\x00\x00\x00\x00\x00\x00'
self.listeners = []
def add_listener(self, callback):
self.listeners.append(callback)
@staticmethod
def factory_type(ttype):
if ttype in [0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x18, 0x19, 0x1A, 0x1B]:
return TypicalT1n(ttype)
elif ttype in [0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58]:
return TypicalT5n(ttype)
else:
return TypicalNotImplemented(ttype)
def update(self, value):
value = value[:self.size]
if value != self.state:
self.state = value
self.state_description = value
_LOGGER.info("Node %d: Typical %d - %s updated from %s to %s" % (self.index,
self.description,
':'.join("{:02x}".format(c) for c in self.state[:self.size]),
':'.join("{:02x}".format(c) for c in value[:self.size])))
for listener in self.listeners:
listener(self)
"""
if self.mqtt:
# TODO: este self....
print("Publico mi nuevo estado %s" + self.state)
self.mqttc.publish('souliss/%s/%s/state' % (self.device_class, self.name), self.state)
"""
"""
def publish(self, mqttc):
if self.mqtt:
self.mqttc = mqttc
self.device_class = typical_types[self.ttype]['mqtt']
mqttc.publish('souliss/%s/%s/config' % (self.device_class, self.name),
'{"name" : "' + self.friendly_name + '", ' +
'"payload_on": "01", ' +
'"payload_off": "00", ' +
'"optimistic": false, ' +
'"retain": true, ' +
'"command_topic": "souliss/%s/%s/set", "state_topic": "souliss/%s/%s/state"}' \
% (self.device_class, self.name, self.device_class, self.name))
#'{"name" : "once,", "payload_on": "0", "payload_off": "1", "optimistic": false, "retain": true, "state_topic": "souliss/switch/%s", "command_topic": "souliss/switch/%s/set"}' % (self.name, self.name))
#mqttc.subscribe("souliss/%s/%s/#" % (self.device_class, self.name))
#mqttc.subscribe("souliss/switch/%s" % self.name)
else:
print('WARNING: I do not know mqtt device for ' + self.description)
"""
def set_node_slot_index(self, node, slot, index):
self.node = node
self.slot = slot
self.index = index
def to_dict(self):
return {'ddesc': self.description,
'slo': self.slot,
'typ': self.ttype}
class TypicalT1n(Typical):
def __init__(self, ttype):
super(TypicalT1n,self).__init__(ttype)
self.state_desc = typical_types[ttype]['state_desc']
def update(self, value):
value = value[:self.size]
if value != self.state:
self.state = value
if self.size > 1: # Raw description for Typicals T15, T16 and T19
self.state_description = ':'.join("{:02x}".format(c) for c in self.state)
else:
if ord(value) in self.state_desc.keys():
self.state_description = self.state_desc[ord(value)]
else:
_LOGGER.warning("Unknow value!")
self.state_description = "Unknow value!"
_LOGGER.info("Node %d: Typical %d - %s updated to %s" % (self.node, self.index,
self.description,
self.state_description))
for listener in self.listeners:
listener(self)
def send_command(self, command):
# TODO: Handle different T1 behaviour
if command == 0x01: # Toggle
if self.state == chr(1):
self.update(chr(0))
else:
self.update(chr(1))
elif command == 0x02: # OnCmd
self.update(chr(0))
elif command == 0x04: # OffCmd
self.update(chr(1))
else:
_LOGGER.debug('Command %x not implemented' % command)
class TypicalT5n(Typical):
def __init__(self, ttype):
super(TypicalT5n,self).__init__(ttype)
self.units= typical_types[ttype]['units']
def update(self, value):
value = value[:self.size]
if value != self.state:
self.state_description = struct.unpack('e', value)[0]
self.state = value
_LOGGER.info("Node %d: Typical %d - %s updated to %s %s" % (self.node, self.index,
self.description,
self.state_description,
self.units))
for listener in self.listeners:
listener(self)
class TypicalNotImplemented(Typical):
def __init__(self, ttype):
_LOGGER.warning('Typical %x not implemented' % ttype)
super(TypicalNotImplemented,self).__init__(ttype)
|
Published at Saturday, February 23rd, 2019 - 19:06:10 PM. Bedroom. By Margeaux Chauvin.
|
from hwt.code import If
from hwt.code_utils import rename_signal
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.defs import BIT
from hwt.hdl.types.struct import HStruct
from hwt.interfaces.std import VectSignal, Signal
from hwt.interfaces.utils import addClkRstn
from hwt.pyUtils.arrayQuery import iter_with_last
from hwt.serializer.mode import serializeParamsUniq
from hwt.synthesizer.hObjList import HObjList
from hwt.synthesizer.interface import Interface
from hwt.synthesizer.param import Param
from hwt.synthesizer.unit import Unit
from hwtLib.amba.axis import AxiStream
from pyMathBitPrecise.bit_utils import mask
class UnalignedJoinRegIntf(Interface):
"""
.. hwt-autodoc::
"""
def _config(self):
AxiStream._config(self)
def _declr(self):
self.data = VectSignal(self.DATA_WIDTH)
self.keep = VectSignal(self.DATA_WIDTH // 8)
if self.USE_STRB:
self.strb = VectSignal(self.DATA_WIDTH // 8)
self.relict = Signal()
self.last = Signal()
@serializeParamsUniq
class FrameJoinInputReg(Unit):
"""
Pipeline of registers for AxiStream with keep mask and flushing
.. hwt-autodoc::
"""
def _config(self):
self.REG_CNT = Param(2)
AxiStream._config(self)
self.USE_KEEP = True
def _declr(self):
assert self.USE_KEEP
addClkRstn(self)
with self._paramsShared():
self.dataIn = AxiStream()
self.regs = HObjList(
UnalignedJoinRegIntf()._m()
for _ in range(self.REG_CNT))
self.keep_masks = HObjList(
VectSignal(self.DATA_WIDTH // 8)
for _ in range(self.REG_CNT)
)
# used to shift whole register pipeline using input keep_mask
self.ready = Signal()
if self.ID_WIDTH or self.USER_WIDTH or self.DEST_WIDTH:
raise NotImplementedError("It is not clear how id/user/dest"
" should be managed between the frames")
def _impl(self):
mask_t = Bits(self.DATA_WIDTH // 8, force_vector=True)
data_fieds = [
(Bits(self.DATA_WIDTH), "data"),
(mask_t, "keep"), # valid= keep != 0
(BIT, "relict"), # flag for partially consumed word
(BIT, "last"), # flag for end of frame
]
if self.USE_STRB:
data_fieds.append((mask_t, "strb"),
)
data_t = HStruct(*data_fieds)
# regs[0] connected to output as first, regs[-1] connected to input
regs = [
self._reg(f"r{r_i:d}", data_t, def_val={"keep": 0,
"last": 0,
"relict": 0})
for r_i in range(self.REG_CNT)
]
ready = self.ready
keep_masks = self.keep_masks
fully_consumed_flags = []
for i, r in enumerate(regs):
_fully_consumed = (r.keep & keep_masks[i])._eq(0)
if i == 0:
_fully_consumed = _fully_consumed & self.ready
fully_consumed_flags.append(rename_signal(self, _fully_consumed, f"r{i:d}_fully_consumed"))
for i, (is_first_on_input_r, r) in enumerate(iter_with_last(regs)):
keep_mask_all = mask(r.keep._dtype.bit_length())
prev_keep_mask = self._sig(f"prev_keep_mask_{i:d}_tmp", r.keep._dtype)
prev_last_mask = self._sig(f"prev_last_mask_{i:d}_tmp")
is_empty = rename_signal(self, r.keep._eq(0) & ~(r.last & r.relict), f"r{i:d}_is_empty")
if is_first_on_input_r:
# is register connected directly to dataIn
r_prev = self.dataIn
If(r_prev.valid,
prev_keep_mask(keep_mask_all),
prev_last_mask(1)
).Else(
# flush (invalid input but the data can be dispersed
# in registers so we need to collapse it)
prev_keep_mask(0),
prev_last_mask(0),
)
if self.REG_CNT > 1:
next_r = regs[i - 1]
next_empty = next_r.keep._eq(0) & ~(next_r.relict & next_r.last)
else:
next_empty = 0
whole_pipeline_shift = (ready & (regs[0].keep & self.keep_masks[0])._eq(0))
r_prev.ready(is_empty # last input reg empty
| whole_pipeline_shift
| next_empty)
else:
r_prev = regs[i + 1]
prev_last_mask(1)
If(is_empty,
# flush
prev_keep_mask(keep_mask_all),
).Else(
prev_keep_mask(keep_masks[i + 1]),
)
data_drive = [r.data(r_prev.data), ]
if self.USE_STRB:
data_drive.append(r.strb(r_prev.strb))
fully_consumed = fully_consumed_flags[i]
if i == 0:
# last register in path
If((ready & fully_consumed) | is_empty,
*data_drive,
r.keep(r_prev.keep & prev_keep_mask),
r.last(r_prev.last & prev_last_mask),
r.relict(
r_prev.valid & r_prev.keep._eq(0)
if is_first_on_input_r else
# [TODO] potentially it should not be keep[0] but fist keep with 1
r_prev.relict | (r_prev.last & (r_prev.keep[0] & ~keep_masks[i + 1][0] & ~fully_consumed_flags[i + 1]))
)
).Elif(ready,
r.keep(r.keep & keep_masks[i]),
r.relict(1), # became relict if there is some 1 in keep (== not fully consumed)
)
else:
next_fully_consumed = fully_consumed_flags[i - 1]
next_r = regs[i - 1]
next_is_empty = next_r.keep._eq(0) & ~(next_r.relict & next_r.last)
if is_first_on_input_r:
is_relict = r_prev.valid & r_prev.keep._eq(0)
else:
prev_fully_consumed = fully_consumed_flags[i + 1]
is_relict = r_prev.relict | ~prev_fully_consumed
If((ready & next_fully_consumed) | is_empty | next_is_empty,
*data_drive,
r.keep(r_prev.keep & prev_keep_mask),
r.last(r_prev.last & prev_last_mask),
r.relict(is_relict)
)
for rout, rin in zip(self.regs, regs):
rout.data(rin.data)
if self.USE_STRB:
rout.strb(rin.strb)
rout.keep(rin.keep)
rout.relict(rin.relict)
rout.last(rin.last)
|
Please call our claims line 24/7 at 1-877-437-5007 if you need to file a claim for faster service.
You may complete a claim on-line, but it will diminish response time.
Note Information on any additional injured persons will be taken when we contact you.
|
class BitSequence(object):
def __init__(self, byte_data, word_length=8):
self.value = byte_data
self.bits_per_word = word_length
def _bitLen(self):
# Warning this does not take into account leading zeros, ie. 0000000010000000, you probbaly want to be using len(self)
length = 0
temp_val = self.value
while (temp_val):
temp_val >>= 1
length += 1
return(length)
def twos_complement(self):
val = int(self)
bits = len(self)
try:
if self[bits-1] != 0:
val = val - (1<<bits)
except ValueError:
# trying to negative shift zero.
pass
return val
def __getitem__(self, val):
return int(bin(self.value & (1 << val))[2:][0])
def __setitem__(self, key, val):
try:
bool(val)
except:
raise TypeError("Possible bit values should evaluate to True of False, not %s" % val)
if val:
# set bit 'key' to 1
self.value |= 1 << key
else:
# set bit 'key' to 0
self.value &= ~(1 << key)
def __len__(self):
# work out how many words are needed to represent the value, and return this number of bits as its length
if (self._bitLen() % self.bits_per_word) == 0 :
return int(self._bitLen())
else:
return int(self._bitLen() + self.bits_per_word - (self._bitLen() % self.bits_per_word))
def __str__(self):
return "0b%s" % bin(self.value)[2:].zfill(len(self))
def __int__(self):
return self.value
def __iter__(self):
for bit in range(len(self)):
yield self[bit]
def __invert__(self):
return ~int(self)
def __abs__(self):
return int(self)
if __name__ == '__main__':
bitseq = BitSequence(0b0000000010101010)
print bitseq
bitseq = BitSequence(0b0000100010101010)
print bitseq
print "First : %d Second : %d" % (bitseq[0], bitseq[1])
bitseq[0] = 1
bitseq[1] = 1
print "First : %d Second : %d Twentieth : %d" % (bitseq[0], bitseq[1], bitseq[20])
print bitseq
bitseq[0] = True
bitseq[1] = False
bitseq[5] = None
bitseq[6] = 1
bitseq[7] = 1
bitseq[20] = 1
print "First : %d Second : %d" % (bitseq[0], bitseq[1])
print bitseq
bitseq1 = BitSequence(0b01)
bitseq2 = BitSequence(0b10)
print "Equal : %s" % bitseq1 == bitseq2
print "Not Equal : %s" % bitseq1 != bitseq2
print "%d Greater than %d : %s" % (bitseq1, bitseq2, bitseq1 > bitseq2)
print "%d Less than %d : %s" % (bitseq1, bitseq2, bitseq1 < bitseq2)
print "len(sequence) : %d" % len(bitseq)
print "Printing bit sequece ..."
for bit in bitseq:
print bit
|
It’s impossible to turn on your TV without coming across a reality TV show within a few channel flips — from The Bachelorette and Survivor to Keeping Up with the Kardashians and Dancing with the Stars. While reality TV began its complete takeover in the 2000s, the origins of the genre started well before then. Here’s a look at the evolution of reality TV.
1940s and 1950s: In the early “reality” TV days, shows weren’t about living in a house or on an island with strangers and they didn’t follow celebrities’ drama-filled lives. Unscripted TV started with shows such as Candid Camera, which debuted in 1948 and showed unsuspecting people reacting to pranks, or talent search shows such as Ted Mack’s Original Amateur Hour. In the 1950s, an early version of crime and police shows also emerged when Confessions aired. It consisted of an interviewer questioning criminals from various backgrounds.
1960s and 1970s: You might be surprised to learn that the first reality show comparable to what we consider reality TV today debuted in 1965 and aired in the U.S. until 1986. A typical episode of The American Sportsman featured one or more celebrities, and sometimes their family members, on outdoor adventures with a camera crew in tow. It was unscripted. During the 70s, meanwhile, more forerunners to competition shows got their start when shows such as The Newlywed Game and The Gong Show (it was resurrected in 2017) aired. These shows focused on people sacrificing their privacy or dignity for a prize.
1980s and 1990s: This was an era in which the popularity of reality TV started taking on a new level. There was stunt shows such as That’s Incredible, and even Canada got in on the act with Thrill of a Lifetime, which showed thrills ranging from daredevil to the romantic adventures fulfilled by regular people. Then there was COPS, which debuted in 1989 and became infamous for its camcorder look and theme song. And while the concept of putting strangers together in a house started on Dutch TV, MTV took the concept and ran with it when it aired The Real World. Another popular idea that got its start overseas was Survivor, which first aired in Sweden as the show Expedition Robinson. It took elements from the Real World (think confessionals and unscripted conversations) and combined them with competitions to create what today are known as elimination shows. Another theme to emerge in the 90s was self-improvement or makeover shows. Changing Rooms, during which couples redecorated each other’s houses, is an example.
2010s: Reality TV remains a staple of networks’ programming. Shows such as The Voice, which now has 50 international adaptations, have become favourites and newcomers such as Nailed It! are on the rise. Even Jersey Shore was resurrected. For now, the genre is here to stay.
If you have as much interest in creating reality TV as you do in watching it, you can get the know-how you need to succeed in the business at Centennial College’s Television and Film – Business program. A graduate certificate program from the School of Communications, Media, Arts and Design, it examines the legal, financial and regulatory frameworks of the industry and develop skills necessary for producing in the current Canadian and international market.
|
"""Classifiers"""
import os
import subprocess
import numpy as np
FILE_DIR = os.path.dirname(os.path.abspath(__file__))
SVM_DIR = os.path.join(FILE_DIR, 'svm_light')
SVM_LEARN = os.path.join(SVM_DIR, 'svm_learn')
SVM_CLASSIFY = os.path.join(SVM_DIR, 'svm_classify')
class TSVM:
"""Transductive support vector machine"""
def __init__(self, varname, classorder):
self.outdir = varname+'_tsvm'
if len(varname) >= 86:
raise Exception('Output name prefix is too long: '+self.varname)
os.makedirs(self.outdir, exist_ok=True)
self.train_prefix = os.path.join(self.outdir, 'train')
self.model_prefix = os.path.join(self.outdir, 'model')
self.test_name = os.path.join(self.outdir, 'test.dat')
self.pred_prefix = os.path.join(self.outdir, 'pred')
self.classorder = classorder
self.orderedclasses = [0] * len(self.classorder)
for key, val in self.classorder.items():
self.orderedclasses[val] = key
def _train_name(self, label):
return self.train_prefix+'_'+str(label)+'.dat'
def _model_name(self, label):
return self.model_prefix+'_'+str(label)
def _pred_name(self, label):
return self.pred_prefix+'_'+str(label)
def _write_feats(self, ofh, feats):
"""Writes the features into the data file"""
# expecting feats to be a csr row
for col, datum in zip(feats.indices[feats.indptr[0]:feats.indptr[1]],
feats.data[feats.indptr[0]:feats.indptr[1]]):
ofh.write(str(col+1))
ofh.write(':')
ofh.write(str(datum))
ofh.write(' ')
def fit(self, features, labels):
"""Call SVMLight for transductive SVM training
features must be a csr matrix
"""
for label_type in self.classorder:
train_file = self._train_name(label_type)
with open(train_file, 'w') as ofh:
for feats, label in zip(features, labels):
if label_type == 'unknown':
ofh.write('0 ')
elif label_type == label:
ofh.write('+1 ')
else:
ofh.write('-1 ')
self._write_feats(ofh, feats)
ofh.write('\n')
subprocess.run(
[
SVM_LEARN,
train_file,
self._model_name(label_type)])
def predict(self, features):
"""Call SVMLight for transductive SVM prediction"""
with open(self.test_name, 'w') as ofh:
for feats in features:
ofh.write('0 ')
self._write_feats(ofh, feats)
ofh.write('\n')
predictions = []
for label_type in self.classorder:
pred_name = self._pred_name(label_type)
subprocess.run(
[
SVM_CLASSIFY,
self.test_name,
self._model_name(label_type),
pred_name])
tmp = []
with open(pred_name) as ifh:
for line in ifh:
line = line.strip()
tmp.append(float(line))
predictions.append(tmp)
predictions = np.argmax(np.array(predictions), axis=0)
return np.array([self.orderedclasses[a] for a in predictions])
|
High paid Hollyweird actress Jennifer Anniston is said to be completely comfortable with her hubby in waiting. So much so that despite the considerable gap between their finances, she has decided not to protect her fortune with a prenup.
Jennifer Aniston reportedly won’t sign a pre-nuptial agreement, as she wants to show she is committed to her marriage.
The star is confident her marriage to Justin Theroux will work out. The actress is said to be adamant about her decision as she trusts her actor fiancé completely.
“Jen is a romantic and says she wants to show her commitment to the marriage by abandoning any ideas of a pre-nup,” Jen’s friends explained to Grazia magazine.
Last week it was reported that the actress’ advisors had suggested she safeguard her fortune by drawing up a pre-nuptial agreement before her big day.
Jennifer has considerably more money than her 41-year-old fiancé, with recent estimates putting the former Friends star’s wealth at around £95 million. Despite being married before, Jen is not worried about divorce or losing any money if the relationship turns sour.
However, her loved ones are reportedly less relaxed about the idea.
Given how unlucky in love she has been in the past, we’re a little surprised that Jen has taken this route. Hopefully her love-struck instincts don’t leave her down and out!
|
#!/usr/bin/env python
"""
Obtain platform information from every node of a cluster.
This script should be placed somewhere on the user's path.
:copyright: Copyright 2006-2015 by the Sumatra team, see doc/authors.txt
:license: BSD 2-clause, see LICENSE for details.
"""
from __future__ import unicode_literals
from mpi4py import MPI
import platform
import socket
from datetime import datetime
TIMESTAMP_FORMAT = "%Y%m%d-%H%M%S"
MPI_ROOT = 0
comm = MPI.Comm.Get_parent()
rank = comm.Get_rank()
network_name = platform.node()
bits, linkage = platform.architecture()
platform_information = {
network_name: dict(architecture_bits=bits,
architecture_linkage=linkage,
machine=platform.machine(),
network_name=network_name,
ip_addr=socket.gethostbyname(network_name),
processor=platform.processor(),
release=platform.release(),
system_name=platform.system(),
version=platform.version(),
clock=datetime.now().strftime(TIMESTAMP_FORMAT))
}
comm.send(platform_information, dest=MPI_ROOT, tag=rank)
comm.Disconnect()
|
Buying for a RF Wireless Remote Control High Light Cartoon Projector Lamp LED Decorative Lawn Pin Lamp Halloween Christmas Holiday with best price? Read about features, types, and other must-know topics in our RF Wireless Remote Control High Light Cartoon Projector Lamp LED Decorative Lawn Pin Lamp Halloween Christmas Holiday buying guide to make an informed choice. Buy greatest RF Wireless Remote Control High Light Cartoon Projector Lamp LED Decorative Lawn Pin Lamp Halloween Christmas Holiday Look for the RF Wireless Remote Control High Light Cartoon Projector Lamp LED Decorative Lawn Pin Lamp Halloween Christmas Holiday package that is greatest for you. Make a price comparison, discount promo, or free sending before ordering.
Theme: 8 pieces of colorful film (4*Christmas theme: 1. running elk in the snow; 2. taking off the hat to salute the snowman; 3. gifts scattered Santa; 4. stars and moons swing Santa + elk; 4*Halloween theme : 1. dancing notes skeleton lead; 2. moon flying witches; 3. coffin vampires; 4. face faced pumpkins ghost in the snow) create an instant atmosphere for indoor and outdoor use throughout the year!
More brightness and clarity; interchangeable color projection images instead of black or white images; gentle and soft led lights can better protect the eyes, especially for kids; enjoy your day.
Easy to use - with 8 films, it can be manually disassembled, which is more stable, user-friendly and convenient to use. It can be placed on a circular base, detachable ground pile and the uneven while stable areas like lawn. To adjust the mounting bracket to gain the optimal illumination angle.
Timing function: built-in intelligent IC and timing function, effectively protect the product and extend the service life of it.
Wireless remote control: equipped with wireless (RF) remote control, it can control the switch, adjust the speed and set the timing function, which is more convenient for customers.
Applicable range: waterproof LED light projector for indoor and outdoor use; it is widely used in Halloween, Christmas, clubs, KTV, bars, banquets, weddings, family gatherings, etc.
|
#
# Copyright 2012 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import os
import re
import socket
import tempfile
import threading
import subprocess
import errno
import testrunner
from vdsm import SecureXMLRPCServer
class SSLServerThread(threading.Thread):
"""A very simple server thread.
This server waits for SSL connections in a serial
fashion and then echoes whatever the client sends.
"""
def __init__(self, server):
threading.Thread.__init__(self)
self.server = server
self.stop = threading.Event()
def run(self):
# It is important to set a timeout in the server thread to be
# able to check periodically the stop flag:
self.server.settimeout(1)
# Accept client connections:
while not self.stop.isSet():
try:
client, address = self.server.accept()
client.settimeout(1)
try:
while True:
data = client.recv(1024)
if data:
client.sendall(data)
else:
break
except:
# We don't care about exceptions here, only on the
# client side:
pass
finally:
client.close()
except:
# Nothing to do here, we will check the stop flag in the
# next iteration of the loop:
pass
def shutdown(self):
# Note that this doesn't stop the thready inmediately, it just
# indicates that stopping is requested, the thread will stop
# with next iteration of the accept loop:
self.stop.set()
class SSLTests(testrunner.VdsmTestCase):
"""Tests of SSL communication"""
def setUp(self):
"""Prepares to run the tests.
The preparation consist on creating temporary files containing
the keys and certificates and starting a thread that runs a
simple SSL server.
"""
# Save the key to a file:
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.write(KEY)
self.keyfile = tmp.name
# Save the certificate to a file:
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.write(CERTIFICATE)
self.certfile = tmp.name
# Create the server socket:
self.server = socket.socket()
self.server = SecureXMLRPCServer.SSLServerSocket(
raw=self.server,
keyfile=self.keyfile,
certfile=self.certfile,
ca_certs=self.certfile)
self.address = self.tryBind(ADDRESS)
self.server.listen(5)
# Start the server thread:
self.thread = SSLServerThread(self.server)
self.thread.start()
def tryBind(self, address):
ipadd, port = address
while True:
try:
self.server.bind((ipadd, port))
return (ipadd, port)
except socket.error as ex:
if ex.errno == errno.EADDRINUSE:
port += 1
if port > 65535:
raise socket.error(
errno.EADDRINUSE,
"Can not find available port to bind")
else:
raise
def tearDown(self):
"""Release the resources used by the tests.
Removes the temporary files containing the keys and certifites,
stops the server thread and closes the server socket.
"""
# Delete the temporary files:
os.remove(self.keyfile)
os.remove(self.certfile)
# Stop the server thread and wait for it to finish:
self.thread.shutdown()
self.thread.join()
del self.thread
# Close the server socket:
self.server.shutdown(socket.SHUT_RDWR)
self.server.close()
del self.server
def runSClient(self, args=None, input=None):
"""This method runs the OpenSSL s_client command.
The address parameter is a tuple containg the address
of the host and the port number that will be used to
build the -connect option of the command.
The args parameter is the list of additional parameters
to pass to the command.
The input parameter is the data that will be piped to the
standard input of the command.
The method returns a tuple containing the exit code of the
command and the data generated in the standard output.
"""
command = [
"openssl",
"s_client",
"-connect", "%s:%d" % self.address,
]
if args:
command += args
print("command=%s" % command)
process = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = process.communicate(input)
rc = process.wait()
print("rc=%d" % rc)
print("out=%s" % out)
print("err=%s" % err)
return rc, out
def extractField(self, name, text):
"""
Extracts the value of one of the informative fields provided in
the output of the s_client command.
The name parameter is the name of the field, for example
Session-ID for the SSL session identifier.
The text parameter should be the output of the execution of the
s_client command.
Returns the value of the given field or None if that field can't
be fond in the provided output of the s_client command.
"""
pattern = r"^\s*%s\s*:\s*(?P<value>[^\s]*)\s*$" % name
expression = re.compile(pattern, flags=re.MULTILINE)
match = expression.search(text)
if not match:
return None
value = match.group("value")
print("%s=%s" % (name, value))
return value
def testConnectWithoutCertificateFails(self):
"""
Verify that the connection without a client certificate
fails.
"""
rc, _ = self.runSClient()
self.assertNotEquals(rc, 0)
def testConnectWithCertificateSucceeds(self):
"""
Verify that the connection with a valid client certificate
works correctly.
"""
rc, _ = self.runSClient([
"-cert", self.certfile,
"-key", self.keyfile,
])
self.assertEquals(rc, 0)
def testSessionIsCached(self):
"""
Verify that SSL the session identifier is preserved when
connecting two times without stopping the server.
"""
# Create a temporary file to store the session details:
sessionDetailsFile = tempfile.NamedTemporaryFile(delete=False)
# Connect first time and save the session to a file:
rc, out = self.runSClient([
"-cert", self.certfile,
"-key", self.keyfile,
"-sess_out", sessionDetailsFile.name,
])
self.assertEquals(rc, 0)
# Get the session id from the output of the command:
firstSessionId = self.extractField("Session-ID", out)
self.assertTrue(firstSessionId is not None)
# Connect second time using the saved session file:
rc, out = self.runSClient([
"-cert", self.certfile,
"-key", self.keyfile,
"-sess_in", sessionDetailsFile.name,
])
self.assertEquals(rc, 0)
# Get the session id again:
secondSessionId = self.extractField("Session-ID", out)
self.assertTrue(secondSessionId is not None)
# Remove the temporary file used to store the session details,
# as we don't need it any longer:
os.remove(sessionDetailsFile.name)
# Compare the session ids:
self.assertEquals(secondSessionId, firstSessionId)
# The address of the tests server:
ADDRESS = ("127.0.0.1", 8443)
# Private key used for the tests:
KEY = """
-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDapPcHwCWYsfiH
pJ/tXpcSZsa6ocJZaL3HF/mFxiO4/7za6lP0Vdtln4CwCzqAfUJKQhCHNyYUvZsf
Eylr0U30MQzhynq8+F5co5f2RNzz93aL7cjEUQMK2YaShLxz7o/QdoNSnT8sJ3TO
P16VEcpngoBD/nDXxNf0HekwhENYz4K2Hqol0xcGY6x8cJoXNybBPheVGTl6wy+r
W9YPuL0gR2/GgyVT1UP0EBGebkvza+eVaenrp0qrMiEQMDAOeNq3mu6ueOUo03Hn
xaEqxrToYv0eBbpF2Z469uJXaLP/NmcT1GUbFqP3H+/Js68HwxCEqb1kKGiG8E58
hSHHM95ZAgMBAAECggEAeMU2TmmsWrOze/lK/WqKN/fdPamsGIbqjNaJVYMkqENa
pfFZflUOYwu/oX4SSnbl7u6fApFLz5kL3hZPguaSEJgnbXDSax8lwDX88mMHSRsf
uBsYEphM/ek5lCUNk1vqxFMyJqgFBPamZmZKcDzreFF1WBlra0OnpYgADnSAXsT7
HcQDkSe1s1YuuRYYUuRc5KYhrQ5P3AHCJ++w7QK7wZbo/5iQuVuuytMBbCWFNH06
K+fEqZRB9wXg9ubvvbcAlX579QL2HRZl5GvhSP+2Jah/zoTndXAKVVWWx8L1ohKg
aAOxWGFy4f47BQwmkafZVYIGsfudEK4Dmf6UmwvVIQKBgQDw8r5ihTHuXLuyBtwy
J+Pn//zY1FKJcANshvFgQtrfbmLiulXDtvaiitdkQj8HyTeEtgtuGt5mnE5uKm8N
MV9eSU2FyuyazwlemI4XYdQWtcw+ZBh7K3u6/QjqDJfNjVDnv7S2VS9DDs8Ga7r4
fanecGfQ6ni5Mqxb2OAlOcBYRwKBgQDoTYmR35Lo/qkJ6Mm+8IljdvN3iAgqkO67
b6WhjkTwgO/Y+zGfQ/W2PbPsVWc1f3IBYvKmArvMDB5PZ9HyzIg27OxCyhjbLmvb
kEPjQF6f+FOb4h4yo9i2dBJucFAKrHMHiqH24Hlf3WOordxX9lY37M0fwpg2kZIM
ConIt/4EXwKBgDIXtV8UI+pTWy5K4NKImogsHywREEvEfuG8OEhz/b7/2w0aAiSb
UDFAvkD4yNPckG9FzaCJc31Pt7qNleLfRd17TeOn6YLR0jfZbYkM7KQADcNW2gQZ
aTLZ0lWeYpz4aT6VC4Pwt8+wL3g9Q3TP41X8dojnhkuybkT2FLuIgyWXAoGAMJUW
skU5qjSoEYR3vND9Sqnz3Qm7+3r4EocU8qaYUFwGzTArfo1t88EPwdtSjGOs6hFR
gdqMf+4A4MZrqAWSbzo5ZvZxIFWjBPY03G/32ijLA4zUl+6gQfggaqxecP0DyY36
tXDYsW3Ri9Ngg5znByck9wFxZ+glzRLfIfUo0K0CgYEAkogcGLKGb5zdwAXuUVQK
ftftLEARqs/gMA1cItxurtho0JUxYaaKgSICB7MQPEuTtdUNqCkeu9S838dbyfL7
gGdsZ26Can3IAyQv7+3DObvB376T4LD8Mp/ZHvOpeZQQ9O4ngadteRcBaCcd78Ij
VSgxeSvBewtCS1FnILwgXJ4=
-----END PRIVATE KEY-----
"""
# This is the certificate used for the tests, and it expires in Sep 26
# 2022, so don't be surprised if by that date the test starts failing:
CERTIFICATE = """
-----BEGIN CERTIFICATE-----
MIIC8zCCAdugAwIBAgIBADANBgkqhkiG9w0BAQUFADAUMRIwEAYDVQQDDAkxMjcu
MC4wLjEwHhcNMTIwOTI4MTcyMzE3WhcNMjIwOTI2MTcyMzE3WjAUMRIwEAYDVQQD
DAkxMjcuMC4wLjEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDapPcH
wCWYsfiHpJ/tXpcSZsa6ocJZaL3HF/mFxiO4/7za6lP0Vdtln4CwCzqAfUJKQhCH
NyYUvZsfEylr0U30MQzhynq8+F5co5f2RNzz93aL7cjEUQMK2YaShLxz7o/QdoNS
nT8sJ3TOP16VEcpngoBD/nDXxNf0HekwhENYz4K2Hqol0xcGY6x8cJoXNybBPheV
GTl6wy+rW9YPuL0gR2/GgyVT1UP0EBGebkvza+eVaenrp0qrMiEQMDAOeNq3mu6u
eOUo03HnxaEqxrToYv0eBbpF2Z469uJXaLP/NmcT1GUbFqP3H+/Js68HwxCEqb1k
KGiG8E58hSHHM95ZAgMBAAGjUDBOMB0GA1UdDgQWBBR0dTG068xPsrXKDD6r6Ne+
8RQghzAfBgNVHSMEGDAWgBR0dTG068xPsrXKDD6r6Ne+8RQghzAMBgNVHRMEBTAD
AQH/MA0GCSqGSIb3DQEBBQUAA4IBAQCoY1bFkafDv3HIS5rBycVL0ghQV2ZgQzAj
sCZ47mgUVZKL9DiujRUFtzrMRhBBfyeT0Bv8zq+eijhGmjp8WqyRWDIwHoQwxHmD
EoQhAMR6pXvjZdYI/vwHJK5u0hADQZJ+zZp77m/p95Ds03l/g/FZHbCdISTTJnXw
t6oeDZzz/dQSAiuyAa6+0tdu2GNF8OkR5c7W+XmL797soiT1uYMgwIYQjM1NFkKN
vGc0b16ODiPvsB0bo+USw2M0grjsJEC0dN/GBgpFHO4oKAodvEWGGxANSHAXoD0E
bh5L7zBhjgag+o+ol2PDNZMrJlFvw8xzhQyvofx2h7H+mW0Uv6Yr
-----END CERTIFICATE-----
"""
|
A tow truck prepares to remove the damaged car from the side of the road | Photo: Lake and McHenry County Scanner.
A 38-year-old man was injured after he illegally passed a vehicle, lost control, and rolled his car over, striking the railroad tracks in Round Lake Tuesday evening.
The Lake County Sheriff’s Office and the Greater Round Lake Fire Protection District responded at 4:30 p.m. on Tuesday to Route 134 east of Fairfield Road in Round Lake for a single-vehicle crash.
Lake County Sheriff Sgt. Christopher Covelli said that a 2002 Ford Mustang, driven by a 38-year-old man from Wauconda, was driving westbound when he illegally overtook another vehicle. The Mustang lost control and left the roadway before rolling over and striking the railroad tracks.
Emergency crews found the car on the north side of the roadway just west of Harrison Avenue. The car came to a rest just off the tracks in the gravel that supports the train tracks.
The man was transported with non-life-threatening injuries to Advocate Condell Medical Center. Nobody else was in the car or injured during the crash.
Metra trains were halted and delayed. Engineers were called to help repair the train tracks which were damaged after the car landed on them. Covelli said that the investigation is ongoing and citations are expected to be issued.
Route 134 was closed from Fairfield Road to Hart Road for almost two hours, causing extensive traffic delays in the area.
|
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.dirname(__file__), 'simple_blog.sqlite'),
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^x+vb79pfnjn@3ozbjwl&#xlo^_sybox877z24-*1cokdx%0ex'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'simple_blog.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'simple_blog.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'simple_blog',
'sortable_listview',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
2018 Ford F-250 XLT 4x4, 6.7L V8 Diesel, Crew Cab, Rear View Camera, AM/FM CD Sat MP3, SYNC Voice Activated Systems, Steering Wheel Mounted Controls, Power Windows and Locks, Remote Keyless Entry. CARFAX One-Owner. This is a Ford Certified Pre-Owned which means you not only get the reassurance of a 12Mo/12,000Mile Comprehensive Warranty, but also up to a 7-Year/100,000-Mile Powertrain Limited Warranty, a 172-point inspection/reconditioning, 24/7 roadside assistance, trip-interruption services, rental car benefits, and a complete CARFAX vehicle history report. Up Front Pricing & NO DEALER FEES!!!
|
#!/usr/bin/python3
from ABE_ADCDACPi import ADCDACPi
import time
import math
"""
================================================
ABElectronics ADCDAC Pi 2-Channel ADC, 2-Channel DAC | DAC sine wave generator demo
Version 1.0 Created 29/02/2015
run with: python3 demo-dacsinewave.py
================================================
# this demo uses the set_dac_raw method to generate a sine wave from a
# predefined set of values
"""
adcdac = ADCDACPi()
DACLookup_FullSine_12Bit = \
[2048, 2073, 2098, 2123, 2148, 2174, 2199, 2224,
2249, 2274, 2299, 2324, 2349, 2373, 2398, 2423,
2448, 2472, 2497, 2521, 2546, 2570, 2594, 2618,
2643, 2667, 2690, 2714, 2738, 2762, 2785, 2808,
2832, 2855, 2878, 2901, 2924, 2946, 2969, 2991,
3013, 3036, 3057, 3079, 3101, 3122, 3144, 3165,
3186, 3207, 3227, 3248, 3268, 3288, 3308, 3328,
3347, 3367, 3386, 3405, 3423, 3442, 3460, 3478,
3496, 3514, 3531, 3548, 3565, 3582, 3599, 3615,
3631, 3647, 3663, 3678, 3693, 3708, 3722, 3737,
3751, 3765, 3778, 3792, 3805, 3817, 3830, 3842,
3854, 3866, 3877, 3888, 3899, 3910, 3920, 3930,
3940, 3950, 3959, 3968, 3976, 3985, 3993, 4000,
4008, 4015, 4022, 4028, 4035, 4041, 4046, 4052,
4057, 4061, 4066, 4070, 4074, 4077, 4081, 4084,
4086, 4088, 4090, 4092, 4094, 4095, 4095, 4095,
4095, 4095, 4095, 4095, 4094, 4092, 4090, 4088,
4086, 4084, 4081, 4077, 4074, 4070, 4066, 4061,
4057, 4052, 4046, 4041, 4035, 4028, 4022, 4015,
4008, 4000, 3993, 3985, 3976, 3968, 3959, 3950,
3940, 3930, 3920, 3910, 3899, 3888, 3877, 3866,
3854, 3842, 3830, 3817, 3805, 3792, 3778, 3765,
3751, 3737, 3722, 3708, 3693, 3678, 3663, 3647,
3631, 3615, 3599, 3582, 3565, 3548, 3531, 3514,
3496, 3478, 3460, 3442, 3423, 3405, 3386, 3367,
3347, 3328, 3308, 3288, 3268, 3248, 3227, 3207,
3186, 3165, 3144, 3122, 3101, 3079, 3057, 3036,
3013, 2991, 2969, 2946, 2924, 2901, 2878, 2855,
2832, 2808, 2785, 2762, 2738, 2714, 2690, 2667,
2643, 2618, 2594, 2570, 2546, 2521, 2497, 2472,
2448, 2423, 2398, 2373, 2349, 2324, 2299, 2274,
2249, 2224, 2199, 2174, 2148, 2123, 2098, 2073,
2048, 2023, 1998, 1973, 1948, 1922, 1897, 1872,
1847, 1822, 1797, 1772, 1747, 1723, 1698, 1673,
1648, 1624, 1599, 1575, 1550, 1526, 1502, 1478,
1453, 1429, 1406, 1382, 1358, 1334, 1311, 1288,
1264, 1241, 1218, 1195, 1172, 1150, 1127, 1105,
1083, 1060, 1039, 1017, 995, 974, 952, 931,
910, 889, 869, 848, 828, 808, 788, 768,
749, 729, 710, 691, 673, 654, 636, 618,
600, 582, 565, 548, 531, 514, 497, 481,
465, 449, 433, 418, 403, 388, 374, 359,
345, 331, 318, 304, 291, 279, 266, 254,
242, 230, 219, 208, 197, 186, 176, 166,
156, 146, 137, 128, 120, 111, 103, 96,
88, 81, 74, 68, 61, 55, 50, 44,
39, 35, 30, 26, 22, 19, 15, 12,
10, 8, 6, 4, 2, 1, 1, 0,
0, 0, 1, 1, 2, 4, 6, 8,
10, 12, 15, 19, 22, 26, 30, 35,
39, 44, 50, 55, 61, 68, 74, 81,
88, 96, 103, 111, 120, 128, 137, 146,
156, 166, 176, 186, 197, 208, 219, 230,
242, 254, 266, 279, 291, 304, 318, 331,
345, 359, 374, 388, 403, 418, 433, 449,
465, 481, 497, 514, 531, 548, 565, 582,
600, 618, 636, 654, 673, 691, 710, 729,
749, 768, 788, 808, 828, 848, 869, 889,
910, 931, 952, 974, 995, 1017, 1039, 1060,
1083, 1105, 1127, 1150, 1172, 1195, 1218, 1241,
1264, 1288, 1311, 1334, 1358, 1382, 1406, 1429,
1453, 1478, 1502, 1526, 1550, 1575, 1599, 1624,
1648, 1673, 1698, 1723, 1747, 1772, 1797, 1822,
1847, 1872, 1897, 1922, 1948, 1973, 1998, 2023]
while True:
for val in DACLookup_FullSine_12Bit:
adcdac.set_dac_raw(1, val)
|
We can supply tooling for any punching environment. Whether you have a C-frame, punch/plasma, even a hand press, we have your tools.
We manufacture tooling for any punching application.
However you are making holes we have the punch and die you need. Choose the style you need for your machine, hydraulic or mechanical, whether it is a punch / plasma, portable press, frame, unitized tooling, or even a hand press. Pick out your type of punch below- headed, ball lock, shoulder, threaded. We can provide you with the tools for any machine. If you don't see your style of tooling below, please call for a quote.
Select your Punch style- whether it's a shoulder punch, ball lock, straight shank or any other type of punch. If you don't see the style you need below, call us, and we can make it for you.
Need a standard style punch?
For punch styles not listed below, view our standard punch styles.
Click here, then enter your machine make and model, and browse or purchase tooling just for your machine.
|
import re
import logging
from pprint import pformat
from scope import Scope
import settings
try:
import path
except ImportError:
logging.critical("Presto requiered path.py to be installed, "
"checkout requirement.txt.")
raise
# char to escape in a regular expression to be taken as literal.
TO_ESCAPE_FOR_RE = r"()[]{}*+?|.^$\\"
# char to escaped inside [] in a regular expression to be taken as literal.
TO_ESCAPE_INSIDE_BRACKET_FOR_RE = r"\^\-\]\\"
def escape_reserved_re_char(string):
"""
Escape with a backslash characters reserved by regular expressions
in the given string.
"""
# first escape all char that have to be escaped inside []
# (we're actually putting them inside [])
to_escape = re.sub("(?P<char>[" + TO_ESCAPE_INSIDE_BRACKET_FOR_RE + "])",
r"\\\g<char>",
TO_ESCAPE_FOR_RE)
return re.sub("(?P<char>[" + to_escape + "])",
r"\\\g<char>",
string)
class MetaDataModel(type):
"""
Meta class for DataModel.
Used to have a 'class property' behavor for the:
_files, _root and _scopes class attribut.
i.e. they can't be modified outside DataModel.
"""
@property
def files(cls):
return cls._files
@files.setter
def files(self, value):
self._files = value
@property
def root(cls):
return cls._root
@root.setter
def root(self, value):
self._root = value
@property
def scopes(cls):
return cls._scopes
@scopes.setter
def scopes(self, value):
self._scopes = value
@property
def document_path(self):
return self._document_path
@document_path.setter
def document_path(self, value):
self._document_path = value
class DataModelError(Exception):
pass
class DataModel(metaclass=MetaDataModel):
_files = None
_root = None
_scopes = None
_document_path = None
def __init__(self, yaml_doc, yaml_doc_dir, scope_to_override):
# Check if the class has already been setup.
if(DataModel.files is not None and DataModel.root is not None and
DataModel.scopes is not None):
logging.warn("DataModel have already been setup:\nroot: %s"
"\n%s files\n%s scopes", DataModel.root,
len(DataModel.scopes), len(DataModel.scopes))
DataModel.document_path = yaml_doc_dir
# Change helpers class instance attribut so all instances of Evaluators
# will use it as helpers
from evaluator import Evaluator
# update yaml_doc with scopte_to_override before setting helpers.
# if scope_to_override in yaml_doc:
yaml_doc.update(scope_to_override)
Evaluator.set_helpers(yaml_doc)
try:
DataModel._set_root(yaml_doc['__ROOT__'])
except KeyError:
logging.error("configuration file must have a '__ROOT__' "
"attribute.")
except (OSError, KeyError, TypeError):
logging.critical("unable to build data model. "
"bad key: '__ROOT__'")
raise
try:
if(DataModel.scopes is None):
DataModel.scopes = dict()
scope_dict = yaml_doc['__SCOPES__']
# check if scope to override are ok.
for scope in scope_to_override:
if scope not in scope_dict:
logging.critical("Unable to find overrided scope '" +
settings.FAIL + "{}".format(scope) +
settings.ENDCBOLD)
raise
scope_dict.update(scope_to_override)
DataModel._make_scopes(scope_dict)
logging.debug("Scopes:\n%s", pformat(DataModel.scopes))
except KeyError:
logging.error("configuration file must have a '__SCOPES__' "
"attribute.")
logging.critical("unable to build data model. "
"bad key: '__SCOPES__'")
raise DataModelError()
@classmethod
def _set_root(cls, root):
from evaluator import Evaluator
evltr = Evaluator()
root = evltr.evaluate(root)
cls.root = path.Path(root).abspath()
try:
cls.files = sorted(cls.root.walkfiles())
logging.debug("files:\n%s", pformat(cls.files))
except OSError:
logging.error("no such directory: ('%s')", cls.root)
raise
@classmethod
def _make_scopes(cls, peers):
from evaluator import Evaluator
evltr = Evaluator()
for key in peers:
name = key
try:
expression = evltr.evaluate(peers[key])
except (TypeError, KeyError):
logging.critical("Error in __SCOPES__ definition for {0}"
"".format(key))
raise
values = set()
for f in cls.files:
try:
match = re.search(r".*?" + expression, f)
except re.error:
logging.critical("bad regular expression '%s' for %s: ",
key, expression)
raise
if(match):
values.add(escape_reserved_re_char(match.group(0)))
cls.scopes[name] = Scope(name, expression, sorted(values))
|
How to find your phone number on TracFone Alcatel Raven A577VL. This tutorial illustrate step by step how to find your Tracfone phone number on an Android device. It’s easy to retrieve your phone number in a few simple steps.
1. From the main screen, tap the “Apps” icon.
2. Slide the screen up to see more options. Tap the “Settings” icon.
5. Tap “SIM status”. Locate “My phone number”. The number underneath is your phone number.
6. To return to the main screen, press the HOME key.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.