text
stringlengths
29
850k
import requests import os import bs4 import sys try: from flask import Flask from flask import request from flask.ext.cors import CORS, cross_origin from datetime import timedelta from flask import make_response, request, current_app from functools import update_wrapper app = Flask(__name__) @app.route('/cric/') @crossdomain(origin='*') #cors=CORS(app) #app.config['CORS_HEADERS']='Content-Type' #@app.route("/cric") #@cross_origin() except: pass try: import lxml parser = 'lxml' except ImportError: parser = 'html.parser' try: with open(sys.path[0]+'/proxy.config','r') as f: proxies = f.read() proxy_dict = { "http": proxies, "https": proxies, } except: import urllib proxy_dict = urllib.getproxies() class Cricket(object): def get_player_stats(self, playerName, type_return='string'): base_url="http://www.espncricinfo.com" url="http://www.espncricinfo.com/ci/content/player/search.html?search=" names=[] names=playerName.split('-') playerName="+".join(names) url=url+playerName res=requests.get(url, stream=True, proxies=proxy_dict) res.raise_for_status() soup=bs4.BeautifulSoup(res.text, parser) playerStatLink=soup.select(".ColumnistSmry") playerStatLink=playerStatLink[1] temp_url=playerStatLink.get('href') url=base_url+temp_url res=requests.get(url) soup=bs4.BeautifulSoup(res.text, parser) player_info=soup.select(".ciPlayerinformationtxt") player_stats={} for item in player_info[0:len(player_info)]: b=item.find('b') if b.string=="Major teams": span=item.findAll('span') temp="" for it in span: temp+=it.string+" " else: temp=item.find('span') temp=temp.string player_stats[b.string]=temp if type_return == 'dict': return player_stats else: return str(player_stats) def live_score(self, type_return='string'): response = requests.get('http://www.cricbuzz.com/live-scores', stream=True, proxies=proxy_dict) soup = bs4.BeautifulSoup(response.text, parser) team_mate = soup.findAll("div", {"class" : "cb-lv-main"}) scores = [] for i in team_mate: scores.append(i.text) if type_return == 'dict': return scores return json.dumps(str(scores)) def list_matches(self, type_return='string'): response = requests.get('https://cricket.yahoo.com/matches/schedule', stream=True, proxies=proxy_dict) soup = bs4.BeautifulSoup(response.text, parser) head_list = soup.findAll("em", {"class": "ycric-table-heading"}) invited_team_list = soup.findAll("div", {"class": "ycric-table-sub-heading"}) no_list = soup.findAll("td", {"class": "sno"}) tour_dates_list = soup.findAll("span", {"class" : "matchDateTime"}) match_list = soup.findAll("td", {"class": "smatch"}) venue_list= soup.findAll("td", {"class": "svenue"}) result_list = soup.findAll("td", {"class": "sresult"}) heading = 0 nos = [] tour_date = [] team_list = [] venue = [] result = [] ans = [] cnt = 0 for i in match_list: if i.text != "Match": team_list.append(i.text) for i in no_list: if i.text !="#": nos.append(i.text) for i in venue_list: if i.text!="Venue": venue.append(i.text) for i in result_list: if i.text!="Result": result.append(i.text) cnt =len(nos) check = 0 matches = {} for i in range(cnt): if nos[i]=="1": header = head_list[heading].text.lstrip() matches[header] = [] heading = heading+1 matches[header].append((team_list[i].lstrip(), tour_dates_list[i].text.lstrip(), venue[i].lstrip(), result[i].lstrip())) if type_return == 'dict': return matches return json.dumps(str(matches)) def news(self, type_return='string'): base_url='http://www.cricbuzz.com/cricket-news/latest-news' res=requests.get(base_url, stream=True, proxies=proxy_dict) soup = bs4.BeautifulSoup(res.text, parser) news = soup.select(".cb-col-33 a") news_dict={} for all_news in news: if str(all_news.get("title"))!="More Photos" and str(all_news.get("title"))!="None": news_dict[all_news.get("title")]=base_url+all_news.get("href") if type_return == 'dict': return news_dict return json.dumps(str(news_dict)) % _GET_PARAMS('callback') if __name__ == '__main__': app.add_url_rule('/',view_func=attr.news) app.add_url_rule('/cric/matches/',view_func=attr.list_matches) app.add_url_rule('/cric/live/',view_func=attr.live_score) app.add_url_rule('/cric/player_stats/',view_func=attr.get_player_stats) #myvar = request.GET["myvar"] port = int(os.environ.get("PORT", 5001)) app.run(host='0.0.0.0', port=port,debug=True) """ #app.add_url_rule('/cric/player_stats/',view_func=attr.player_stats) player_stats=attr.get_player_stats("Virender Sehwag") print (player_stats) print (attr.live_score()) print (attr.list_matches()) print (attr.news()) """
Marco mobile casegoods give you total exibility in the classroom with the appearance of built-in cabinetry. Our cabinets are built on concealed casters, internal adjustable levelers and lock together with cap nuts. Steel tubing is added for strength and durability. Shelving is 1" thick to reduce sagging. European hinges give the cabinets a clean appearance. Modern door pulls are available in brushed nickel or black powder coat. Drawers have steel sides with a 5⁄8" bottom. Optional door and drawer locks are available. These bookcases are built with bolt-through steel tubing for added stability and strength. The thermofused laminate surface along with the 3mm PVC edge banding provide a durable surface on both sides. The adjustable compartment shelves are 1" thick to prevent the risk of sagging.
import re import yaml import time import datetime from libs import request import geocode config = yaml.load(open('config.yaml', 'r')) def matches(text): return re.search('time\s+(at|in)\s+(.+)', text, re.IGNORECASE) def decode(text): g = matches(text).groups() if g and g[1]: return [g[1]] def query(m, q): results = geocode.raw(q) if results: location = results.get('geometry').get('location') address = results.get('formatted_address') timestamp = time.time() timezone_results = request.ajax('https://maps.googleapis.com/maps/api/timezone/json?key=' + config.get('map_timezone_api_key') + '&location=' + str(location.get('lat')) + ',' + str(location.get('lng')) + '&timestamp=' + str(timestamp)) if timezone_results.get('status') == 'OK': readabletime = datetime.datetime.fromtimestamp(timestamp + timezone_results.get('rawOffset') + timezone_results.get('dstOffset')).strftime('%A, %d %B %Y, %I:%M %p') return 'It\'s {0} in {1} - {2} ({3}).'.format(readabletime, address, timezone_results.get('timeZoneId'), timezone_results.get('timeZoneName'))
What your personal brand really means. Last week I spoke at the Integrated Marketing Association Summit at NYUSPS. The topic was how to tell if your personal brand was digital ready. It’s a talk I’ve given before that’s based on my e-book on the subject. Whenever I speak I invite the audience to interrupt me with their questions rather than wait until the end. My experience has been that sets the stage for a more engaged and lively conversation, one in which I can address the needs of a specific audience that I might not have included otherwise, in this case a diverse group of graduate students. When we got into the area of how what you share on social media contributes to the story you’re telling about yourself, I was asked several questions about how I approached my own social sharing to tell my story. One young woman in the first row raised her hand and told me that I wanted to make an impact on people, I wanted to make a difference. She then clarified that it really wasn’t a question, it was a statement. I took a pause and a breath and thanked her. It was true. That is part of what my brand is about. Beyond the marketing, the social media, the content and the personal branding, my desire is always to cause people to think and to make a difference in their lives. That’s why I teach and consult. It’s why I write. When the talk was over, I couldn’t get what she said out of my head. Her remark had triggered another time I had been told something similiar. It was my sophomore year in college and my psychology professor found me outside his office one day in tears because I had just broken up with my boyfriend and thought my world had come to an end. He was working with a personality assessment based on the colors you chose. He asked if I wanted to take it. At that moment I was seeking insight and answers so of course I said yes. I don’t remember the name of the test or what happened to that professor, but I do remember how much it resonated with me at the time — enough to have kept the results he wrote with a black sharpie on the back of two half pages of yellow scrap paper all these years later and to remember where I had them stashed. I had to take a breath again. This kernel of what my personal brand is now has always been there, as far back as the young age of 19. I just wasn’t always conscious of it, perhaps sometimes running from it, and so did not always bring it into focus. But now here it was and clear enough for this student to recognize. My definition of a personal brand is not something you make up out of thin air as you might for a product or service. It’s roots lie in who you are, what your overarching desires are and the unique gifts you bring to the table. Digital tools allow us to tweak that personal brand in real time as our desires and goals and career evolve. We just need to remember that a personal brand is never a destination. It’s more of an intricately woven patchwork quilt of everything that makes up you and one that you keep adding to. And when there are specific things you want from that brand — a new job, new clients, to be seen as an expert or influencer — you showcase that part of you. But the core and the essence of who we are, what we need and what we are here to do never changes. If you’re like me you might forget what that is. Good thing I keep notes. Is your personal brand Digital Ready? Download my FREE ebook and find out! Originally published at joannetombrakos.com on April 4, 2017.
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Services for managing subscriptions.""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules from core.platform import models (user_models,) = models.Registry.import_models([ models.NAMES.user ]) def subscribe_to_thread(user_id, feedback_thread_id): """Subscribes a user to a feedback thread. WARNING: Callers of this function should ensure that the user_id and feedback_thread_id are valid. Args: user_id: str. The user ID of the new subscriber. feedback_thread_id: str. The ID of the feedback thread. """ subscribe_to_threads(user_id, [feedback_thread_id]) def subscribe_to_threads(user_id, feedback_thread_ids): """Subscribes a user to feedback threads. WARNING: Callers of this function should ensure that the user_id and the feedback_thread_ids are valid. Args: user_id: str. The user ID of the new subscriber. feedback_thread_ids: list(str). The IDs of the feedback threads. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) if not subscriptions_model: subscriptions_model = user_models.UserSubscriptionsModel(id=user_id) # Using sets for efficiency. current_feedback_thread_ids_set = set( subscriptions_model.general_feedback_thread_ids ) # Determine which thread_ids are not already in the subscriptions model. feedback_thread_ids_to_add_to_subscriptions_model = list( set(feedback_thread_ids).difference(current_feedback_thread_ids_set) ) subscriptions_model.general_feedback_thread_ids.extend( feedback_thread_ids_to_add_to_subscriptions_model ) subscriptions_model.update_timestamps() subscriptions_model.put() def subscribe_to_exploration(user_id, exploration_id): """Subscribes a user to an exploration (and, therefore, indirectly to all feedback threads for that exploration). WARNING: Callers of this function should ensure that the user_id and exploration_id are valid. Args: user_id: str. The user ID of the new subscriber. exploration_id: str. The exploration ID. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) if not subscriptions_model: subscriptions_model = user_models.UserSubscriptionsModel(id=user_id) if exploration_id not in subscriptions_model.exploration_ids: subscriptions_model.exploration_ids.append(exploration_id) subscriptions_model.update_timestamps() subscriptions_model.put() def subscribe_to_creator(user_id, creator_id): """Subscribes a user (learner) to a creator. WARNING: Callers of this function should ensure that the user_id and creator_id are valid. Args: user_id: str. The user ID of the new subscriber. creator_id: str. The user ID of the creator. """ if user_id == creator_id: raise Exception('User %s is not allowed to self subscribe.' % user_id) subscribers_model_creator = user_models.UserSubscribersModel.get( creator_id, strict=False) subscriptions_model_user = user_models.UserSubscriptionsModel.get( user_id, strict=False) if not subscribers_model_creator: subscribers_model_creator = user_models.UserSubscribersModel( id=creator_id) if not subscriptions_model_user: subscriptions_model_user = user_models.UserSubscriptionsModel( id=user_id) if user_id not in subscribers_model_creator.subscriber_ids: subscribers_model_creator.subscriber_ids.append(user_id) subscriptions_model_user.creator_ids.append(creator_id) subscribers_model_creator.update_timestamps() subscribers_model_creator.put() subscriptions_model_user.update_timestamps() subscriptions_model_user.put() def unsubscribe_from_creator(user_id, creator_id): """Unsubscribe a user from a creator. WARNING: Callers of this function should ensure that the user_id and creator_id are valid. Args: user_id: str. The user ID of the subscriber. creator_id: str. The user ID of the creator. """ subscribers_model_creator = user_models.UserSubscribersModel.get( creator_id, strict=False) subscriptions_model_user = user_models.UserSubscriptionsModel.get( user_id, strict=False) if user_id in subscribers_model_creator.subscriber_ids: subscribers_model_creator.subscriber_ids.remove(user_id) subscriptions_model_user.creator_ids.remove(creator_id) subscribers_model_creator.update_timestamps() subscribers_model_creator.put() subscriptions_model_user.update_timestamps() subscriptions_model_user.put() def get_all_threads_subscribed_to(user_id): """Returns a list with ids of all the feedback and suggestion threads to which the user is subscribed. WARNING: Callers of this function should ensure that the user_id is valid. Args: user_id: str. The user ID of the subscriber. Returns: list(str). IDs of all the feedback and suggestion threads to which the user is subscribed. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) return ( subscriptions_model.general_feedback_thread_ids if subscriptions_model else []) def get_all_creators_subscribed_to(user_id): """Returns a list with ids of all the creators to which this learner has subscribed. WARNING: Callers of this function should ensure that the user_id is valid. Args: user_id: str. The user ID of the subscriber. Returns: list(str). IDs of all the creators to which this learner has subscribed. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) return ( subscriptions_model.creator_ids if subscriptions_model else []) def get_all_subscribers_of_creator(user_id): """Returns a list with ids of all users who have subscribed to this creator. WARNING: Callers of this function should ensure that the user_id is valid. Args: user_id: str. The user ID of the subscriber. Returns: list(str). IDs of all users who have subscribed to this creator. """ subscribers_model = user_models.UserSubscribersModel.get( user_id, strict=False) return ( subscribers_model.subscriber_ids if subscribers_model else []) def get_exploration_ids_subscribed_to(user_id): """Returns a list with ids of all explorations that the given user subscribes to. WARNING: Callers of this function should ensure that the user_id is valid. Args: user_id: str. The user ID of the subscriber. Returns: list(str). IDs of all explorations that the given user subscribes to. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) return ( subscriptions_model.exploration_ids if subscriptions_model else []) def subscribe_to_collection(user_id, collection_id): """Subscribes a user to a collection. WARNING: Callers of this function should ensure that the user_id and collection_id are valid. Args: user_id: str. The user ID of the new subscriber. collection_id: str. The collection ID. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) if not subscriptions_model: subscriptions_model = user_models.UserSubscriptionsModel(id=user_id) if collection_id not in subscriptions_model.collection_ids: subscriptions_model.collection_ids.append(collection_id) subscriptions_model.update_timestamps() subscriptions_model.put() def get_collection_ids_subscribed_to(user_id): """Returns a list with ids of all collections that the given user subscribes to. WARNING: Callers of this function should ensure that the user_id is valid. Args: user_id: str. The user ID of the subscriber. Returns: list(str). IDs of all collections that the given user subscribes to. """ subscriptions_model = user_models.UserSubscriptionsModel.get( user_id, strict=False) return ( subscriptions_model.collection_ids if subscriptions_model else [])
Distinguishing Characteristics: Asian female. Black hair, brown eyes. Sun may use the first name Susan. She was born in China. Xiang was last seen in Columbia, Missouri on September 23, 2000. Her husband, Yinzhou Zheng, was last seen on September 17, six days before his wife disappeared. The couple planned to go visit one of their daughters in Iowa City, Iowa, but never arrived. Zheng and Sun have never been heard from again. They are both originally from China, and both of them left their passports behind. Zheng was a graduate student at the University of Missouri and Zheng's son was also attending the university at the time of his father and stepmother's disappearances. Few details are available in their cases.
#!/usr/bin/env python import sys, math from pylab import * def hitf(a): r = 0 if (a >= 1): r = 1 else: r = 0 return r def mean(b): return float(sum(b)) / float(len(b)) #infilename = "d2out" infilename = sys.argv[1] #alpha = float(sys.argv[2]) ifile = open(infilename, 'r') no_cache = 0 time = [] stab_cost = [] cache_time = [] #tmp_cr = [] tmp_cr = 0 for line in ifile: data = line.split() action = data[1] if action == "caching": #print action no_cache = no_cache + 1 cache_time.append(float(data[0])) elif action == "Node_Join": #print action if no_cache != 0: time.append(float(data[0])) cost = float(data[6]) cost_rate = cost / float(no_cache) #tmp_cr.append(cost_rate) tmp_cr = tmp_cr + cost_rate f_res = float(tmp_cr) / float(len(stab_cost) + 1) #stab_cost.append(mean(tmp_cr)) stab_cost.append(f_res) #print no_cache, cost, cost_rate else: st = 0 #leave or query. handle later #print stab_cost print len(time), len(stab_cost) #print cache_time for i in xrange(len(time)): print time[i], stab_cost[i] print ' ' for j in xrange(len(cache_time)): print cache_time[j], 0 """ figure(1) plot(time, stab_cost) xlabel('time') ylabel('number of messages per unique object') title('Stabilization Cost') grid(True) #figure(2) #plot(num_ave) #figure(3) #plot(cost) show() """
This Bike Pic Tuesday, digging through the archives, we caught this young biker chick out ahead of the pack, in the kid’s race at the North Star Bicycle Festival, in Stillwater MN. This year the event happens ming this year on June 12 – 16th. This entry was posted in Picture of the Day and tagged bike guide, bike mn, Bike Ride, bike tour, bike touring, biker chick, biker family, kids race, Minnesota Bike Guide, Minnesota bike maps, North Star Bicycle Festival, pic of the day, Stillwater MN on February 12, 2019 by Russ Lowthian.
import logging from mailsync.models.base import BaseModel from mailsync.models.adapter import adapter from mailsync.models.sqlite import details_table, columns_table, database_table, provider_table, lists_table class SetupModel(BaseModel): def __init__(self): super(SetupModel, self).__init__() def _get_provider(self, provider_name): providers = { "mailchimp": "MailChimp", "campaignmonitor": "Campaign Monitor" } return providers[provider_name] def _get_last_inserted_id(self, list_data, table, primary_key): # nothing synced for this list if not (list_data.last_inserted_id and list_data.status and list_data.inserted_rows and list_data.rows_to_be_inserted): last_inserted_id = 0 elif list_data.last_inserted_id: last_inserted_id = list_data.last_inserted_id else: last_inserted_id = adapter.get_last_inserted_id(table, primary_key, "first") return last_inserted_id def get_synced_lists(self): synced_lists = [] for synced_list_data in details_table.get_details(): details_id = synced_list_data._id database_data = database_table.find_detail(details_id) provider_data = provider_table.find_detail(details_id) list_data = lists_table.find_detail(details_id) columns_data = columns_table.find_details(details_id) if database_data and provider_data and list_data and columns_data: driver = self.get_driver(database_data) adapter.setup(driver) table = database_data.table primary_key = adapter.get_primary_key(table) last_inserted_id = self._get_last_inserted_id(list_data, table, primary_key) columns_dict = self.get_columns(columns_data) try: rows_to_be_synced = adapter.get_rows_to_be_inserted(table, columns_dict, primary_key, last_inserted_id) provider_name = self._get_provider(provider_data.provider) synced_lists.append({ "id": synced_list_data._id, "name": list_data.name, "last_synced": synced_list_data.last_synced, "provider": provider_name, "database": database_data, "table": table, "rows_to_be_synced": len(rows_to_be_synced) }) except Exception, err: logging.error(err) continue return synced_lists def check_synced_list(self, list_provider_id): return lists_table.find_list_by_listid(list_provider_id) setup_model = SetupModel()
(NaturalNews) Natural News editor Mike Adams, the Health Ranger, has been busy again lately, recording several episodes' worth of a course that contains very useful information regarding how best to protect yourself and your family in today's chaotic, bio-infected world. His latest course, Episode 12, is titled,"Remaining Calm: Why Ebola Preparedness Leads to Confidence and Calm," is now available at BioDefense.com, and as is the case with all previous discussions, it is available free of charge to the public. In this episode, Adams leads off by saying that he has fielded scores of questions and concerns regarding the current Ebola outbreak, its spread to the United States and what people can do to better prepare for it. "I've had a lot people of contact me since [the disease] was found in Dallas," he said. "There's a lot of concern that [Ebola] is spreading, that even if [the first patient, Thomas Eric Duncan] is controlled, then... It could be spreading through people that are just flying into the country, going right through Customs, and then [are] walking around major U.S. cities." He added: "I've never heard so many people in a state of fear about an epidemic before... so I thought it would be really important to really explain why we need to stay calm about this." In this course, Adams discusses several important aspects of the Ebola outbreak, its spread to the U.S., what to expect, what is ahead and, vitally, what measures people can take to be prepared and, thus, calm their fears. are you ready right now to survive a 21-day quarantine in your own home? can you survive for 21 days without electricity? by preparing for a pandemic, you are also simultaneously prepared against all other potential disasters. The very first step in gaining peace of mind, says Adams, is to start "making a list" of supplies, foodstuffs and medicines you will need if you have to a) venture out of your home during a widening Ebola outbreak; b) take care someone inside your home (click here for a great primer on how to set up a "sick room"); or c)"bug in" - that is, stay in your home during a government-ordered quarantine of two to five weeks (or more). Adams says storable foods and as many as 100 gallons of water per person will go a long way toward getting you prepared for a potential widening Ebola outbreak (or any other manmade or natural disaster). Click here to listen to the course now.
#!/usr/bin/python3 # -*- coding: utf-8 -*- #******************************************************************** # ZYNTHIAN PROJECT: Jackpeak Python Wrapper # # A Python wrapper for jackpeak library # # Copyright (C) 2019 Brian Walton <brian@riban.co.uk> # #******************************************************************** # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # For a full copy of the GNU General Public License see the LICENSE.txt file. # #******************************************************************** from ctypes import * from os.path import dirname, realpath #------------------------------------------------------------------------------- # Jackpeak Library Wrapper #------------------------------------------------------------------------------- lib_jackpeak=None def lib_jackpeak_init(): global lib_jackpeak try: lib_jackpeak=cdll.LoadLibrary(dirname(realpath(__file__))+"/build/libjackpeak.so") lib_jackpeak.initJackpeak() lib_jackpeak.getPeak.restype = c_float lib_jackpeak.getPeakRaw.restype = c_float lib_jackpeak.getHold.restype = c_float except Exception as e: lib_jackpeak=None print("Can't init jackpeak library: %s" % str(e)) return lib_jackpeak def get_lib_jackpeak(): return lib_jackpeak #-------------------------------------------------------------------------------
AS BROADWAY CUTS UP through the Upper West Side of Manhattan and into Washington Heights, it gradually turns into a giant Caribbean bazaar. The avenue abounds with bodegas, farmacias, unisex beauty salons, bargain clothing outlets, restaurants serving pollo and platanos, and travel agencies offering bargains to the Dominican Republic. Women squeeze mangoes, children lick flavored ices, men play hard at dominoes, all to the accompaniment of a hundred different radios blaring salsa music. Long a magnet for immigrants, Washington Heights today is home to large colonies of Irish, Jews and, most numerous of all, Dominicans. As the ever-present crowds make their way up and down the street, the Heights seems a living embodiment of the American Dream - a vibrant, energetic urban melting pot. Wander off Broadway, though, and the neighborhood quickly seems like an American nightmare. On side streets in the 150's and 160's, clusters of tough teen-agers wearing beepers, four-finger gold rings and $95 Nikes offer $3 vials of crack, the high-octane, smokable derivative of cocaine. On every block there are four or five different ''crews,'' or gangs, each touting its own brand of the drug, known to aficionados as ''Scotty'' (as in ''Beam me up''). Some blocks are ''hotter'' than others, depending on the availability of the crack. On the hottest blocks Scotty is available ''24/7'' - 24 hours a day, seven days a week. So much business is transacted on these streets that Washington Heights has gained a reputation as the crack capital of America. How did it happen? How did a drug once confined to a handful of large-city neighborhoods make its way to Main Street in just a few short years? Much of the answer can be found in data generated by the Federal Government's unprecedented intelligence-gathering operation against crack. In 13 cities across the country, crack teams created over the last three years by the Justice Department are methodically tracking the importation, distribution and consumption of the drug. In New York, the United States Drug Enforcement Administration (D.E.A.) operates a special Unified Intelligence Division staffed by experts from a dozen agencies, including the F.B.I., the I.R.S., Customs, Immigration and the local police department. Twenty-four hours a day, the information pours in - from wiretaps and surveillance teams, witnesses and informants, police raids and drug busts. By combining this data with testimony from D.E.A. agents, police captains, sociologists, undercover agents, community leaders, criminologists, prosecutors, addicts and treatment experts, it is possible to trace crack's destructive sprint across America. WASHINGTON HEIGHTS HAS COME NATU-rally by its status as the nation's crack capital. It would be difficult to design a better location for marketing a new drug. First, the area is highly accessible. To the west, the George Washington Bridge brings in potential customers from New Jersey; to the north the Henry Hudson Parkway pulls them in from Westchester County. For residents of Passaic or Peekskill who want a quick high, the Heights offers easy-in, easy-out convenience. By all accounts, Washington Heights transacts more out-of-town drug deals than any other neighborhood in New York City. The neighborhood itself provides a substantial pool of users. For every newcomer who has made it as a shopkeeper on Broadway, there are others who have dropped out of school, had a child out of marriage, become permanently unemployed -likely candidates for drug use. Here, as throughout the country, crack does best where Americans -especially minorities - do worst. Finally, Washington Heights is home to New York's most enterprising drug dealers. No, they are not Colombians. Although most people associate Colombians with cocaine and crack - especially now that open warfare has broken out between the drug lords and the Government in Bogota - Colombians are not generally involved in the retailing of crack in the United States. They just produce cocaine in South America and smuggle it into the United States. The Colombians living in New York are too few in number and too insular to have the array of contacts necessary to move drugs on the street. The Dominicans, New York's fastest-growing immigrant group, do have such contacts. They also have the marketing talent. Crack dealing, like more legitimate lines of work, requires the ability to exercise quality control, hire a dependable workforce and develop a steady clientele. At all of this, the Dominicans (only a small percentage of whom are involved in the drug trade) have had a lot of experience. In the Caribbean, they are known as merchants and tradesmen. In New York, they have applied their savvy to become highly successful shopkeepers - and the city's top crack traffickers. ALTHOUGH CRACK'S ORIGINS RE-main obscure - no one really knows who invented it - the Dominicans are generally credited with having first developed it for a mass market. The breakthrough seems to have come about by accident, an improvised response to shifting consumer tastes. Until 1983, cocaine was used almost exclusively in powder form (known as cocaine hydrochloride). Extremely expensive, the drug was often consumed at parties and discos, where it was sprinkled on mirrors and snorted - usually through a high-denomination bill. Gradually, though, many snorters, seeking a more intense high, turned to freebasing. This involved treating cocaine powder with ether and reducing it to a crystalline base, which, when smoked, produced a sharp, pleasurable rush. Unfortunately, freebasing was complicated and messy, the subject of instruction booklets running on for many pages. It was dangerous, too, risking explosions like the one that hospitalized the comedian Richard Pryor. To avoid the hassle, customers began demanding that their dealers convert the powder to freebase in advance. This presented dealers with a dilemma. For, when cocaine is converted to freebase, it loses much of its weight. If they wanted to sell ready-made freebase at popular prices, dealers would have to absorb the loss - unless they could find an undetectable filler . The search was on. One researcher who observed it was Terry Williams, a sociologist at the City University of New York who in 1982 began hanging out with a teen-age cocaine gang in Washington Heights. Williams, who recounts his experiences in a recently published book, ''The Cocaine Kids,'' recalls that the local Dominican gangs ''came up with something called 'comeback,' '' a chemical adulterant akin to lidocaine, a prescription anesthetic. ''When comeback is blended with cocaine powder and cooked, all of it remains in the mix,'' Williams explains. ''This became the first chemical that you could cut freebase with.'' The result was the prototype for crack. Take two ounces cocaine hydrochloride. Mix with two ounces comeback and one ounce Arm & Hammer. Add water. Bring to a boil. Let cool into a solid mass. Break into small pieces. Smoke. Intense rush should follow instantly. Serves 2,000. Before long, Dominican dealers were out on the streets of Washington Heights peddling the new substance in pellet form at a few dollars a pop. (The name came from the crackling sound the drug made when smoked.) The ''champagne'' of drugs, once limited to the elite, was now available - somewhat diluted - to drifters and dropouts, welfare mothers and unemployed youths. Dealers set up shop across the street from schools, enticing teen-agers with free samples. They also offered two-for-one deals and ''Mother's Day'' specials timed to coincide with the arrival of welfare checks. Soon, customers were seeking out dealers rather than the other way around, helping establish Washington Heights as America's first major crack market. The Dominicans then began fanning southward into Harlem and eastward into the South Bronx. They also began supplying cocaine to other ethnic groups. In Harlem, in South Jamaica, Queens, in the Brooklyn neighborhoods of Bushwick and Brownsville, poor young blacks - jobless, uneducated and desperate - hungered for a piece of the ''crazy money'' crack offered. To get started, it took as little as an ounce of cocaine, an investment of perhaps $1,000. Obtaining it was not much of a problem. By the mid-1980's, cocaine was arriving in New York by the ton. Importation was controlled by the Cali Cartel, Colombia's second-largest syndicate, after the Medellin Cartel. Desperate to unload their supplies, the Colombians found dependable customers in the Dominicans of Washington Heights. The two groups got along well, joined not only by a common language but also by similar entrepreneurial values. The Dominicans became New York's chief middlemen. As sales boomed to aspiring young dealers outside Washington Heights, the city was eventually carved up along ethnic lines, with Dominican-supplied blacks controlling Harlem, Queens and Brooklyn, and the Dominicans dominant in upper Manhattan and the South Bronx. FROM THE START, CRACK'S DISTRI-bution system distinguished it from all other drugs. Heroin, for instance, was highly centralized. From poppy cultivation to street sales, the trade was dominated by a single organization - the Mafia, which, though ruthless, imposed a certain order on the trade. Gang wars were rare, and police officers were never fired upon. Because distribution was so tightly controlled, it was possible, with diligent police work, to put an entire network out of business, as occurred in the French Connection case. If heroin was the Fortune 500, crack was Mom and Pop. A typical crack organization would have no more than seven or eight people - a street seller or two, a steerer to direct customers, a guard to protect the merchandise, a police lookout, a weigher (known as a ''scale boy''), a manager and a ''Mr. Big'' to count the profits. Competition was intense. In busy areas like Washington Heights, one block might host four or five crews, all contending for the same consumer dollar. With no overall hierarchy or command structure to impose order, turf wars broke out over the most lucrative spots. Dealers regularly ripped off customers and stole from one another, leading to frequent shootouts, stabbings and executions. Crack created a new breed of urban guerrilla, members of a fierce, proliferating army that left the police badly outnumbered and outgunned. By late 1985, when crack first came to the attention of the national press, it was deeply entrenched in New York's poor neighborhoods. In the rest of the country, only two cities - Los Angeles and Miami - had comparable crack problems. All three cities were major distribution points for cocaine, so it was natural that crack would engulf them first. Soon, however, the drug began to move out from these gateway cities and into the heartland. It would travel via a fearsome new set of traffickers - illegal immigrants from Jamaica - who would quickly and radically transform the way crack was distributed around the country. DELROY EDWARDS GREW up poor in the tough, stifling shantytowns of Kingston, Jamaica. In 1980, at the age of 20, he went to work as a street enforcer for the Jamaica Labor Party of Edward Seaga. Seaga was locked in a bitter election duel with the People's National Party, headed by Michael Manley, and each side was forming armed gangs to intimidate the other. The gangs did their job only too well, killing 800 people by election day. After his victory, Seaga launched a crackdown, and many gang members, feeling the heat, headed for the United States. Among them was Delroy Edwards. Slipping into Brooklyn on a tourist visa, he eventually made his way into the marijuana business, selling nickel bags out of a neighborhood storefront. At the beginning of 1985, Edwards learned to make crack. Soon he was selling little else. He worked out of two ''flagship'' spots in Brooklyn - one, a two-story house, the other, an abandoned brownstone near a housing project. Enough poor blacks coughed up enough $5 bills to enable Edwards to buy a $150,000 home on Long Island - and to pay for it in cash. That wasn't enough for Edwards, who began looking to expand his business. Unfortunately, New York was already crowded with crack dealers; outside the city, however, lay plenty of virgin territory. In Washington, Baltimore and Philadelphia, for instance, crack was just beginning to catch on. Enterprising local dealers would travel to New York, buy a few ounces of cocaine, return home, convert it into crack, and sell the product for three or four times the New York street price. In the fall of 1986, Edwards traveled to Washington and set up shop; by the following spring his lieutenants had established thriving businesses in Philadelphia and Baltimore as well. At its peak, Edwards's organization, known as the Rankers, employed 50 workers and made up to $100,000 a day. The glory days did not last. Edwards - nicknamed ''Uzi'' for his taste in weapons -was pathologically violent. People who crossed him were pistol-whipped, beaten with baseball bats, shot in the legs. One 16-year-old worker, suspected of cheating, was beaten unconscious with bats, scalded with boiling water, and suspended by a chain from the ceiling until he died. Eventually, the police caught up with Edwards, and in July a Brooklyn jury convicted him on 42 counts of murder, assault, kidnapping and drug dealing. Edwards is now awaiting sentencing. The Rankers have disintegrated. But there are 40 other groups just like the Rankers, running crack out of New York and Miami to points across the country. Posses, they're called, after their members' affection for American westerns (and the guns used in them). Most, like the Rankers, took shape as gangs during the 1980 Jamaican election, then fled to the United States and regrouped. Here, their 10,000 to 20,000 members, organized in posses with as few as 25 members and as many as several hundred, keep incessantly on the move, slipping in and out of the many Jamaican communities scattered across the country. To maintain loyalty, each posse generally restricts membership to the residents of a particular neighborhood in Kingston. Posse members travel with fake IDs, making it tough for policemen to identify them. Sometimes, as a cover, they attach themselves to reggae groups touring the country. Today, Jamaicans are believed to control 35 percent to 40 percent of the nation's crack network. ''They're very good businessmen,'' says John A. O'Brien, an agent with the Bureau of Alcohol, Tobacco and Firearms (B.A.T.F.), the Federal agency that most closely monitors the posses. ''They follow the law of supply and demand. When they see that a vial of crack selling for $5 in New York will get $15 in Kansas City, they'll move in.'' New York is their ''training school,'' O'Brien says, ''like going to Wharton. They'll take a guy doing a good job in Harlem and send him to open an office in the Midwest.'' On his arrival in the new area, the posse sales rep will rent a motel room and conduct a market survey of sorts to determine the most lucrative spot in town. Then he'll rent an apartment or, better yet, get a single female to lend him one in return for crack. When asked how the posses move the drug from city to city, Bill McMullan, the assistant special agent in charge of the D.E.A.'s Washington office, jokingly cites the title of a recent movie: ''Planes, Trains and Automobiles.'' Amtrak, Greyhound, commercial airlines, Federal Express, U.P.S. - the posses use them all, regularly. To carry cocaine on commercial flights, the Jamaicans tend to recruit overweight women able to conceal one- or two-pound packages on their person. Also popular are rental cars, preferably Volvos, sent over the nation's highways, preferably Interstates. ''When I see some of the places the posses are operating, I can't find any other explanation than the presence of a nearby Interstate,'' says Stephen Higgins, the B.A.T.F.'s director in Washington. In deciding where to strike, the Jamaicans generally follow the path of least resistance. Cities with well-organized criminal groups, such as Newark, St. Louis and Chicago, tend to get bypassed. At first glance, Chicago would seem to be an ideal posse target. It is a major transportation hub, has a vast inner-city population and offers block upon block of public housing projects, a favorite crack target. But Chicago also has plenty of established homegrown gangs doing a brisk business in cocaine and heroin. Intent on protecting their trade, they have worked determinedly to keep outside traffickers from entering. ''When crack first appeared, some groups did try to come here and stake out some territory, but they quickly left,' '' says Vincent Lang, chairman of the Chicago Housing Authority. ''They wanted to remain alive.'' Today Chicago is awash in powder cocaine, but crack is very hard to find. Perhaps wary of the anarchic market forces crack has unleashed elsewhere, local dealers have opted out of selling it themselves. In other cities, dealers tend to be too weak and disorganized to stand up to the posses and their tactics. The Jamaicans are fanatics for weapons. Taking advantage of lax gun laws in Texas, Florida and Virginia, they have stockpiled Uzis and AR-13 assault rifles. When breaking into a new area, the Jamaicans come in with all barrels blazing. ''One cause of the violence we're seeing in many cities is Jamaican traffickers pushing out American blacks,'' says Jonny Frank, a prosecutor in the Delroy Edwards case. According to the B.A.T.F., the posses have been responsible for approximately 1,000 murders since 1985. Washington, Philadelphia, Dallas, Houston, Kansas City, Denver - all have suffered Jamaican invasions. In New York, the posses have succeeded in taking over much of Brooklyn and Harlem, establishing themselves as the city's second largest traffickers, after the Dominicans. Many smaller cities have been hit, too. New York-based posses have set up a thriving operation in Hartford, shipping their merchandise there via bus and train. In New York State, the Jamaicans are moving crack up Interstate 87, hitting such tiny Hudson Valley towns as Newburgh, Kingston and Saratoga Springs. In West Virginia, the posses have established crack houses in Martinsburg (population 13,000) and Charles Town (3,000). From there they have moved out along Interstate 81, shipping crack as far north as Chambersburg, Pa., and as far south as Roanoke, Va. Roanoke! It would be hard to imagine a more unlikely setting for crack. A town of only 90,000, Roanoke is nestled in the heart of Virginia's dairy and orchard country, several hours' drive from the nearest city. Yet members of the Shower posse, the largest in the country, have managed to find their way there from New York. On closer examination, though, the presence of crack in Roanoke is really not so surprising. Even towns with Norman Rockwell-like reputations have pockets of alienation and despair, open sores in which crack can take hold and fester. Roanoke has its own modest ghetto, beset by the same social ills afflicting larger cities. ''We have a sizable minority community, and that's where the Jamaicans set up,'' observes Tom Bondurant, an assistant United States attorney in Roanoke. Demand was obviously high: crack was selling for $25 to $30 a vial - more than enough to catch the attention of big-city traffickers. Roanoke's case is typical. Across the country, crack has displayed remarkable consistency, taking root in those sectors of society least able to resist it. In places like Charles Town, Newburgh, and even Fort Wayne, Ind., crack has found a foothold among poor blacks and - to a far lesser degree - Hispanics. Will it stay that way? Or will crack break out like some threatening contagion into the middle-class population, white and black? In some places, it already has. In Washington - the city with the nation's highest rate of drug-related violence -crack has leaped across the city lines into the middle-class suburbs of Maryland and Virginia. Until June 1988, for instance, crack was virtually unknown in Howard County, a tranquil, middle-class area midway between Washington and Baltimore; today the drug is being distributed in 20 locations, including some attractive town houses. TodAY'S REALITY, meanwhile, is most visible in Kansas City, Mo. A year ago, the city was declaring victory in the war on crack. A Federal organized-crime task force had succeeded in dismantling a thriving crack operation run by the Waterhouse posse. Working out of a fortresslike house in the heart of the inner city, the Jamaicans had employed an estimated 1,000 people at one point. Demand was so great that the posse had had to import Jamaicans from New York and Miami. This proved to be its undoing. The new workers felt little loyalty to the group, and, when arrested, tended to talk, providing the task force with invaluable information. In late 1986, the Feds struck, raiding ghetto crack houses and rounding up dealers. Eventually, 178 traffickers were prosecuted and 25 more deported. By the start of 1987, no more than 75 posse members remained in town. Across the nation, Kansas City was hailed as a stunning success story. Los Angeles's black street gangs are the fastest-growing set of crack traffickers in the country. Already they have established a national network second only to that of the posses. They are grouped into confederations known as the Bloods and the Crips, each with its own color (blue for Crips, red for Bloods), slang and hand signals. Fiercely territorial, the gangs have traditionally concentrated on fighting one another over impoverished patches of south-central Los Angeles. Crack has changed that. Sensing the enormous profit potential in the drug, the Bloods and the Crips are now paying less attention to one another and more to transporting crack. Today, more than 10,000 gang members are at work in some 50 cities from Seattle to Baltimore. Generally, they emulate the tactics of the posses, infiltrating black communities by working through local contacts. So far, the gangs and the posses have avoided fighting one another. In fact, they seem to be dividing up the country between them -the gangs working eastward from California, the posses westward from New York. The two national networks are gradually crisscrossing - with devastating consequences for towns caught in the middle. Indeed, even if the posses and L.A. gangs were somehow tamed - an extremely unlikely prospect - there are plenty of other groups ready to take their place. Both Cubans and Guyanese have established fledgling interstate operations, while Haitian traffickers based in Fort Pierce, Fla., are transporting crack along the East Coast, via migrant farm workers. In Miami, Detroit and countless smaller cities across the country, local black rings are at work, looking intently for opportunities to expand. Stutman cites a Dominican organization in Washington Heights that, in three short years, went from being street-corner peddlers to becoming a multitier conglomerate selling more than 10,000 vials of crack a day. The group was so well organized that it marketed its crack under a brand name (''Based Balls''). Such practices eventually attracted the attention of the D.E.A., which, after an intensive investigation, put the group out of business. Unfortunately, the D.E.A.'s efforts had little effect on the supply of crack in New York - the usual outcome of law-enforcement action aimed at the drug. No matter how determined the effort to root it out, crack seems always to thrive. That has been the experience in Kansas City. In Brooklyn, crack continues to sell for $3 a vial despite the prosecution of Delroy Edwards. In Washington, the police recently seized Rayful Edmond 3d, allegedly the city's largest dealer, but crack remains available in 120 locations throughout town. ''Right after we locked up Edmond, the homicide rate slowed down for a while,'' says Collin Younger, commander of the narcotics branch of the D.C. police. ''Now it's beginning to pick up again as other dealers fight over his territory.'' Crack is getting so plentiful in the District of Columbia that Younger expects the price to drop any day now. As if crack weren't enough to contend with, a new drug has recently appeared on the horizon. Called ice, it is a smokable version of methamphetamine, or speed. It creates a high that lasts for up to 24 hours, compared with crack's 20-minute high, followed by a ''crash'' so severe that it can resemble paranoid schizophrenia. President Bush's anti-drug strategy, unveiled on Sept. 5, calls for a heavy reliance on police and prosecutors. Fully 70 percent of its projected $7.9 billion spending will go for law enforcement, including $1.6 billion for new prisons and $3.1 billion for state and local police. The remaining 30 percent will go toward treatment, prevention and education. Judging from the record of the police to date, though, the Bush plan seems unlikely to make any real dent in the amount of crack on the streets. A sense of resignation is settling over America's drug agents. More and more of them are beginning to sound like Francis Hall. For four years, until his retirement in March, Hall served as commanding officer of the New York Police Department's narcotics division, making him, in effect, New York's top narc. Hall helped design the Tactical Narcotics Teams (T.N.T.) -the special police units that carry out sweeps through drug-infested neighborhoods - that today are the city's principal weapon in the fight against drugs. The Vietnam analogy might be taken a step further. The Vietcong grew largely because of the social, political and economic breakdown engulfing Vietnam. No matter how much firepower the United States expended, the guerrillas always managed to regroup, nurtured by the poverty and injustice around them. Much the same is true of crack in America. No matter how many sweeps, raids and busts our police departments mount against crack traffickers, they always manage to resurface. Only when we address the conditions that have given rise to crack - the desperation of our inner cities - will we begin to see the light at the end of the tunnel. Michael Massing, a New York writer, is a 1989 Alicia Patterson Fellow.
# Time: O(n) # Space: O(1) # We are given two strings, A and B. # # A shift on A consists of taking string A and moving the leftmost character to the rightmost position. # For example, if A = 'abcde', then it will be 'bcdea' after one shift on A. Return True # if and only if A can become B after some number of shifts on A. # # Example 1: # Input: A = 'abcde', B = 'cdeab' # Output: true # # Example 2: # Input: A = 'abcde', B = 'abced' # Output: false # # Note: # - A and B will have length at most 100. # Rabin-Karp Algorithm (rolling hash) class Solution(object): def rotateString(self, A, B): """ :type A: str :type B: str :rtype: bool """ def check(index): return all(A[(i+index) % len(A)] == c for i, c in enumerate(B)) if len(A) != len(B): return False M, p = 10**9+7, 113 p_inv = pow(p, M-2, M) b_hash, power = 0, 1 for c in B: b_hash += power * ord(c) b_hash %= M power = (power*p) % M a_hash, power = 0, 1 for i in xrange(len(B)): a_hash += power * ord(A[i%len(A)]) a_hash %= M power = (power*p) % M if a_hash == b_hash and check(0): return True power = (power*p_inv) % M for i in xrange(len(B), 2*len(A)): a_hash = (a_hash-ord(A[(i-len(B))%len(A)])) * p_inv a_hash += power * ord(A[i%len(A)]) a_hash %= M if a_hash == b_hash and check(i-len(B)+1): return True return False # Time: O(n) # Space: O(n) # KMP algorithm class Solution2(object): def rotateString(self, A, B): """ :type A: str :type B: str :rtype: bool """ def strStr(haystack, needle): def KMP(text, pattern): prefix = getPrefix(pattern) j = -1 for i in xrange(len(text)): while j > -1 and pattern[j + 1] != text[i]: j = prefix[j] if pattern[j + 1] == text[i]: j += 1 if j == len(pattern) - 1: return i - j return -1 def getPrefix(pattern): prefix = [-1] * len(pattern) j = -1 for i in xrange(1, len(pattern)): while j > -1 and pattern[j + 1] != pattern[i]: j = prefix[j] if pattern[j + 1] == pattern[i]: j += 1 prefix[i] = j return prefix if not needle: return 0 return KMP(haystack, needle) if len(A) != len(B): return False return strStr(A*2, B) != -1 # Time: O(n^2) # Space: O(n) class Solution3(object): def rotateString(self, A, B): """ :type A: str :type B: str :rtype: bool """ return len(A) == len(B) and B in A*2
this is the new project of apse: www.eraasgroup.com http://soundcloud.com/eraas/sets https://www.facebook.com/eraasgroup Apse (pronounced /‘apps/) is a cadre of ghosts that has been wailing into the cold New England wind for a decade. In recent years the group has channeled itself through ATP Recordings, reaching a larger audience so all can immerse themselves in its dark realms. Its sounds are heard on the tribal atmospheric swamp of Spirit, the drug-addled lo-fi montage Eras, or recently on Climb Up’s more rousing gesture toward the light. The group's live performances - often known for their unmistakable dark intensity and shamanic conjurings - are not to be missed.
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score def read_file(path): y = list() with open(path) as fid: for line in fid.readlines(): curr_y = list() vals = line.split() for val in vals: curr_y.append(int(val)) y.append(curr_y) fid.close() return y y_all = read_file('labels.dat') y_hat_all = read_file('pred.dat') pos_neg_y = list() pos_neg_y_hat = list() for i, y in enumerate(y_all): if len(y) == 2: pos_neg_y.append(1) else: pos_neg_y.append(0) if y_hat_all[i][1] == -1: pos_neg_y_hat.append(1) else: pos_neg_y_hat.append(0) print('Confusion Matrix: ') print(confusion_matrix(pos_neg_y, pos_neg_y_hat)) print print('Total Accuracy: %.3f' % accuracy_score(pos_neg_y, pos_neg_y_hat)) print('Precision: %.3f' % precision_score(pos_neg_y, pos_neg_y_hat)) print('Recall: %.3f' % recall_score(pos_neg_y, pos_neg_y_hat)) print('F1-Score: %.3f' % f1_score(pos_neg_y, pos_neg_y_hat)) cumulative_onset_pos = 0.0 cumulative_offset_pos = 0.0 count_pos = 0 cumulative_onset_neg = 0.0 cumulative_offset_neg = 0.0 count_neg = 0 for i, y in enumerate(y_all): if len(y) == 2: cumulative_onset_pos += abs(y[0] - y_hat_all[i][0]) cumulative_offset_pos += abs(y[1] - y_hat_all[i][len(y_hat_all[i]) - 1]) count_pos += 1 if len(y) == 3: if y_hat_all[i][1] == -1: cumulative_onset_neg += abs(y[0] - y_hat_all[i][0]) cumulative_offset_neg += abs(y[1] - y_hat_all[i][2]) count_neg += 1 else: cumulative_onset_neg += abs(y[0] - y_hat_all[i][0]) cumulative_offset_neg += abs(y[1] - y_hat_all[i][1]) count_neg += 1 print('Pos') print('Average onset: %.3f' % (cumulative_onset_pos / count_pos)) print('Average offset: %.3f' % (cumulative_offset_pos / count_pos)) print('Neg') print('Average onset: %.3f' % (cumulative_onset_neg / count_neg)) print('Average offset: %.3f' % (cumulative_offset_neg / count_neg)) cumulative_loss = 0.0 gamma_m = 4 gamma_0 = 100 count = 0 ms2 = 0 ms5 = 0 ms10 = 0 ms15 = 0 ms25 = 0 ms50 = 0 duration = 0 duration_hat = 0 flag = 0 for i, y in enumerate(y_all): if len(y) == 2 and y_hat_all[i][1] == -1: # duration = y[1] - y[0] # duration_hat = y_hat_all[i][2] - y_hat_all[i][0] # cumulative_loss += max(0, abs((y_hat_all[i][2] - y_hat_all[i][0]) - (y[1] - y[0]) - gamma_m)) flag = 1 elif len(y) == 3 and y_hat_all[i][1] != -1: duration = y[1] - y[0] duration_hat = y_hat_all[i][1] - y_hat_all[i][0] cumulative_loss += max(0, abs((y_hat_all[i][1] - y_hat_all[i][0]) - (y[1] - y[0]) - gamma_m)) count += 1 elif len(y) == 2 and y_hat_all[i][1] != -1: # duration = y[1] - y[0] # duration_hat = y_hat_all[i][1] - y_hat_all[i][0] # cumulative_loss += gamma_0 flag = 1 else: duration = y[1] - y[0] duration_hat = y_hat_all[i][2] - y_hat_all[i][0] cumulative_loss += gamma_0 count += 1 if flag == 0: diff = duration - duration_hat if diff < 2: ms2 += 1 if diff < 5: ms5 += 1 if diff < 10: ms10 += 1 if diff < 15: ms15 += 1 if diff < 25: ms25 += 1 if diff < 50: ms50 += 1 flag = 0 print(cumulative_loss / float(count)) print("==> 2ms > %.3f%%" % (100 * ms2 / float(count))) print("==> 5ms > %.3f%%" % (100 * ms5 / float(count))) print("==> 10ms > %.3f%%" % (100 * ms10 / float(count))) print("==> 15ms > %.3f%%" % (100 * ms15 / float(count))) print("==> 25ms > %.3f%%" % (100 * ms25 / float(count))) print("==> 50ms > %.3f%%" % (100 * ms50 / float(count)))
Do you have a question about the book? Want to schedule an interview? Interested in discussing your own travels? Have a question about #TRLT or #CultureTrav? Heading on a trip and want some one-on-one personalized advice, for Amsterdam, Detroit or elsewhere in the world? Interested in being a guest blogger? Want to hire or work with Jessica? Discuss something else? Please feel free to contact Jessica using the form below or at contact@jessicalipowski.com. Have a Question About Book Sales? Book sales are handled through JL Publishing. Direct any questions through the form to the left and address it to JL Publishing. Alternatively, send an email to contact@jessicalipowski.com and put JL Publishing Inquiry in the subject line. Read the personal stories of Amsterdam restaurateurs and be inspired! Subscribe to the newsletter to stay informed.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import creditpiggy.core.models def generate_uuid(apps, schema_editor): PiggyUser = apps.get_model('core', 'PiggyUser') for user in PiggyUser.objects.all().iterator(): user.uuid = creditpiggy.core.models.new_uuid() user.save() class Migration(migrations.Migration): dependencies = [ ('core', '0005_auto_20150607_0838'), ] operations = [ migrations.AddField( model_name='piggyuser', name='uuid', field=models.CharField(default=creditpiggy.core.models.new_uuid, help_text=b'Unique user identification string', max_length=32), preserve_default=False, ), migrations.RunPython( generate_uuid, ), migrations.AlterField( model_name='piggyuser', name='uuid', field=models.CharField(default=creditpiggy.core.models.new_uuid, help_text=b'Unique user identification string', unique=True, max_length=32, db_index=True), preserve_default=True, ), ]
You can also prepare your own decoys as they are very easy to craft. Just make a few balls from some old newspapers, put them into plastic shopping bags and tie them well. Such prepared bags put into paper medium-size bags and that’s it. Now you can hang your decoys around the house, focusing on places in which wasps are likely to be present. A very interesting way to deter wasps is to grow plants in your yard that will repel those insects and at the same time remove flowers that might be attractive to them. Some green plants that are natural wasp deterrents are spearmint, eucalyptus, wormwood, and thyme citronella. They are not only natural and beautiful, but they will also keep wasps away. Peppermint oil appears to be very effective when it comes to keeping wasps away. Some studies have proved that those insects don’t like any type of mint so it’s worth growing it in your garden or balcony. You can also sprinkle several cotton pads with peppermint oil and put them in strategical places around your home or garden such as crevices, ledges, any cracks or porch roofs. Place those pads in the spots in which you discovered nests in the past as there are some kinds of wasps which tend to build their nests in the same spots again and again. Peppermint oil is not the only type of oil that can deter wasps. Similarly mix lemongrass, clove, and geranium essential oils and you will observe that wasps do not like the smell of this blend. Just add some drops of these oils to some water and use a spray bottle to sprinkle the places at or near your home where wasps tend to build their nests. A good solution too is to prepare a sugar trap for wasps. In order to do so you just need to place around your house some containers with water and sugar in them. Because of sugar it will be very tempting for wasps to fly into the container and once they are in, they are trapped and can’t get out and your problem with buzzing insects is solved. If it’s too late for prevention and you’ve been already dealing with the problem of wasps in your area, try some safe and effective home remedies. Remember, though, that it can be very dangerous to move a wasp nest on your own. The best thing is to turn to a professional who will do the dirty job for you. If you want to do it yourself, you must be sure that you are not allergic to wasps’ stings. If you simply don’t know it, run allergy tests first. When attempting a nest yourself, always wear protective clothing. It is probably the most eco-friendly way as to get rid of wasps from your home you need plain soap and water only. Prepare a spray bottle with water and add to it two tablespoons of dish soap. Now you only need to spray the hanging nests. According to experts soap clogs the breathing spores of wasps and they die instantly so this method is also more humane than using chemicals, which need hours to kill the insects. This solution, however, works best with smaller nests. If you deal with a larger one, you should use a garden hose. You just need to fasten the hose-end sprayer and follow the same procedure as in case of a bottle sprayer. Of course, here it can get quite messy and the possibility of getting stung is higher so always try to stand as far from the nest as possible. Remember to wear protective clothes too and before you attack the wasps with full pressure, allow the hose to run for a while to get it going. If you struggle with an aerial nest, you can remove it by drowning it. This method, however, brings some risk of getting stung so to try it you need to be 100% sure that you are not allergic. Even if you aren’t allergic, put on protective clothes, such as long, thick trousers, heavy boots, a hooded jumper, gloves, and protective glasses. You should do the job at night as this is the time when wasps are less active and less aggressive. Don’t use a standard flashlight for clear visibility as it will attract wasps. Use a light in the shade of red or amber instead. You need to place a cloth bag over the whole nest carefully and quickly tie it off. Put the bag into a bucket with water and cover it with a large, heavy rock. Remember that this method works only with aerial nests, which are accessible easily. Another solution is to smoke the wasps out their nest. Remember, though, that this method requires all the protective measures mentioned in the previous point. What you should do is to light a fire, for example in a grill, just under the nest.
''' Copyright 2015, Institute for Systems Biology Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ****************************************************** Using python to generate bigqueries. Here we use the 'filter file' to create subusets of data to download. ****************************************************** First need to install the BigQuery API pip3 install --upgrade google-cloud-bigquery The first time I ran the installer, there was an error. But just running pip3 again seemed to work. Also we need to get authenticated. At the command line we: gcloud auth application-default login # table:isb-cgc.tcga_201510_alpha.DNA_Methylation_betas # tablevar:Probe_Id # annot:isb-cgc.platform_reference.methylation_annotation # annotvar:IlmnID # idvar:ParticipantBarcode # valvar:Beta_Value # pivot:UCSC.RefGene_Name # after the annotation join # filter:SampleTypeLetterCode='TP' # filter:Study='BRCA' # filter:UCSC.RefGene_Name IN ('ACSM5','NAP1L4','SULF2') # limit:100 ''' from google.cloud import bigquery import argparse import sys ko = ['idvar', 'valvar', 'pivot', 'table', 'annot', 'tablevar', 'annotvar', 'filter', 'limit'] # Some queries must be annoated before running pairwise ## to this point, some annotation fields are nested ## so we need to check the schema first. def checkSchemas(client,ffd): # have to use a client pointed to the table that you want to query ts = ffd['table'].split('.') d1 = client.dataset(ts[1]) t1 = d1.table(ts[2]) t1.reload() # then t1 contains a list of schema fields print(t1.schema[0].description) print(t1.schema[0].name) print(t1.schema[0].field_type) print(t1.schema[0].mode) # will have to check if any of the fields are records # or structs or arrays. # check that dictionary names are # in the allowed set. def checkQuery(client, ffd): # make sure the query contains only allowed keys in KO. ks = list(ffd.keys()) if any([x not in ko for x in ks]): print("Removing items from the filter file:") print([x for x in ks if x not in ko]) filtered_dict = {key: value for key, value in ffd.items() if key in ko} filtered_dict = checkSchemas(client, filtered_dict) return(filtered_dict) def keyOrder(ffdict): ks = list(ffdict.keys()) kd = [x for x in ko if x in ks] return(kd) def readFilterFile(filepath): # build a dictionary of query terms fin = open(filepath, 'r') ffdict = {} for line in fin: strings = line.strip().split(':') k, v = [s.strip() for s in strings] if k not in ffdict: ffdict[k] = v else: ffdict[k] = ffdict[k] + " AND " + v fin.close() return(ffdict) def buildQuery(client, filename): ffd = readFilterFile(filename) ffd = checkQuery(client, ffd) query = "SELECT \n" for key in keyOrder(ffd): # queries need to have a particular order if key in ['idvar', 'valvar']: query += ffd[key] + ",\n" elif key == 'table': query += "FROM `" + ffd[key] + "`\n WHERE \n" elif key == 'limit': query += "LIMIT " + ffd[key] + " \n" else: query += ffd[key] + " \n" return(query) def bq(args): client = bigquery.Client(project=args.proj) queryString = buildQuery(client, args.ff1) print("*****************************************") print(queryString) print("*****************************************") #query_results = client.run_sync_query(queryString) #query_results.use_legacy_sql = False #query_results.run() #print(query_results.total_rows) #for qi in query_results.rows: # print(qi) print("done") if __name__ == "__main__": parser = argparse.ArgumentParser(description="BigQuery PairWise") parser.add_argument("prj", help="google project ID") parser.add_argument("ff1", help="filter file") args = parser.parse_args() bq(args)
Inderjeet Singh is the new poster boy of Indian athletics. The Haryana shot putter established himself in the Indian sports scene, with a hat-trick of medals in the Indian Grand Prix, after booking a ticket on the Rio bound team. In the recently concluded 55th National Inter-State Athletics Championship in Chennai, Singh rewrote a meet record with a throw of 20.44 metres. Here are the results from the final day of the Inter-State Athletics Championships. Inderjeet, who looks well set to bring home a medal from Rio, was the cynosure of all eyes. Though the University Games Gold medalist started with a mediocre 18.41 metres, he later found his momentum throwing the ball to 19.25 metres in the second attempt, improved upon it to clear 19.70 metres, slumped to 19.36 metres in the fourth, and ultimately, cleared 20.42 metres in the final attempt. Enroute to Gold, Inderjeet also battered a 15-year-old meet record (20.42 metres, set by compatriot Shakti Singh). Inderjeet was not the only one to take a medal on the trot. MR Poovamma bagged her fifth straight win in the women’s 400 metres event in the Inter-State Athletics, stopping the clocks at 52.78 seconds. However, the Kannadiga, who was declared the best woman athlete of the meet, failed to clear the World Championships’ qualification mark. Keralite Jisna Mathew bagged silver (53.51 seconds) and Bengal’s Debashree Majumdar took bronze (53.68 seconds). In the parallel men’s event, Rajiv Arokia took Gold for the hosts with a timing of 45.72 seconds. Teammate A Dharun came in second (46.87 seconds), while Chattisgarh’s Jibin Sebastian came third (47.32 seconds). Prem Kumar took Telangana’s maiden Gold, beating Kerala’s Pinto Mathew (14.43 seconds) and Tamil Nadu’s J Thiyagarajan (15.11 seconds) in the men’s 110 metres hurdles. In long jump, Tamil Nadu’s Prem Kumar took Gold with a jump of 7.91 metres,while Haryana’s Ankit Sharma took silver (7.71 metres) and Karnataka’s Samsheer took bronze (7.55 metres). In men’s 100 metres race, Haryana’s Dharambir Singh came in first, clocking 10.51 seconds. Odisha’s Amiya Kumar came second (10.58 seconds), while Delhi’s Vikas Gulia bagging bronze (10.65 seconds). The analogous women’s event saw some surprise, with National Champion Dutee Chand being pushed to the third position (12.10 seconds). Karnataka’s HM Jyoti emerged victorious (11.87 seconds), while Gujarat’s Merlin K Joseph took silver. Kerala won the meet title, accruing 177.5 points, while hosts Tamil Nadu came in a distant second with 146. 5 points. The men’s title went to Haryana (99 points), the women’s to Kerala (144.5 points), while Tamil Nadu emerged runners-up in both.
import numpy as np # from gaborfitting import * import theano import theano.tensor as T def scale_norm(X): X = X - X.min() scale = (X.max() - X.min()) return X / scale def img_grid(X, rows_cols=None, rescale=True): """Image Grid: modified from jbornschein/draw Parameters: =========== X : np.array, images (samples, channels, height, width) rows_cols : list, grid dimensions (rows, cols) rescale : bool Returns: ======== I : np.array, grid image """ N, channels, height, width = X.shape if rows_cols is None: sroot = np.sqrt(X.shape[0]) cols = int(np.ceil(sroot)) rows = int(np.floor(sroot)) + 1 else: rows, cols = rows_cols total_height = int(rows * height + rows - 1) total_width = int(cols * width + cols - 1) if rescale: X = scale_norm(X) I = np.zeros((channels, total_height, total_width)) I.fill(1) for i in xrange(N): r = i // cols c = i % cols if rescale: this = X[i] else: this = scale_norm(X[i]) offset_y, offset_x = r*height+r, c*width+c I[0:channels, offset_y:(offset_y+height), offset_x:(offset_x+width)] = this I = (255*I).astype(np.uint8) if(channels == 1): out = I.reshape((total_height, total_width)) else: out = np.dstack(I).astype(np.uint8) return out def grid2d(X, example_width=False, display_cols=False, pad_row=1, pad_col=1, rescale=True): """Display weights in a nice grid This function assumes that each row of the X is an image weight to be resized to a square. After that it creates a 2D grid with with all the squares. Parameters ---------- X : `numpy.array` array with each filter to be transformed to an image on the rows example_width: int defines the width of the images in the rows X if they are not square display_cols: bool pad_row: int integer number of pixels between up/down neighbors pad_col: int integer number of pixels between left/right neighbors Adapted from https://github.com/martinblom/py-sparse-filtering """ m, n = X.shape if not example_width: example_width = int(np.round(np.sqrt(n))) example_height = n//example_width # Compute number of items to display if not display_cols: display_cols = int(np.sqrt(m)) display_rows = int(np.ceil(m/display_cols)) # Setup blank display display_array = -np.ones((pad_row+display_rows * (example_height+pad_row), pad_col+display_cols * (example_width+pad_col))) # Copy each example into a patch on the display array curr_ex = 0 for j in range(display_rows): for i in range(display_cols): if curr_ex >= m: break # Copy the patch # Get the max value of the patch max_val = abs(X[curr_ex, :]).max() i_inds = example_width*[pad_row+j * (example_height+pad_row)+q for q in range(example_height)] j_inds = [pad_col+i * (example_width+pad_col)+q for q in range(example_width) for nn in range(example_height)] try: newData = (X[curr_ex, :].reshape((example_height, example_width))).T/max_val except: raise ValueError("expected {}, got {}".format(X[curr_ex, :].shape), (example_height, example_width)) display_array[i_inds, j_inds] = newData.flatten() curr_ex += 1 if curr_ex >= m: break visual = (display_array - display_array.min()) / (display_array.max() - display_array.min()) visual = np.nan_to_num(visual) ret = visual if rescale else display_array ret = (255*ret).astype(np.uint8) return ret def pref_grid(above, bellow, num_preferred=9, abs_value=True, pad_row=5): """Display the weights that the layer above prefers on the layer below This function looks for the `num_preferred` larger values on the layer `above` and get their indexes. Those indexes are used to retrieve the preferred weights on the layer `bellow`. After all, those preferred vectors are organized with `meth`:grid2d. Parameters ---------- above : `numpy.array` matrix with each filter to be transformed to an image on the rows bellow : `numpy.array` matrix with each filter to be transformed to an image on the rows num_preferred: int number of preferred weights to be plotted abs_value: bool if True chooses the preferred as the weights associated with maximum absolute activation. Else, uses only the maximum (positve) values. pad_row: int integer number of pixels between up/down neighbors """ # idx = np.random.randint(above.shape[0], size=num_preferred) R = np.abs(above) if abs_value else above X = np.zeros((num_preferred**2, bellow.shape[1])) for i, w in enumerate(R): s = np.argsort(w)[::-1] prefs = s[:num_preferred] first = i*num_preferred last = (i+1)*num_preferred X[first:last] = bellow[prefs] visual = grid2d(X, pad_col=1, pad_row=pad_row) return visual[pad_row-1:-pad_row+1, :] class DeepPref(): """Similar do pref_grid but for deep networks. Checks what are the weights in layers[0] that layers[-1] prefers. Parameters ---------- model: `keras.models.Sequential` layer: int, observed layer num_preferred: int number of preferred weights to be plotted abs_value: bool if True chooses the preferred as the weights associated with maximum absolute activation. Else, uses only the maximum (positve) values. pad_row: int integer number of pixels between horizontal neighbors """ def __init__(self, model, layer, num_preferred=10, abs_value=True, pad_row=5, sum_preferences=False): self.model = model self.layer = layer self.num_preferred = num_preferred self.abs_value = abs_value self.pad_row = pad_row self.sum_preferences = sum_preferences X = model.get_input() Y = model.layers[layer].get_output() if self.sum_preferences: Y = T.nnet.softmax(Y) self.F = theano.function([X], Y, allow_input_downcast=True) num_weights_out = model.layers[layer].W.get_value().shape[1] self.idx = np.random.randint(num_weights_out, size=num_preferred) def get_pref(self): W = self.model.layers[0].W.get_value().T Y = self.F(W) R = np.abs(Y[:, self.idx]) if self.abs_value else Y[:, self.idx] if self.sum_preferences: X = np.zeros((self.num_preferred, W.shape[1])) else: X = np.zeros((self.num_preferred**2, W.shape[1])) for i, w in enumerate(R.T): s = np.argsort(w) prefs = s[:-self.num_preferred-1:-1] first = i*self.num_preferred last = (i+1)*self.num_preferred if self.sum_preferences: X[i] = (W[prefs]).mean(axis=0) else: X[first:last] = W[prefs] visual = grid2d(X, pad_col=1, pad_row=self.pad_row) return visual[self.pad_row-1:-self.pad_row+1, :]
round table lamp mix table lamp with usb port and outlet. circle table lamp round table lamp base. spider floor lamp spotlight floor lamp lamps tripod spotlight floor lamp manufacturers zebra floor lamp spider floor lamp floor spider floor lamp shades. rustic track lighting fixtures rustic track lighting collection rustic lighting lighting direct coupon. chandelier floor lamp cheap standing chandelier lamp chandelier floor lamps sale antique crystal chandelier floor lamps buy standing lamp shades standing chandelier lamp floor lighting new york. led christmas light bulbs led light replacement bulbs replacement lights warm white led replacement light bulbs in husk 1 led xmas light bulbs. fillable glass table lamp beach lamp glass table lamp with collection sources where to buy fillable glass table lamp australia. seafoam green lamp related post seafoam colored lamps. lighted poinsettia garland lighted poinsettia garland white 2 pack cordless lighted poinsettia garland cordless lighted poinsettia garland. white bamboo shades use sheers with white bamboo shades for window treatments coastal living room chic via design white washed bamboo roman shades.
import threading import asyncio import click from safer.safe import Safer nb_copies_done = 0 @click.command() @click.option('-d', '--delta', default=10, help='Number of minutes between copies.') @click.option('-s', '--safe_dir', default=None, help='Destination folder.') @click.option('-w', '--delicate_dirs', required=True, help='Folder to save.') @click.option('-n', '--count', default=0, help='Number of iterations, 0 for infinite loop (default 0).') @click.option('-t', '--type', default='filter', help='`copy` or `filter` or `update` (default `filter`).') @click.option('--extentions', default='', help='File extentions to exclude separeted by comma (pdf, txt...) (useless when `type` is copy)') @click.option('--dirpath', default='', help='A path to exclude (useless when `type` is copy)') @click.option('--dirname', default='', help='A folder name to exclude (useless when `type` is copy)') def scan(delta, safe_dir, delicate_dirs, count, type, extentions, dirpath, dirname): config = { 'timedelta': delta, 'safe_dir': safe_dir, 'delicate_dirs': [delicate_dirs], 'advanced': True, # disable MAX_DIR_SIZE limit # Exclusion rules: 'dirname': [dirname], 'dirpath': [dirpath], 'filename': [], 'extention': extentions.split(',') if extentions != '' else [], # other options 'local_path': '', 'external_path': '' } loop = asyncio.get_event_loop() safer = Safer(config=config) if type == 'filter': func = lambda: safer.save_with_filters(loop) elif type == 'copy': func = safer.copy_files elif type == 'update': func = lambda: safer.update(loop) def perpetual_scan(): global nb_copies_done func() nb_copies_done += 1 if nb_copies_done < count or count == 0: timer = threading.Timer(delta, perpetual_scan) timer.start() delta *= 60 perpetual_scan()
All women have a menstrual cycle, which is indicative of their fertility. It is important to understand that this is a natural change, characteristic of the female reproductive system and which ensures a possible pregnancy. If you would like to find out more information on the menstrual cycle and its specific characteristics, do not hesitate to keep on reading this article. Menstruation is the monthly sloughing off and discharge of blood as well mucosal tissues from the endometrial lining of the uterus. This is often episodic having the average length of four to six days. About thirty to eighty milliliters of blood is lost during a normal menstrual cycle. The Menstrual Cycle is an episodic uterine bleeding in response to cyclic hormonal changes. It is a method wherein it prepares a woman’s body for conception and implantation to form a new life. The purpose of which is to lead an ovum (egg cell) into maturity as well as to renew a woman’s uterine tissue bed which will be in charge for the growth of the ova if ever it will be fertilized by a sperm cell. What is a Normal Teenage Menstrual Cycle? The menstrual cycle typically varies among women of different ages. In a normal teenager, menarche is the term used to describe their very first menstruation. Typically, a normal cycle is composed of twenty eight days. The first phase which is so called proliferative phase happens instantly after the woman’s menstrual flow. The endometrium thickens to about eight times than its usual size from approximately day five until day fourteen of a normal twenty eight day menstrual cycle. The second phase which is called the secretory phase happens right after ovulation or the release of the egg cell. The third phase which is called ischemic phase happens if fertilization of the egg cell does not transpire. Thus, it regresses and shrivels off. The last is the menstrual phase where menstruation begin to flow consisting of blood, mucus, fragments of the unfertilized egg cell and other fragments of the endometrium. : This gland initiates the woman’s menstrual cycle through its release of the so called Gonadotropin Releasing Hormone. This release of this hormone is cyclic thereby resulting to a monthly menstrual period which is cyclic as well. : This gland produces two hormones such as the Leutinizing Hormone and the Follicle Stimulating Hormone which targets the woman’s ovary to promote menstrual cycle. : Monthly, a woman naturally experiences a fertile period where she releases an ovum or egg cell which swims freely on the surface of the ovary. This process is called ovulation which occurs usually at the fourteenth day prior to the onset of the next menstrual cycle. : The endometrial lining of the uterus increases by eight times before menstruation occurs. However, during menstruation, its lining sheds off if the ovum is not fertilized. This results to the sloughing off of the endometrium with blood consisting of about 30 to 80 ml. Dysmenorrhea or painful menstrual period varies from being a slight discomfort to rigorous pain. Usually, the cause of painful menstrual period is ischemia and muscular spasms. Moreover, during a normal menstruation, the hormone prostaglandin is released. The over secretion of this hormone may result to painful menstruation which is often called primary dysmenorrheal. There are also secondary dysmenorrhea which results from pelvic inflammatory disease, infection and other disorders of the uterus. How Does The Menstrual Cycle Change As A Woman Gets Older? As a woman gets older, she reaches the time of life where she experiences cessation of her menstrual cycle called menopause. This usually occurs due to age, hormonal influence, and other factors involving a woman’s lifestyle prior to menopause such as heavy smoking and drinking. How to Manage Menstrual Symptoms and Bleeding? : It is a must for a woman to moderately exercise during menstruation since it enhances her general well being and causes relaxation. Thus, cramping is avoided. : It is advisable to slow down for a bit, lie down and rest. This will alleviate dysmenorrhea especially if it becomes intolerable for the woman. : Since menstruation causes sloughing of blood, it is important for a woman to eat foods rich in iron to replace the iron that may have been lost during menstruation. The treatment for menstrual symptoms and bleeding usually varies depending on the reasons why there is pain. If there is painful menstrual cramps which results from muscle contractions and ischemia, drugs may be given to inhibit the cramping. However, there are secondary dysmenorrhea which results from disorders such as endometriosis, pelvic infections and other disorders involving the uterus. Prompt treatment depends on the mechanism of the disorder and this should be given proper treatment in order for the pain to subside. What Are The Reasons Behind A Painful Menstrual Period? A painful menstrual period can be caused by a number of health problems, including endometriosis, fibroids and other abnormal growths in the uterus and pelvic inflammatory disease. Depression, stress and anxiety can make the menstrual period even more painful, as well as hormonal imbalances and kidney disease. The most important thing is to obtain a correct diagnosis and proceed from there, getting the right treatment. Nowadays, you can use a wide range of applications in order to keep track of your menstrual cycle. These are known as period calculators or menstrual cycle trackers, pinpointing the exact dates of the menstrual cycle, ovulation and so on. If you want to do it the old way, you can mark the dates of your current menstruation in a calendar. In this way, you can know the date of your next period (with approximation). The menstrual cycle is different from one person to the other, its duration lasting from 23 to 35 days. Throughout this period, hormonal levels rise and decrease. The menstrual cycle is split into different phases, each with its own set of changes. In the menstrual phase, the lining of the uterus break down and sheds. This is when the menstruation begins and you experience the much-dreaded symptoms. The follicular phase is the one in which the body prepares for ovulation, with the ovaries being stimulated (hormonally) to produce an egg. Mucus discharge is normal for this phase, as well as increased energy levels and a positive mood. The third phase is known as ovulation, with the matured egg being released into the uterus. If it happens that it comes into contact with the sperm, it will become fertilized and pregnancy will occur. Last, but not least, you have the luteal phase, which signifies the end of the menstrual cycle. You will begin menstruating and experiencing the PMS symptoms. When a woman truly understands the normal parameters of menstruation, its physiology and how it works, can she be able to adapt in this natural monthly occurrence. The key to a stress-free menstrual period is thorough and adequate knowledge and understanding. As you have seen, the menstrual cycle is a complex matter and it can provide a lot of useful information regarding your general state of health.
import bpy from bpy_extras import view3d_utils def main(context, event): """Run this function on left mouse, execute the ray cast""" # get the context arguments scene = context.scene region = context.region rv3d = context.region_data coord = event.mouse_region_x, event.mouse_region_y # get the ray from the viewport and mouse view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord) ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord) ray_target = ray_origin + view_vector def visible_objects_and_duplis(): """Loop over (object, matrix) pairs (mesh only)""" for obj in context.visible_objects: if obj.type == 'MESH': yield (obj, obj.matrix_world.copy()) if obj.dupli_type != 'NONE': obj.dupli_list_create(scene) for dob in obj.dupli_list: obj_dupli = dob.object if obj_dupli.type == 'MESH': yield (obj_dupli, dob.matrix.copy()) obj.dupli_list_clear() def obj_ray_cast(obj, matrix): """Wrapper for ray casting that moves the ray into object space""" # get the ray relative to the object matrix_inv = matrix.inverted() ray_origin_obj = matrix_inv * ray_origin ray_target_obj = matrix_inv * ray_target ray_direction_obj = ray_target_obj - ray_origin_obj # cast the ray success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj) if success: return location, normal, face_index else: return None, None, None # cast rays and find the closest object best_length_squared = -1.0 best_obj = None for obj, matrix in visible_objects_and_duplis(): if obj.type == 'MESH': hit, normal, face_index = obj_ray_cast(obj, matrix) if hit is not None: hit_world = matrix * hit scene.cursor_location = hit_world length_squared = (hit_world - ray_origin).length_squared if best_obj is None or length_squared < best_length_squared: best_length_squared = length_squared best_obj = obj # now we have the object under the mouse cursor, # we could do lots of stuff but for the example just select. if best_obj is not None: best_obj.select = True context.scene.objects.active = best_obj class ViewOperatorRayCast(bpy.types.Operator): """Modal object selection with a ray cast""" bl_idname = "view3d.modal_operator_raycast" bl_label = "RayCast View Operator" def modal(self, context, event): if event.type in {'MIDDLEMOUSE', 'WHEELUPMOUSE', 'WHEELDOWNMOUSE'}: # allow navigation return {'PASS_THROUGH'} elif event.type == 'LEFTMOUSE': main(context, event) return {'RUNNING_MODAL'} elif event.type in {'RIGHTMOUSE', 'ESC'}: return {'CANCELLED'} return {'RUNNING_MODAL'} def invoke(self, context, event): if context.space_data.type == 'VIEW_3D': context.window_manager.modal_handler_add(self) return {'RUNNING_MODAL'} else: self.report({'WARNING'}, "Active space must be a View3d") return {'CANCELLED'} def register(): bpy.utils.register_class(ViewOperatorRayCast) def unregister(): bpy.utils.unregister_class(ViewOperatorRayCast) if __name__ == "__main__": register()
Transaction Failed - Who told ya .. ?!
from archfinch.wiki.models import PageForm, Page, Revision, RevisionText from archfinch.main.models import Item from django.shortcuts import get_object_or_404 from archfinch.utils import render_to_response from django.core.urlresolvers import reverse from django.template import RequestContext from django.http import HttpResponseRedirect from django.utils.http import base36_to_int from django.contrib.auth.decorators import login_required from django.template.defaultfilters import slugify from lazysignup.decorators import allow_lazy_user @allow_lazy_user def edit(request, page_id=None, item_id=None): ''' Lets the user edit a wiki page. ''' # get the page if page_id is None and item_id is None: from django.core.exceptions import SuspiciousOperation raise SuspiciousOperation('Page id and item id were both empty') if page_id is None: item = get_object_or_404(Item, pk=base36_to_int(item_id)) page = item.profile.page redirect_url = reverse('item', args=[item_id, slugify(item.name)]) else: page = get_object_or_404(Page, pk=base36_to_int(page_id)) redirect_url = reverse('wiki-page', args=[page_id]) if request.method == 'POST': form = PageForm(request.POST) if form.is_valid(): if page is None: page = Page() page.save() item.profile.page = page item.profile.save() text = form.cleaned_data['text'] revision_text = RevisionText(text=text) revision_text.save() page.revisions.create(text=revision_text, user=request.user) request.user.add_points(5) return HttpResponseRedirect(redirect_url) else: if page is not None: try: text = page.current().text.render() except Revision.DoesNotExist: text = '' else: text = '' form = PageForm(initial={'text': text}) return render_to_response('wiki/edit.html', locals(), context_instance=RequestContext(request))
Tooth loss is not typically on the radar of most people. People assume that brushing their teeth twice a day is enough, and that is all they do. However, tooth loss affects approximately 25% of the American population, providing even further proof that brushing the teeth is not enough. Neglecting flossing on a daily basis and putting off regular dental cleanings, can lead to the decline in your oral health. There are also other common habits that you may be engaging in right now that are compromising the health of your teeth. Tobacco use. This is a no-brainer. Smoking, chewing, dipping – any use of tobacco products – has been proven detrimental to your oral health. Not only do tobacco products stain your teeth and ruin your breath, but they also increase your risk for tooth decay, periodontal disease, oral cancer, lung cancer, and cancer of other parts of the body. Alcohol. Alcoholic beverages can damage the teeth. Wine is known for its staining powers, as well as dark whiskey and other dark ales. Alcohol is also highly acidic, and it can eat away at your tooth enamel if it is consumed frequently. Soft drinks. Whether you call it Coke, soda, pop, or a soft drink – it’s bad for your teeth. Soft drinks are laden with sugars and acids. Even diet drinks that contain a sugar substitute cause the bacteria inside the mouth to react the same way as they do with regular sugar. Soft drinks smother the teeth in a sugar bath and can cause damage to occur quickly. While it is OK to indulge occasionally, rinse your mouth with water after drinking soft drinks to help rinse away sugars from the teeth. Stress. Clenching and grinding the teeth during stressful situations is a common cause of broken teeth and worn down tooth enamel. Many people grind their teeth at night and are unaware that they do it. Talking with your dentist can help you prevent damage from these habits. Tooth loss affects millions of people throughout the United States each year. If at all possible, try to avoid habits that increase your chance for tooth loss. If you do experience tooth loss, there are good options for replacing missing teeth, including high quality dental implants and dentures. Contact Georgia Denture & Implant Specialists to learn more.
# Copyright (c) 2013 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import hashlib import json from github import MainClass import six from spectrometer.openstack.common import log as logging from spectrometer.processor import normalizer from spectrometer.processor import record_processor from spectrometer.processor import utils from spectrometer.processor import vcs LOG = logging.getLogger(__name__) def _check_default_data_change(runtime_storage_inst, default_data): h = hashlib.new('sha1') h.update(json.dumps(default_data)) digest = h.hexdigest() p_digest = runtime_storage_inst.get_by_key('default_data_digest') if digest == p_digest: LOG.debug('No changes in default data, sha1: %s', digest) return False LOG.debug('Default data has changes, sha1: %s', digest) runtime_storage_inst.set_by_key('default_data_digest', digest) return True def _retrieve_project_list_from_github(project_sources): LOG.info('Retrieving project list from GitHub') github = MainClass.Github(timeout=60) repos = [] for project_source in project_sources: organization = project_source['organization'] LOG.debug('Get list of projects for organization %s', organization) try: github_repos = github.get_organization(organization).get_repos() except Exception as e: LOG.exception(e) LOG.warn('Fail to retrieve list of projects. Keep it unmodified') return False exclude = set(project_source.get('exclude', [])) for repo in github_repos: if repo.name not in exclude: r = { 'branches': ['master'], 'module': repo.name, 'organization': organization, 'uri': repo.git_url, 'releases': [] } repos.append(r) LOG.debug('Project is added to default data: %s', r) return repos def _create_module_groups_for_project_sources(project_sources, repos): organizations = collections.defaultdict(list) for repo in repos: organizations[repo['organization']].append(repo['module']) ps_organizations = dict([(ps.get('organization'), ps.get('module_group_name') or ps.get('organization')) for ps in project_sources]) module_groups = [] for ogn, modules in six.iteritems(organizations): module_groups.append(utils.make_module_group( ogn, name=ps_organizations.get(ogn, ogn), modules=modules, tag='organization')) return module_groups def _update_project_list(default_data): configured_repos = set([r['uri'] for r in default_data['repos']]) repos = _retrieve_project_list_from_github(default_data['project_sources']) if repos: default_data['repos'] += [r for r in repos if r['uri'] not in configured_repos] default_data['module_groups'] += _create_module_groups_for_project_sources( default_data['project_sources'], default_data['repos']) def _store_users(runtime_storage_inst, users): for user in users: stored_user = utils.load_user(runtime_storage_inst, user['user_id']) if stored_user: stored_user.update(user) user = stored_user utils.store_user(runtime_storage_inst, user) def _store_companies(runtime_storage_inst, companies): domains_index = {} for company in companies: for domain in company['domains']: domains_index[domain] = company['company_name'] if 'aliases' in company: for alias in company['aliases']: normalized_alias = utils.normalize_company_name(alias) domains_index[normalized_alias] = company['company_name'] normalized_company_name = utils.normalize_company_name( company['company_name']) domains_index[normalized_company_name] = company['company_name'] runtime_storage_inst.set_by_key('companies', domains_index) def _store_module_groups(runtime_storage_inst, module_groups): stored_mg = runtime_storage_inst.get_by_key('module_groups') or {} for mg in module_groups: name = mg['module_group_name'] module_group_id = mg.get('id') or name stored_mg[module_group_id] = utils.make_module_group( module_group_id, name=name, modules=mg['modules'], tag=mg.get('tag', 'group')) runtime_storage_inst.set_by_key('module_groups', stored_mg) STORE_FUNCS = { 'users': _store_users, 'companies': _store_companies, 'module_groups': _store_module_groups, } def _store_default_data(runtime_storage_inst, default_data): normalizer.normalize_default_data(default_data) LOG.debug('Update runtime storage with default data') for key, value in six.iteritems(default_data): if key in STORE_FUNCS: STORE_FUNCS[key](runtime_storage_inst, value) else: runtime_storage_inst.set_by_key(key, value) def _update_records(runtime_storage_inst, sources_root): LOG.debug('Update existing records') release_index = {} for repo in utils.load_repos(runtime_storage_inst): vcs_inst = vcs.get_vcs(repo, sources_root) release_index.update(vcs_inst.get_release_index()) record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) record_processor_inst.update(release_index) def _get_changed_member_records(runtime_storage_inst, record_processor_inst): for record in runtime_storage_inst.get_all_records(): if record['record_type'] == 'member' and 'company_name' in record: company_draft = record['company_draft'] company_name = record_processor_inst.domains_index.get( utils.normalize_company_name(company_draft)) or company_draft if company_name != record['company_name']: record['company_name'] = company_name yield record def _update_members_company_name(runtime_storage_inst): LOG.debug('Update company names for members') record_processor_inst = record_processor.RecordProcessor( runtime_storage_inst) member_iterator = _get_changed_member_records(runtime_storage_inst, record_processor_inst) for record in member_iterator: company_name = record['company_name'] user = utils.load_user(runtime_storage_inst, record['user_id']) user['companies'] = [{ 'company_name': company_name, 'end_date': 0, }] user['company_name'] = company_name utils.store_user(runtime_storage_inst, user) LOG.debug('Company name changed for user %s', user) record_id = record['record_id'] runtime_storage_inst.memcached.set( runtime_storage_inst._get_record_name(record_id), record) runtime_storage_inst._commit_update(record_id) def process(runtime_storage_inst, default_data, sources_root, force_update): LOG.debug('Process default data') dd_changed = _check_default_data_change(runtime_storage_inst, default_data) if 'project_sources' in default_data: _update_project_list(default_data) if dd_changed or force_update: _store_default_data(runtime_storage_inst, default_data) _update_records(runtime_storage_inst, sources_root) _update_members_company_name(runtime_storage_inst)
Concord, Massachusetts is a small town with a grand past. The region that developed into the town of Concord was formerly known as Musketaquid, which means “grassy plain” in Algonquian. The area played host to the Battle of Lexington and Concord, which was the initial conflict in the American Revolutionary War. Due to this town being a major landmark in the birth and rise of the United States, owning a piece of Concord MA real estate is like owning a piece of priceless American history. But to most Concord residents it’s not just the past that makes the town so vibrant. It’s the people. From friendly neighbors to outstanding town personnel, this city will make any new person or families moving in feel right at home. You can expect a friendly hello from the postal worker to the grocery clerk because everyone is welcome in Concord. The perfect time to find a new home is now, so why not make the entire searching process much easier with the help of one of our amazing and talented agents? The professional and accommodating agents here at The Attias Group are dedicated and passionate about getting you the home you want and deserve. We will craft a hand-selected list from our wonderful array of prime Concord MA real estate listings that will match any budget or lifestyle. The Attias Group is a Massachusetts’ staple and locally owned by one of the highest producing agents that the Bay State has to offer. Our track record speaks for itself and our five star rating reflects our consistent and high client satisfaction. We will work hard to ensure that you find the perfect place to call home in beautiful Concord, MA. Contact us today and let us put you in a prime piece of Concord MA real estate!
# Copyright 2014 Netflix, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.auditors.s3 :platform: Unix .. version:: $$VERSION$$ .. moduleauthor:: Patrick Kelley <pkelley@netflix.com> @monkeysecurity """ from six import text_type from security_monkey.auditors.resource_policy_auditor import ResourcePolicyAuditor from security_monkey.auditor import Entity from security_monkey.watchers.s3 import S3 from security_monkey.datastore import Account class S3Auditor(ResourcePolicyAuditor): index = S3.index i_am_singular = S3.i_am_singular i_am_plural = S3.i_am_plural def __init__(self, accounts=None, debug=False): super(S3Auditor, self).__init__(accounts=accounts, debug=debug) self.policy_keys = ['Policy'] def prep_for_audit(self): super(S3Auditor, self).prep_for_audit() self.FRIENDLY_S3NAMES = [text_type(account['s3_name']).lower() for account in self.OBJECT_STORE['ACCOUNTS']['DESCRIPTIONS'] if account['label'] == 'friendly'] self.THIRDPARTY_S3NAMES = [text_type(account['s3_name']).lower() for account in self.OBJECT_STORE['ACCOUNTS']['DESCRIPTIONS'] if account['label'] == 'thirdparty'] self.FRIENDLY_S3CANONICAL = [text_type(account['s3_canonical_id']).lower() for account in self.OBJECT_STORE['ACCOUNTS']['DESCRIPTIONS'] if account['label'] == 'friendly'] self.THIRDPARTY_S3CANONICAL = [text_type(account['s3_canonical_id']).lower() for account in self.OBJECT_STORE['ACCOUNTS']['DESCRIPTIONS'] if account['label'] == 'thirdparty'] self.INTERNET_ACCESSIBLE = [ 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'.lower(), 'http://acs.amazonaws.com/groups/global/AllUsers'.lower()] self.LOG_DELIVERY = ['http://acs.amazonaws.com/groups/s3/LogDelivery'.lower()] self.KNOWN_ACLS = self.FRIENDLY_S3NAMES + self.THIRDPARTY_S3NAMES + self.FRIENDLY_S3CANONICAL + self.THIRDPARTY_S3CANONICAL + self.INTERNET_ACCESSIBLE + self.LOG_DELIVERY def _check_acl(self, item, field, keys, recorder): acl = item.config.get('Grants', {}) owner = item.config["Owner"]["ID"].lower() for key in list(acl.keys()): if key.lower() not in keys: continue # Canonical ID == Owning Account - No issue if key.lower() == owner.lower(): continue entity = Entity(category='ACL', value=key) account = self._get_account(field, key) if account: entity.account_name=account['name'] entity.account_identifier=account['identifier'] recorder(item, actions=acl[key], entity=entity) def check_acl_internet_accessible(self, item): """ Handles AllUsers and AuthenticatedUsers. """ self._check_acl(item, 'aws', self.INTERNET_ACCESSIBLE, self.record_internet_access) def check_acl_log_delivery(self, item): self._check_acl(item, 'aws', self.LOG_DELIVERY, self.record_thirdparty_access) def check_acl_friendly_legacy(self, item): self._check_acl(item, 's3_name', self.FRIENDLY_S3NAMES, self.record_friendly_access) def check_acl_thirdparty_legacy(self, item): self._check_acl(item, 's3_name', self.THIRDPARTY_S3NAMES, self.record_thirdparty_access) def check_acl_friendly_canonical(self, item): self._check_acl(item, 's3_canonical_id', self.FRIENDLY_S3CANONICAL, self.record_friendly_access) def check_acl_thirdparty_canonical(self, item): self._check_acl(item, 's3_canonical_id', self.THIRDPARTY_S3CANONICAL, self.record_thirdparty_access) def check_acl_unknown(self, item): acl = item.config.get('Grants', {}) for key in list(acl.keys()): if key.lower() not in self.KNOWN_ACLS: entity = Entity(category='ACL', value=key) self.record_unknown_access(item, entity, actions=acl[key]) def check_policy_exists(self, item): policy = item.config.get('Policy', {}) if not policy: message = "POLICY - No Policy." self.add_issue(0, message, item)
Here’s another from the same series …. ‘part 2’ if you like. Great images… but a little bit more explanation in the process it will perfect, like the steps to do in PS or any other program that you used after SU. Thanks for be so inspirational with so amazing presentation. gv us more step by step detail of it…..plz…. I don’t know how you can make this because i’m a newbie,could you make it in details please? how you do the gargoyle and that something with cape?
#!/usr/bin/python #-*-coding:utf-8 -*- # author: mld # email: miradel51@126.com # date : 2017/9/30 # time : 23:42(pm) import sys import re # Remove the whitespace at the begining or ending of the sentence # Helps to convert full-width and half-width from chinese sentence # If there is any english characters among the chinise sentence, it can lowercase the non-chinse symbols def chinese_norm(original_sen): conver_sen = "" for char in original_sen: _code = ord(char) if _code == 0x3000: _code = 0x0020 else: _code -= 0xfee0 # restore the original sentence after converted if it is still not half-width character if _code < 0x0020 or _code > 0x7e: conver_sen += char else: conver_sen += chr(_code) conver_sen = re.sub(r'\s+', ' ', conver_sen) #conver lower conver_sen = conver_sen.lower() return conver_sen if __name__ == '__main__': ori_ = sys.argv[1] convert_ = sys.argv[2] ori_file = open(ori_,"r") converted_file = open(convert_,"w") context = "" for eachline in ori_file: context = chinese_norm(eachline.strip()) converted_file.write(context) converted_file.write("\n") ori_file.close() converted_file.close()
Corrosion is the process of the deterioration of a material due to chemical reaction, with its surrounding environment. Corrosion testing is performed to assess the susceptibility of a material to environmental attack and quantify a material’s resistance to a corrosive environment. A corrosion test subjects a sample of material to a severe test environment (a combination of temperature and solution) for a specified time. Following exposure to a severe test environment, a test sample is evaluated for signs of corrosive attack. Typically, visual inspection for pitting, loss of mass, and bend resistance are assessed to evaluate if a sample has resisted corrosive attack.
import flask from flask_login import login_user from server.models import User from server.login_manager import login_manager @login_manager.user_loader def load_user(user_id: int) -> User: """Returns a user from the database based on their id :param user_id: a users unique id :return: User object with corresponding id, or none if user does not exist """ return User.query.filter_by(id=user_id).first() def handle_basic_auth(request: flask.Request) -> User: """Verifies a request using BASIC auth :param request: flask request object :return: User object corresponding to login information, or none if user does not exist """ auth = request.authorization if not auth: return None return User.query.filter_by( username=auth.username, password=auth.password ).first() def login(request: flask.Request) -> flask.Response: """Handle a login request from a user :param request: incoming request object :return: flask response object """ user = handle_basic_auth(request) if user: login_user(user, remember=True) return 'OK' return flask.Response( 'Could not verify your access level for that URL.\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'})
The Plant List (2013). Version 1.1. Last accessed on Monday, March 14, 2016. WCSP (2016). World Checklist of Selected Plant Families. Facilitated by the Royal Botanic Gardens, Kew. Last accessed on Monday, March 14, 2016.
import pandas as pd import numpy as np from .uvc_model import calc_phi_total def summary_tables_maker_uvc(material_definition, x_file_paths, data, peeq='sat'): """ Prints to screen the summary tables for the material optimization in LaTeX format for the updated VC model. :param dict material_definition: Contains information about each material. :param list x_file_paths: (str) Path for the files that contain the x values for each material. :param list data: (list, pd.DataFrame) The test data used for calibration of each of the materials. :param str or float peeq: If 'sat' then calculates the metrics at model saturation, otherwise a finite equivalent plastic strain. :return list: The first and second summary tables. Notes: - material_definition: 'material_id': (list, str) Identifier for each material. 'load_protocols': (list, str) Labels of the load protocols used, see [1] for definitions. - The metrics in Table 2 are defined in [2]. - If a finite peeq is provided, the metrics are calculated assuming that peeq increases monotonically to the provided value. References: [1] de Castro e Sousa and Lignos (2017), On the inverse problem of classic nonlinear plasticity models. [2] de Castro e Sousa and Lignos (2018), Constrained optimization in metal plasticity inverse problems. """ # Output column labels parameter_labels = [r'$E$[GPa]', r'$\sigma_{y,0}$[MPa]', r'$Q_\infty$[MPa]', r'$b$', r'$D_\infty$[MPa]', r'$a$', r'$C_1$[MPa]', r'$\gamma_1$', r'$C_2$[MPa]', r'$\gamma_2$', r'$C_3$[MPa]', r'$\gamma_3$', r'$C_4$[MPa]', r'$\gamma_4$'] metric_labels = [r'$\sigma_{y,0}$[MPa]', r'$\sigma_{sat}$[MPa]', r'$\sigma_{hard}$[MPa]', r'$\rho^{sat}_{yield}$', r'$\rho^{sat}_{iso}$', r'$\rho^{sat}_{kin}$', r'$\rho^{sat}_{D}$'] n_basic_param = 6 tab_1, tab_2 = _table_maker(material_definition, x_file_paths, data, parameter_labels, metric_labels, n_basic_param, calc_upd_metrics=True, peeq=peeq) return [tab_1, tab_2] def summary_tables_maker_vc(material_definition, x_file_paths, data, peeq='sat'): """ Prints to screen the summary tables for the material optimization in LaTeX format for the original VC model. :param dict material_definition: Contains information about each material. :param list x_file_paths: (str) Path for the files that contain the x values for each material. :param list data: (list, pd.DataFrame) The test data used for calibration of each of the materials. :param str or float peeq: If 'sat' then calculates the metrics at model saturation, otherwise a finite equivalent plastic strain. :return list: The first and second summary tables. Notes: - material_definition: 'material_id': (list, str) Identifier for each material. 'load_protocols': (list, str) Labels of the load protocols used, see [1] for definitions. - The metrics in Table 2 are defined in [2]. - If a finite peeq is provided, the metrics are calculated assuming that peeq increases monotonically to the provided value. References: [1] de Castro e Sousa and Lignos (2017), On the inverse problem of classic nonlinear plasticity models. [2] de Castro e Sousa and Lignos (2018), Constrained optimization in metal plasticity inverse problems. """ # Output column labels parameter_labels = [r'$E$[GPa]', r'$\sigma_{y,0}$[MPa]', r'$Q_\infty$[MPa]', r'$b$', r'$C_1$[MPa]', r'$\gamma_1$', r'$C_2$[MPa]', r'$\gamma_2$', r'$C_3$[MPa]', r'$\gamma_3$', r'$C_4$[MPa]', r'$\gamma_4$'] metric_labels = [r'$\sigma_{y,0}$[MPa]', r'$\sigma_{sat}$[MPa]', r'$\sigma_{hard}$[MPa]', r'$\rho^{sat}_{yield}$', r'$\rho^{sat}_{iso}$', r'$\rho^{sat}_{kin}$'] n_basic_param = 4 tab_1, tab_2 = _table_maker(material_definition, x_file_paths, data, parameter_labels, metric_labels, n_basic_param, calc_upd_metrics=False, peeq=peeq) return [tab_1, tab_2] def _table_maker(material_definition, x_file_paths, data, parameter_labels, metric_labels, num_basic_param, calc_upd_metrics, peeq='sat'): """ Base function to generate the tables. """ # Set some options for the display pd.set_option('display.max_columns', 12) pd.set_option('display.width', 300) pd.set_option('display.float_format', '{:0.2f}'.format) # Extract the properties from the definition material_id = material_definition['material_id'] load_protocols = material_definition['load_protocols'] # Make the first table phi_values = [] summary_table = pd.DataFrame() for i, f in enumerate(x_file_paths): x = pd.read_csv(f, delimiter=' ') x = np.array(x.iloc[-1]) # Sort the backstresses so that the largest gamma value is first gammas = x[num_basic_param + 1::2] ind = np.flipud(np.argsort(gammas)) # Exchange the gammas x[num_basic_param + 1::2] = x[2 * ind + num_basic_param + 1] # Exchange the Cs x[num_basic_param::2] = x[2 * ind + num_basic_param] temp_table = pd.DataFrame(x, columns=(material_id[i],)).transpose() summary_table = summary_table.append(temp_table) if calc_upd_metrics: phi_values.append(calc_phi_total(x, data[i])) else: x_phi = np.insert(x, 4, [0., 1.]) phi_values.append(calc_phi_total(x_phi, data[i])) # Rename the columns summary_table.columns = parameter_labels[:len(summary_table.columns)] # Add the phi values summary_table.insert(0, r'$\bar{\varphi}$[\%]', phi_values) # Add the load protocols summary_table.insert(0, r'LP', load_protocols) # Make the elastic modulus in GPa summary_table[parameter_labels[0]] = summary_table[parameter_labels[0]] / 1000. # Set the index name to Materials summary_table.index.name = 'Material' print (summary_table.to_latex(escape=False)) # Make the second table summary_table_2 = pd.DataFrame() for i, f in enumerate(x_file_paths): # Calculate the comparison metrics data_row = list(summary_table.iloc[i]) s_y0 = data_row[3] hm = _hard_metric_at_peeq(data_row, num_basic_param, calc_upd_metrics, peeq) sigma_sat = hm['sigma_sat'] sigma_hard = hm['sigma_hard'] rho_yield = hm['rho_yield'] rho_iso = hm['rho_iso'] rho_kin = hm['rho_kin'] rho_d = hm['rho_d'] if calc_upd_metrics: new_row = np.array([s_y0, sigma_sat, sigma_hard, rho_yield, rho_iso, rho_kin, rho_d]) else: new_row = np.array([s_y0, sigma_sat, sigma_hard, rho_yield, rho_iso, rho_kin]) # Add the values to the table for each material temp_table = pd.DataFrame(new_row, columns=(material_id[i],)).transpose() summary_table_2 = summary_table_2.append(temp_table) # Rename the columns summary_table_2.columns = metric_labels # Set the index name to Materials summary_table_2.index.name = 'Material' print (summary_table_2.to_latex(escape=False)) return [summary_table, summary_table_2] def _hard_metric_at_peeq(x, num_basic_param, calc_upd_metrics, peeq='sat'): """ Calculates the hardening metrics for both the original and updated Voce-Chaboche models. :param list x: Row of data from table_maker function. :param int num_basic_param: Number of non-backstress related parameters in the model. :param bool calc_upd_metrics: If True then calculates the rho_d metric, if False then sets it to 0. :param str or float peeq: If 'sat' then calculates the metrics at model saturation, otherwise a finite equivalent plastic strain. :return dict: Hardening metrics. Notes: - If a finite peeq is provided, the metrics are calculated assuming that peeq increases monotonically to the provided value. """ cols_before_kin = num_basic_param + 2 num_backstresses = (len(x) - cols_before_kin) // 2 s_y0 = x[3] if peeq == 'sat': # Calculate values assuming fully saturated q_inf = x[4] if calc_upd_metrics: d_inf = x[6] else: d_inf = 0. sum_kin = 0. for j in range(num_backstresses): c_j = x[cols_before_kin + 2 * j] g_j = x[cols_before_kin + 1 + 2 * j] sum_kin += c_j / g_j else: # Calculate values at finite equivalent plastic strain (monotonically increasing) q_inf = x[4] * (1. - np.exp(-x[5] * peeq)) if calc_upd_metrics: d_inf = x[6] * (1. - np.exp(-x[7] * peeq)) else: d_inf = 0. sum_kin = 0. for j in range(num_backstresses): c_j = x[cols_before_kin + 2 * j] g_j = x[cols_before_kin + 1 + 2 * j] sum_kin += c_j / g_j * (1. - np.exp(-g_j * peeq)) # Calculate all the metrics sigma_sat = s_y0 + q_inf - d_inf + sum_kin sigma_hard = q_inf + sum_kin rho_yield = sigma_sat / s_y0 rho_iso = q_inf / sigma_hard rho_kin = sum_kin / sigma_hard rho_d = d_inf / (q_inf + sum_kin) return {'sigma_sat': sigma_sat, 'sigma_hard': sigma_hard, 'rho_yield': rho_yield, 'rho_iso': rho_iso, 'rho_kin': rho_kin, 'rho_d': rho_d}
Whether you organise a birthday party or a corporate party, one way to engage your guests is to hire the services of TJ Your Mobile DJ. We have multi-talented DJs. Their performance at your party will certainly get your guests to stay upbeat throughout the event. Everyone would want their parties to be lively and memorable. The only way to ensure that is to opt for DJ Hire in Berry, Corporate DJ Hire Berry, Wedding DJ Hire Berry. Our team of versatile and professional DJs are the life of all parties. No one will get into gossips in small groups & then disperse after the event. The DJ’s who we have on board, can engage the crowd by connecting with them instantly. All our DJ’s have extensive knowledge about music. They play all genres of music from classics to the latest chartbusters. The song collection is so huge that any specific song requests can be played. The performances are high in energy & melody. When a DJ performs, they create such an atmosphere that everyone flows with the music & all are entertained. Your friends & guests would have never enjoyed any such parties before. As you arrange the best quality drinks & an elaborate menu for the party, do not miss best DJ Hire in Berry, Corporate DJ Hire Berry, Wedding DJ Hire Berry. By the time you realize, the party is rocking! All our DJs have performed at various events like birthdays, corporate parties, school/college events, weddings etc. Therefore they are highly experienced & gauge what people want. Their music will leave lasting impressions & make the party worth remembering. All our DJs are extremely flexible &they can quickly change tracks as and when the requests are flowing in. In addition to a DJ, you can also hire: lights, party theme effect-lighting, chairs & tables, covers for the furniture, glassware and crockery set, glow furniture, LED sound activated lighting, accessories for event décor, balloons, party equipment etc. Whatever the event, a DJ can instantly get your guests tapping their feet. For more information and for professional DJ Hire in Berry, Corporate DJ Hire Berry, Wedding DJ Hire Berry, call TJ Your Mobile DJ at 1300631842 and make your event memorable.
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from __future__ import absolute_import from __future__ import print_function from twisted.internet import defer from buildbot.test.util.decorators import flaky from buildbot.test.util.integration import RunMasterBase # This integration test creates a master and worker environment, # with one builder and a custom step # The custom step is using a CustomService, in order to calculate its result # we make sure that we can reconfigure the master while build is running class CustomServiceMaster(RunMasterBase): @flaky(bugNumber=3340) @defer.inlineCallbacks def test_customService(self): yield self.setupConfig(masterConfig()) build = yield self.doForceBuild(wantSteps=True) self.assertEqual(build['steps'][0]['state_string'], 'num reconfig: 1') myService = self.master.service_manager.namedServices['myService'] self.assertEqual(myService.num_reconfig, 1) self.assertTrue(myService.running) # We do several reconfig, and make sure the service # are reconfigured as expected yield self.master.reconfig() build = yield self.doForceBuild(wantSteps=True) self.assertEqual(myService.num_reconfig, 2) self.assertEqual(build['steps'][0]['state_string'], 'num reconfig: 2') yield self.master.reconfig() myService2 = self.master.service_manager.namedServices['myService2'] self.assertTrue(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 3) yield self.master.reconfig() # second service removed self.assertNotIn( 'myService2', self.master.service_manager.namedServices) self.assertFalse(myService2.running) self.assertEqual(myService2.num_reconfig, 3) self.assertEqual(myService.num_reconfig, 4) # master configuration num_reconfig = 0 def masterConfig(): global num_reconfig num_reconfig += 1 c = {} from buildbot.config import BuilderConfig from buildbot.process.factory import BuildFactory from buildbot.schedulers.forcesched import ForceScheduler from buildbot.steps.shell import ShellCommand from buildbot.util.service import BuildbotService class MyShellCommand(ShellCommand): def getResultSummary(self): service = self.master.service_manager.namedServices['myService'] return dict(step=u"num reconfig: %d" % (service.num_reconfig,)) class MyService(BuildbotService): name = "myService" def reconfigService(self, num_reconfig): self.num_reconfig = num_reconfig return defer.succeed(None) c['schedulers'] = [ ForceScheduler( name="force", builderNames=["testy"])] f = BuildFactory() f.addStep(MyShellCommand(command='echo hei')) c['builders'] = [ BuilderConfig(name="testy", workernames=["local1"], factory=f)] c['services'] = [MyService(num_reconfig=num_reconfig)] if num_reconfig == 3: c['services'].append( MyService(name="myService2", num_reconfig=num_reconfig)) return c
0:07Skip to 0 minutes and 7 secondsTexts have two primary functions: one is to give a stable form to words, which are by nature volatile and transient; we may call this function "giving material form to people's thoughts." The other is to give someone's words wider circulation by reproducing them; we may call this second function “giving a larger audience to ideas.” In other words, texts are one of the mediums that connect people’s thoughts across time and space. In the textual medium, the message―a long and carefully structured segment of text―is encoded in words and images. Text reproduction is not an abstract process. Here I have prepared three books. They are three different editions of the same book, the Shijing which were published during the Edo period (1603-1868). 1:09Skip to 1 minute and 9 secondsIf we look inside, we can see that they are all woodblock printed editions produced at different points in time, specifically, one in the early, another from the middle, and then late Edo period. Thus, we can say that books not only bridge the geographical gap between different places (China and Japan), but also connect different points in time as well. Since the beginnings of history, people living in different parts of the world have been exchanging ideas through texts and developing their own unique cultures. We are going to define cultures that have placed a high importance on texts as "textual cultures." East Asia, of which Japan is part, possesses an incredibly rich textual culture with a very long history. It would be impossible to talk about East Asian textual culture without considering the role of Mahayana Buddhism. Before reading the article below, let us watch the video and reflect on the role of books within culture. Prof. Sumiyoshi will show you three different editions of the same book published during the Edo period, in the 17th, 18th, and 19th century respectively. You will see how approaches to publishing evolved over a period of 200 years. Shijing is the oldest existing collection of Chinese poetry, comprising more than 300 works dating approx. from the 11th to the 7th centuries BCE. The content of the songs varies. One of the most famous poems is “The Pulchritudinous Peach Tree” (桃夭) (Shijing I.1.6) which celebrates the beauty of a young bride. Though this course is about Japanese culture, you will learn a lot about Chinese books, too! It would be impossible to talk about East Asian textual culture without considering the role of Mahayana Buddhism. “Sutra” is a general term for Buddhist texts of any school or sect. The reason for focusing on Mahayana texts in particular here is that Mahayana Buddhism placed a particular emphasis on the production and circulation of written works. The Lotus Sutra of the Sublime Law (J. Myōhō rengekyō), for instance, the most representative work of Mahayana Buddhism, repeatedly urges its readers to “accept,” “recite,” “explain,” and “copy” the sutra. From the very outset, Mahayana Buddhism urged its followers to copy its sacred texts in order to spread its teachings more widely, and nowhere was this more literally and emphatically applied than in China and its neighboring countries, where Buddhist texts circulated in Chinese translation. As mentioned in Step 1.1, you can view the images of most books—introduced in this course—in larger size by clicking the links marked Click to take a closer look. The link takes you to an online library where you can enjoy quality images and detailed information but it gives you more than that.
import argparse import pxssh import sys class Client: def __init__(self, host, user, password): self.host = host self.user = user self.password = password self.session = self.connect() def connect(self): try: s = pxssh.pxssh() s.login(self.host, self.user, self.password) return s except Exception, e: print e print '[-] Error Connecting' def send_command(self, cmd): self.session.sendline(cmd) self.session.prompt() return self.session.before def botnetCommand(command): for client in botNet: output = client.send_command(command) print '[*] Output from ' + client.host print '[+] ' + output + '\n' def addClient(host, user, password): client = Client(host, user, password) botNet.append(client) botNet = [] addClient('ip', 'user', 'password') try: while True: command = raw_input("Put a command: ") botnetCommand(command) except KeyboardInterrupt: print "KeyboardInterrupt" sys.exit() def main(): init() if __name__ == "__main__": main
If Stockett Side Chair (Set of 2) By Loon Peak you are right after some thing a small smaller sized then our range of bistro sets are compact two person table and chairs that develop a a lot far more intimate Stockett Side Chair (Set of 2) By Loon Peak really feel when there is only two of you sitting down to consume. Should you only have space for a breakfast bar in your kitchen location, then we have Stockett Side Chair (Set of 2) By Loon Peak an array of mobile bar stools Stockett Side Chair (Set of 2) By Loon Peak to Stockett Side Chair (Set of 2) By Loon Peak suit your existing space. Offer you ends 31st August 2017. Provide Details > New Really House Insurance coverage consumers only. Very good news, we ship Australia wide. No matter whether you reside in Melbourne, Sydney, Perth or even the outer suburbs, our goods come straight to you. Get via our on the internet shop, add in the critical details, and your Brosa created items will be there quickly. The table is beautiful!! The butterfly leaf is an excellent feature and makes the table big more than enough to seat the whole family. My son assembled it for me personally. He is in structure and was impressed by the quality and sturdiness of the desk. It was delivered regularly and all the bits were in the package. Nothing at all was broken or lacking. I will order from your company as a result of price, quality, and superb service. I ordered 3 stools and they arrived 2 times in the future. I took a risk and ordered the red afteroses reading other critiques that the color was unique of how it seemed in the online images. I think the colour is perfect and matches the web photos pretty close. These were very easy to assemble. I did smell the gasoline smell that other persons commented about, nonetheless it was the plastic wrap/packaging, not the bar stools themselves. Super good quality, very hefty wood, simple to install, beautiful accent to a white colored decoration house; I really like it, and for the price it is no brainer' I recommend it 100% ? Best size for a tiny space. Not overpowering or heavy like some sets, and just enough of a seating location to get the job done for a couple or a little family.
import numpy as np import copy from random import randrange import game.py import arena.py import gym.py import winningPercent.py """ Piece State, 0 = Empty 1 = X 2 = O 0 | 1 | 2 --------------- 3 | 4 | 5 --------------- 6 | 7 | 8 """ """ This will check if the current state exists, IF YES: return index of it IF NO: return -1 """ def stateChecker(states, board): index=-1 i=0 while i<len(states) and index==-1: match=True j=0 while j<9 and match is True: if states[i][j]!=board[j]: match=False j+=1 if match is True: index=i i+=1 return index """This will start us cranking out all possible states """ def createAllPossibleStates(states, R, t): board=[0,0,0,0,0,0,0,0,0] states.append(board) createAllStates(states,R, t,board, 1, -1, -1 , False,) print "Woe, that was tough!" def createAllStates(states, R, t, board, turn, previousState, previousMove, GameOver): currentState=copy.deepcopy(len(states)-1) #prevMove=copy.deepcopy(previousMove) #prevState=copy.deepcopy(previousState) newTurn=copy.deepcopy(turn) #playerThatWon=copy.deepcopy(winningPlayer) R.append([0.,0.,0.,0.,0.,0.,0.,0.,0.]) t.append([0,0,0,0,0,0,0,0,0]) """ if turn==10: #print "DRAW" R.append([-1,-1,-1,-1]) return """ for i in range(0,9): currentMove=copy.deepcopy(i) #Check for empty square if board[i]==0: newBoard=copy.deepcopy(board) game.addMove(newBoard, turn, i) gameOv=copy.deepcopy(GameOver) #if gameOv is True: # if newTurn%2==playerThatWon%2: # R[currentState][i]=100.0 if game.threecheck(newBoard) is True and gameOv is False: R[currentState][i]=100.0 gameOv=True #winningPlayer=newTurn%2 #we need to alter the reward from previous movement to reflect a loss #R[prevState][prevMove]=-100.0 #If the game is not over, the last player puts a piece down to draw elif game.threecheck(newBoard) is False and gameOv is False and turn==9: #R[prevState][prevMove]=25 R[currentState][i]=25 gameOv=True #Here we will find if we will be at a previously check=stateChecker(states, newBoard) if check==-1: #If this is a board we have not seen states.append(newBoard) t[currentState][currentMove]=len(states)-1 #Go to next state from current move #We will have to send the info for current state and move in case it results in a direct loss createAllStates(states,R, t, newBoard,newTurn+1, currentState, currentMove, gameOv) else: # if this is, all we will ahve to do is append the INDEX FOR THE next state # This will allow us to quickly jump to that state. t[currentState][currentMove]=check #IF the square is taken, we can not place a piece there #so there is not corresponding cation or reward else: R[currentState][currentMove]=-np.inf t[currentState][currentMove]=-1 def setQNoBacktrack(Q,t): for i in range (len(t)): for j in range (len(t[0])): if t[i][j]==-1: Q[i,j]=-np.inf #------------------------------------------------------------------------------------------- """ States holds all boards R holds all rewards t holds list of actions, and location of state following action """ states=[] R=[] t=[] print "Loading states, please wait." createAllPossibleStates(states, R, t) #nStates= np.shape(R)[0] #print nStates print "Time to get to the gym, brb." Qrand1 = trainingAgainstRand1(states,t) Qrand2 = trainingAgainstRand2(states,t) QQ=trainingAgainstLearner(states, t) Qplayer1=QQ[0] Qplayer2=QQ[1] # ****** If you want to calculate winning percentages of the learners, enable the next line********** winningPercentages.winningPercent(QQ, Qrand1, Qrand2, t, states) #----------------------------------------------------------------------------------------------------------- # This section is a user menu that allows the # user to determine if they want two trained # computers to battle, or play against the super computer mode=0 while mode!=3: print "Would you like:" print "1. Two computers to battle to the death" print "2. Play against the super computer" print "3. Quit" mode=int(raw_input('Input:')) if mode==1: print "You selected two computers" arena.TwoComputers(QRand1,QRand2, t,states, Comp1Win,RandWin,Draw) print "" print "" elif mode==2: print "So you want to play?" print "" print "" arena.soIHearYouLikeToPlay(Q, states) elif mode!=3: print "Invalid Response" print "" print"" print "done"
Grivet Members get back an estimated $3 on this item as part of their member rewards. The Impact pad with hexpad technology is integrated into a new low profile, form-fitting knee or elbow pad. Perfect for all levels of impact absorption. The #6440 HexForce Knee or Elbow Pad is one of McDavid's most popular and versatile pads, featuring proprietary HexPad material in a versatile, lightweight, protective pad for the knee or elbow. Individual hex shaped pads conform to and move with your body for solid comfort and continuous protection, while compression fabric holds the pad firmly in place and keeps muscles warm to prevent pulls and fatigue. HydraVent hDc Moisture Management Technology keeps you cool and dry, as well. HydraVent hDc (1) This material results from a patented process that permanently bonds hydrophilic (water-attracting) molecules to mid-weight and heavyweight synthetic fiber surfaces, ensuring HydraVent hDc remains intact even after numerous washings. (2) As you exercise, your body produces perspiration to cool itself and this perspiration condenses on your skin and builds up heat and discomfort. (3) Moisture is pulled away from the skin by the wicking ability of HydraVent hDc, so your skin remains cooler and drier, and your body's temperature management system can do its job more effectively. (4) Once the fabric absorbs the moisture, it is spread rapidly across a broader surface area for faster evaporation, and this evaporation draws heat out of the body. (5) Finally, when moisture is one molecule thick, HydraVent hDc starts its drying process, allowing for a consistent and rapid moisture transport ability over a long use time. Hexpad Hexpad Technology is a patented padding system that permanently bonds dozens of individual athletic pads into fabrics. Because the pads are individually applied, they move with and conform to the body for continuous protection that isn't bulky or restricting. The material is machine washable. Protection Levels McDavid offers a range of sports medicine products based on protection and support levels. Each level provides soft tissue support through compression, and may promote healing through therapeutic heat. Protection Level I, Primary: General purpose protection Minor pain, sprains, and instability Relief from arthritis, bursitis, and tendonitis Protection Level II, Advanced: Minor to moderate protection Slight ligament instabilities or pain Offers more protection than simple sleeves Protection Level III, Maximum: Ultimate in protection Moderate to major instabilities and pain Provides the maximum level of support About McDavid In 1969, Dr. Robert F. McDavid, operating out of a basement in Bellwood, Illinois, developed what became the first widely used lateral protective knee brace in football. Today, the company manufactures more than 400 products for athletes and active people of all ages, designed to help prevent injury while enhancing performance. The company is the most recommended brand of braces among sports medical professionals, and an industry leader in sports medical products and technical performance athletic wear.
# Clutter depends on Cogl 1.0 for public API, but Cogl ships with # introspection data for both 1.0 and 2.0; pygobject will prefer # the latter, so we need to load Clutter before Cogl from gi.repository import Clutter from gi.repository import Cogl from gi.repository import GdkPixbuf gravities = [ ( Clutter.ContentGravity.TOP_LEFT, 'Top Left' ), ( Clutter.ContentGravity.TOP, 'Top' ), ( Clutter.ContentGravity.TOP_RIGHT, 'Top Right' ), ( Clutter.ContentGravity.LEFT, 'Left' ), ( Clutter.ContentGravity.CENTER, 'Center' ), ( Clutter.ContentGravity.RIGHT, 'Right' ), ( Clutter.ContentGravity.BOTTOM_LEFT, 'Bottom Left' ), ( Clutter.ContentGravity.BOTTOM, 'Bottom' ), ( Clutter.ContentGravity.BOTTOM_RIGHT, 'Bottom Right' ), ( Clutter.ContentGravity.RESIZE_FILL, 'Resize Fill' ), ( Clutter.ContentGravity.RESIZE_ASPECT, 'Resize Aspect' ) ] current_gravity = 0 def on_tap(action, actor, text): global gravities, current_gravity # Change the label text.props.text = 'Content Gravity: ' + gravities[current_gravity][1] # Animate the content gravity changes with actor.easing_state(): actor.set_content_gravity(gravities[current_gravity][0]) # Cycle through all gravities current_gravity += 1 if current_gravity >= len(gravities): current_gravity = 0 if __name__ == '__main__': Clutter.init(None) # Our stage stage = Clutter.Stage(title='Content Box', user_resizable=True) stage.set_margin(Clutter.Margin(12)) stage.connect('destroy', Clutter.main_quit) stage.show() # Load the texture data from a file pixbuf = GdkPixbuf.Pixbuf.new_from_file('redhand.png') # Use the correct pixel format depending on whether the image # has an alpha channel pixel_format = Cogl.PixelFormat.RGB_888 if pixbuf.get_has_alpha(): pixel_format = Cogl.PixelFormat.RGBA_8888 data = pixbuf.read_pixel_bytes() width = pixbuf.get_width() height = pixbuf.get_height() stride = pixbuf.get_rowstride() # The Image content knows how to draw texture data image = Clutter.Image() image.set_bytes(data, pixel_format, width, height, stride) # A Stage is like any other actor, and can paint a Content stage.set_content_gravity(Clutter.ContentGravity.RESIZE_ASPECT) stage.set_content_scaling_filters(Clutter.ScalingFilter.TRILINEAR, Clutter.ScalingFilter.LINEAR) stage.set_content(image) # Show a label with the current content gravity label = 'Content Gravity: Resize Aspect' text = Clutter.Text(text=label) text.add_constraint(Clutter.AlignConstraint(source=stage, align_axis=Clutter.AlignAxis.BOTH, factor=0.5)) stage.add_child(text) # Change the content gravity on tap/click action = Clutter.TapAction() action.connect('tap', on_tap, text) stage.add_action(action) Clutter.main()
Fantasmic! is scheduled for 8:30pm on September 27th. Here is a link to the park calendar in case you need it! I hope you have a wonderful trip! Thanks for visiting the Moms panel.
""" An example in which 3 functions of x and y are displayed with a surf plot, while the z scaling is kept constant, to allow comparison between them. The important aspect of this example is that the 3 functions should not be displayed on top of each other, but side by side. For this we use the extent keyword argument. In addition, the relative scale between the different plots is important. This is why we also use the `warp_scale` keyword argument, to have the same scale on all plots. Finally, we have to adjust the data bounds: as we want the "horizon" of the wigner function in the middle of our extents, we put this to zero. We add a set of axes and outlines to the plot. We have to play we extents and ranges in order to make them fit with the data. """ # Author: Gael Varoquaux <gael.varoquaux@normalesup.org> # Copyright (c) 2007, Enthought, Inc. # License: BSD Style. import numpy from mayavi import mlab def cat(x, y, alpha, eta=1, purity=1): """ Multiphoton shrodinger cat. eta is the fidelity, alpha the number of photons""" cos = numpy.cos exp = numpy.exp return (1 + eta * (exp(-x ** 2 - (y - alpha) ** 2) + exp(-x ** 2 - (y + alpha) ** 2) + 2 * purity * exp(-x ** 2 - y ** 2) * cos(2 * \ alpha * x)) / (2 * (1 + exp(- alpha ** 2)))) / 2 x, y = numpy.mgrid[-4:4.15:0.1, -4:4.15:0.1] mlab.figure(1, size=(500, 250), fgcolor=(1, 1, 1), bgcolor=(0.5, 0.5, 0.5)) mlab.clf() cat1 = cat(x, y, 1) cat2 = cat(x, y, 2) cat3 = cat(x, y, 3) # The cats lie in a [0, 1] interval, with .5 being the assymptotique # value. We want to reposition this value to 0, so as to put it in the # center of our extents. cat1 -= 0.5 cat2 -= 0.5 cat3 -= 0.5 cat1_extent = (-14, -6, -4, 4, 0, 5) surf_cat1 = mlab.surf(x - 10, y, cat1, colormap='Spectral', warp_scale=5, extent=cat1_extent, vmin=-0.5, vmax=0.5) mlab.outline(surf_cat1, color=(.7, .7, .7), extent=cat1_extent) mlab.axes(surf_cat1, color=(.7, .7, .7), extent=cat1_extent, ranges=(0, 1, 0, 1, 0, 1), xlabel='', ylabel='', zlabel='Probability', x_axis_visibility=False, z_axis_visibility=False) mlab.text(-18, -4, '1 photon', z=-4, width=0.13) cat2_extent = (-4, 4, -4, 4, 0, 5) surf_cat2 = mlab.surf(x, y, cat2, colormap='Spectral', warp_scale=5, extent=cat2_extent, vmin=-0.5, vmax=0.5) mlab.outline(surf_cat2, color=(0.7, .7, .7), extent=cat2_extent) mlab.text(-4, -3, '2 photons', z=-4, width=0.14) cat3_extent = (6, 14, -4, 4, 0, 5) surf_cat3 = mlab.surf(x + 10, y, cat3, colormap='Spectral', warp_scale=5, extent=cat3_extent, vmin=-0.5, vmax=0.5) mlab.outline(surf_cat3, color=(.7, .7, .7), extent=cat3_extent) mlab.text(6, -2.5, '3 photons', z=-4, width=0.14) mlab.title('Multi-photons cats Wigner function') mlab.view(142, -72, 32) mlab.show()
Richard Strauss: Symphonic Rhapsody "Elektra" As part of a European tour the Pittsburgh Symphony Orchestra and their music director Manfred Honeck paid a single visit to Belgium. Their Brussels program encompassed the classical elegance of Haydn's 93th Symphony as well as the dissonant expressionism of Strauss's Elektra in an orchestral adaptation, allowing us to appreciate the excellence and the generally high-octane performance style of the orchestra. There is little doubt, however, that for most in the Brussels audience, the return of the acclaimed 25-year-old Russian pianist Daniil Trifonov in Rachmaninoff's 2nd Piano Concerto, made the icing on the cake. The magnificent Haydn 93rd Symphony, one of the earliest in his London series, was delightfully performed – luminous, lively, and witty. Any fears that with such a large formation Honeck would treat us to some outmoded big band, string-driven Haydn were soon dispelled by the transparent, antiphonally placed Pittsburgh violins radiating warmth and elegance, and by his impeccable phrasing. The string quartet opening the second movement provided a striking contrast and Haydn's ever-inventive orchestration, including remarkable solos from principal oboe Cynthia Koledo DeAlmeida, was always carefully exposed. Honeck gave the Menuetto an irresistible swing and rounded off with an imaginatively paced Finale. Imagination was also running high in the Rachmaninoff concerto, but here the results were far less persuasive. Daniil Trifonov possesses – need one repeat it – a phenomenal technique which effortlessly deals with the work's virtuosic demands and allows him to conjure the most astonishing sonorities from his instrument. But just as in his traversal of Rachmaninoff's Third, which I heard in London last year, I was left with the feeling that bigger structures seem to elude him and this was mainly work in progress. Trifonov's playing was sonorous and crystal-clear, even in the most demanding passages, and I haven't heard that many pianists in concert who aren't drowned out by the orchestral tutti, yet eventually this turned out to be part of the problem. Trifonov seems to treat everything as a solo part and hardly ever takes a back seat. Every note, and we all know Rachmaninoff gives us many, is highlighted in his hands. This surgical treatment often reveals unheard details but also stretches the longer lines to breaking point. His preference for slow tempi and lingering mid-way may be considered as expressive freedom but when pushed this far they undermine the musical sweep, as in the first movement where he reached the sonic limits of his Steinway too soon, only to hold back immediately afterwards and flunk the Maestoso passage with loudly hammered chords. The first movement took forever to end and blurred the contrast with the following Adagio sostenuto. In the second movement, with Trifonov's microscopic, meandering approach the overall effect was overwrought rather than affecting. Truth to be told the sense of fragmentation was reinforced by Honeck's reliance on extreme dynamic shifts. The fortissimo orchestral passages, topped by the brass section overpowering everybody else, were simply too demonstrative. By the time they reached the third movement Trifonov was in characteristic vein with his nose on the keyboard, sweating profusely, as if in a trance. It was sufficient to convince the Brussels' audience they were in the presence of greatness and give Trifonov a standing ovation. Trifonov is a remarkable pianist, let there be no doubt. Yet compared to some of his generation from the Russian school, like Dmitry Masleev or Behzod Abduraimov who both featured in the Rachmaninoff festival in Rotterdam last September, he still has some way to go. What may have sounded loud in the Rachmaninoff was dwarfed by what the orchestra had in store after the interval. But here the sonic excesses were duly warranted. The Elektra Symphonic Rhapsody had been the crowning achievement of the Pittsburgh's Symphony's homage to the composer's 150th birthday in 2014. Manfred Honeck and composer Tomas Ille bravely arranged a 35-minutes suite from Strauss's extraordinary opera and while I have never been a great fan of such posthumous opera-without-words medleys, at least this Elektra Rhapsody proved a cleverly convincing showcase for the orchestra. No matter that those unfamiliar with the opera plot will remain mostly in the dark as to what this music is depicting – with the characters' leitmotifs and chords preserved a synopsis might come in handy – one can revel in the stunning sound world of Strauss at his most daringly avant-garde. The arrangers made sure to balance tension with texture and a massive Pittsburgh Symphony Orchestra captured the changing moods, from lyrical to brutally terrifying, with aplomb and utter conviction. The outsized brass section and percussion could easily have stolen the show, if it wasn't for the continuous quality of the string playing. In short, a fitting tribute to Richard Strauss, but foremost to the Pittsburg Symphony Orchestra clearly in splendid form.
# this is an extremely simple Satchmo standalone store. import logging import os, os.path LOCAL_DEV = True DEBUG = True TEMPLATE_DEBUG = DEBUG if LOCAL_DEV: INTERNAL_IPS = ('127.0.0.1',) DIRNAME = os.path.dirname(os.path.abspath(__file__)) SATCHMO_DIRNAME = DIRNAME gettext_noop = lambda s:s LANGUAGE_CODE = 'en-us' LANGUAGES = ( ('en', gettext_noop('English')), ) #These are used when loading the test data SITE_NAME = "simple" DATABASES = { 'default': { # The last part of ENGINE is 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'ado_mssql'. 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(DIRNAME, 'simple.db'), # Or path to database file if using sqlite3 #'USER': '', # Not used with sqlite3. #'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } SECRET_KEY = 'EXAMPLE SECRET KEY' ##### For Email ######## # If this isn't set in your settings file, you can set these here #EMAIL_HOST = 'host here' #EMAIL_PORT = 587 #EMAIL_HOST_USER = 'your user here' #EMAIL_HOST_PASSWORD = 'your password' #EMAIL_USE_TLS = True #These are used when loading the test data SITE_DOMAIN = "localhost" SITE_NAME = "Simple Satchmo" # not suitable for deployment, for testing only, for deployment strongly consider memcached. CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'satchmo-cache', 'TIMEOUT': 60 } } ACCOUNT_ACTIVATION_DAYS = 7 #Configure logging LOGFILE = "satchmo.log" logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=os.path.join(DIRNAME,LOGFILE), filemode='w') logging.getLogger('django.db.backends').setLevel(logging.INFO) logging.getLogger('keyedcache').setLevel(logging.INFO) logging.getLogger('l10n').setLevel(logging.INFO) logging.getLogger('suds').setLevel(logging.INFO) logging.info("Satchmo Started")
20 years ago, the first novel “Trainspotting” by the Scottish writer Irvine Welsh was filmed. To celebrate the anniversary of this classic movie it is now being released in an amazing glossy SteelBook® edition. For the front artwork, a picture from the original iconic poster campaign is used. The back of the case features the famous intro quote from the movie. Danny Boyle’s explosive film tracks the misadventures of young men in Edinburgh trying to find their way out of joblessness, aimless relationships and drug addiction. Some are successful, while others hopelessly are not. Based on Irvine Walsh’s novel, Trainspotting melds grit with poetry, resulting in a film of harsh truths and stunning grace.
""" Extractor-related utility functions. """ from collections import defaultdict def ppextract2matches(matches, tag=None, overlaps=True): """ Convert PP extractor API results to 4-tuples specifying annotations, usable for edit operations on the input file. Overlapping tuples may optionally be removed, since it is tricky to apply overlapping offset-based annotations to a string. :param matches: An array of dicts as returned by pp_api.PoolParty.get_cpts_from_response(). :param tag: A fixed tag to annotate with. If None, annotate with the prefLabel of each matched concept. :param overlaps: Whether to include overlapping annotations in the results. :return: A list of tuples (start, end, tag, content). `start` and `end` are the character offsets. `content` is the text content of this span, e.g. for error checking. Note: pp_api.PoolParty.get_cpts_from_response() returns this structure: [ { "prefLabel": "Something", "uri": "https:...", ... "matchings": [ { "text": "something", "frequency": n, "positions": [ [ start1, end1 ], [ start2, end2 ] ] }, { "text": "something_else", ... """ use_labels = bool(tag is None) edits = [] for cpt_dict in matches: if use_labels: tag = cpt_dict["prefLabel"] # We can't annotate shadow concepts: if "matchings" not in cpt_dict: continue for match in cpt_dict["matchings"]: for start, end in match["positions"]: edits.append((start, end, tag, match["text"])) if not overlaps: edits = remove_overlaps(edits) return edits def remove_overlaps(matches): """ Return a subset of the matches, so that they are unique, ordered and non-overlapping. :param matches: a list of 4-tuples (start, end, tag, content) :return: the cleaned list """ # Example that must be handled: Three annotations data, security, "data security" # # [ [data] [security] ] # Remove repetitions (e.g., ambiguous concept labels matching the same text) matches = set(matches) # Group edits by start position groups = defaultdict(list) for edt in matches: start, end, tag, match = edt groups[start].append(edt) # If several spans start at the same point, we keep the longest one # (If we still have two prefLabels with the same span, keeps the one that sorts last) for k, members in groups.items(): if len(members) > 1: members[:] = sorted(members, key=lambda x: x[1])[-1:] matches = sorted(v[0] for v in groups.values()) # Now look for concepts that start before the last one ended offset = -1 clean = [] for edt in matches: start, end, tag, match = edt if start <= offset: continue clean.append(edt) offset = end return clean
Help your Arctic Cat 650 H1 4x4 Auto LE run better. Find the right K&N motorcycle or ATV air filter for your Arctic Cat using the application search. Upgrade your Arctic Cat 650 H1 4x4 Auto LE today with a K&N motorcycle or ATV air filter.
#!/usr/bin/env python # -*- coding: UTF-8 -*- tags = ['Satellite_5', 'Spacewalk'] name = 'Taskomatic service is running' from satellite_sanity_lib.util import get_days_uptime def find_taskomatic_process(data): """ Check the ps output to see if taskomatic is running """ for line in data['ps_auxcww']: if line.endswith(' taskomaticd') or ' /usr/bin/taskomaticd ' in line: return {'TASKOMATIC_PROCESS_LINE': line} def get_uptime(data): """ Return the number of days the machine has been up """ return {'UPTIME_DAYS': int(get_days_uptime(data['uptime'][0]))} def main(data): if data['ps_auxcww'] is not None or data['uptime'] is not None: # We do not want to hit case when system just booted, Satellite # is still starting (taskomatic not yet running) if get_uptime(data)['UPTIME_DAYS'] > 0: if not find_taskomatic_process(data): return True def text(result): out = "" out += "Service Taskomatic does't seems to be running.\n" out += "Use `service taskomatic restart` to restart it.\n" out += "See https://access.redhat.com/solutions/2116911" return out
Oils and fats are necessary to keep your skin supple, your hair shiny and to help you process certain vitamins such as A and D, but too much of a good thing can quickly go bad. A diet high in unhealthy fats can lead to obesity and all of the health complications that come with it. But, you can be thin, athletic and counting calories and still get into trouble by taking in the wrong kinds of fats, because they can raise your cholesterol levels. Trans fats are the worst type of fat you can consume. While they occur naturally in trace amounts in whole milk dairy products, beef and lamb, the most common trans fats are artificially produced. Developed to improve flavor and texture and to increase the shelf life of mass-produced breads, baked goods and snack foods, trans fats are found on food labels as "partially hydrogenated oils." Trans fats raise the level of low-density lipoproteins, or LDLs, in your bloodstream, creating little fat globules. High-density lipoproteins, or HDLs, sweep up these globs, but since trans fats also lower your level of HDLs, they can quickly become outmatched. A high level of LDLs in your bloodstream can clog your arteries and force your heart to work harder to maintain blood flow, which can lead to high blood pressure. High blood pressure puts you at risk for certain types of cancers as well as heart attack and stroke. Saturated fats generally come from animal products and remain solid at room temperature. Too much saturated fat in your diet can raise your level of low-density lipoproteins, or LDLs. Just as with trans fats, this forms minuscule fat globules that can clog your bloodstream like slow drivers on the freeway. Too many traffic jams cause your heart to work harder, which raises your blood pressure, increasing your risk of heart attack, stroke and certain types of cancers. Unsaturated fats come in two types. Monounsaturated fats are found in avocados, canola oil, nuts, olive oil and peanut oil. Eating these can help lower your cholesterol levels. Like all fats, they can be fairly high in calories, so keep that in mind and use them sparingly. Polyunsaturated fats come in two forms, Omega-3 and Omega-6 fatty acids. Omega-3 fatty acids are found in fish, especially salmon and sardines. Omega-6 fatty acids are found in butter substitutes, but you have to watch out for trans fats in those. They are also found in nuts, sunflower seeds and in sesame, soybean and sunflower oils. Unsaturated fats promote heart health. They lower the LDLs and raise the HDLs in your bloodstream, helping you avoid the risk of heart attack, stroke and some cancers. Choose your oil based on how you intend to use it, because not all healthy oils can handle the same demands. Avocado oil, canola oil, safflower oil, pure olive oil (not extra virgin) and vegetable oil are all excellent for frying because they have high smoke points and neutral flavors. They can also be used for sauteing, but they don't add flavor to dressings or dips. Peanut oil has a high smoke point, at 450 F, but it also has a strong peanut flavor. Sesame oil is excellent for sauteing, but it has an even stronger taste, which is why it's used so often in Asian cuisine. Extra virgin olive oil, or EVOO, flaxseed oil, peanut oil and sesame oil are all delicious and nutritious choices for salad dressings and dips because they offer heart-healthy unsaturated fats and a ton of flavor. Coconut oil is a healthier choice than butter, shortening or lard for baking, but should not be used to fry or saute foods. Chandler, Brynne. "What Are the Dangers of Eating Food With Large Amount of Fats & Oils?" Healthy Eating | SF Gate, http://healthyeating.sfgate.com/dangers-eating-food-large-amount-fats-oils-6693.html. 06 December 2018.
import smbus __all__ = ['I2C'] class I2C(object): def __init__(self, address, busnum, debug=False): self._address = address self._bus = smbus.SMBus(busnum) self._debug = debug @property def address(self): return self._address def _log_debug(self, msg): print "I2C: %s" % msg def _log_error(self, msg): print "I2C: Error accessing 0x%02X: %s" % (self._address, msg) def write8(self, reg, value): """ Writes an 8-bit value to the specified register/address """ try: self._bus.write_byte_data(self._address, reg, value) if self._debug: self._log_debug("Wrote 0x%02X to register 0x%02X" % (value, reg)) except IOError as e: self._log_error(e) def write16(self, reg, value): """ Writes a 16-bit value to the specified register/address pair """ try: self._bus.write_word_data(self._address, reg, value) if self._debug: self._log_debug("Wrote 0x%02X to register pair 0x%02X, 0x%02X" % (value, reg, reg+1)) except IOError as e: self._log_error(e) def read8(self, reg): """ Read an 8-bit value from the I2C device """ try: result = self._bus.read_byte_data(self._address, reg) if self._debug: self._log_debug("Device 0x%02X returned 0x%02X from reg 0x%02X" % (self._address, result & 0xFF, reg)) return result except IOError as e: self._log_error(e) def read16(self, reg): """ Read a 16-bit value from the I2C device """ try: result = self._bus.read_word_data(self._address, reg) if self._debug: self._log_debug("Device 0x%02X returned 0x%02X from reg 0x%02X" % (self._address, result & 0xFF, reg)) return result except IOError as e: self._log_error(e)
What you give someone who has everything? Someone that you care about a lot? Or who has even captured that special place in your heart. Precisely! A priceless, thoughtful and floral present. December feels a bit magical, doesn’t it? As if time is passing more slowly, sounds are more muted, colours appear deeper and daydreams could easily become reality. It's the ideal time to dive into your favourite Christmas stories, like Charles Dickens’ Christmas Carol or The Little Match Girl by Hans Christian Andersen. Scour a second-hand bookshop, charity shop or your own bookshelves for a vintage copy. If you see rings on the cover, dog-eared pages or an old library stamp, you’ve literally found a book with a good story. Whenever you give a book as a gift, you should write a personal note on the first blank page. Why this book? Is it your favourite, or have the two of you referred to it sometime? A hand-written message turns every book into a must-read. Don’t forget to record the date and place. One day this copy may end up in a second-hand bookshop again. How cool would it be for it to have this little bit of history in it? You’re on the right path, but it can be personalised more. Does your beloved have a favourite flower? They’re bound to. Press the flower in the book, ideally on your favourite page with your favourite passage marked. A beautifully scented sprig of lavender, a cheerful gerbera, or Erica from the garden. Pressing flowers and including them in your gift lends that extra bit of TLC to your present. Does the flower or plant have a specific symbolism? Look it up, and include a note about it. If you don’t like the smell of old books, disguise it with a beautifully scented floral perfume. The one you use, obviously, so that your beloved always thinks of you when they open the book. Christmas presents deserve the finest wrapping paper. We’ve written previously about the most stylish ways to wrap gifts botanically. Using velvet is bang on trend. Dark blue or black paper with an ochre or blood red velvet ribbon with a big bow - pure luxury! Don’t forget the mistletoe for an accompanying kiss. After all, it is Xmas.
# Generated by Django 2.1.7 on 2019-04-12 19:28 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import filer.fields.file class Migration(migrations.Migration): dependencies = [ ('financial', '0016_auto_20190409_0033'), ] operations = [ migrations.AlterField( model_name='expenseitem', name='attachment', field=filer.fields.file.FilerFileField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='expense_attachment', to='filer.File', verbose_name='Attach File (optional)'), ), migrations.AlterField( model_name='revenueitem', name='attachment', field=filer.fields.file.FilerFileField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='revenue_attachment', to='filer.File', verbose_name='Attach File (optional)'), ), migrations.AlterField( model_name='revenueitem', name='currentlyHeldBy', field=models.ForeignKey(blank=True, help_text='If cash has not yet been deposited, this indicates who to contact in order to collect the cash for deposit.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='revenuesheldby', to=settings.AUTH_USER_MODEL, verbose_name='Cash currently in possession of'), ), migrations.AlterField( model_name='revenueitem', name='event', field=models.ForeignKey(blank=True, help_text='If this item is associated with an Event, enter it here.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.Event', verbose_name='Event'), ), migrations.AlterField( model_name='revenueitem', name='invoiceItem', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.InvoiceItem', verbose_name='Associated invoice item'), ), ]
A little collection of Burnsiana for 25 January. Working backwards, chronologically, a couple of posters currently on show Edinburgh’s Rose Street; a first day cover from 1966; pages from an early Penguin book; the grave of ‘Clarinda’ (Agnes McLehose), and a line from ‘Ae Fond Kiss’, written for her by Burns; and the Burns monument on Calton Hill. This entry was posted in Poets, Uncategorized and tagged Clarinda, Robert Burns, Rose Street on January 25, 2018 by Ken Cockburn.
from rest_framework import serializers from home.models import * from django.contrib.auth.models import User #serializer for User table class UserSerializer(serializers.ModelSerializer): class Meta: model=User fields=('id','username','first_name','last_name','email') #Serializer for Personnel table class PersonnelSerializer(serializers.ModelSerializer): class Meta: model=Personnel fields=('Person_ID','LDAP','Role','Dept') #Serializer for Department table class DepartmentSerializer(serializers.ModelSerializer): class Meta: model=Department fields=('Dept_ID','Dept_Name') class RolesSerializer(serializers.ModelSerializer): class Meta: model=Roles fields=('Role_ID','Role_name','level') #Serializer for Courses table class CoursesSerializer(serializers.ModelSerializer): class Meta: model=Courses fields=('Course_ID','Course_Name','Course_description','Course_Credits','Course_Year','Course_Status') #Serializer for Document table class DocumentsSerializer(serializers.ModelSerializer): class Meta: model=Documents fields=('Doc_ID','Doc_Name','Document') #Serializer for Assignment table class AssignmentSerializer(serializers.ModelSerializer): class Meta: model=Assignment fields=('Assign_ID','Assignment_File','Course_ID','Start_Time','End_Time') #Serializer for Submission table class SubmissionsSerializer(serializers.ModelSerializer): class Meta: model=Submissions fields=('Sub_ID','Assign_ID','Student_ID','Sub_Time','Score') class ICSerializer(serializers.ModelSerializer): class Meta: model=Instructors_Courses fields=('IC_id','Course_ID','Inst_ID','Start_Date','End_Date') class SCSerializer(serializers.ModelSerializer): class Meta: model=Students_Courses fields=('SC_ID','Student_ID','Course_ID','Reg_Date') class EventsSerializer(serializers.ModelSerializer): class Meta: model=Events fields=('Event_ID','Event_Date','Event_Name') class SPSerializer(serializers.ModelSerializer): class Meta: model=Student_Period fields=('Student_ID','Start_Year','End_Year') #Serializer for Attendance table class AttendanceSerializer(serializers.ModelSerializer): class Meta: model=Attendance fields=('Student_ID','ASession_ID','Date_time','Marked') #Serializer for Attendance table class Attendance_SessionSerializer(serializers.ModelSerializer): class Meta: model=Attendance_Session fields=('Session_ID','Course_Slot','Date_time','Status','Location') #Serializer for Timetable table class TimetableSerializer(serializers.ModelSerializer): class Meta: model=Timetable fields=('T_days','Start_time','End_time','Course_ID','Class_ID')
Classic Berkeley Cars For Sale in UK | Classic Cars HQ. Alert Me! when Berkeley cars are listed.
# coding: utf-8 import time import pymongo from base import * import bnw.core.bnw_objects as objs def get_user_bl(request, use_bl=False): """Return authed user blacklist or simply an empty list if user not authed. :param use_bl: default False. Whether we should return actual blacklist or just empty list. """ if use_bl and request.user: bl = request.user.get('blacklist', []) bl = [el[1] for el in bl if el[0] == 'user'] return bl else: return [] @defer.inlineCallbacks def set_subscriptions_info(request, messages): """Add 'subscribed' param for each message which indicate do the user subscribed on the message or not. Return updated list of messages (update in place actually!). For non-authed users return non-modified list. :param request: BnW request object. """ if not request.user: defer.returnValue(messages) user = request.user['name'] ids = [m['id'] for m in messages] subscriptions = yield objs.Subscription.find({ 'user': user, 'type': 'sub_message', 'target': {'$in': ids}}) sub_ids = [s['target'] for s in subscriptions] for msg in messages: msg['subscribed'] = True if msg['id'] in sub_ids else False defer.returnValue(messages) @defer.inlineCallbacks def showSearch(parameters, page, request): # FIXME: THIS COMMAND IS FUCKING SLOW SLOW SLOW AND WAS WRITTEN BY A # BRAIN-DAMAGED IDIOT messages = [x.filter_fields() for x in (yield objs.Message.find_sort( parameters, [('date', pymongo.DESCENDING)], limit=20, skip=page * 20))] messages = yield set_subscriptions_info(request, messages) messages.reverse() defer.returnValue(dict( ok=True, format="messages", cache=5, cache_public=True, messages=messages)) @defer.inlineCallbacks def showComment(commentid): comment = yield objs.Comment.find_one({'id': commentid}) if comment is None: defer.returnValue( dict(ok=False, desc='No such comment', cache=5, cache_public=True) ) defer.returnValue( dict(ok=True, format='comment', cache=5, cache_public=True, comment=comment.filter_fields(), )) @defer.inlineCallbacks def showComments(msgid, request, bl=None, after=''): message = yield objs.Message.find_one({'id': msgid}) if message is None: defer.returnValue(dict( ok=False, desc='No such message', cache=5, cache_public=True)) if request.user: user = request.user['name'] subscribed = yield objs.Subscription.count({ 'user': user, 'type': 'sub_message', 'target': msgid}) message['subscribed'] = bool(subscribed) qdict = {'message': msgid.upper()} if bl: qdict['user'] = {'$nin': bl} if after: after_comment = yield objs.Comment.find_one({'id':msgid+'/'+after.split('/')[-1]}) if after_comment: qdict['date'] = {'$gte': after_comment['date']} comments = yield objs.Comment.find_sort( qdict, [('date', pymongo.ASCENDING)], limit=10000) defer.returnValue(dict( ok=True, format='message_with_replies', cache=5, cache_public=True, msgid=msgid, message=message.filter_fields(), replies=[comment.filter_fields() for comment in comments])) @check_arg(message=MESSAGE_COMMENT_RE, page='[0-9]+') @defer.inlineCallbacks def cmd_show(request, message='', user='', tag='', club='', page='0', show='messages', replies=None, use_bl=False, after='', before=''): """Show messages by specified parameters.""" message = canonic_message_comment(message).upper() bl = get_user_bl(request, use_bl) if '/' in message: defer.returnValue((yield showComment(message))) if replies: if not message: defer.returnValue(dict( ok=False, desc="Error: 'replies' is allowed only with 'message'.", cache=3600)) defer.returnValue((yield showComments(message, request, bl, after))) else: if show not in ['messages', 'recommendations', 'all']: defer.returnValue(dict( ok=False, desc="Bad 'show' parameter value.")) parameters = [('tags', tag), ('clubs', club), ('id', message.upper())] parameters = dict(p for p in parameters if p[1]) if user: user = canonic_user(user).lower() if show == 'messages': user_spec = dict(user=user) elif show == 'recommendations': user_spec = dict(recommendations=user) else: user_spec = {'$or': [{'user': user}, { 'recommendations': user}]} parameters.update(user_spec) elif bl: parameters['user'] = {'$nin': bl} if before: befmsg = yield objs.Message.find_one({'id': before}) if befmsg: parameters['date'] = {'$lt': befmsg['date']} else: defer.returnValue(dict(ok=False, desc="Message to search before doesn't exist.")) if after: afmsg = yield objs.Message.find_one({'id': after}) if afmsg: parameters['date'] = {'$gt': afmsg['date']} else: defer.returnValue(dict(ok=False, desc="Message to search after doesn't exist.")) defer.returnValue((yield showSearch(parameters, int(page), request))) @require_auth @defer.inlineCallbacks def cmd_feed(request, page="0"): """ Показать ленту """ page = int(page) if page else 0 feed = yield objs.FeedElement.find_sort({'user': request.user['name']}, [('_id', pymongo.DESCENDING)], limit=20, skip=page * 20) messages = [x.filter_fields() for x in (yield objs.Message.find_sort({'id': {'$in': [f['message'] for f in feed] }}, [('date', pymongo.ASCENDING)]))] defer.returnValue( dict(ok=True, format="messages", messages=messages, desc='Your feed', cache=5) ) @defer.inlineCallbacks def cmd_today(request, use_bl=False): """ Показать обсуждаемое за последние 24 часа """ bl = get_user_bl(request, use_bl) for x in range(10): postids = [x['_id'] for x in (yield objs.Today.find({}, limit=20))] if len(postids)>0: break qdict = {'id': {'$in': postids}} if bl: qdict['user'] = {'$nin': bl} dbposts = dict( (x['id'], x.filter_fields()) for x in (yield objs.Message.find(qdict))) messages = [dbposts[x] for x in postids if (x in dbposts)] messages = yield set_subscriptions_info(request, messages) messages.reverse() defer.returnValue( dict(ok=True, format="messages", messages=messages, desc='Today''s most discussed', cache=300) ) @defer.inlineCallbacks def cmd_today2(request): """ Показать обсуждаемое за последние 24 часа """ start = time.time() - 86400 messages = [x.filter_fields() for x in (yield objs.Message.find_sort({'date': {'$gte': start}}, [('replycount', pymongo.DESCENDING)], limit=20))] messages.reverse() defer.returnValue( dict(ok=True, format="messages", messages=messages, desc='Today''s most discussed', cache=300) )
The lights signs are realized in small peripheral towns and exhibited during their most important local festivals, including concerts, events and meetings. Every light sign is a unique piece, which is conceived and produced in collaboration with local artisans, in order to celebrate and decorate the village during these special days. The light sign was donated to local people and is kept by the “Pro Loco” of Frassineto Po, which is the association in charge of promoting local culture and tourism. The light sign was exhibited on the occasion of “Sagra del Peperone” , on the 26th August 2011. A moment during the local festival “Sagra del Peperone”, Frassineto Po, Alessandria. 2010. Neon. Light bulbs, wood, steel. “Italia61” is the reconstruction of the logo of the international expo (Esposizione internazionale del lavoro) held in Turin on the occasion of the celebrations of the centenary of the unification of Italy in 1961. The inscription, originally located at the entrance of the Expo site, was rebuilt by hand as a light-up sign and suspended at the entrance to the Fondazione Sandretto Re Rebaudengo. Every second Friday a potluck open to both museum staff and the public is held in the lobby of the Arizona State University Art Museum in Tempe. Everyone is invited to cook something and share it. The kitchen of the museum has developed into a little culinary laboratory where different communities and cultures that inhabit the vast metropolitan area of Phoenix, Tempe, Scottsdale and Mesa can come together. 2013. Banners. Light signs. Prints on paper, telescopic sticks, Christmas lights, desk lamps. As on old army parade, or a modern sport parade, the six small towns participing in the project “La Bilioteca Fantastica” were represented in six banners, mixing old book illustations about science-fiction with the names of the towns. They were shown the first time in Venice, as ambassadors of these places, in a collateral event of the Biennale opening. A sort of unexpected temporary Sulcis pavilion. The banners were damaged seriously during the event because of an sudden heavy shower, and their remains were transformed in special suspended lamps, to light up the exhibition “La Biblioteca Fantastica” at MAN, Nuoro.
import librosa import librosa.filters import math import numpy as np import tensorflow as tf import scipy from hparams import hparams def load_wav(path): return librosa.core.load(path, sr=hparams.sample_rate)[0] def save_wav(wav, path): wav *= 32767 / max(0.01, np.max(np.abs(wav))) scipy.io.wavfile.write(path, hparams.sample_rate, wav.astype(np.int16)) def preemphasis(x): return scipy.signal.lfilter([1, -hparams.preemphasis], [1], x) def inv_preemphasis(x): return scipy.signal.lfilter([1], [1, -hparams.preemphasis], x) def spectrogram(y): D = _stft(preemphasis(y)) S = _amp_to_db(np.abs(D)) - hparams.ref_level_db return _normalize(S) def inv_spectrogram(spectrogram): '''Converts spectrogram to waveform using librosa''' S = _db_to_amp(_denormalize(spectrogram) + hparams.ref_level_db) # Convert back to linear return inv_preemphasis(_griffin_lim(S ** hparams.power)) # Reconstruct phase def inv_spectrogram_tensorflow(spectrogram): '''Builds computational graph to convert spectrogram to waveform using TensorFlow. Unlike inv_spectrogram, this does NOT invert the preemphasis. The caller should call inv_preemphasis on the output after running the graph. ''' S = _db_to_amp_tensorflow(_denormalize_tensorflow(spectrogram) + hparams.ref_level_db) return _griffin_lim_tensorflow(tf.pow(S, hparams.power)) def melspectrogram(y): D = _stft(preemphasis(y)) S = _amp_to_db(_linear_to_mel(np.abs(D))) - hparams.ref_level_db return _normalize(S) def find_endpoint(wav, threshold_db=-40, min_silence_sec=0.8): window_length = int(hparams.sample_rate * min_silence_sec) hop_length = int(window_length / 4) threshold = _db_to_amp(threshold_db) for x in range(hop_length, len(wav) - window_length, hop_length): if np.max(wav[x:x+window_length]) < threshold: return x + hop_length return len(wav) def _griffin_lim(S): '''librosa implementation of Griffin-Lim Based on https://github.com/librosa/librosa/issues/434 ''' angles = np.exp(2j * np.pi * np.random.rand(*S.shape)) S_complex = np.abs(S).astype(np.complex) y = _istft(S_complex * angles) for i in range(hparams.griffin_lim_iters): angles = np.exp(1j * np.angle(_stft(y))) y = _istft(S_complex * angles) return y def _griffin_lim_tensorflow(S): '''TensorFlow implementation of Griffin-Lim Based on https://github.com/Kyubyong/tensorflow-exercises/blob/master/Audio_Processing.ipynb ''' with tf.variable_scope('griffinlim'): # TensorFlow's stft and istft operate on a batch of spectrograms; create batch of size 1 S = tf.expand_dims(S, 0) S_complex = tf.identity(tf.cast(S, dtype=tf.complex64)) y = _istft_tensorflow(S_complex) for i in range(hparams.griffin_lim_iters): est = _stft_tensorflow(y) angles = est / tf.cast(tf.maximum(1e-8, tf.abs(est)), tf.complex64) y = _istft_tensorflow(S_complex * angles) return tf.squeeze(y, 0) def _stft(y): n_fft, hop_length, win_length = _stft_parameters() return librosa.stft(y=y, n_fft=n_fft, hop_length=hop_length, win_length=win_length) def _istft(y): _, hop_length, win_length = _stft_parameters() return librosa.istft(y, hop_length=hop_length, win_length=win_length) def _stft_tensorflow(signals): n_fft, hop_length, win_length = _stft_parameters() return tf.contrib.signal.stft(signals, win_length, hop_length, n_fft, pad_end=False) def _istft_tensorflow(stfts): n_fft, hop_length, win_length = _stft_parameters() return tf.contrib.signal.inverse_stft(stfts, win_length, hop_length, n_fft) def _stft_parameters(): n_fft = (hparams.num_freq - 1) * 2 hop_length = int(hparams.frame_shift_ms / 1000 * hparams.sample_rate) win_length = int(hparams.frame_length_ms / 1000 * hparams.sample_rate) return n_fft, hop_length, win_length # Conversions: _mel_basis = None def _linear_to_mel(spectrogram): global _mel_basis if _mel_basis is None: _mel_basis = _build_mel_basis() return np.dot(_mel_basis, spectrogram) def _build_mel_basis(): n_fft = (hparams.num_freq - 1) * 2 return librosa.filters.mel(hparams.sample_rate, n_fft, n_mels=hparams.num_mels) def _amp_to_db(x): return 20 * np.log10(np.maximum(1e-5, x)) def _db_to_amp(x): return np.power(10.0, x * 0.05) def _db_to_amp_tensorflow(x): return tf.pow(tf.ones(tf.shape(x)) * 10.0, x * 0.05) def _normalize(S): return np.clip((S - hparams.min_level_db) / -hparams.min_level_db, 0, 1) def _denormalize(S): return (np.clip(S, 0, 1) * -hparams.min_level_db) + hparams.min_level_db def _denormalize_tensorflow(S): return (tf.clip_by_value(S, 0, 1) * -hparams.min_level_db) + hparams.min_level_db
Once support for binding to localhost only is in place, it would be very advantageous to be able to specify the None authentication mechanism, because the user would already require access to the local system (via SSH, for example) and therefore not need to re-authenticate via VNC itself. Allowing for the None authentication mechanism might also be useful for debugging. Once support for binding to localhost only is in place, it would be very advantageous to be able to specify the None authentication mechanism, because the user would already require access to the local system (via SSH, for example) and therefore not need to re-authenticate via VNC itself. Allowing for the `None` authentication mechanism might also be useful for debugging.
""" 莉芮尔订阅坏狗发出的每一条tick信息 本模块基于zeromq实现, 通过创建一个socket到指定端口, 我们可以收取坏狗群发的每一条tick信息 用户还可以设置关键词来过滤我们不需要的tick信息(暂未实现) """ import zmq import threading class Subscriber: """ 订阅者 """ def __init__(self, context, address, tick_filter=""): """ 构造函数 @param context 通信上下文, 进程唯一 @param address 发送tick信息的服务器的地址 @param tick_filter 过滤器, 通过设置过滤器来滤掉不需要的tick信息 """ self.filter = tick_filter self.context = context self.address = address self.socket = None self.handler = None self.quit_event = threading.Event() self.quit_event.clear() def start(self, callback=None): """ 开始接收tick信息 @param callback 设置一个回调函数, 每次接收有用的tick信息后, 都会调用此函数, 如果不提供, 仅仅打印tick信息 """ if callback and not hasattr(callback, "__call__"): print("%s cannot be invoked" % str(callback)) return # 如果工作线程已经存在, 应当首先关闭, 再创建新的 if self.handler and not self.quit_event: self.quit_event.set() self.handler.join() # 开启工作线程 self.quit_event.clear() self.handler = threading.Thread(target=Subscriber.work_thread, args=(None, self.context, self.address, self.quit_event, callback)) self.handler.start() def stop(self): """ 停止接收tick信息 """ if self.handler: self.quit_event.set() self.handler.join() self.handler = None def work_thread(self, *args): """ 工作线程 """ # 准备socket socket = args[0].socket(zmq.SUB) socket.connect(args[1]) socket.setsockopt_string(zmq.SUBSCRIBE, '') print("Subscriber is collecting tick information......") # 工作循环 quit_event = args[2] callback = args[3] while not quit_event.is_set(): tick_info = socket.recv_string() if callback: callback(tick_info) else: print(tick_info) # 退出, 清除资源 socket.close() quit_event.clear() print("Subscriber has stopped, no more tick information will be collected.") if __name__ == "__main__": sub = Subscriber(zmq.Context(), "tcp://192.168.61.8:16888", tick_filter="") sub.start()
As long as their poo is of a firm consistency with no mucous then it’s ok by me. I think that fibre is a good thing for intestinal health but of course it will lead to larger poos. When I fed my dogs raw, and more recently a high meat content wet food I wasn’t happy with their stools because they were very small and hard. I could see the dogs were taking a while to push it out, as if they were constipated. On BB their stools are larger than the aforementioned wet food but I class them as good because they are firm and the dogs aren’t struggling to defaecate. So Barney has been on BB for 4 weeks now. The past week has seen the majority of his bowl made up of BB Turkey recipe. I have noticed an increasingly large amount of mucous in his stools and wondering why this could be happening? This is strange and as there are a number of conditions that can cause mucous, it is impossible to say whether it is due to food. Intolerance, infection, colitis, scavenging, foreign bodies are just a few things that can lead to mucous in the stool. I can’t think of anything in BB Turkey recipe that should cause problems but it isn’t impossible that there is one ingredient that might not suit. The only thing I can suggest is to a) see your vet and take a sample of the stool, b) contact Butternut Box, c) try a pre/probiotic such as Lintbells YuDigest. Please can you let us know how you get on? It is most strange and I am puzzled and would like to know the outcome. I reduced the BB a little over the weekend, Increasing the amount of small diced raw celery, red pepper and cooked sweet potato. It seems to have settled, so going to keep an eye on it this week. It’s good that the mucous has abated. Very odd though - wonder what caused it. Barney, is still doing extremely well on BB and no more mucous in his stool. I accidentally ordered a lot of the beef recipe which being too lazy to send send back, decided to see how he managed on it. It's quite high fat compared to their other recipes, but he took to it really well and with no noticeable side effects. For my next order I will skew in favour of the Lamb and Turkey recipes like before but include a few beef for variation. His bowl is now made up of 75% BB, and the rest is small chopped veg (red bell pepper, celery, sweet potato) and a small amount of Simpsons sensitive kibble. After I have finished the kibble, ideally I would like to get a cold pressed food to add into his BB and veg. I want to do this because I'm fairly off the idea of kibble generally now,but I'm not sure how silly this is given that there is so little going into each of his meals and he's not having any issues with the Simpsons, it seems. Anyway, If anyone would like to try the Butternut box then I am able to provide a discount code (75% off the first order of 3 weeks supply). PM me if you're interested. This works in my favour too, as I get 75% off my next order if I recommend people. So don't be shy! Thank you for the update. I am pleased that your dog is improving with his new food. I would think that cold pressed food would go well with BB. 1 Like stringz likes this. I feed cold pressed alongside Butternut Box. My dog has three meals a day so she has Butternut morning and evening and Tribal TLC cold pressed for lunch. 2 Likes Dottie and stringz like this. Butternut Box has new and improved packaging. The names of the four types have changed but not the recipes. More information here. Still no word on the planned fish and pork varieties. 3 Likes Tinyplanets, Seaweed and Petmum like this. I can't find any information on what material the new pouches are made of, or the old ones for that matter. I wasn't really aware that the food was cooked after it was in the pouches either. That worries me a bit and makes me think of all the warnings about drinking water from a plastic bottle that has been in the sun as heating the bottle can cause chemicals to leach into the water. Hello chloeg04 - welcome to the forum. My understanding is that the new fish variety was due in February but was not sure about the pork one. I enquired about both new varieties earlier this year but the spokesperson just said that they would announce them on social media and their website. I agree with you about fish being absent from the range. For now, perhaps consider adding some steamed white fish or sardines sometimes? Another option would be to mix as Petmum describes earlier. The new Forthglade sardines and sweet potato might be worth looking at. They also have fish and brown rice and salmon and potato. I have previously mixed BB with Forthglade with no problems. From what I read on the Internet mixing BB is not uncommon and perhaps makes it more affordable for some pet owners. Regarding packaging, I too was not aware that cooking took place within the pouches but I understand that low temperatures are used so perhaps that is less problematic? Not sure tbh. On the subject of the home cooked claim, I agree with BB in that it is difficult to describe their product accurately. I had the same dilemma myself when I started this thread. I wasn't sure whether it should be in the Home Cooking section of the forum. In the end I viewed it as another commercial dog food product and used the Dog Food section. I have seen, and wondered about this 'home cooked' claim many times in relation to human food. It's often made in eating establishments, particularly public houses who offer meals. Clearly it is not home cooked because the food is prepared in a commercial kitchen and may even be brought in from a factory. Should this judgement by the Advertising Standards Authority have wider ramifications? As a BB customer my dog was lucky enough to trial the new pork and fish recipes back in December and I can confirm that the fish was not tuna but also not salmon-it was white fish and my understanding is that they will use different varieties of white fish in the recipes depending upon what is more readily available throughout the seasons. Pork is due to be released at he same time I believe but so far there is still no date-Butternut keep saying soon. I am also attaching the founders' response to the ASA ruling. As for the food being cooked in the pouches you may wish to contact Butternut direct about this. They are usually very helpful and informative in resolving any queries. Personally I would prefer if it wasn't cooked in the pouch but I also take into account the size of the operation they're running and that this is the most practical method. As I understand it their food is cooked at low temperatures and many human foods are also cooked in this way-microwave meals are often designed to be reheated in plastic trays at much higher temperatures. 1 Like Dottie likes this. Thank you Petmum. I agree with them about their products not fitting into established pet food categories. Hopefully the staff will come up with a satisfactory description of their food. Different Dog use the description ‘hand-made’. 1 Like Petmum likes this. 'It's absolutely safe to cook and freeze our food in the pouches. We use a multilayer film which is designed especially for cooking human foods and is completely BPA (bisphenol A) free. It's the same type of film that you might find on fresh or cooked meats, cheese, fish, ready meals, pasta and bakery products.
# This example demonstrates insertion of endo-compartments into the # dendrite. Here endo_compartments are used for the endoplasmic reticulum # (ER) in a model of Calcium Induced Calcium Release through the # IP3 receptor. It generates a series of propagating waves of calcium. # Note that units of permeability in the ConcChan are 1/(millimolar.sec) # # Copyright (C) Upinder S. Bhalla NCBS 2018 # Released under the terms of the GNU Public License V3. # Converted to a test by Dilawar Singh, 2020 import matplotlib as mpl mpl.use('Agg') import os import moose print("[INFO ] Using moose from %s (%s)" % (moose.__file__, moose.version())) import numpy as np import rdesigneur as rd np.set_printoptions(precision=3) sdir_ = os.path.dirname(os.path.realpath(__file__)) E = (np.array([1.09014453e-07, 7.28082797e-13, 2.75389935e-08, 4.09373273e-01, 5.13839676e-04, 5.04392239e-04, 5.18535951e-04, 5.20332653e-04, 5.20319412e-04, 5.20315927e-04, 5.20315785e-04, 5.20315780e-04, 5.20315780e-04, 5.20315780e-04, 5.13839676e-04, 5.04392239e-04, 5.18535951e-04, 5.20332653e-04, 5.20319412e-04, 5.20315927e-04, 5.20315785e-04, 5.20315780e-04, 5.20315780e-04, 5.20315780e-04, 4.03334121e-01, 4.04616316e-01, 4.03839819e-01, 4.03873596e-01, 4.03877574e-01, 4.03877276e-01, 4.03877250e-01, 4.03877249e-01, 4.03877249e-01, 4.03877249e-01, 1.08136177e-06, 1.03726538e-06, 1.04624969e-06, 1.04989891e-06, 1.05005782e-06, 1.05006129e-06, 1.05006147e-06, 1.05006148e-06, 1.05006148e-06, 1.05006148e-06]), np.array([2.64763531e-06, 3.53901405e-12, 1.06297817e-07, 2.59647692e-05, 1.50771752e-03, 1.44372345e-03, 1.46452771e-03, 1.46445738e-03, 1.46426743e-03, 1.46425938e-03, 1.46425914e-03, 1.46425913e-03, 1.46425913e-03, 1.46425913e-03, 1.50771752e-03, 1.44372345e-03, 1.46452771e-03, 1.46445738e-03, 1.46426743e-03, 1.46425938e-03, 1.46425914e-03, 1.46425913e-03, 1.46425913e-03, 1.46425913e-03, 1.26799318e-02, 1.15981501e-02, 1.19280784e-02, 1.20059244e-02, 1.20092971e-02, 1.20092807e-02, 1.20092772e-02, 1.20092772e-02, 1.20092772e-02, 1.20092772e-02, 2.11602709e-06, 2.06303080e-06, 2.08117025e-06, 2.08584557e-06, 2.08603181e-06, 2.08603541e-06, 2.08603560e-06, 2.08603562e-06, 2.08603562e-06, 2.08603562e-06]) ) def test_CICR(): """Test CICR """ rdes = rd.rdesigneur( turnOffElec=True, chemDt=0.005, chemPlotDt=0.02, numWaveFrames=200, diffusionLength=1e-6, useGssa=False, addSomaChemCompt=False, addEndoChemCompt=True, # cellProto syntax: ['somaProto', 'name', dia, length] cellProto=[['somaProto', 'soma', 2e-6, 10e-6]], chemProto=[[os.path.join(sdir_, 'chem', 'CICRwithConcChan.g'), 'chem']], chemDistrib=[['chem', 'soma', 'install', '1']], plotList=[ ['soma', '1', 'dend/CaCyt', 'conc', 'Dendritic Ca'], ['soma', '1', 'dend/CaCyt', 'conc', 'Dendritic Ca', 'wave'], ['soma', '1', 'dend_endo/CaER', 'conc', 'ER Ca'], ['soma', '1', 'dend/ActIP3R', 'conc', 'active IP3R'], ], ) rdes.buildModel() IP3 = moose.element('/model/chem/dend/IP3') IP3.vec.concInit = 0.004 IP3.vec[0].concInit = 0.02 moose.reinit() moose.start(20) data = [t.vector for t in moose.wildcardFind('/##[TYPE=Table2]')] m, s = np.mean(data, axis=1), np.std(data, axis=1) # print(np.array_repr(m)) # print(np.array_repr(s)) # In multithreaded mode, the numers are not exactly the same as in # expected. assert np.allclose(m, E[0], rtol=1e-2, atol=1e-4), (m - E[0]) # standard deviation could be very low in some cases. print(np.sum(abs(s-E[1])) ) assert np.sum(abs(s-E[1])) < 1e-2, "Got %s" % np.sum(abs(s-E[1])) print('done') if __name__ == '__main__': test_CICR()
Jonny Laughton is owner and investor at Armada Ventures, which specialises in securing Series A to Series C funding for B2B SaaS companies in Europe. Early on in my career I founded Star Technology Services where I spent the best part of 10 years building the business. When I decided I needed a new challenge, I convinced the Messagelabs board to take me on as EMEA partner director for four years before leaving to take some time out. My next venture was totally left field; I teamed up with some healthcare guys and set up a new healthcare fund investing in cancer technology. After realising it wasn’t really my forte, I knew I needed to get back into the tech space where I could add some real value, so I started working on a consultancy basis with tech startups which was great but they never had any money to go and execute plans. So I put two and two together and made a few contacts in the VC space and went out to start raising money for them and advising them how to spend it. That quickly turned into fundraising, and so for the last four years, under a brand I set up called Armada Ventures, I fundraise between £1-7 million for early to mid-stage B2B SaaS companies. Getting a small cancer fund up and running without having much clue how to do it – we managed to raise a little bit of money during in a difficult time (when the recession hit) and in a difficult sector – the business still ticks along and I’m still involved on a day-a-week basis. Since then, I think investing in fundraising and managing to get that business off the ground in addition to saying no to projects that I may not be able to deliver has been key to success – reputation is everything in a high-risk space. Starting a business in a sector I knew nothing about wasn’t necessarily a mistake in hindsight as it helped me get into the investment space, but at the time I had a lot of doubts. Would I do it again? It’s highly unlikely as it was very challenging. Another is get to know the teams – beyond the founder and the senior team – better before you invest. Remember, you’re fundraising for people not businesses, so look at it as a huge emotional and human process. It’s all about finding people with the drive, ambition, talent and vision. If you find people that complement each other and people that you can trust to get on with the job, you’ll drive the business forward. I’ve worked with many recruiters who don’t really listen to what you want, so often you get recommended the wrong people. But Michelle is different, she takes all the noise away. She understands you straight away and goes beyond finding people with the right qualifications and experience to find people who are going to culturally fit.
import re __version__ = '0.2.1' class TranslationRule(object): pass class SingleRule(TranslationRule): def __init__(self, mod, pkg, distmap=None): self.mod = mod self.pkg = pkg self.distmap = distmap def __call__(self, mod, dist): if mod != self.mod: return None if self.distmap and dist: for distrex in self.distmap: if re.match(distrex, dist): return self.distmap[distrex] return self.pkg class MultiRule(TranslationRule): def __init__(self, mods, pkgfun): self.mods = mods self.pkgfun = pkgfun def __call__(self, mod, dist): if mod in self.mods: return self.pkgfun(mod) return None def default_tr(mod): pkg = mod.rsplit('-python')[0] pkg = pkg.replace('_', '-').replace('.', '-').lower() if not pkg.startswith('python-'): pkg = 'python-' + pkg return pkg def exact_tr(mod): return mod def openstack_prefix_tr(mod): return 'openstack-' + mod RDO_PKG_MAP = [ # This demonstrates per-dist filter #SingleRule('sphinx', 'python-sphinx', # distmap={'epel-6': 'python-sphinx10'}), SingleRule('distribute', 'python-setuptools'), SingleRule('pyopenssl', 'pyOpenSSL'), SingleRule('IPy', 'python-IPy'), SingleRule('pycrypto', 'python-crypto'), SingleRule('pyzmq', 'python-zmq'), SingleRule('mysql-python', 'MySQL-python'), SingleRule('PasteDeploy', 'python-paste-deploy'), SingleRule('sqlalchemy-migrate', 'python-migrate'), SingleRule('qpid-python', 'python-qpid'), SingleRule('posix_ipc', 'python-posix_ipc'), SingleRule('oslosphinx', 'python-oslo-sphinx'), MultiRule( mods=['PyYAML', 'm2crypto', 'numpy', 'pyflakes', 'pylint', 'pyparsing', 'pytz', 'pysendfile', 'libvirt-python'], pkgfun=lambda x: x), MultiRule( mods=['nova', 'keystone', 'glance', 'swift', 'neutron'], pkgfun=openstack_prefix_tr), ] SUSE_PKG_MAP = [ # Do what you gotta do ;) ] def get_pkg_map(dist): if dist.lower().find('suse') != -1: return SUSE_PKG_MAP return RDO_PKG_MAP def module2package(mod, dist, pkg_map=None): """Return a corresponding package name for a python module. mod -- python module name dist -- a linux distribution as returned by `platform.linux_distribution()[0]` """ if not pkg_map: pkg_map = get_pkg_map(dist) for rule in pkg_map: pkg = rule(mod, dist) if pkg: return pkg return default_tr(mod)
A name defines who we are. It’s written on our driver’s liscens, school IDs, insurance cards. It’s an automatic link towards yourself. When you think of a person, you think of their name, a set of letters fitted together that essentially represents an entire person. When the first day of school comes around, our names become defined once again as teachers call them forth from a long attendence list. It goes by slowly; the teacher reads through all the familiar names like “Jack” and “Anna” easily and narowly avoids the “Katherine” v.s. “Kate” debacle, but it’s over the more unique names where they find the trouble. But unlike those that proudly state that they’d rather go by “Jenn” than “Jennifer,” I tend to just let it slide by, stating that “it’s fine” or “I don’t mind either.” And it’s true. I don’t mind that my name is pronounced several different ways to the point where some days I’m not even sure of the correct spelling. It’s been that way for almost ten years, and it’s not just been something associated with me. I’ve seen this problem throughout my community, and the truth is that our names define us. They’re an essential part of our lives, made the moment we are born and forever engraved when we are dead. It may not seem important, and it might not even be important. But if we are to have one thing in life, let it at least be our names.
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- """ .. _warnings: Warnings -------- .. note:: Most of the following warnings indicate violations of the VOTable specification. They should be reported to the authors of the tools that produced the VOTable file. To control the warnings emitted, use the standard Python :mod:`warnings` module. Most of these are of the type `VOTableSpecWarning`. {warnings} .. _exceptions: Exceptions ---------- .. note:: This is a list of many of the fatal exceptions emitted by vo.table when the file does not conform to spec. Other exceptions may be raised due to unforeseen cases or bugs in vo.table itself. {exceptions} """ # STDLIB import io import re from textwrap import dedent from warnings import warn from ...utils.exceptions import AstropyWarning __all__ = [ 'warn_or_raise', 'vo_raise', 'vo_reraise', 'vo_warn', 'warn_unknown_attrs', 'parse_vowarning', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning', 'VOTableSpecError'] MAX_WARNINGS = 10 def _format_message(message, name, config=None, pos=None): if config is None: config = {} if pos is None: pos = ('?', '?') filename = config.get('filename', '?') return '{}:{}:{}: {}: {}'.format(filename, pos[0], pos[1], name, message) def _suppressed_warning(warning, config, stacklevel=2): warning_class = type(warning) config.setdefault('_warning_counts', dict()).setdefault(warning_class, 0) config['_warning_counts'][warning_class] += 1 message_count = config['_warning_counts'][warning_class] if message_count <= MAX_WARNINGS: if message_count == MAX_WARNINGS: warning.formatted_message += \ ' (suppressing further warnings of this type...)' warn(warning, stacklevel=stacklevel+1) def warn_or_raise(warning_class, exception_class=None, args=(), config=None, pos=None, stacklevel=1): """ Warn or raise an exception, depending on the pedantic setting. """ if config is None: config = {} if config.get('pedantic'): if exception_class is None: exception_class = warning_class vo_raise(exception_class, args, config, pos) else: vo_warn(warning_class, args, config, pos, stacklevel=stacklevel+1) def vo_raise(exception_class, args=(), config=None, pos=None): """ Raise an exception, with proper position information if available. """ if config is None: config = {} raise exception_class(args, config, pos) def vo_reraise(exc, config=None, pos=None, additional=''): """ Raise an exception, with proper position information if available. Restores the original traceback of the exception, and should only be called within an "except:" block of code. """ if config is None: config = {} message = _format_message(str(exc), exc.__class__.__name__, config, pos) if message.split()[0] == str(exc).split()[0]: message = str(exc) if len(additional): message += ' ' + additional exc.args = (message,) raise exc def vo_warn(warning_class, args=(), config=None, pos=None, stacklevel=1): """ Warn, with proper position information if available. """ if config is None: config = {} warning = warning_class(args, config, pos) _suppressed_warning(warning, config, stacklevel=stacklevel+1) def warn_unknown_attrs(element, attrs, config, pos, good_attr=[], stacklevel=1): for attr in attrs: if attr not in good_attr: vo_warn(W48, (attr, element), config, pos, stacklevel=stacklevel+1) _warning_pat = re.compile( (r":?(?P<nline>[0-9?]+):(?P<nchar>[0-9?]+): " + r"((?P<warning>[WE]\d+): )?(?P<rest>.*)$")) def parse_vowarning(line): """ Parses the vo warning string back into its parts. """ result = {} match = _warning_pat.search(line) if match: result['warning'] = warning = match.group('warning') if warning is not None: result['is_warning'] = (warning[0].upper() == 'W') result['is_exception'] = not result['is_warning'] result['number'] = int(match.group('warning')[1:]) result['doc_url'] = "io/votable/api_exceptions.html#{0}".format( warning.lower()) else: result['is_warning'] = False result['is_exception'] = False result['is_other'] = True result['number'] = None result['doc_url'] = None try: result['nline'] = int(match.group('nline')) except ValueError: result['nline'] = 0 try: result['nchar'] = int(match.group('nchar')) except ValueError: result['nchar'] = 0 result['message'] = match.group('rest') result['is_something'] = True else: result['warning'] = None result['is_warning'] = False result['is_exception'] = False result['is_other'] = False result['is_something'] = False if not isinstance(line, str): line = line.decode('utf-8') result['message'] = line return result class VOWarning(AstropyWarning): """ The base class of all VO warnings and exceptions. Handles the formatting of the message with a warning or exception code, filename, line and column number. """ default_args = () message_template = '' def __init__(self, args, config=None, pos=None): if config is None: config = {} if not isinstance(args, tuple): args = (args, ) msg = self.message_template.format(*args) self.formatted_message = _format_message( msg, self.__class__.__name__, config, pos) Warning.__init__(self, self.formatted_message) def __str__(self): return self.formatted_message @classmethod def get_short_name(cls): if len(cls.default_args): return cls.message_template.format(*cls.default_args) return cls.message_template class VOTableChangeWarning(VOWarning, SyntaxWarning): """ A change has been made to the input XML file. """ class VOTableSpecWarning(VOWarning, SyntaxWarning): """ The input XML file violates the spec, but there is an obvious workaround. """ class UnimplementedWarning(VOWarning, SyntaxWarning): """ A feature of the VOTABLE_ spec is not implemented. """ class IOWarning(VOWarning, RuntimeWarning): """ A network or IO error occurred, but was recovered using the cache. """ class VOTableSpecError(VOWarning, ValueError): """ The input XML file violates the spec and there is no good workaround. """ class W01(VOTableSpecWarning): """ The VOTable spec states: If a cell contains an array or complex number, it should be encoded as multiple numbers separated by whitespace. Many VOTable files in the wild use commas as a separator instead, and ``vo.table`` supports this convention when not in :ref:`pedantic-mode`. ``vo.table`` always outputs files using only spaces, regardless of how they were input. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#toc-header-35>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:TABLEDATA>`__ """ message_template = "Array uses commas rather than whitespace" class W02(VOTableSpecWarning): r""" XML ids must match the following regular expression:: ^[A-Za-z_][A-Za-z0-9_\.\-]*$ The VOTable 1.1 says the following: According to the XML standard, the attribute ``ID`` is a string beginning with a letter or underscore (``_``), followed by a sequence of letters, digits, or any of the punctuation characters ``.`` (dot), ``-`` (dash), ``_`` (underscore), or ``:`` (colon). However, this is in conflict with the XML standard, which says colons may not be used. VOTable 1.1's own schema does not allow a colon here. Therefore, ``vo.table`` disallows the colon. VOTable 1.2 corrects this error in the specification. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `XML Names <http://www.w3.org/TR/REC-xml/#NT-Name>`__ """ message_template = "{} attribute '{}' is invalid. Must be a standard XML id" default_args = ('x', 'y') class W03(VOTableChangeWarning): """ The VOTable 1.1 spec says the following about ``name`` vs. ``ID`` on ``FIELD`` and ``VALUE`` elements: ``ID`` and ``name`` attributes have a different role in VOTable: the ``ID`` is meant as a *unique identifier* of an element seen as a VOTable component, while the ``name`` is meant for presentation purposes, and need not to be unique throughout the VOTable document. The ``ID`` attribute is therefore required in the elements which have to be referenced, but in principle any element may have an ``ID`` attribute. ... In summary, the ``ID`` is different from the ``name`` attribute in that (a) the ``ID`` attribute is made from a restricted character set, and must be unique throughout a VOTable document whereas names are standard XML attributes and need not be unique; and (b) there should be support in the parsing software to look up references and extract the relevant element with matching ``ID``. It is further recommended in the VOTable 1.2 spec: While the ``ID`` attribute has to be unique in a VOTable document, the ``name`` attribute need not. It is however recommended, as a good practice, to assign unique names within a ``TABLE`` element. This recommendation means that, between a ``TABLE`` and its corresponding closing ``TABLE`` tag, ``name`` attributes of ``FIELD``, ``PARAM`` and optional ``GROUP`` elements should be all different. Since ``vo.table`` requires a unique identifier for each of its columns, ``ID`` is used for the column name when present. However, when ``ID`` is not present, (since it is not required by the specification) ``name`` is used instead. However, ``name`` must be cleansed by replacing invalid characters (such as whitespace) with underscores. .. note:: This warning does not indicate that the input file is invalid with respect to the VOTable specification, only that the column names in the record array may not match exactly the ``name`` attributes specified in the file. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = "Implicitly generating an ID from a name '{}' -> '{}'" default_args = ('x', 'y') class W04(VOTableSpecWarning): """ The ``content-type`` attribute must use MIME content-type syntax as defined in `RFC 2046 <https://tools.ietf.org/html/rfc2046>`__. The current check for validity is somewhat over-permissive. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:link>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:link>`__ """ message_template = "content-type '{}' must be a valid MIME content type" default_args = ('x',) class W05(VOTableSpecWarning): """ The attribute must be a valid URI as defined in `RFC 2396 <http://www.ietf.org/rfc/rfc2396.txt>`_. """ message_template = "'{}' is not a valid URI" default_args = ('x',) class W06(VOTableSpecWarning): """ This warning is emitted when a ``ucd`` attribute does not match the syntax of a `unified content descriptor <http://vizier.u-strasbg.fr/doc/UCD.htx>`__. If the VOTable version is 1.2 or later, the UCD will also be checked to ensure it conforms to the controlled vocabulary defined by UCD1+. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:ucd>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:ucd>`__ """ message_template = "Invalid UCD '{}': {}" default_args = ('x', 'explanation') class W07(VOTableSpecWarning): """ As astro year field is a Besselian or Julian year matching the regular expression:: ^[JB]?[0-9]+([.][0-9]*)?$ Defined in this XML Schema snippet:: <xs:simpleType name="astroYear"> <xs:restriction base="xs:token"> <xs:pattern value="[JB]?[0-9]+([.][0-9]*)?"/> </xs:restriction> </xs:simpleType> """ message_template = "Invalid astroYear in {}: '{}'" default_args = ('x', 'y') class W08(VOTableSpecWarning): """ To avoid local-dependent number parsing differences, ``vo.table`` may require a string or unicode string where a numeric type may make more sense. """ message_template = "'{}' must be a str or bytes object" default_args = ('x',) class W09(VOTableSpecWarning): """ The VOTable specification uses the attribute name ``ID`` (with uppercase letters) to specify unique identifiers. Some VOTable-producing tools use the more standard lowercase ``id`` instead. ``vo.table`` accepts ``id`` and emits this warning when not in ``pedantic`` mode. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = "ID attribute not capitalized" class W10(VOTableSpecWarning): """ The parser has encountered an element that does not exist in the specification, or appears in an invalid context. Check the file against the VOTable schema (with a tool such as `xmllint <http://xmlsoft.org/xmllint.html>`__. If the file validates against the schema, and you still receive this warning, this may indicate a bug in ``vo.table``. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__ """ message_template = "Unknown tag '{}'. Ignoring" default_args = ('x',) class W11(VOTableSpecWarning): """ Earlier versions of the VOTable specification used a ``gref`` attribute on the ``LINK`` element to specify a `GLU reference <http://aladin.u-strasbg.fr/glu/>`__. New files should specify a ``glu:`` protocol using the ``href`` attribute. Since ``vo.table`` does not currently support GLU references, it likewise does not automatically convert the ``gref`` attribute to the new form. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:link>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:link>`__ """ message_template = "The gref attribute on LINK is deprecated in VOTable 1.1" class W12(VOTableChangeWarning): """ In order to name the columns of the Numpy record array, each ``FIELD`` element must have either an ``ID`` or ``name`` attribute to derive a name from. Strictly speaking, according to the VOTable schema, the ``name`` attribute is required. However, if ``name`` is not present by ``ID`` is, and *pedantic mode* is off, ``vo.table`` will continue without a ``name`` defined. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = ( "'{}' element must have at least one of 'ID' or 'name' attributes") default_args = ('x',) class W13(VOTableSpecWarning): """ Some VOTable files in the wild use non-standard datatype names. These are mapped to standard ones using the following mapping:: string -> char unicodeString -> unicodeChar int16 -> short int32 -> int int64 -> long float32 -> float float64 -> double unsignedInt -> long unsignedShort -> int To add more datatype mappings during parsing, use the ``datatype_mapping`` keyword to `astropy.io.votable.parse`. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "'{}' is not a valid VOTable datatype, should be '{}'" default_args = ('x', 'y') # W14: Deprecated class W15(VOTableSpecWarning): """ The ``name`` attribute is required on every ``FIELD`` element. However, many VOTable files in the wild omit it and provide only an ``ID`` instead. In this case, when *pedantic mode* is off, ``vo.table`` will copy the ``name`` attribute to a new ``ID`` attribute. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = "{} element missing required 'name' attribute" default_args = ('x',) # W16: Deprecated class W17(VOTableSpecWarning): """ A ``DESCRIPTION`` element can only appear once within its parent element. According to the schema, it may only occur once (`1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__) However, it is a `proposed extension <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:addesc>`__ to VOTable 1.2. """ message_template = "{} element contains more than one DESCRIPTION element" default_args = ('x',) class W18(VOTableSpecWarning): """ The number of rows explicitly specified in the ``nrows`` attribute does not match the actual number of rows (``TR`` elements) present in the ``TABLE``. This may indicate truncation of the file, or an internal error in the tool that produced it. If *pedantic mode* is off, parsing will proceed, with the loss of some performance. **References:** `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC10>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC10>`__ """ message_template = 'TABLE specified nrows={}, but table contains {} rows' default_args = ('x', 'y') class W19(VOTableSpecWarning): """ The column fields as defined using ``FIELD`` elements do not match those in the headers of the embedded FITS file. If *pedantic mode* is off, the embedded FITS file will take precedence. """ message_template = ( 'The fields defined in the VOTable do not match those in the ' + 'embedded FITS file') class W20(VOTableSpecWarning): """ If no version number is explicitly given in the VOTable file, the parser assumes it is written to the VOTable 1.1 specification. """ message_template = 'No version number specified in file. Assuming {}' default_args = ('1.1',) class W21(UnimplementedWarning): """ Unknown issues may arise using ``vo.table`` with VOTable files from a version other than 1.1, 1.2 or 1.3. """ message_template = ( 'vo.table is designed for VOTable version 1.1, 1.2 and 1.3, but ' + 'this file is {}') default_args = ('x',) class W22(VOTableSpecWarning): """ Version 1.0 of the VOTable specification used the ``DEFINITIONS`` element to define coordinate systems. Version 1.1 now uses ``COOSYS`` elements throughout the document. **References:** `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:definitions>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:definitions>`__ """ message_template = 'The DEFINITIONS element is deprecated in VOTable 1.1. Ignoring' class W23(IOWarning): """ Raised when the VO service database can not be updated (possibly due to a network outage). This is only a warning, since an older and possible out-of-date VO service database was available locally. """ message_template = "Unable to update service information for '{}'" default_args = ('x',) class W24(VOWarning, FutureWarning): """ The VO catalog database retrieved from the www is designed for a newer version of vo.table. This may cause problems or limited features performing service queries. Consider upgrading vo.table to the latest version. """ message_template = "The VO catalog database is for a later version of vo.table" class W25(IOWarning): """ A VO service query failed due to a network error or malformed arguments. Another alternative service may be attempted. If all services fail, an exception will be raised. """ message_template = "'{}' failed with: {}" default_args = ('service', '...') class W26(VOTableSpecWarning): """ The given element was not supported inside of the given element until the specified VOTable version, however the version declared in the file is for an earlier version. These attributes may not be written out to the file. """ message_template = "'{}' inside '{}' added in VOTable {}" default_args = ('child', 'parent', 'X.X') class W27(VOTableSpecWarning): """ The ``COOSYS`` element was deprecated in VOTABLE version 1.2 in favor of a reference to the Space-Time Coordinate (STC) data model (see `utype <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:utype>`__ and the IVOA note `referencing STC in VOTable <http://ivoa.net/Documents/latest/VOTableSTC.html>`__. """ message_template = "COOSYS deprecated in VOTable 1.2" class W28(VOTableSpecWarning): """ The given attribute was not supported on the given element until the specified VOTable version, however the version declared in the file is for an earlier version. These attributes may not be written out to the file. """ message_template = "'{}' on '{}' added in VOTable {}" default_args = ('attribute', 'element', 'X.X') class W29(VOTableSpecWarning): """ Some VOTable files specify their version number in the form "v1.0", when the only supported forms in the spec are "1.0". **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__ """ message_template = "Version specified in non-standard form '{}'" default_args = ('v1.0',) class W30(VOTableSpecWarning): """ Some VOTable files write missing floating-point values in non-standard ways, such as "null" and "-". In non-pedantic mode, any non-standard floating-point literals are treated as missing values. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "Invalid literal for float '{}'. Treating as empty." default_args = ('x',) class W31(VOTableSpecWarning): """ Since NaN's can not be represented in integer fields directly, a null value must be specified in the FIELD descriptor to support reading NaN's from the tabledata. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "NaN given in an integral field without a specified null value" class W32(VOTableSpecWarning): """ Each field in a table must have a unique ID. If two or more fields have the same ID, some will be renamed to ensure that all IDs are unique. From the VOTable 1.2 spec: The ``ID`` and ``ref`` attributes are defined as XML types ``ID`` and ``IDREF`` respectively. This means that the contents of ``ID`` is an identifier which must be unique throughout a VOTable document, and that the contents of the ``ref`` attribute represents a reference to an identifier which must exist in the VOTable document. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = "Duplicate ID '{}' renamed to '{}' to ensure uniqueness" default_args = ('x', 'x_2') class W33(VOTableChangeWarning): """ Each field in a table must have a unique name. If two or more fields have the same name, some will be renamed to ensure that all names are unique. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:name>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:name>`__ """ message_template = "Column name '{}' renamed to '{}' to ensure uniqueness" default_args = ('x', 'x_2') class W34(VOTableSpecWarning): """ The attribute requires the value to be a valid XML token, as defined by `XML 1.0 <http://www.w3.org/TR/2000/WD-xml-2e-20000814#NT-Nmtoken>`__. """ message_template = "'{}' is an invalid token for attribute '{}'" default_args = ('x', 'y') class W35(VOTableSpecWarning): """ The ``name`` and ``value`` attributes are required on all ``INFO`` elements. **References:** `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC32>`__ """ message_template = "'{}' attribute required for INFO elements" default_args = ('x',) class W36(VOTableSpecWarning): """ If the field specifies a ``null`` value, that value must conform to the given ``datatype``. **References:** `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:values>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:values>`__ """ message_template = "null value '{}' does not match field datatype, setting to 0" default_args = ('x',) class W37(UnimplementedWarning): """ The 3 datatypes defined in the VOTable specification and supported by vo.table are ``TABLEDATA``, ``BINARY`` and ``FITS``. **References:** `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:data>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:data>`__ """ message_template = "Unsupported data format '{}'" default_args = ('x',) class W38(VOTableSpecWarning): """ The only encoding for local binary data supported by the VOTable specification is base64. """ message_template = "Inline binary data must be base64 encoded, got '{}'" default_args = ('x',) class W39(VOTableSpecWarning): """ Bit values do not support masking. This warning is raised upon setting masked data in a bit column. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "Bit values can not be masked" class W40(VOTableSpecWarning): """ This is a terrible hack to support Simple Image Access Protocol results from `archive.noao.edu <http://archive.noao.edu>`__. It creates a field for the coordinate projection type of type "double", which actually contains character data. We have to hack the field to store character data, or we can't read it in. A warning will be raised when this happens. """ message_template = "'cprojection' datatype repaired" class W41(VOTableSpecWarning): """ An XML namespace was specified on the ``VOTABLE`` element, but the namespace does not match what is expected for a ``VOTABLE`` file. The ``VOTABLE`` namespace is:: http://www.ivoa.net/xml/VOTable/vX.X where "X.X" is the version number. Some files in the wild set the namespace to the location of the VOTable schema, which is not correct and will not pass some validating parsers. """ message_template = ( "An XML namespace is specified, but is incorrect. Expected " + "'{}', got '{}'") default_args = ('x', 'y') class W42(VOTableSpecWarning): """ The root element should specify a namespace. The ``VOTABLE`` namespace is:: http://www.ivoa.net/xml/VOTable/vX.X where "X.X" is the version number. """ message_template = "No XML namespace specified" class W43(VOTableSpecWarning): """ Referenced elements should be defined before referees. From the VOTable 1.2 spec: In VOTable1.2, it is further recommended to place the ID attribute prior to referencing it whenever possible. """ message_template = "{} ref='{}' which has not already been defined" default_args = ('element', 'x',) class W44(VOTableSpecWarning): """ ``VALUES`` elements that reference another element should not have their own content. From the VOTable 1.2 spec: The ``ref`` attribute of a ``VALUES`` element can be used to avoid a repetition of the domain definition, by referring to a previously defined ``VALUES`` element having the referenced ``ID`` attribute. When specified, the ``ref`` attribute defines completely the domain without any other element or attribute, as e.g. ``<VALUES ref="RAdomain"/>`` """ message_template = "VALUES element with ref attribute has content ('{}')" default_args = ('element',) class W45(VOWarning, ValueError): """ The ``content-role`` attribute on the ``LINK`` element must be one of the following:: query, hints, doc, location And in VOTable 1.3, additionally:: type **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__ `1.3 <http://www.ivoa.net/documents/VOTable/20130315/PR-VOTable-1.3-20130315.html#sec:link>`__ """ message_template = "content-role attribute '{}' invalid" default_args = ('x',) class W46(VOTableSpecWarning): """ The given char or unicode string is too long for the specified field length. """ message_template = "{} value is too long for specified length of {}" default_args = ('char or unicode', 'x') class W47(VOTableSpecWarning): """ If no arraysize is specified on a char field, the default of '1' is implied, but this is rarely what is intended. """ message_template = "Missing arraysize indicates length 1" class W48(VOTableSpecWarning): """ The attribute is not defined in the specification. """ message_template = "Unknown attribute '{}' on {}" default_args = ('attribute', 'element') class W49(VOTableSpecWarning): """ Prior to VOTable 1.3, the empty cell was illegal for integer fields. If a \"null\" value was specified for the cell, it will be used for the value, otherwise, 0 will be used. """ message_template = "Empty cell illegal for integer fields." class W50(VOTableSpecWarning): """ Invalid unit string as defined in the `Standards for Astronomical Catalogues, Version 2.0 <http://cdsarc.u-strasbg.fr/doc/catstd-3.2.htx>`_. Consider passing an explicit ``unit_format`` parameter if the units in this file conform to another specification. """ message_template = "Invalid unit string '{}'" default_args = ('x',) class W51(VOTableSpecWarning): """ The integer value is out of range for the size of the field. """ message_template = "Value '{}' is out of range for a {} integer field" default_args = ('x', 'n-bit') class W52(VOTableSpecWarning): """ The BINARY2 format was introduced in VOTable 1.3. It should not be present in files marked as an earlier version. """ message_template = ("The BINARY2 format was introduced in VOTable 1.3, but " "this file is declared as version '{}'") default_args = ('1.2',) class W53(VOTableSpecWarning): """ The VOTABLE element must contain at least one RESOURCE element. """ message_template = ("VOTABLE element must contain at least one RESOURCE element.") default_args = () class E01(VOWarning, ValueError): """ The size specifier for a ``char`` or ``unicode`` field must be only a number followed, optionally, by an asterisk. Multi-dimensional size specifiers are not supported for these datatypes. Strings, which are defined as a set of characters, can be represented in VOTable as a fixed- or variable-length array of characters:: <FIELD name="unboundedString" datatype="char" arraysize="*"/> A 1D array of strings can be represented as a 2D array of characters, but given the logic above, it is possible to define a variable-length array of fixed-length strings, but not a fixed-length array of variable-length strings. """ message_template = "Invalid size specifier '{}' for a {} field (in field '{}')" default_args = ('x', 'char/unicode', 'y') class E02(VOWarning, ValueError): """ The number of array elements in the data does not match that specified in the FIELD specifier. """ message_template = ( "Incorrect number of elements in array. " + "Expected multiple of {}, got {}") default_args = ('x', 'y') class E03(VOWarning, ValueError): """ Complex numbers should be two values separated by whitespace. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "'{}' does not parse as a complex number" default_args = ('x',) class E04(VOWarning, ValueError): """ A ``bit`` array should be a string of '0's and '1's. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "Invalid bit value '{}'" default_args = ('x',) class E05(VOWarning, ValueError): r""" A ``boolean`` value should be one of the following strings (case insensitive) in the ``TABLEDATA`` format:: 'TRUE', 'FALSE', '1', '0', 'T', 'F', '\0', ' ', '?' and in ``BINARY`` format:: 'T', 'F', '1', '0', '\0', ' ', '?' **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "Invalid boolean value '{}'" default_args = ('x',) class E06(VOWarning, ValueError): """ The supported datatypes are:: double, float, bit, boolean, unsignedByte, short, int, long, floatComplex, doubleComplex, char, unicodeChar The following non-standard aliases are also supported, but in these case :ref:`W13 <W13>` will be raised:: string -> char unicodeString -> unicodeChar int16 -> short int32 -> int int64 -> long float32 -> float float64 -> double unsignedInt -> long unsignedShort -> int To add more datatype mappings during parsing, use the ``datatype_mapping`` keyword to `astropy.io.votable.parse`. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:datatypes>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:datatypes>`__ """ message_template = "Unknown datatype '{}' on field '{}'" default_args = ('x', 'y') # E07: Deprecated class E08(VOWarning, ValueError): """ The ``type`` attribute on the ``VALUES`` element must be either ``legal`` or ``actual``. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:values>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:values>`__ """ message_template = "type must be 'legal' or 'actual', but is '{}'" default_args = ('x',) class E09(VOWarning, ValueError): """ The ``MIN``, ``MAX`` and ``OPTION`` elements must always have a ``value`` attribute. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:values>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:values>`__ """ message_template = "'{}' must have a value attribute" default_args = ('x',) class E10(VOWarning, ValueError): """ From VOTable 1.1 and later, ``FIELD`` and ``PARAM`` elements must have a ``datatype`` field. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#elem:FIELD>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#elem:FIELD>`__ """ message_template = "'datatype' attribute required on all '{}' elements" default_args = ('FIELD',) class E11(VOWarning, ValueError): """ The precision attribute is meant to express the number of significant digits, either as a number of decimal places (e.g. ``precision="F2"`` or equivalently ``precision="2"`` to express 2 significant figures after the decimal point), or as a number of significant figures (e.g. ``precision="E5"`` indicates a relative precision of 10-5). It is validated using the following regular expression:: [EF]?[1-9][0-9]* **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:form>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:form>`__ """ message_template = "precision '{}' is invalid" default_args = ('x',) class E12(VOWarning, ValueError): """ The width attribute is meant to indicate to the application the number of characters to be used for input or output of the quantity. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:form>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:form>`__ """ message_template = "width must be a positive integer, got '{}'" default_args = ('x',) class E13(VOWarning, ValueError): r""" From the VOTable 1.2 spec: A table cell can contain an array of a given primitive type, with a fixed or variable number of elements; the array may even be multidimensional. For instance, the position of a point in a 3D space can be defined by the following:: <FIELD ID="point_3D" datatype="double" arraysize="3"/> and each cell corresponding to that definition must contain exactly 3 numbers. An asterisk (\*) may be appended to indicate a variable number of elements in the array, as in:: <FIELD ID="values" datatype="int" arraysize="100*"/> where it is specified that each cell corresponding to that definition contains 0 to 100 integer numbers. The number may be omitted to specify an unbounded array (in practice up to =~2×10⁹ elements). A table cell can also contain a multidimensional array of a given primitive type. This is specified by a sequence of dimensions separated by the ``x`` character, with the first dimension changing fastest; as in the case of a simple array, the last dimension may be variable in length. As an example, the following definition declares a table cell which may contain a set of up to 10 images, each of 64×64 bytes:: <FIELD ID="thumbs" datatype="unsignedByte" arraysize="64×64×10*"/> **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:dim>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:dim>`__ """ message_template = "Invalid arraysize attribute '{}'" default_args = ('x',) class E14(VOWarning, ValueError): """ All ``PARAM`` elements must have a ``value`` attribute. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#elem:FIELD>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#elem:FIELD>`__ """ message_template = "value attribute is required for all PARAM elements" class E15(VOWarning, ValueError): """ All ``COOSYS`` elements must have an ``ID`` attribute. Note that the VOTable 1.1 specification says this attribute is optional, but its corresponding schema indicates it is required. In VOTable 1.2, the ``COOSYS`` element is deprecated. """ message_template = "ID attribute is required for all COOSYS elements" class E16(VOTableSpecWarning): """ The ``system`` attribute on the ``COOSYS`` element must be one of the following:: 'eq_FK4', 'eq_FK5', 'ICRS', 'ecl_FK4', 'ecl_FK5', 'galactic', 'supergalactic', 'xy', 'barycentric', 'geo_app' **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#elem:COOSYS>`__ """ message_template = "Invalid system attribute '{}'" default_args = ('x',) class E17(VOWarning, ValueError): """ ``extnum`` attribute must be a positive integer. **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__ """ message_template = "extnum must be a positive integer" class E18(VOWarning, ValueError): """ The ``type`` attribute of the ``RESOURCE`` element must be one of "results" or "meta". **References**: `1.1 <http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#ToC54>`__, `1.2 <http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#ToC58>`__ """ message_template = "type must be 'results' or 'meta', not '{}'" default_args = ('x',) class E19(VOWarning, ValueError): """ Raised either when the file doesn't appear to be XML, or the root element is not VOTABLE. """ message_template = "File does not appear to be a VOTABLE" class E20(VOTableSpecError): """ The table had only *x* fields defined, but the data itself has more columns than that. """ message_template = "Data has more columns than are defined in the header ({})" default_args = ('x',) class E21(VOWarning, ValueError): """ The table had *x* fields defined, but the data itself has only *y* columns. """ message_template = "Data has fewer columns ({}) than are defined in the header ({})" default_args = ('x', 'y') def _get_warning_and_exception_classes(prefix): classes = [] for key, val in globals().items(): if re.match(prefix + "[0-9]{2}", key): classes.append((key, val)) classes.sort() return classes def _build_doc_string(): def generate_set(prefix): classes = _get_warning_and_exception_classes(prefix) out = io.StringIO() for name, cls in classes: out.write(".. _{}:\n\n".format(name)) msg = "{}: {}".format(cls.__name__, cls.get_short_name()) if not isinstance(msg, str): msg = msg.decode('utf-8') out.write(msg) out.write('\n') out.write('~' * len(msg)) out.write('\n\n') doc = cls.__doc__ if not isinstance(doc, str): doc = doc.decode('utf-8') out.write(dedent(doc)) out.write('\n\n') return out.getvalue() warnings = generate_set('W') exceptions = generate_set('E') return {'warnings': warnings, 'exceptions': exceptions} if __doc__ is not None: __doc__ = __doc__.format(**_build_doc_string()) __all__.extend([x[0] for x in _get_warning_and_exception_classes('W')]) __all__.extend([x[0] for x in _get_warning_and_exception_classes('E')])
We used our Happiness Model in one of the high scale manufacturing companies of India to create an environment of Happiness and Positivity. Though the company has a successful business model and has relatively satisfied employees and customers, it was the founder’s belief that ‘Total Happiness and Positivity’ in the organization will make people realize their true potential. The company CEO strongly believed that the key to sustainable organizations and the essence of Indian ethos is the concept of ‘Ananda’ or ‘Happiness and Positivity’, which made it possible for SOIL to implement its model more effectively. The intervention was spread out over two years for 12 units and for approximately 2000 employees. For one of the key manufacturing units of the organization.
""" Copyright (c) 2014, Are Hansen - Honeypot Development. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND AN EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ __author__ = 'Are Hansen' __date__ = '2014, July 25' __version__ = '0.0.1' def filelines(file_obj): """Expects that the file_obj is a list containing the name of the files that sould be read, including the path to the directory in which they are located. Each line of these files are appended to the file_lines and returned from the function. """ file_lines = [] file_dict = {} for obj in file_obj: with open(obj, 'r') as lines: for line in lines.readlines(): file_lines.append(line) file_dict[obj] = file_lines file_lines = [] return file_dict
Hopefully, Mr. Black will be the first of thousands. It is estimated that about 12,000 prisoners will see a reduction of about three years in their sentences. The Courts have been planning for the motions for months. Here's how it will work in the District of Colorado. Not every inmate sentenced for crack will get a reduction. FAMM has this handy FAQ outlining the requirements. Via Law Prof Doug Berman at Sentencing Law and Policy: Judge Jack Weinstein has written another scathing indictment of our drug sentencing laws and policy in a multi-defendant crack cocaine case. Newly Released Crack Cocaine Defendants: How Are They Faring? The Washington Post today reports on some crack defendants who were able to leave prison early due to the recent retroactive sentencing guideline reductions. They seem to be coping pretty well, considering the changed world they've returned to after a decade or more behind bars. More than 7,000 crack cocaine offenders ... have received reduced sentences since March, when the U.S. Sentencing Commission put retroactive sentence guidelines into effect to offset what the commission felt were overly harsh punishments for crack cocaine related crimes, and it is an open question whether they will succeed or return to a life behind bars. ....Nearly 90 percent of those who received the tough sentences for crack cocaine were black men and women. Most users and dealers of powder cocaine are white and Latino. There were 19,500 federal inmates serving sentences for crack when the reduction went into effect in March. Many aren't eligible for the reduction for a variety of technical reasons. For others, mandatory minimum sentencing laws which trump the guidelines will prevent them from getting a reduced sentence. The Government files objections to scores of requests, arguing either that the reduction doesn't apply to a particular defendant or the court should exercise its discretion and deny the relief. The U.S. Sentencing Commission says 3,000 crack cocaine sentences have been reduced since the guideline amendment went into effect in March. There are 19,500 inmates serving time for crack cocaine. In the 40 or so motions I've seen filed in Colorado (cases in which I had one of many co-defendants) the Government seems to file an objection to every request. It either says the guideline doesn't apply or the court should exercise its discretion and deny the relief. I've mentioned before that the reductions are small, and only apply to a limited group of defendants. I only have one client out of dozens of crack defendants I've represented who appears to be eligible for relief. Sure enough, the Government is opposing the request.
# -*- coding: utf-8 -*- import boto from numbers import Number from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError class Table(object): def __init__(self, table=None, eager=False): self.table = table self.is_eager = eager def __repr__(self): return '<table \'{0}\'>'.format(self.name) @property def name(self): return self.table.__dict__['_dict']['TableName'] def item(self, item): return Item(item, self) def delete(self): return self.table.delete() def scale(self, read=None, write=None): read = read or self.table.read_units write = write or self.table.read_units return self.table.update_throughput(read_units=read, write_units=write) def __getitem__(self, key): try: if isinstance(key, (basestring, Number)): key = [key] i = self.table.get_item(*key) i = self.item(i) except DynamoDBKeyNotFoundError: return self.__magic_get(key) return i def get(self, key, default=None): try: return self[key] except KeyError: return default def __setitem__(self, key, values): if isinstance(key, (basestring, Number)): key = [key] i = self.table.new_item(*key, attrs=values) i = self.item(i) i.put() return i def __delitem__(self, key): return self[key].delete() def __magic_get(self, key): if self.is_eager: self[key] = {} return self.item(self[key]) def __contains__(self, key): return not self.get(key) is None def new(self, name): table = self.table.layer2.create_table( name=name, schema=self.table._schema, read_units=self.table.read_units, write_units=self.table.write_units ) return Table(table=table, eager=self.is_eager) class Item(object): def __init__(self, item, table): self.item = item self.table = table @property def is_eager(self): return self.table.is_eager def __getattr__(self, key): if not key in ['item']: try: return getattr(object, key) except AttributeError: return getattr(self.item, key) def get(self, key, default=None): try: return self[key] except KeyError: return default def __repr__(self): return repr(self.item) def __getitem__(self, key): return self.item[key] def __setitem__(self, key, value): self.item[key] = value if self.is_eager: self.item.save() def __contains__(self, key): return key in self.item def table(name, auth=None, eager=True): """Returns a given table for the given user.""" auth = auth or [] dynamodb = boto.connect_dynamodb(*auth) table = dynamodb.get_table(name) return Table(table=table, eager=eager) def tables(auth=None, eager=True): """Returns a list of tables for the given user.""" auth = auth or [] dynamodb = boto.connect_dynamodb(*auth) return [table(t, auth, eager=eager) for t in dynamodb.list_tables()]
Take a glimpse into the interdisciplinary environmental degrees at Canadian universities – and check out the thriving green student movement across the nation. The newest AJ Environmental Education Directory spells out environmental opportunities in a way that truly showcases the unique geographies of our nation. Justin Trudeau and other emerging environmental leaders lend their advice. Nicola Ross checks out the latest in the environmental job market and finds green jobs galore. Trickster teachers seek ecojustice in curricula. Learn how and why transdisciplinary education works so well. it takes a thoughtful dance to ensure that environmental education is as just and inclusive as it needs to be. Transdisciplinary education works because environmental problems and their solutions seldom respect faculty lines. Daid Orr suggests that understanding our sense of place may be the key to wisdom.
"""Support for Homekit Alarm Control Panel.""" import logging from homekit.model.characteristics import CharacteristicsTypes from homeassistant.components.alarm_control_panel import AlarmControlPanel from homeassistant.components.alarm_control_panel.const import ( SUPPORT_ALARM_ARM_AWAY, SUPPORT_ALARM_ARM_HOME, SUPPORT_ALARM_ARM_NIGHT, ) from homeassistant.const import ( ATTR_BATTERY_LEVEL, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) from homeassistant.core import callback from . import KNOWN_DEVICES, HomeKitEntity ICON = "mdi:security" _LOGGER = logging.getLogger(__name__) CURRENT_STATE_MAP = { 0: STATE_ALARM_ARMED_HOME, 1: STATE_ALARM_ARMED_AWAY, 2: STATE_ALARM_ARMED_NIGHT, 3: STATE_ALARM_DISARMED, 4: STATE_ALARM_TRIGGERED, } TARGET_STATE_MAP = { STATE_ALARM_ARMED_HOME: 0, STATE_ALARM_ARMED_AWAY: 1, STATE_ALARM_ARMED_NIGHT: 2, STATE_ALARM_DISARMED: 3, } async def async_setup_entry(hass, config_entry, async_add_entities): """Set up Homekit alarm control panel.""" hkid = config_entry.data["AccessoryPairingID"] conn = hass.data[KNOWN_DEVICES][hkid] @callback def async_add_service(aid, service): if service["stype"] != "security-system": return False info = {"aid": aid, "iid": service["iid"]} async_add_entities([HomeKitAlarmControlPanel(conn, info)], True) return True conn.add_listener(async_add_service) class HomeKitAlarmControlPanel(HomeKitEntity, AlarmControlPanel): """Representation of a Homekit Alarm Control Panel.""" def __init__(self, *args): """Initialise the Alarm Control Panel.""" super().__init__(*args) self._state = None self._battery_level = None def get_characteristic_types(self): """Define the homekit characteristics the entity cares about.""" return [ CharacteristicsTypes.SECURITY_SYSTEM_STATE_CURRENT, CharacteristicsTypes.SECURITY_SYSTEM_STATE_TARGET, CharacteristicsTypes.BATTERY_LEVEL, ] def _update_security_system_state_current(self, value): self._state = CURRENT_STATE_MAP[value] def _update_battery_level(self, value): self._battery_level = value @property def icon(self): """Return icon.""" return ICON @property def state(self): """Return the state of the device.""" return self._state @property def supported_features(self) -> int: """Return the list of supported features.""" return SUPPORT_ALARM_ARM_HOME | SUPPORT_ALARM_ARM_AWAY | SUPPORT_ALARM_ARM_NIGHT async def async_alarm_disarm(self, code=None): """Send disarm command.""" await self.set_alarm_state(STATE_ALARM_DISARMED, code) async def async_alarm_arm_away(self, code=None): """Send arm command.""" await self.set_alarm_state(STATE_ALARM_ARMED_AWAY, code) async def async_alarm_arm_home(self, code=None): """Send stay command.""" await self.set_alarm_state(STATE_ALARM_ARMED_HOME, code) async def async_alarm_arm_night(self, code=None): """Send night command.""" await self.set_alarm_state(STATE_ALARM_ARMED_NIGHT, code) async def set_alarm_state(self, state, code=None): """Send state command.""" characteristics = [ { "aid": self._aid, "iid": self._chars["security-system-state.target"], "value": TARGET_STATE_MAP[state], } ] await self._accessory.put_characteristics(characteristics) @property def device_state_attributes(self): """Return the optional state attributes.""" if self._battery_level is None: return None return {ATTR_BATTERY_LEVEL: self._battery_level}
It is the policy of Unum not to discriminate against any employee or applicant for employment because he or she is an individual with a disability or a protected veteran (i.e., a disabled veteran, recently separated veteran, Armed Forces service medal veteran, or other veteran who served during a war, or in a campaign or expedition for which a campaign badge has been authorized). It is also the policy of Unum to take affirmative action to employ and to advance in employment all persons regardless of their status as individuals with disabilities or as protected veterans, and to base all employment decisions only on valid job requirements. In accordance with this policy, we will recruit, hire, train and promote persons in all job titles, and will ensure that all other personnel actions, including but not limited to transfers, demotions, layoffs, recalls , terminations, rates of pay or other forms of compensation, and selection for training, including apprenticeship, are administered without regard to disability or protected veteran status. Employees and applicants of Unum will not be subject to harassment, intimidation, threats, coercion or discrimination because they have engaged or may engage in any of the following activities: (1) Filing a complaint; (2) Assisting or participating in an investigation, compliance evaluation, hearing or any other activity related to the administration of the affirmative action and/or nondiscrimination provisions of Section 503 of the Rehabilitation Act of 1973, as amended (Section 503) and the Vietnam Era Veterans' Readjustment Assistance Act of 1974, as amended (VEVRAA) or any other Federal, state, or local law requiring equal opportunity for individuals with disabilities or protected veterans; (3) Opposing any act or practice made unlawful by Section 503 or VEVRAA or their implementing regulations, or any other Federal, state or local law requiring equal opportunity for individuals with disabilities or protected veterans; (4) Exercising any other right protected by Section 503 or VEVRAA or their implementing regulations. As Chief Executive Officer of Unum, I am committed to the principles of Affirmative Action and Equal Employment Opportunity. In order to ensure dissemination and implementation of equal employment opportunity and affirmative action throughout all levels of the company, I have selected the AVP, Workforce Metric, Reporting & Analysis as the Equal Employment Opportunity (EEO) Officer for Unum. One of the EEO Officer's duties will be to establish and maintain an internal audit and reporting system to allow for effective measurement of Unum's programs. In furtherance of Unum's policy regarding Affirmative Action and Equal Employment Opportunity, Unum has developed a written Affirmative Action Program which sets forth the policies, practices and procedures that Unum is committed to in order to ensure that its policy of nondiscrimination and affirmative action for qualified individuals with disabilities and qualified protected veterans is accomplished. This Affirmative Action Program is available for inspection by any employee or applicant for employment upon request, during normal business hours, in the Administration Department Interested persons should contact the EEO Officer at 207-575-9304 for assistance.
# [1] http://dx.doi.org/10.1063/1.4952956 # Lee-Ping Wang, 2016 import numpy as np from pysisyphus.intcoords.Primitive import Primitive from pysisyphus.linalg import eigvec_grad def compare_to_geometric(c3d, ref_c3d, dR, dF, dqdx, dvdx): from geometric.rotate import get_R_der, get_F_der, get_q_der, get_expmap_der dR_ref = get_R_der(c3d, ref_c3d) np.testing.assert_allclose(dR, dR_ref) dF_ref = get_F_der(c3d, ref_c3d) np.testing.assert_allclose(dF.reshape(-1, 3, 4, 4), dF_ref) dq_ref = get_q_der(c3d, ref_c3d) np.testing.assert_allclose(dqdx.reshape(-1, 3, 4), dq_ref) dvdx_ref = get_expmap_der(c3d, ref_c3d) np.testing.assert_allclose(dvdx, dvdx_ref.reshape(-1, 3).T) class Rotation(Primitive): """See (II. Theory) in [1], Eq. (3) - (14)""" index = None def __init__(self, *args, ref_coords3d, **kwargs): super().__init__(*args, **kwargs) self.calc_kwargs = ("index", "ref_coords3d") self.ref_coords3d = ref_coords3d.reshape(-1, 3).copy() @staticmethod def _weight(atoms, coords3d, indices, f_damping): return 1 @staticmethod def to_origin(coords3d, indices): return coords3d[indices] - coords3d[indices].mean(axis=0) @staticmethod def _calculate(coords3d, indices, gradient=False, index=0, ref_coords3d=None): # Translate to origin by removing centroid c3d = Rotation.to_origin(coords3d, indices) ref_c3d = Rotation.to_origin(ref_coords3d, indices) # Setup correlation matrix R = c3d.T.dot(ref_c3d) # Setup F matrix, Eq. (6) in [1] F = np.zeros((4, 4)) R11, R12, R13, R21, R22, R23, R31, R32, R33 = R.flatten() # Fill only upper triangular part. F[0, 0] = R11 + R22 + R33 F[0, 1] = R23 - R32 F[0, 2] = R31 - R13 F[0, 3] = R12 - R21 # F[1, 1] = R11 - R22 - R33 F[1, 2] = R12 + R21 F[1, 3] = R13 + R31 # F[2, 2] = -R11 + R22 - R33 F[2, 3] = R23 + R32 # F[3, 3] = -R11 - R22 + R33 # Eigenvalues, eigenvectors of upper triangular part. w, v_ = np.linalg.eigh(F, UPLO="U") # Quaternion corresponds to biggest (last) eigenvalue. # np.linalg.eigh already returns sorted eigenvalues. quat = v_[:, -1] # Eigenvector sign is ambigous. Force first item to be positive, # similar to geomeTRIC code. if quat[0] < 0.0: quat *= -1 # Eq. (8) in [1]. # v = 2 * q_i * (cos⁻¹(q_0) / sqrt(1 - q_0 ** 2) # # As q_0 approaches 1, the denominator becomes very small, and dividing # by this small number results in numerical instability. # # According to wolframalpha v(q_0) limit approaches 2 for q_0 = 1. # # input: limit of (2 * arccos(x) / sqrt(1-x**2)) # output: lim v(x) for x -> 1 becomes 2. q0 = quat[0] if abs(q0 - 1.0) <= 1e-8: prefac = 2 dvdq0 = 0.0 else: arccos_q0 = np.arccos(q0) diff = 1 - q0 ** 2 prefac = 2 * arccos_q0 / np.sqrt(diff) dvdq0 = quat[1:] * (2 * q0 * arccos_q0 / diff ** 1.5 - 2 / diff) # Exponential map v = prefac * quat[1:] if gradient: # Gradient of correlation matrix y1, y2, y3 = ref_c3d.T dR = np.zeros((*c3d.shape, 3, 3)) dR[:, 0, 0, 0] = y1 dR[:, 0, 0, 1] = y2 dR[:, 0, 0, 2] = y3 # dR[:, 1, 1, 0] = y1 dR[:, 1, 1, 1] = y2 dR[:, 1, 1, 2] = y3 # dR[:, 2, 2, 0] = y1 dR[:, 2, 2, 1] = y2 dR[:, 2, 2, 2] = y3 dR11, dR12, dR13, dR21, dR22, dR23, dR31, dR32, dR33 = dR.reshape(-1, 9).T # Gradient of F matrix. Construct full matrix, as we have to do a dot # product later on. dF = np.zeros((ref_c3d.size, 4, 4)) dF[:, 0, 0] = dR11 + dR22 + dR33 dF[:, 0, 1] = dR23 - dR32 dF[:, 0, 2] = dR31 - dR13 dF[:, 0, 3] = dR12 - dR21 # dF[:, 1, 0] = dF[:, 0, 1] dF[:, 1, 1] = dR11 - dR22 - dR33 dF[:, 1, 2] = dR12 + dR21 dF[:, 1, 3] = dR13 + dR31 # dF[:, 2, 0] = dF[:, 0, 2] dF[:, 2, 1] = dF[:, 1, 2] dF[:, 2, 2] = -dR11 + dR22 - dR33 dF[:, 2, 3] = dR23 + dR32 # dF[:, 3, 0] = dF[:, 0, 3] dF[:, 3, 1] = dF[:, 1, 3] dF[:, 3, 2] = dF[:, 2, 3] dF[:, 3, 3] = -dR11 - dR22 + dR33 # Quaternion gradient dqdx = eigvec_grad(w, v_, ind=-1, mat_grad=dF) dvdq = np.zeros((4, 3)) dvdq[0] = dvdq0 dvdq[1:] = np.diag((prefac, prefac, prefac)) # Gradient of exponential map from chain rule. # See bottom-left on 214108-3 in [1], after Eq. (11). dvdx = np.einsum("ij,ki->jk", dvdq, dqdx) # compare_to_geometric(c3d, ref_c3d, dR, dF, dqdx, dvdx) row = np.zeros_like(coords3d) row[indices] = dvdx[index].reshape(-1, 3) return v[index], row.flatten() return v[index] @staticmethod def _jacobian(coords3d, indices): raise Exception("Not implemented!") class RotationA(Rotation): index = 0 class RotationB(Rotation): index = 1 class RotationC(Rotation): index = 2
If you have visited , Bhavans Bhagwandas Purohit Vidya Mandir Civil Line Share your experience with us. Your Few words can help the others. Request Bhavans Bhagwandas Purohit Vidya Mandir Civil Line's Contact Detail.
# -*- coding: utf-8 -*- import re import StringIO # Licencia: GPLv3 # Author: Pablo Codeiro # Cambios de hugoruscitti: que simule crear un archivo .sc y la función # 'get_fakefile' que se llama desde quickdiagrams. # # por supuesto, aqui iria el autor, la fecha, la version # que es GPL v3.0 y todas esas chorradas.... def esUnMensaje (unaLinea): elementos = re.match("!(\w+) categoriesFor: #(\w+)! public! !",unaLinea) if elementos: return (elementos.group(1), elementos.group(2)) def esUnaClase (unaLinea): elementos = re.match("\s+add: #(\w+);", unaLinea) if elementos: return (elementos.group(1)) def esUnaRelacion (unaLinea): #elementos = re.match("\s+\w+\s*:=\s*(\w+)\s*new\s* \.",unaLinea) elementos = re.match(".+:=\s+(\w+)\s*new\s*\..*",unaLinea) if elementos: return (elementos.group(1)) def get_fakefile(input_filename): "Simula un archivo de disco, para realizar la conversion .pac -> .sc" new_file = StringIO.StringIO() last_file = open(input_filename, 'r') dict = {} claseActual = "" for linea in last_file.readlines(): tmp_clase_actual = (re.match("!(\w+) methodsFor!", linea)) if tmp_clase_actual: claseActual = tmp_clase_actual.group(1) clase = esUnaClase(linea) mens = esUnMensaje(linea) relacion = esUnaRelacion(linea) if clase: if len(dict) == 1: dict[clase] = clase elif not dict.has_key(clase): dict[clase] = clase claseActual = clase if mens: (clase2,mensaje) = mens dict[clase2] = dict[clase2] + "\n\t" + mensaje + "()" claseActual = clase if relacion: new_file.write("%s <>- %s\n" %(claseActual, relacion)) for elemento in dict.values(): new_file.write(elemento + "\n"); last_file.close() new_file.flush() new_file.seek(0) import pprint pprint.pprint(new_file.readlines()) return new_file
Accomodations – Trail Blazer Survival School, Inc. Your time at the Trail Blazer Survival School can be a rich and fulfilling experience. Please review this important information to help you get the most out of your time here. If you need help arranging transportation to the school please contact us. Students may arrive the evening before a course begins. TBSS does not allow pets on campus, with the exception of service animals, or prior approval. TBSS does allow smoking, but ask that you are respectful of those around you. Consider stepping away from groups when choosing to light up. Please use same considerations when vaping or chewing tobacco. TBSS campus consists of twenty-two acres of beautiful forest with springs and a small lake. The property also connects to the Sumter National Forrest. There is a central camp with a fire pit, potable water station, outhouse, and other camp accommodations. Coming soon, we plan to have a trapping course, hide tanning station, throwing stick range, and sweat lodge. We encourage students to bring reusable water bottles. Students are highly encouraged to camp and sleep outdoors when they are participating in multiple day courses. Camping is free to registered students. Other Inn and Hotel accommodations are available in Union. As a School, we focus our efforts on sharing skills.This allows you to bring the foods you know you will like and it helps us focus on what we are good at (while keeping the price of tuition down). Meal times are an hour each for your planning purposes. Please keep in mind you will be outdoors for the majority of class time and plan to dress accordingly. Our intent is to be outside as often as possible and, though comfort is a concern, when possible we will train in the elements. Wet weather classrooms are available if needed. Our Survival Shack provides covered training and emergency shelters in the event of bad weather. We have a potable drinking water station, hand washing station, and a porta potty on site. There are numerous restaurants and motels nearby in Union, SC for your convenience. - Days Inn, Quality Inn, Magnuson Hotel.
# Copyright (C) 2010 ceibalJAM! ceibaljam.org # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. import gtk from os.path import join, dirname from gettext import gettext as _ from sugar import profile import logging from gobject import SIGNAL_RUN_FIRST, TYPE_PYOBJECT _logger = logging.getLogger('memorize-activity') class AccessibilityToolbar(gtk.Toolbar): __gtype_name__ = 'AccessibilityToolbar' __gsignals__ = { 'accessibility_changed': (SIGNAL_RUN_FIRST, None, 2 * [TYPE_PYOBJECT]), } def __init__(self, activity): gtk.Toolbar.__init__(self) self.activity = activity self._lock = True self.jobject = None # Accessible mode checkbox self._accessible = gtk.CheckButton(_('Accessible')) self._accessible.connect('toggled', self._accessibility_changed) self._add_widget(self._accessible) # Scanning speed scale min = 1 max = 5 step = 1 default = 2.5 self._speed_adj = gtk.Adjustment(default, min, max, step) self._speed_bar = gtk.HScale(self._speed_adj) self._speed_bar.set_draw_value(True) self._speed_bar.set_update_policy(gtk.UPDATE_DISCONTINUOUS) self._speed_bar.set_size_request(240,15) self._speed_adj.connect("value_changed", self._accessibility_changed) # Add it to the toolbar self._add_widget(self._speed_bar) def _add_widget(self, widget, expand=False): tool_item = gtk.ToolItem() tool_item.set_expand(expand) tool_item.add(widget) widget.show() self.insert(tool_item, -1) tool_item.show() def _game_reset_cb(self, widget): self.emit('game_changed', None, None, 'reset', None, None) def _load_game(self, button): pass def _accessibility_changed(self, widget): self.emit("accessibility_changed", self._accessible.get_active(), self._speed_bar.get_value())
Texas just opened the nation's first state-run gold depository. Here's what that means. Texas lawmakers signed off on building the country's first state-backed gold depository in 2015 after the project was reworked to ensure a private firm would absorb all the costs. Gov. Greg Abbott said at the time that the project would allow Texas to “repatriate” gold from New York. A 23,000-square-foot building — operated by the private storage company Lone Star Tangible Assets — will serve as Texas' precious metals depository until 2019, when Lone Star is scheduled to open a facility double that size in Leander. Texas Comptroller Glenn Hegar became the first person to take advantage of the fully insured Texas Bullion Depository when he deposited his own gold and silver in the Austin vault earlier this week, according to his office. The construction of the Leander depository is set to begin “in the next couple of months,” said Kevin Lyons, spokesman for the Texas comptroller’s office. Once that facility opens, armored trucks will transport the precious metals from Austin to Leander. Any U.S. citizen hoping to use the Texas facility can set up an account online and either ship or personally deliver precious metals to the site in Austin. Customers will be charged for storage at an annual rate of 0.5 percent for deposits worth up to $2.5 million. Along with gold, the depository will accept deposits of silver, platinum, rhodium and palladium. The long road to the depository's opening began in 2013, when state Rep. Giovanni Capriglione, R-Southlake, proposed a bill to establish the state’s bullion depository, drawing the support of then-Gov. Rick Perry. But the bill floundered, largely due to estimates that it would cost the state $14 million in just the first two years. Two years later, Capriglione rewrote the bill to allow the state to hire an outside firm to manage the depository, with the expectation that the firm would charge fees to customers to cover the facility's costs. In its revised form, the bill sailed through the Legislature and landed on Gov. Greg Abbott’s desk with little debate. But despite that progress, Abbott's most ambitious aim for the depository seems unlikely to be fulfilled. The The University of Texas/Texas A&M Investment Management Company, which oversees the assets of the University of Texas and Texas A&M systems, holds $1 billion worth of gold bullion at the HSBC Bank in New York City, according to Karen Adler, a spokeswoman for the organization. When Abbott signed the gold depository bill into law in June 2015, he declared that the opening of a state-run facility would allow Texas to “repatriate” the nonprofit's gold supply. Three years later, however, the prospect of UTIMCO moving its gold to Texas appears remote. UTIMCO officials say the new Texas depository would have to become a member of the Chicago Mercantile Exchange’s COMEX platform, where gold futures contracts are traded, for any transfer to move forward. And Lone Star Tangible Assets Chairman Matt Ferris acknowledged on Tuesday that simple geography has prevented the Texas Bullion Depository from joining COMEX, whose rules stipulate that approved depositories must be located within 150 miles of New York. The state has also begun “very preliminary” talks with UTIMCO to discuss the possibility of moving the gold to Texas, according to Tom Smelker, the administrator of the state depository. And, on Wednesday, Hegar said the Texas comptroller's office will work to get the state depository approved by COMEX, even though it sits outside the platform’s radius. Disclosure: The University of Texas System and Texas A&M University have been financial supporters of The Texas Tribune, a nonprofit, nonpartisan news organization that is funded in part by donations from members, foundations and corporate sponsors. Financial supporters play no role in the Tribune's journalism. Find a complete list of them here.
## # Copyright (C) 2014 Jessica Tallon & Matt Molyneaux # # This file is part of Inboxen. # # Inboxen is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Inboxen is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Inboxen. If not, see <http://www.gnu.org/licenses/>. ## import datetime from django import test from django.contrib.auth import get_user_model from inboxen import models class ModelTestCase(test.TestCase): """Test our custom methods""" fixtures = ['inboxen_testdata.json'] def setUp(self): super(ModelTestCase, self).setUp() self.user = get_user_model().objects.get(id=1) def test_inbox_create(self): with self.assertRaises(models.Domain.DoesNotExist): models.Inbox.objects.create() domain = models.Domain.objects.get(id=1) inbox = models.Inbox.objects.create(domain=domain, user=self.user) self.assertIsInstance(inbox.created, datetime.datetime) self.assertEqual(inbox.user, self.user) def test_inbox_from_string(self): inbox = models.Inbox.objects.select_related("domain").get(id=1) email = "%s@%s" % (inbox.inbox, inbox.domain.domain) inbox2 = inbox.user.inbox_set.from_string(email=email) self.assertEqual(inbox, inbox2) def test_inbox_from_string_and_user(self): user = get_user_model().objects.create(username="bizz") domain = models.Domain.objects.get(id=1) inbox = models.Inbox.objects.create(domain=domain, user=user) with self.assertRaises(models.Inbox.DoesNotExist): self.user.inbox_set.from_string(email="%s@%s" % (inbox.inbox, domain.domain)) def test_header_create(self): name = "X-Hello" data = "Hewwo" part = models.PartList.objects.get(id=1) header1 = part.header_set.create(name=name, data=data, ordinal=0) header2 = part.header_set.create(name=name, data=data, ordinal=1) self.assertEqual(header1[0].name_id, header2[0].name_id) self.assertEqual(header1[0].data_id, header2[0].data_id) self.assertTrue(header1[1]) self.assertFalse(header2[1]) def test_body_get_or_create(self): body_data = "Hello" body1 = models.Body.objects.get_or_create(data=body_data) body2 = models.Body.objects.get_or_create(data=body_data) self.assertEqual(body1[0].id, body2[0].id) self.assertTrue(body1[1]) self.assertFalse(body2[1])
I have kitchens on the brain. I am starting my kitchen back splash very soon and I could not be more excited. A simple white subway tile. These are a few kitchen favorites I've saved around blog land. They inspire me.
import argparse import logging from collections import OrderedDict from dvc.command.base import CmdBase, append_doc_link, fix_subparsers from dvc.exceptions import DvcException logger = logging.getLogger(__name__) def _show_diff(diff, markdown=False): from dvc.utils.diff import table rows = [] for fname, pdiff in diff.items(): sorted_pdiff = OrderedDict(sorted(pdiff.items())) for param, change in sorted_pdiff.items(): rows.append([fname, param, change["old"], change["new"]]) return table(["Path", "Param", "Old", "New"], rows, markdown) class CmdParamsDiff(CmdBase): def run(self): try: diff = self.repo.params.diff( a_rev=self.args.a_rev, b_rev=self.args.b_rev, all=self.args.all, ) if self.args.show_json: import json logger.info(json.dumps(diff)) else: table = _show_diff(diff, self.args.show_md) if table: logger.info(table) except DvcException: logger.exception("failed to show params diff") return 1 return 0 def add_parser(subparsers, parent_parser): PARAMS_HELP = "Commands to display params." params_parser = subparsers.add_parser( "params", parents=[parent_parser], description=append_doc_link(PARAMS_HELP, "params"), help=PARAMS_HELP, formatter_class=argparse.RawDescriptionHelpFormatter, ) params_subparsers = params_parser.add_subparsers( dest="cmd", help="Use `dvc params CMD --help` to display command-specific help.", ) fix_subparsers(params_subparsers) PARAMS_DIFF_HELP = ( "Show changes in params between commits in the DVC repository, or " "between a commit and the workspace." ) params_diff_parser = params_subparsers.add_parser( "diff", parents=[parent_parser], description=append_doc_link(PARAMS_DIFF_HELP, "params/diff"), help=PARAMS_DIFF_HELP, formatter_class=argparse.RawDescriptionHelpFormatter, ) params_diff_parser.add_argument( "a_rev", nargs="?", help="Old Git commit to compare (defaults to HEAD)" ) params_diff_parser.add_argument( "b_rev", nargs="?", help=("New Git commit to compare (defaults to the current workspace)"), ) params_diff_parser.add_argument( "--all", action="store_true", default=False, help="Show unchanged params as well.", ) params_diff_parser.add_argument( "--show-json", action="store_true", default=False, help="Show output in JSON format.", ) params_diff_parser.add_argument( "--show-md", action="store_true", default=False, help="Show tabulated output in the Markdown format (GFM).", ) params_diff_parser.set_defaults(func=CmdParamsDiff)
Suggested Citation: Lovly, C., L. Horn, W. Pao. 2017. KRAS c.183A>C (Q61H) Mutation in Non-Small Cell Lung Cancer. My Cancer Genome https://www.mycancergenome.org/content/disease/lung-cancer/kras/30/ (Updated February 20).
from .config import Config from .form import UploadForm from .transfer import PDFTransfer, pdf_saver from . import utils from flask import (Flask, render_template, redirect, abort, url_for, send_from_directory) from flask_bootstrap import Bootstrap import os app = Flask(__name__) app.config.from_object(Config) Bootstrap(app) Config.init_app(app) @app.route('/') def index(): return render_template('index.html', links=utils.build_image_links()) @app.route('/upload', methods=['GET', 'POST']) def upload(): form = UploadForm() if form.validate_on_submit(): meta = {'width': form.width.data or 1080} PDFTransfer.save(form.pdf.data, destination=pdf_saver, metadata=meta) return redirect(url_for('index')) else: return render_template('upload.html', form=form) @app.route('/pdf/<pdf>') def display_pdf(pdf): path = utils.get_save_path(pdf) if not os.path.exists(path): abort(404) else: return send_from_directory(*os.path.split(path))
IQ-17 Presidential Address: Toward an Theory of Knowledge, Reddy AAAI. Part 1. IQ-17 Presidential Address: Toward an Theory of Knowledge, Reddy AAAI. Part 2.
# Copyright (c) 2009, 2014 Bartosz Szczesny <bszcz@bszcz.org> # This program is free software under the MIT license. import time import visual class Lorentz(object): def __init__(self, beta, rho, sigma, x, y, z, dt): # parameters self.beta = beta self.rho = rho self.sigma = sigma # coordinates self.x = x self.y = y self.z = z # time step self.dt = dt def get_xyz(self): return self.x, self.y, self.z def advance(self): x_ = self.x y_ = self.y z_ = self.z self.x = x_ + self.dt * self.sigma * (y_ - x_) self.y = y_ + self.dt * (x_ * (self.rho - z_) - y_) self.z = z_ + self.dt * (x_ * y_ - self.beta * z_) def main(): visual.rate(100) beta, rho, sigma = 8.0/3.0, 28.0, 10.0 x, y, z = 5.0, 5.0, 5.0 dt = 0.002 system = Lorentz(beta, rho, sigma, x, y, z, dt) num_curves = 30 for i in range(num_curves): curve = visual.curve(color = visual.color.white) curve_max_points = 1000 # "skipping points" otherwise for j in range(curve_max_points): system.advance() x, y, z = system.get_xyz() curve.append(pos = (x, y, z)) if __name__ == "__main__": main()
Itanagar: In the Northeast, BJP has suffered a major setback. Just before voting for Lok Sabha and assembly elections in Arunachal Pradesh, two BJP ministers and six MLAs joined the National People’s Party (NPP) in Meghalaya’s National People’s Party (NPP) on Tuesday. It is interesting that the BJP in the NPP government in Meghalaya is also included. There are two BJP MLAs in Meghalaya. Home Minister Kumar Y and Tourism Minister Jarkar Gamlin and six MLAs refused to give tickets to the BJP. Yai said that BJP has lost its first reputation in people’s mind by making “false promises”. He said, “We will not fight elections only but we will form NPP government in the state.” Apart from the BJP’s eight MLAs, one MLA from the People’s Party of Arunachal (PPA) and 19 other BJP leaders also joined the NPP. Tell you that the two Lok Sabha seats of Arunachal Pradesh and 60 Assembly seats will be cast on April 11. In the 2014 assembly elections, the Congress won 42 out of 60 seats. Later, many legislators left the party and BJP led by Pema Khandu formed the government.
import pytest from .._util import LocalProtocolError from .. import _events from .._events import * def test_event_bundle(): class T(_events._EventBundle): _fields = ["a", "b"] _defaults = {"b": 1} def _validate(self): if self.a == 0: raise ValueError # basic construction and methods t = T(a=1, b=0) assert repr(t) == "T(a=1, b=0)" assert t == T(a=1, b=0) assert not (t == T(a=2, b=0)) assert not (t != T(a=1, b=0)) assert (t != T(a=2, b=0)) with pytest.raises(TypeError): hash(t) # check defaults t = T(a=10) assert t.a == 10 assert t.b == 1 # no positional args with pytest.raises(TypeError): T(1) with pytest.raises(TypeError): T(1, a=1, b=0) # unknown field with pytest.raises(TypeError): T(a=1, b=0, c=10) # missing required field with pytest.raises(TypeError) as exc: T(b=0) # make sure we error on the right missing kwarg assert 'kwarg a' in str(exc) # _validate is called with pytest.raises(ValueError): T(a=0, b=0) def test_events(): with pytest.raises(LocalProtocolError): # Missing Host: req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.1") # But this is okay (HTTP/1.0) req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") # fields are normalized assert req.method == b"GET" assert req.target == b"/" assert req.headers == [(b"a", b"b")] assert req.http_version == b"1.0" # This is also okay -- has a Host (with weird capitalization, which is ok) req = Request(method="GET", target="/", headers=[("a", "b"), ("hOSt", "example.com")], http_version="1.1") # we normalize header capitalization assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] # Multiple host is bad too with pytest.raises(LocalProtocolError): req = Request(method="GET", target="/", headers=[("Host", "a"), ("Host", "a")], http_version="1.1") # Even for HTTP/1.0 with pytest.raises(LocalProtocolError): req = Request(method="GET", target="/", headers=[("Host", "a"), ("Host", "a")], http_version="1.0") # Header values are validated with pytest.raises(LocalProtocolError): req = Request(method="GET", target="/", headers=[("Host", "a"), ("Foo", " asd\x00")], http_version="1.0") ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) assert ir.status_code == 100 assert ir.headers == [(b"host", b"a")] assert ir.http_version == b"1.1" with pytest.raises(LocalProtocolError): InformationalResponse(status_code=200, headers=[("Host", "a")]) resp = Response(status_code=204, headers=[], http_version="1.0") assert resp.status_code == 204 assert resp.headers == [] assert resp.http_version == b"1.0" with pytest.raises(LocalProtocolError): resp = Response(status_code=100, headers=[], http_version="1.0") with pytest.raises(LocalProtocolError): Response(status_code="100", headers=[], http_version="1.0") with pytest.raises(LocalProtocolError): InformationalResponse(status_code=b"100", headers=[], http_version="1.0") d = Data(data=b"asdf") assert d.data == b"asdf" eom = EndOfMessage() assert eom.headers == [] cc = ConnectionClosed() assert repr(cc) == "ConnectionClosed()"
Home / All / Breakfast Gift Box Hamper - for two! Enjoy a romantic breakfast with two Heart Oak Egg Cups, a chunky Heart Chopping / Serving board and two beautifully handmade natural Linen Stripe Napkins plus a solid Oak Heart Keyring. Create effortless style with this Breakfast Gift Box Hamper and enjoy long and leisurely breakfasts. Serve crusty bread or pastries on the chunky oak heart chopping/ serving board. Beautifully present your morning boiled eggs on two handcrafted heart egg cups and complete the look with two crisp blue stripped linen napkins. Simply add a pot of hot coffee and relax. Take the day off!
#! /usr/bin/env python # -*- Mode: python; py-indent-offset: 4; tab-width: 8; indent-tabs-mode: t; -*- # # A script for generating a number of flows. # # The output of the script should be saved to a file, and the flows from # that file should be added by the following command: # # web/add_flow.py -f filename # # NOTE: Currently, some of the parameters fo the flows are hard-coded, # and all flows are between same source and destination DPID and ports # (differentiated by different matchSrcMac and matchDstMac). # import copy import pprint import os import sys import subprocess import json import argparse import io import time ## Global Var ## DEBUG=0 pp = pprint.PrettyPrinter(indent=4) ## Worker Functions ## def log_error(txt): print '%s' % (txt) def debug(txt): if DEBUG: print '%s' % (txt) if __name__ == "__main__": usage_msg = "Generate a number of flows by using a pre-defined template.\n" usage_msg = usage_msg + "\n" usage_msg = usage_msg + "NOTE: This script is work-in-progress. Currently all flows are within same\n" usage_msg = usage_msg + "pair of switch ports and contain auto-generated MAC-based matching conditions.\n" usage_msg = usage_msg + "\n" usage_msg = usage_msg + "Usage: %s <begin-flow-id> <end-flow-id>\n" % (sys.argv[0]) usage_msg = usage_msg + "\n" usage_msg = usage_msg + " The output should be saved to a file, and the flows should be installed\n" usage_msg = usage_msg + " by using the command './add_flow.py -f filename'\n" # app.debug = False; # Usage info if len(sys.argv) > 1 and (sys.argv[1] == "-h" or sys.argv[1] == "--help"): print(usage_msg) exit(0) # Check arguments if len(sys.argv) < 3: log_error(usage_msg) exit(1) # Extract the arguments begin_flow_id = int(sys.argv[1], 0) end_flow_id = int(sys.argv[2], 0) if begin_flow_id > end_flow_id: log_error(usage_msg) exit(1) # # Do the work # # NOTE: Currently, up to 65536 flows are supported. # More flows can be supported by iterating by, say, iterating over some of # the other bytes of the autogenereated source/destination MAC addresses. # flow_id = begin_flow_id idx = 0 while flow_id <= end_flow_id: mac3 = idx / 255 mac4 = idx % 255 str_mac3 = "%0.2x" % mac3 str_mac4 = "%0.2x" % mac4 src_mac = "00:00:" + str_mac3 + ":" + str_mac4 + ":00:00"; dst_mac = "00:01:" + str_mac3 + ":" + str_mac4 + ":00:00"; print "%s FOOBAR 00:00:00:00:00:00:00:01 1 00:00:00:00:00:00:00:01 2 matchSrcMac %s matchDstMac %s" % (flow_id, src_mac, dst_mac) flow_id = flow_id + 1 idx = idx + 1
There is nothing like a spring day in a field of wildflowers. The warm breeze, the beautiful smells and the vibrant colors are all captured with this beautiful quilt called Wildflowers by Heidi Pridemore. The finished size of this project will be 51" x 63"
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from openerp.osv import fields, osv from openerp.tools.translate import _ class purchase_config_settings(osv.osv_memory): _name = 'purchase.config.settings' _inherit = 'res.config.settings' _columns = { 'group_product_variant': fields.selection([ (0, "No variants on products"), (1, 'Products can have several attributes, defining variants (Example: size, color,...)') ], "Product Variants", help='Work with product variant allows you to define some variant of the same products, an ease the product management in the ecommerce for example', implied_group='product.group_product_variant'), 'group_uom':fields.selection([ (0, 'Products have only one unit of measure (easier)'), (1, 'Some products may be sold/puchased in different units of measure (advanced)') ], "Units of Measure", implied_group='product.group_uom', help="""Allows you to select and maintain different units of measure for products."""), 'group_costing_method':fields.selection([ (0, 'Set a fixed cost price on each product'), (1, "Use a 'Fixed', 'Real' or 'Average' price costing method") ], "Costing Methods", implied_group='stock_account.group_inventory_valuation', help="""Allows you to compute product cost price based on average cost."""), 'module_purchase_requisition': fields.selection([ (0, 'Purchase propositions trigger draft purchase orders to a single supplier'), (1, 'Allow using call for tenders to get quotes from multiple suppliers (advanced)') ], "Calls for Tenders", help="Calls for tenders are used when you want to generate requests for quotations to several vendors for a given set of products.\n" "You can configure per product if you directly do a Request for Quotation " "to one vendor or if you want a Call for Tenders to compare offers from several vendors."), 'group_warning_purchase': fields.selection([ (0, 'All the products and the customers can be used in purchase orders'), (1, 'An informative or blocking warning can be set on a product or a customer') ], "Warning", implied_group='purchase.group_warning_purchase'), 'module_stock_dropshipping': fields.selection([ (0, 'Suppliers always deliver to your warehouse(s)'), (1, "Allow suppliers to deliver directly to your customers") ], "Dropshipping", help='\nCreates the dropship Route and add more complex tests\n' '-This installs the module stock_dropshipping.'), 'group_manage_vendor_price': fields.selection([ (0, 'Manage vendor price on the product form'), (1, 'Allow using and importing vendor pricelists') ], "Vendor Price", implied_group="purchase.group_manage_vendor_price"), }
I recently bought a pair of Vibram Five Fingers running “shoes” after reading about barefoot running on the Britmilfit website. I’m not currently doing Britmilfit, but have done over the last ten years and can thoroughly recommend it. Today I was able to put a last minute client rescheduling to good use, and finally had a chance to get out and try these odd looking things out. With a place for each toe, it’s an odd sensation putting them on, a bit like trying to put skiing gloves on your child – where you’re not totally in control of each digit. I’ve worn them round the house a bit already. I was keen to get used to wearing them and to get used to people looking down and pointing at them (in this case, it was my three children, the 16-month old making the oddest noises of surprise). You have to get used to not landing heavily on your heels, which are no longer have a thick rubber cushion. I thought this would feel odd, but it turns out that my instinct took over – probably in self-protection mode – and I found my running style altered itself fairly quickly. Early morning a few Christmases ago in Dulwich Woods I’d popped my right ankle – audibly. It’s never been quite the same since, and I was definitely conscious of it running “barefoot”. But it wasn’t painful, it was just there. On my way in to Dulwich Park I bumped in to local fitness trainer and Goodrich School Fun Run organiser Liz Stuart, and stopped for a brief chat. She reminded me of the need to get used to this new running style gently. I’m glad she did, or I might have been tempted to add in an extra lap of the horse track – which I’m sure I’d be paying for now. Sure enough, I got some funny looks – bemusement, pity, derision, outright hilarity… so I was glad I’d trained for that. I’d worried beforehand about sharp stones, glass, dog dirt. In fact I wasn’t convinced it would work at all running on the pavement. But pavement running is okay, and you just have to be a little more vigilent at spotting sharp or squashy hazards. I kept my first run slow and on the short side – just shy of three miles. One hour later, after stretching down – particularly my calf muscles – I have a very slight burning sensation on the backs of my heels, and my ankle is still there. But generally I feel fine.
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __all__ = [ "Provider", "AutoScaleAdjustmentType" ] class Provider(object): """ Defines for each of the supported providers :cvar AWS_AUTOSCALE: Amazon AutoScale :cvar SOFTLAYER: Softlayer """ AWS_AUTOSCALE = 'aws_autoscale' SOFTLAYER = 'softlayer' OPENSTACK = 'openstack' class AutoScaleAdjustmentType(object): """ The logic to be used to scale the group when its policy is executed. :cvar CHANGE_IN_CAPACITY: Increases or decreases the existing capacity. :cvar EXACT_CAPACITY: Changes the current capacity to the specified value. :cvar PERCENT_CHANGE_IN_CAPACITY: Increases or decreases the capacity by a percentage. """ CHANGE_IN_CAPACITY = 'CHANGE_IN_CAPACITY' EXACT_CAPACITY = 'EXACT_CAPACITY' PERCENT_CHANGE_IN_CAPACITY = 'PERCENT_CHANGE_IN_CAPACITY' class AutoScaleTerminationPolicy(object): """ The policy to be used for automatic removal of members from an auto scale group. Policy determines which members are chosen first for removal. :cvar OLDEST_INSTANCE: Terminates the oldest instance in the group. :cvar NEWEST_INSTANCE: Terminates the newest instance in the group. :cvar CLOSEST_TO_NEXT_CHARGE: Terminates instances that are closest to the next billing charge. :cvar DEFAULT: Default termination policy. """ OLDEST_INSTANCE = 0 NEWEST_INSTANCE = 1 CLOSEST_TO_NEXT_CHARGE = 2 DEFAULT = 3
Point Blank Recreational Land: Nice recreational tract for sale located off of Over The Hill Road in Point Blank.Youth camps and retreats provide a variety of of summer opportunities geared toward all ages and. Found 6 results for Wall Hack Point Blank. Wall Hack Point Blank Download: 12-Aug-2017:. Gantt Chart Template Collection The Gantt chart template collection was designed for sophisticated professionals who want to distinguish their work with rich.More templates like this. Blank and General. Brochures. Budgets.Point Blank Cheats - PlayStation Cheats: This page contains a list of cheats, codes, Easter eggs, tips, and other secrets for Point Blank for PlayStation. Download Point Blank 1967 1080p Movie Download hd popcorns,.Serving Houston, Magnolia, Tomball, Waller, Hockley, Hempstead,. Point Blank Tahadi Waller Hilesi 09 August 2014. Point Blank 19 Juli 2014 Work All Windows Fitur Cheat Point Blank Auto Head Shot 19. To understand how to use the new HUBZone Map, check out these videos: Searching. My Mac does not have a cd drive so I am looking for where I can download a safe copy to. Mouse Mischief Fun with Dates and Calendars PowerPoint. Blank monthly calendar Excel. Serving Houston, Magnolia, Tomball, Waller, Hockley. blank page. 3. Our main office is also the supply point for our bulk delivery bobtails and. call us today. App Dist Trinity Vidor Village Mills Waller Wallis Wallisville Warren Washington Webster Weimar. 2017 Houston Realtors.Aplikasi Guidare Yandere Simulator 2017 ini berisi tips dan trik bermain Yandere. Waller. HORRIBLE. Free play Point Blank Tips and tricks to play. Thankfully Smirnoff Sound Collective recently collaborated with Point Blank.Best website for music streaming and download. 2017. In 2016 just 17% of. Results of waller point blank free download 2012: Free download software, Free Video dowloads, Free Music downloads, Free Movie downloads, Games. Callie Waller is a practicing Family Medicine doctor in Norwalk, IA. This section includes topics on how to make your own corporate and personal calendars in MS Office programs or how to download free calendar. Cheat Point Blank Hack G-Cash Permanen Simple 2014 masa Berlaku.Provides information about fishing on the lake, plus area accommodations and attractions.Pointblank Hacks and Cheats Free Download for all Pointblank Worldwide. Printable Calendar (PDF) for easy printing Add own events to PDF Calendar Phases of the Moon are calculated using local time in New York. A fill in the blank form used to create your Unconditional. 2017 ALL RIGHTS. Download Cheat Point Blank Garena Update 14 15 16 September 2017 VIP Pekalongan Kommuniti Hack Update Terbaru Extreem Cheat PB Garena Bomberman, AutoKiler, Fastkiler.