text
stringlengths 29
850k
|
|---|
import json
import requests
from flask_restful import Resource, request, abort
from flask_restful_swagger import swagger
from hpcpm.api import log
from hpcpm.api.helpers.database import database
from hpcpm.api.helpers.utils import abort_when_port_invalid
from hpcpm.api.helpers.constants import COMPUTATION_NODE_PARAM_NAME, COMPUTATION_NODE_PARAM_ADDRESS, \
COMPUTATION_NODE_PARAM_PORT, COMPUTATION_NODE_ADDED_RESPONSE, COMPUTATION_NODE_NOT_FOUND_RESPONSE, \
COMPUTATION_NODE_FETCHED_RESPONSE, COMPUTATION_NODE_PUT_NOT_FOUND_RESPONSE
from hpcpm.api.helpers.requests import get_node_information, delete_power_limit
from hpcpm.api.resources.endpoints.nodes.computation_node.StatisticsInterval import set_statistics_interval
class ComputationNode(Resource):
@swagger.operation(
notes='This endpoint is used for registering new computation node',
nickname='/nodes/computation_node/<string:name>',
parameters=[
COMPUTATION_NODE_PARAM_NAME,
COMPUTATION_NODE_PARAM_ADDRESS,
COMPUTATION_NODE_PARAM_PORT
],
responseMessages=[
COMPUTATION_NODE_ADDED_RESPONSE,
COMPUTATION_NODE_PUT_NOT_FOUND_RESPONSE
]
)
def put(self, name):
address = request.args.get('address')
port = request.args.get('port')
abort_when_port_invalid(port)
node_by_ip = database.get_computation_node_info_by_address(address, port)
if node_by_ip and node_by_ip.get('name') != name:
log.warning('Node with IP: %s:%s is present in database: %s', address, port, node_by_ip)
try:
response = get_node_information(address, port)
except requests.exceptions.ConnectionError:
log.error('Connection could not be established to %s:%s', address, port)
abort(406)
log.info('Response %s:', response.text)
backend_info = json.loads(response.text)
node_info = {
'name': name,
'address': address,
'port': port,
'backend_info': backend_info
}
upsert_result = database.replace_computation_node_info(name, node_info)
if upsert_result.modified_count:
log.info('Node %s was already present in a database', name)
log.info('Stored Node info %s', node_info)
else:
log.info('Stored Node info %s on id %s', node_info, upsert_result.upserted_id)
for device in backend_info['devices']:
set_statistics_interval(name, device['id'], 1)
return name, 201
@swagger.operation(
notes='This endpoint is used for getting computation node information from database',
nickname='/nodes/computation_node/<string:name>',
parameters=[
COMPUTATION_NODE_PARAM_NAME
],
responseMessages=[
COMPUTATION_NODE_FETCHED_RESPONSE,
COMPUTATION_NODE_NOT_FOUND_RESPONSE
]
)
def get(self, name):
result = database.get_computation_node_info(name)
if not result:
log.info('No such computation node %s', name)
abort(404)
log.info('Successfully get node %s info: %s', name, result)
return result, 200
@swagger.operation(
notes='This endpoint is used for removing computation node information from database',
nickname='/nodes/computation_node/<string:name>',
parameters=[
COMPUTATION_NODE_PARAM_NAME
],
responseMessages=[
COMPUTATION_NODE_FETCHED_RESPONSE,
COMPUTATION_NODE_NOT_FOUND_RESPONSE
]
)
def delete(self, name):
result_node_info = database.delete_computation_node_info(name)
result_power_limit_info = database.delete_power_limit_infos(name)
if not result_node_info:
log.info('No such computation node %s', name)
abort(404)
if not result_power_limit_info:
log.info('No such power limit info for node %s', name)
abort(404)
address = result_node_info.get('address')
port = result_node_info.get('port')
abort_when_port_invalid(port)
for device in result_node_info['backend_info']['devices']:
try:
response = delete_power_limit(address, port, device['id'], device['Type'])
log.info('Device %s deletion info: %s', device['id'], response)
except requests.exceptions.ConnectionError:
log.error('Connection could not be established to %s:%s', address, port)
abort(406)
log.info('Successfully deleted node %s info and its power limit: %s %s', name, result_node_info,
result_power_limit_info)
return 204
|
Having washed up in the wake of Oasis’success, stadium-filling Welsh pub rockers Stereophonics are clearly only too aware of the shifting temper of the times, hence their reinvention here as keepers of the soul-funk flame. Their fourth album sees them attempt the same move The Charlatans made with their last LP, but less successfully. Kelly Jones’ laryngitic bellow?which makes Rod Stewart sound like a castrato?is applied to Stones-y epics and sub-Weller workouts, both of which strive for rock’n’roll authenticity but ultimately just prove how lacking in soul the trio really are.
|
# Copyright 2014 NeuroData (http://neurodata.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import requests
from ndlib.ndtype import *
try:
from django.conf import settings
SECRET_TOKEN = settings.SECRET_TOKEN
except:
SECRET_TOKEN = None
def generateURLBlosc(server_name, token_name, channel_list, res_value, range_args, direct=False):
"""Generate a URL for blosc"""
try:
url = "https://{}/sd/{}/{}/blosc/{}/{},{}/{},{}/{},{}/".format(server_name, token_name, ','.join(channel_list), res_value, *range_args)
if direct:
url = url + DIRECT
except Exception as e:
return ""
return url
def generateURLBlaze(server_name, token_name, channel_list, res_value, range_args):
"""Generate a URL for blosc"""
try:
url = "https://{}/blaze/{}/{}/blosc/{}/{},{}/{},{}/{},{}/".format(server_name, token_name, ','.join(channel_list), res_value, *range_args)
except Exception as e:
return ""
return url
def postJson(url, data):
try:
response = requests.post(url, json=data, headers={'Authorization': 'Token {}'.format(SECRET_TOKEN)} if SECRET_TOKEN else None, verify=False)
return response
except requests.HTTPError as e:
return e
def getJson(url):
return getURL(url)
def deleteJson(url):
return deleteURL(url)
def getURL(url):
try:
response = requests.get(url, headers={'Authorization': 'Token {}'.format(SECRET_TOKEN)} if SECRET_TOKEN else None, verify=False)
return response
except requests.HTTPError as e:
return e
def deleteURL(url):
try:
response = requests.delete(url, headers={'Authorization': 'Token {}'.format(SECRET_TOKEN)} if SECRET_TOKEN else None, verify=False)
return response
except requests.HTTPError as e:
return e
def postURL(url, data):
try:
response = requests.post(url, data, headers={'Authorization': 'Token {}'.format(SECRET_TOKEN)} if SECRET_TOKEN else None, verify=False)
return response
except requests.HTTPError as e:
return e
|
The only advice I got on Taoist meditation was: try not to try not to think. There are things you’re just never going to accomplish if you really put your head to them.
When I gave my friend this book, he complained of the poem-like verses, said he wished I would write stories to explain them instead (my 81 instalments is a bit of a friendly nod to that). But the reality is, this the ineffable baby! You can write treatises on it and never get it. Insight comes from the stripping away of the intellect not the furiously working of it.
…takes my breath away every time I hear it. Now I could expound thousands of words explaining the simultaneous feelings of spiritual longing and connection, and all the deep passion and paradox and beauty in the haiku – which you would probably just smile and nod politely over anyway. But if you get it, you GET it.
If you’ve had that moment, your heart cries YES to this poem. If you haven’t, it will stick with you somehow, and perhaps years from now, you will have that moment and this haiku will come back to you (and you’ll be madly google-ing to try and find it again).
If you truly, deeply understand something, it’s simply and you can say it in a few words, or in a smile, a look, a laugh. If you lay a lot of words on it, you’re talking around it, of it, but it isn’t IT. The tao that can be told, well, that isn’t quite it.
So try not to try not to think, just let the words be.
ToV 2 – no judging!
does your eye see what I’d call blue?
I really like this btw!
Yup, I agree, it is a little piece of brilliance.
I guess I will just have to read all the rest to find out.
I love the Basho haiku also. The feeling of something underneath the words, separating from the words as we speak them. Grasping for underneath starts to strip the meaning. Which just reminds me, what’s underneath that? For me it’s less about getting it, maybe it’s more about not getting it.
I am comforted by the end of the chapter: darkness. Like a dissolving and a forgiveness, for thinking like humans maybe. A rest.
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import logging
from networkx import exception as g_exc
from networkx.algorithms import dag
from networkx.classes import digraph
from taskflow import exceptions as exc
from taskflow.patterns import ordered_flow
LOG = logging.getLogger(__name__)
class Flow(ordered_flow.Flow):
"""A flow which will analyze the attached tasks input requirements and
determine who provides said input and order the task so that said providing
task will be ran before."""
def __init__(self, name, parents=None, allow_same_inputs=True):
super(Flow, self).__init__(name, parents)
self._graph = digraph.DiGraph()
self._connected = False
self._allow_same_inputs = allow_same_inputs
def add(self, task):
# Do something with the task, either store it for later
# or add it to the graph right now...
#
# Only insert the node to start, connect all the edges
# together later after all nodes have been added.
self._graph.add_node(task)
self._connected = False
def _fetch_task_inputs(self, task):
def extract_inputs(place_where, would_like, is_optional=False):
for n in would_like:
for (them, there_result) in self.results:
if not n in set(getattr(them, 'provides', [])):
continue
if (not is_optional and
not self._graph.has_edge(them, task)):
continue
if there_result and n in there_result:
place_where[n].append(there_result[n])
if is_optional:
# Take the first task that provides this optional
# item.
break
elif not is_optional:
place_where[n].append(None)
required_inputs = set(getattr(task, 'requires', []))
optional_inputs = set(getattr(task, 'optional', []))
optional_inputs = optional_inputs - required_inputs
task_inputs = collections.defaultdict(list)
extract_inputs(task_inputs, required_inputs)
extract_inputs(task_inputs, optional_inputs, is_optional=True)
def collapse_functor(k_v):
(k, v) = k_v
if len(v) == 1:
v = v[0]
return (k, v)
return dict(map(collapse_functor, task_inputs.iteritems()))
def order(self):
self.connect()
try:
return dag.topological_sort(self._graph)
except g_exc.NetworkXUnfeasible:
raise exc.InvalidStateException("Unable to correctly determine "
"the path through the provided "
"flow which will satisfy the "
"tasks needed inputs and outputs.")
def connect(self):
"""Connects the nodes & edges of the graph together."""
if self._connected or len(self._graph) == 0:
return
# Figure out the provider of items and the requirers of items.
provides_what = collections.defaultdict(list)
requires_what = collections.defaultdict(list)
for t in self._graph.nodes_iter():
for r in getattr(t, 'requires', []):
requires_what[r].append(t)
for p in getattr(t, 'provides', []):
provides_what[p].append(t)
def get_providers(node, want_what):
providers = []
for (producer, me) in self._graph.in_edges_iter(node):
providing_what = self._graph.get_edge_data(producer, me)
if want_what in providing_what:
providers.append(producer)
return providers
# Link providers to consumers of items.
for (want_what, who_wants) in requires_what.iteritems():
who_provided = 0
for p in provides_what[want_what]:
# P produces for N so thats why we link P->N and not N->P
for n in who_wants:
if p is n:
# No self-referencing allowed.
continue
if (len(get_providers(n, want_what)) and not
self._allow_same_inputs):
msg = "Multiple providers of %s not allowed."
raise exc.InvalidStateException(msg % (want_what))
self._graph.add_edge(p, n, attr_dict={
want_what: True,
})
who_provided += 1
if not who_provided:
who_wants = ", ".join([str(a) for a in who_wants])
raise exc.InvalidStateException("%s requires input %s "
"but no other task produces "
"said output." % (who_wants,
want_what))
self._connected = True
|
NEW TECHNOLOGY BRINGS ABOUT NEW DVD PACKAGING!
high quality, high Definition DVD Cases! Store your High-Definition movies in our new vibrant blue Blu-ray DVD cases! Push-button hub, literature clips, full sleeve for artwork. Exclusive printed Blu-ray logos on center top of front side.
|
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
def parse_date_publicportal(date_str):
return ' '.join([date_str.split(' ')[1], date_str.split(' ')[-1]])
def subtract_from_time(date_time, subtr_min, subtr_sec):
sub = (datetime.strptime(date_time, "%d.%m.%Y %H:%M") - timedelta(minutes=int(subtr_min), seconds=int(subtr_sec)))
return sub.isoformat()
def insert_tender_id_into_xpath(xpath_to_change, tender_id):
return xpath_to_change.format(tender_id)
def adapt_tender_data(tender_data):
sum_for_value, sum_for_minimal_step = 0, 0
for lot in tender_data['data'].get('lots', []):
lot['value']['amount'] = int(lot['value']['amount'])
sum_for_value += lot['value']['amount']
lot['minimalStep']['amount'] = int(lot['minimalStep']['amount'])
sum_for_minimal_step += lot['minimalStep']['amount']
tender_data['data']['minimalStep']['amount'] = sum_for_minimal_step if sum_for_minimal_step != 0 else int(
tender_data['data']['minimalStep']['amount'])
tender_data['data']['value']['amount'] = sum_for_value if sum_for_value != 0 else int(
tender_data['data']['value']['amount'])
return tender_data
def get_only_numbers(given_string):
numbers = int(u''.join([s for s in given_string.split() if s.isdigit()]))
return numbers
|
Sorry this class is sold out!
However our Classes Page is constantly being updated with new opportunities to learn.
Contact us directly at info@cupcakesweeties.co.nz or call us on 04-577 2962 to find out more about our classes.
Our Buttercream Flowers decorating classes are run on a Tuesday from 7pm to 9pm for a maximum of 8 participants.
The cost for this class is $60 - and includes a box of 6 chocolate cupcakes you will decorate and take home with you. Please note that this is a fully hands on class where you learn a lot of different techniques.
Join one of our scheduled classes, or get a group of friends together and book a private class!
For any further information please email info@cupcakesweeties.co.nz or call us on 04 577 2962.
|
# -*- coding: utf-8 -*-
#
# CurrentCost GUI
#
# Copyright (C) 2008 Dale Lane
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# The author of this code can be contacted at Dale.Lane@gmail.com
# Any contact about this application is warmly welcomed.
#
import os
import serial
import time
import string
from currentcostparser import CurrentCostDataParser
from currentcostdb import CurrentCostDB
from tracer import CurrentCostTracer
# class for logging diagnostics
trc = CurrentCostTracer()
#
#
#
#
# Dale Lane (http://dalelane.co.uk/blog)
class CurrentCostSerialHistoryConnection():
guicallback = None
#
# Establish a connection to the CurrentCost meter
#
def EstablishConnection(self, comportobj, guihandle, dbfilelocation):
global trc
trc.FunctionEntry("EstablishConnection")
self.ser = comportobj
self.toCancel = False
self.guicallback = guihandle
myparser = CurrentCostDataParser()
#
# we create our own connection to the database here
#
# we need our own connection to the database because we are running
# in a background thread, and pysqlite (used to implement the database)
# cannot reuse a connection across multiple threads
# the connection is relatively low cost, so having two connections open
# - one for the GUI thread and one for this background worker thread -
# doesn't seem like a burdensome extravagance :-)
#
dbconnection = CurrentCostDB()
dbconnection.InitialiseDB(dbfilelocation)
#
# look for the current reading in the data
#
line = ""
receivedHistory = False
while self.toCancel == False:
try:
line = self.ser.readUpdate()
# try to parse the XML
currentcoststruct = myparser.parseCurrentCostXML(line)
if currentcoststruct != None:
if 'hist' in currentcoststruct['msg']:
# we have received history data - parse and store the CurrentCost
# data in the datastore
# the parser will return the number of updates still expected
# (0 if this was the last or only expected update)
myparser.storeTimedCurrentCostData(dbconnection)
receivedHistory = True
elif receivedHistory == True:
# we received live data only
# if we have received un-graphed history data, we refresh the
# graphs now
trc.Trace("finished receiving history data - need to redraw graphs")
self.guicallback.updateGraphs()
receivedHistory = False
except Exception, exception:
if self.toCancel == False:
self.guicallback.exitOnError('Error reading from COM port: ' + str(exception))
trc.Error("Error when closing COM port")
trc.Error(str(exception))
trc.FunctionExit("EstablishConnection")
return
# cleanup
dbconnection.CloseDB()
try:
self.ser.disconnect()
except Exception, exc:
self.guicallback.exitOnError('Error when closing COM port')
trc.Error("Error when closing COM port")
trc.Error(str(exc))
trc.FunctionExit("EstablishConnection")
#
# Disconnect from the serial port
#
def Disconnect(self):
self.toCancel = True
self.ser.disconnect()
|
E-Commerce SEO means Search Engine Optimisation done specifically for e-commerce websites. Organic traffic is a main driver of growth, sales and customer acquisition for ecommerce stores. Having the right Ecommerce SEO strategy for your website is thus essential.
Nowadays, selling online is getting easier and easier. With all the CMS, payment portals, drop shipping providers available in the market, it has never been so easy to setup your own e-commerce website online.
Globally, the Ecommerce market sales are increasing exponentially! As you can see in the chart below, the ecommerce sales grew 11% in the past 2 years. It is previewed to reach the amazing value of 1.7 Trillion USD by 2020!
The SEO for e-commerce brings results with more sustainability on medium and long-term by focusing on organic traffic – the most qualified and converting traffic for your business.
Our e-commerce SEO services include: keywords audit, find your target-audience, competitors analysis, Link building (Off-Page SEO), define a custom-made SEO strategy, recommendations and activity reports each month (including rankings and goal conversions).
Setting up your online business is quick and easy with the current technology and solutions available, however, being successful in retailing and selling online is not! The online retail market is very competitive and run by major brands and websites such as Amazon, Ebay, or Alibaba.
As the demand and the offer increases, search engines also increase their website’s ranking criteria to best attend user’s expectations. In addition, the rules are very rigid on this field and this is why a specialized SEO service for your e-commerce business is vital.
If you wish to have visibility online for your products and/or services, to appear on the main search queries leading to conversion and sales, it is necessary to optimize your e-commerce website and pages. Search Engine Optimisation is the right solution for your e-commerce business and will help you to increase your sales.
UniK SEO has a team of digital marketing experts focused and dedicated on e-commerce SEO for your website. We create custom built digital strategies for e-commerce stores including SEO and Pay Per Click services. We create SEO strategies for your e-commerce that get your website to the top of Google for the most relevant searched terms. Contact us today for a FREE SEO Audit on your website.
How can we help you driving more sales with ecommerce SEO?
The first necessary step of any successful ecommerce SEO strategy is to understand the keywords of your target audience uses when they are searching for the products or services that you are selling.
We create, updated and optimize your product or service pages for them to appeal to customers and search engines. Your ecommerce website contents need to represent relevant keywords related to what you are selling online.
How the pages on your online store are organized and structured — is an important SEO consideration because the average ecommerce site tends to have significantly more pages than an average website.
The most effective way to get authority and SEO ranking for your ecommerce store. We will build your backlink profile to increase your website popularity and achieve better rankings in order to get more traffic and sales.
We are here to help you discover the full potential of your online store and reach your business goals by using the power of E-commerce SEO!
|
from util import *
class TuringovStroj(types.SimpleNamespace):
@classmethod
def iz_komponenti(klasa, stanja, abeceda, radna_abeceda, praznina,
prijelaz, početno, prihvat):
assert abeceda
assert abeceda <= radna_abeceda
assert praznina in radna_abeceda - abeceda
assert {početno, prihvat} <= stanja
assert funkcija(prijelaz,
Kartezijev_produkt(stanja - {prihvat}, radna_abeceda),
Kartezijev_produkt(stanja, radna_abeceda, {-1, 1}))
return klasa(**vars())
@classmethod
def iz_tablice(klasa, tablica):
"""Parsiranje tabličnog zapisa Turingovog stroja.
Pogledati funkciju util.parsiraj_tablicu_TS za detalje."""
return klasa.iz_komponenti(*parsiraj_tablicu_TS(tablica))
@property
def komponente(stroj):
"""Relacijska definicija - rastav u sedmorku."""
return (stroj.stanja, stroj.abeceda, stroj.radna_abeceda,
stroj.praznina, stroj.prijelaz, stroj.početno, stroj.prihvat)
def prihvaća(T, riječ):
"""Prihvaća li Turingov stroj T zadanu riječ?
Poluodlučivo: može zapeti u beskonačnoj petlji ako ne prihvaća."""
return T.rezultat(riječ) is not None
def izračunavanje(T, riječ):
assert set(riječ) <= T.abeceda
stanje, pozicija, traka = T.početno, 0, list(riječ)
yield stanje, pozicija, traka
while stanje != T.prihvat:
if pozicija >= len(traka): traka.append(T.praznina)
stanje, traka[pozicija], pomak = T.prijelaz[stanje, traka[pozicija]]
pozicija = max(pozicija + pomak, 0)
yield stanje, pozicija, traka
def rezultat(T, riječ):
for stanje, pozicija, traka in T.izračunavanje(riječ):
if stanje == T.prihvat: break
if (T.prijelaz[stanje, T.praznina] == (stanje, T.praznina, 1) and
pozicija == len(traka)): return
while traka and traka[~0] == T.praznina: del traka[~0]
join_ok = all(type(znak) is str and len(znak) == 1 for znak in traka)
return ''.join(traka) if join_ok else traka
|
Add a Defbrilator to your purchase?
Sport Promote was established in 2007 and is now affiliated with Promote Medical.
Sport Promote provides specialised training for healthcare professionals.
Where SportPromote previously provided both equipment and education, Promote Medical has been set up to specifically focus on the design, delivery and implementation of a one stop service to provide you with all of your equipment needs. This allows SportPromote to focus purely on education and training and Promote Medical on equipment and services.
In recent years Sport Promote has supplied medical equipment to multiple sporting organisations throughout Europe, including the European football governing body UEFA, the Scottish FA, the Irish FA and the Scottish Institute of Sport. Promote Medical is building and expanding on this experience to provide a revolutionary model of emergency equipment design, supply and aftercare.
Our extensive association and accreditation to the medical sector and international sports world guarantees a quality approved service and supply solution for all of your pre-hospital immediate care requirements.
© Promote Medical 2019. All rights reserved. site by fatBuzz.
|
from netforce.controller import Controller
from netforce import database
from netforce.model import get_model,clear_cache
from netforce import template
class IssueRM(Controller):
_path="/issue_rm"
def get(self):
db=database.get_connection()
try:
data={}
html=template.render("issue_rm",data)
self.write(html)
db.commit()
except Exception as e:
db.rollback()
import traceback
traceback.print_exc()
def post(self):
db=database.get_connection()
data={}
try:
data["barcode"]=self.get_argument("barcode",None)
data["qty"]=self.get_argument("qty",None)
barcode=data["barcode"]
if not barcode:
raise Exception("Missing barcode!")
barcode=int(barcode)
qty=data["qty"]
if not qty:
raise Exception("Missing qty!")
qty=int(qty)
res=get_model("production.component").search([["id","=",barcode]])
if not res:
raise Exception("Invalid barcode")
comp_id=res[0]
# TODO: create goods issue for that component
db.commit()
self.redirect("/issue_rm")
except Exception as e:
data["error"]="ERROR: %s"%e
html=template.render("issue_rm",data)
self.write(html)
db.rollback()
import traceback
traceback.print_exc()
IssueRM.register()
|
Chakla are traditional wall hangings of Gujarat. It exhibits a fine piece of embroidery work done with silk threads on cotton cloth. Chakla depicts the folk art and culture of the state.
The corners in a Chakla are often highlighted. The designs usually consist of geometrical shapes, flowers and animals. Blue, yellow and brown are the usual colors employed. These wall hangings may contain mirror-work and frames.
|
# -*- coding: utf-8 -*-
"""
gdown.modules.rapidu
~~~~~~~~~~~~~~~~~~~
This module contains handlers for rapidu.
"""
import re
from datetime import datetime, timedelta
from ..module import browser, acc_info_template
from ..exceptions import ModuleError
def accInfo(username, passwd, proxy=False):
"""Returns account info."""
acc_info = acc_info_template()
r = browser(proxy)
r.verify = False
rc = r.post('https://rapidu.net/ajax.php?a=getUserLogin', {'login': username, 'pass': passwd, 'remember': 0, '_go': ''}).json()
if rc['message'] == 'error':
acc_info['status'] = 'deleted'
return acc_info
elif rc['message'] == 'success':
rc = r.get('https://rapidu.net').text
open('gdown.log', 'w').write(rc)
if 'Account: <b>Free</b>' in rc or 'Konto: <b>Free</b>' in rc:
acc_info['status'] = 'free'
return acc_info
else:
days = re.search('A?c?c?o?u?n?t?K?o?n?t?o?: <b>Premium \(([0-9]+) dz?i?e?ń?a?y?s?n?i?\)</b>', rc).group(1) # TODO: this is just wrong
acc_info['status'] = 'premium'
acc_info['expire_date'] = datetime.utcnow() + timedelta(days=int(days))
acc_info['transfer'] = re.search('class="tipsyS"><b>(.+?)</b>', rc).group(1)
return acc_info
elif rc['message'] == 'block':
acc_info['status'] = 'blocked'
return acc_info
else:
print(rc)
open('gdown.log', 'w').write(rc) # this won't work - json.dumps first
ModuleError('Unknown error, full log in gdown.log')
|
After competing in his 2nd Olympics, The Snow Centre’s ambassador, Jamie Nicholls shares his thoughts on the 2018 winter games. PyeongChang 2018, South Korea took place over February, and saw team mate, Billy Morgan take a bronze in Big Air, and freestyle skier, Izzy Atkins also land a spot on the podium.
How does it feel to have competed at two Winter Olympic Games?
How do you feel PyeongChang 2018 went?
Your cousin and Big Air medal hope, snowboarder, Katie Ormerod had an accident during the Olympic practice which meant she had to leave the competition, what was that like?
What does your team mate, Billy Morgan's bronze medal win in Big Air mean for the future of British snowboarding?
What's next for Jamie Nicholls?
What would you say to any youngsters considering a career in snowboarding?
|
# Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""Services for handling downloads."""
from backends.filesync.data import model, errors
from backends.filesync.data.gateway import SystemGateway
from backends.filesync.data.dbmanager import (
get_shard_ids,
retryable_transaction,
fsync_commit,
fsync_readonly,
)
# states
UNKNOWN = "Unknown"
DOWNLOADED_NOT_PRESENT = "Downloaded But Not Present"
QUEUED = model.DOWNLOAD_STATUS_QUEUED
DOWNLOADING = model.DOWNLOAD_STATUS_DOWNLOADING
DOWNLOADED = model.DOWNLOAD_STATUS_COMPLETE
ERROR = model.DOWNLOAD_STATUS_ERROR
@retryable_transaction()
@fsync_commit
def download_start(user_id, download_id):
"""Start the download."""
SystemGateway().update_download(user_id, download_id,
status=model.DOWNLOAD_STATUS_DOWNLOADING)
@retryable_transaction()
@fsync_commit
def download_error(user_id, download_id, message):
"""Mark the download as in error."""
return SystemGateway().update_download(
user_id, download_id,
status=model.DOWNLOAD_STATUS_ERROR, error_message=message)
@retryable_transaction()
@fsync_commit
def download_complete(user_id, download_id, hash, crc32, size,
deflated_size, mimetype, storage_key):
"""Complete the download."""
gw = SystemGateway()
return gw.download_complete(user_id, download_id, hash, crc32, size,
deflated_size, mimetype, storage_key)
@retryable_transaction()
@fsync_commit
def get_or_make_download(user_id, volume_id, path, download_url, dl_key):
"""Get or make a download if it doesn't already exist."""
gw = SystemGateway()
try:
download = gw.get_download(
user_id, volume_id, path, download_url, dl_key)
except errors.DoesNotExist:
download = gw.make_download(
user_id, volume_id, path, download_url, dl_key)
return download
@retryable_transaction()
@fsync_commit
def download_update(user_id, download_id, status=None,
node_id=None, error_message=None):
"""Update a download directly.
Typically this isn't used.
"""
gw = SystemGateway()
return gw.update_download(user_id, download_id, status=status,
node_id=node_id, error_message=error_message)
@fsync_readonly
def get_status_from_download(user_id, download):
"""Gets the status from a download object."""
gw = SystemGateway()
if download.status == model.DOWNLOAD_STATUS_COMPLETE:
# check if the file is actually present
user = gw.get_user(user_id)
try:
gw.get_node(download.node_id, user.shard_id)
except errors.DoesNotExist:
return DOWNLOADED_NOT_PRESENT
return download.status
@fsync_readonly
def get_status(user_id, volume_id, path, download_url, dl_key):
"""Get the status of the download."""
gw = SystemGateway()
try:
download = gw.get_download(
user_id, volume_id, path, download_url, dl_key)
except errors.DoesNotExist:
return UNKNOWN
return get_status_from_download(user_id, download)
@fsync_readonly
def get_status_by_id(user_id, dl_id):
"""Get the status of the download."""
gw = SystemGateway()
try:
download = gw.get_download_by_id(user_id, dl_id)
except errors.DoesNotExist:
return UNKNOWN
return get_status_from_download(user_id, download)
@retryable_transaction()
@fsync_commit
def make_download(user_id, udf_id, file_path, download_url, download_key=None):
"""Create a new download object."""
gw = SystemGateway()
return gw.make_download(
user_id, udf_id, file_path, download_url, download_key)
@fsync_readonly
def get_download(user_id, udf_id, file_path, download_url, download_key=None):
"""Get a download by its UDF, file path and download URL and key."""
gw = SystemGateway()
return gw.get_download(
user_id, udf_id, file_path, download_url, download_key)
@fsync_readonly
def get_download_by_id(user_id, download_id):
"""Get a download by its ID."""
gw = SystemGateway()
return gw.get_download_by_id(user_id, download_id)
@fsync_readonly
def get_failed_downloads(start_date, end_date):
"""Get the failed downloads between start_date and end_date."""
gw = SystemGateway()
downloads = []
for shard_id in get_shard_ids():
downloads.extend(list(gw.get_failed_downloads(
shard_id, start_date, end_date)))
return downloads
|
We held a market makeover event on Sunday, November 20th at African Caribbean Food Market. This very unique market in City Heights sells different cultural products from Jamaica, East and West Africa. Our Live Well Community Market Team, together with volunteers from the neighborhood and UC San Diego’s Chi Omega Sorority, spent the morning putting in the necessary work to help transform this market towards an even better version of itself! We did a full scale reorganization by grouping and moving products to new aisles, installed new aisle marker signage and point of sale marketing materials, revamped old and unused produce displays, and gave the market a fresh cleaning. African Caribbean is now easier for customers to navigate and unique specialty items are better highlighted. Market owner, Christian Bempong, is now excited to introduce more fruit and vegetable varieties to fill the newly available produce display space.
Previous Article: See You Soon, Michelle Obama!
Next Article: November 20th: Market Makeover at African Caribbean Food Market in City Heights!
|
"""
File: score_to_midi_converter.py
Purpose: Provides a means to convert a score to a midi file.
"""
from mido import MidiFile, MidiTrack, Message
from fractions import Fraction
from timemodel.tempo_event import TempoEvent
from timemodel.time_signature_event import TimeSignatureEvent
from mido.midifiles import MetaMessage
from structure.dynamics import Dynamics
from structure.tempo import Tempo
from structure.time_signature import TimeSignature
from structure.score import Score
from timemodel.duration import Duration
from timemodel.position import Position
from timemodel.offset import Offset
from instruments.instrument_catalog import InstrumentCatalog
from structure.instrument_voice import InstrumentVoice
import logging
from timemodel.dynamics_event import DynamicsEvent
from timemodel.dynamics_function_event import DynamicsFunctionEvent
from misc.utility import convert_to_numeric
from timemodel.time_conversion import TimeConversion
from timemodel.tempo_function_event import TempoFunctionEvent
from timemodel.tempo_event_sequence import TempoEventSequence
class ScoreToMidiConverter(object):
"""
This class is used to convert a score to a midi file. The procedure is:
1) Create a converter: smc = ScoreToMidiConverter(score)
2) Create the output file: smc.create(filename)
Note:
All tempos messages are on channel 1 track 0
All note messages are on channel 1 for other tracks.
"""
# Number of MIDI ticks per quarter note.
TICKS_PER_BEAT = 480
DEFAULT_NOTE_CHANNEL = 1
DEFAULT_VELOCITY = 64
# number of ms between volume events for dynamic function events
VOLUME_EVENT_DURATION_MS = 5
TEMPO_EVENT_DURATION_MS = 50
DEFAUTLT_BEAT_DURATION = Duration(1, 4)
def __init__(self, score):
"""
Constructor. Set up the tick track map.
Args:
score: of Score class
"""
self.__score = score
self.__filename = ''
self.mid = None
self.inst_voice_channel = {}
self.channel_assignment = 1
self.fine_tempo_sequence = None
self.time_conversion = None
def create(self, filename):
"""
Create a midi file from the score, with midi filename provided.
Args:
filename - String filename. Can include path, should have filetype '.mid'.
"""
self.__filename = filename
self.mid = MidiFile(type=1)
self.mid.ticks_per_beat = ScoreToMidiConverter.TICKS_PER_BEAT
# assign each instrument voice to a channel
self.inst_voice_channel = {}
# used for assigning channels to each voice.
self.channel_assignment = 1
(self.fine_tempo_sequence, self.time_conversion) = self._build_time_conversion()
meta_track = MidiTrack()
self.mid.tracks.append(meta_track)
self._fill_meta_track(meta_track)
self._assign_voices_tracks()
self.mid.save(self.filename)
@property
def score(self):
return self.__score
@property
def filename(self):
return self.__filename
@staticmethod
def convert_score(score, filename):
"""
Static method to convert a Score to a midi file.
Args:
score: Class Score object
filename: The name of the midi file, should have filetype .mid
"""
smc = ScoreToMidiConverter(score)
smc.create(filename)
@staticmethod
def convert_line(line, filename, tempo=Tempo(60, Duration(1, 4)),
time_signature=TimeSignature(4, Duration(1, 4))):
"""
Static method to convert a Line to a midi file
Args:
line: Class Line object
filename: The name of the midi file, should have filetype .mid
tempo: Tempo for playback, default is 60 BPM tempo beat = quarter note
time_signature: TimeSiganture on playback, default is 4 quarter notes
"""
score = Score()
tempo_sequence = score.tempo_sequence
tempo_sequence.add(TempoEvent(tempo, Position(0)))
ts_sequence = score.time_signature_sequence
ts_sequence.add(TimeSignatureEvent(time_signature, Position(0)))
c = InstrumentCatalog.instance()
piano = c.get_instrument("piano")
piano_instrument_voice = InstrumentVoice(piano, 1)
piano_voice = piano_instrument_voice.voice(0)
piano_voice.pin(line, Offset(0))
score.add_instrument_voice(piano_instrument_voice)
ScoreToMidiConverter.convert_score(score, filename)
def _assign_voices_tracks(self):
# assign a channel to each instrument voice
for inst_voice in self.score.instrument_voices:
self.inst_voice_channel[inst_voice] = self._next_channel()
self._add_notes(inst_voice, self.inst_voice_channel[inst_voice])
def _next_channel(self):
"""
Allocates channels starting at 1 through 15. Raises exception beyond that.
"""
if self.channel_assignment == 15:
raise Exception('Ran out of channels.')
self.channel_assignment += 1
if self.channel_assignment == 9: # drums
return self._next_channel()
return self.channel_assignment
def _add_notes(self, inst_voice, channel):
voice_note_map = inst_voice.get_all_notes()
for voice, notes in voice_note_map.items():
track = MidiTrack()
track.name = inst_voice.instrument.name
self.mid.tracks.append(track)
# For each note
# build a note on and off message, compute the ticks of the message
# append both messages to out list msgs
velocity_msgs = self._gen_velocity_msgs(voice, channel)
msgs = []
for n in notes:
# We do not need to set velocity outside of the default
# Crescendo and decrescendo are taken care of by channel change messages only,
# which modify the constant velocity set per note.
# If the velocity was set here, the channel change would distort the setting.
# Otherwise, the velocity would be acquired as follows
ticks = self._wnt_to_ticks(n.get_absolute_position())
msg = NoteMessage('note_on', channel, n.diatonic_pitch.chromatic_distance + 12, ticks,
ScoreToMidiConverter.DEFAULT_VELOCITY)
msgs.append(msg)
end_ticks = self._wnt_to_ticks(n.get_absolute_position() + n.duration)
msg = NoteMessage('note_off', channel, n.diatonic_pitch.chromatic_distance + 12, end_ticks)
msgs.append(msg)
# Sort the msgs list by tick time, and respect to off before on if same time
msgs.extend(velocity_msgs)
from functools import cmp_to_key
msgs = sorted(msgs, key=cmp_to_key(lambda x, y: ScoreToMidiConverter.compare_note_msgs(x, y)))
prior_tick = 0
for m in msgs:
logging.info('{0}'.format(m))
ticks_value = int(m.abs_tick_time - prior_tick)
# Append the midi message to the track, with tics being incremental over succeeding messages.
# We default to channel 1 for all tracks.
track.append(m.to_midi_message(ticks_value))
prior_tick = m.abs_tick_time
def _gen_velocity_msgs(self, voice, channel):
"""
The method runs through the dynamic sequence events, and generates channel change events to set velocity.
In the case of a DynamicsEvent, the process is trivial.
In the case of a DynamicsFunctionEvent, we generate channel change events in small steps over the domain
of the event, providing a 'simulation' of velocity changes as dictated by the function behind the event.
"""
msgs = []
dyn_seq = voice.dynamics_sequence.sequence_list
voice_len = voice.length()
tc = self.time_conversion
for event in dyn_seq:
if event.time >= voice_len:
break
if isinstance(event, DynamicsEvent):
velocity = event.velocity()
ticks = self._wnt_to_ticks(event.time)
msgs.append(ExpressionVelocityMessage(channel, ticks, velocity))
elif isinstance(event, DynamicsFunctionEvent):
t1 = tc.position_to_actual_time(event.time)
next_event = voice.dynamics_sequence.successor(event)
t2 = tc.position_to_actual_time(next_event if next_event is not None else Position(voice_len.duration))
while t1 < t2:
wnt = tc.actual_time_to_position(t1)
ticks = self._wnt_to_ticks(wnt)
velocity = int(event.velocity(wnt, next_event.time if next_event is not None else
Position(voice_len.duration)))
msgs.append(ExpressionVelocityMessage(channel, ticks, velocity))
t1 += ScoreToMidiConverter.VOLUME_EVENT_DURATION_MS
return msgs
def _fill_meta_track(self, meta_track):
event_list = self.score.tempo_sequence.sequence_list
score_len = self.score.length()
# Loop over list, for every change in tempo , the tempo should be reset.
# Note, that there may be tempo or ts changes that last for 0 duration - we skip those.
last_tick_time = 0
for tempo_event in event_list:
if tempo_event.time >= score_len:
break
if isinstance(tempo_event, TempoEvent):
current_tick_time = self._wnt_to_ticks(tempo_event.time)
# If there is a ts and tempo event, effect a midi tempo change
beat_ratio = Fraction(1, 4) / tempo_event.object.beat_duration.duration
# tempo_value = (60/BPM) * (ts_beat / tempo_beat)
tempo_value = int((60.0 / tempo_event.object.tempo) * beat_ratio * 1000000)
ticks = int(current_tick_time - last_tick_time)
msg = MetaMessage('set_tempo', tempo=tempo_value, time=ticks)
meta_track.append(msg)
last_tick_time = current_tick_time
elif isinstance(tempo_event, TempoFunctionEvent):
# Run over event range making a small step function effectively, and setting the tempo
# every TEMPO_EVENT_DURATION_MS.
t1 = tempo_event.time
beat_duration = tempo_event.beat_duration if tempo_event.beat_duration is None else \
ScoreToMidiConverter.DEFAUTLT_BEAT_DURATION
next_event = self.score.tempo_sequence.successor(tempo_event)
t2 = next_event.time if next_event is not None else Position(score_len.duration)
while t1 < t2:
tempo = int(tempo_event.tempo(t1, next_event.time if next_event is not None else
Position(score_len)))
delta_wnt = (tempo * ScoreToMidiConverter.TEMPO_EVENT_DURATION_MS * beat_duration.duration) / \
(60.0 * 1000.0)
current_tick_time = self._wnt_to_ticks(t1)
ticks = int(current_tick_time - last_tick_time)
# If there is a ts and tempo event, effect a midi tempo change
beat_ratio = Fraction(1, 4) / beat_duration.duration
# tempo_value = (60/BMP) * (ts_beat / tempo_beat)
tempo_value = int((60.0 / tempo) * beat_ratio * 1000000)
msg = MetaMessage('set_tempo', tempo=tempo_value, time=ticks)
meta_track.append(msg)
t1 += delta_wnt
last_tick_time = current_tick_time
def _build_time_conversion(self):
event_list = self.score.tempo_sequence.sequence_list
score_len = self.score.length()
fine_tempo_sequence = TempoEventSequence()
for event in event_list:
if isinstance(event, TempoEvent):
fine_tempo_sequence.add(TempoEvent(event.object, event.time))
elif isinstance(event, TempoFunctionEvent):
t1 = event.time
beat_duration = event.beat_duration if event.beat_duration is None else \
ScoreToMidiConverter.DEFAUTLT_BEAT_DURATION
next_event = self.score.tempo_sequence.successor(event)
t2 = next_event.time if next_event is not None else Position(score_len.duration)
while t1 < t2:
tempo = int(event.tempo(t1, next_event.time if next_event is not None else Position(score_len)))
delta_wnt = (tempo * ScoreToMidiConverter.TEMPO_EVENT_DURATION_MS * beat_duration.duration) / \
(60.0 * 1000.0)
fine_tempo_sequence.add(TempoEvent(Tempo(tempo, beat_duration), t1))
t1 += delta_wnt
tc = TimeConversion(fine_tempo_sequence, self.score.time_signature_sequence, Position(score_len))
return fine_tempo_sequence, tc
def _wnt_to_ticks(self, wnt):
# Convert whole note time to ticks.
offset = convert_to_numeric(wnt)
return int((offset / Fraction(1, 4)) * self.mid.ticks_per_beat)
@staticmethod
def compare_note_msgs(a, b):
a_ticks = a.abs_tick_time
b_ticks = b.abs_tick_time
comp_value = -1 if a_ticks < b_ticks else 1 if a_ticks > b_ticks else 0
if isinstance(a, ExpressionVelocityMessage) or isinstance(b, ExpressionVelocityMessage):
return comp_value
if comp_value != 0:
return comp_value
a_is_note_off = a.msg_type == 'note_off'
b_is_note_off = b.msg_type == 'note_off'
if a_is_note_off and not b_is_note_off:
return -1
if not a_is_note_off and b_is_note_off:
return 1
return 0
class MidiMessage(object):
def __init__(self, msg_type, channel, abs_tick_time):
self.__msg_type = msg_type
self.__channel = channel
self.__abs_tick_time = abs_tick_time
@property
def msg_type(self):
return self.__msg_type
@property
def channel(self):
return self.__channel
@property
def abs_tick_time(self):
return self.__abs_tick_time
def to_midi_message(self, prior_msg_ticks):
return None
class NoteMessage(MidiMessage):
def __init__(self, msg_type, channel, note_value, abs_tick_time, velocity=Dynamics.DEFAULT_DYNAMICS_VELOCITY):
MidiMessage.__init__(self, msg_type, channel, abs_tick_time)
self.__note_value = note_value
self.__velocity = velocity
@property
def note_value(self):
return self.__note_value
@property
def velocity(self):
return self.__velocity
def to_midi_message(self, ticks_from_prior_msg):
return Message(self.msg_type, note=self.note_value, velocity=self.velocity, time=ticks_from_prior_msg,
channel=self.channel)
def __str__(self):
return '{0} {1}/{2}({3}, {4})'.format(self.abs_tick_time, self.msg_type, self.channel, self.note_value,
self.velocity)
class ExpressionVelocityMessage(MidiMessage):
def __init__(self, channel, abs_tick_time, velocity=Dynamics.DEFAULT_DYNAMICS_VELOCITY):
MidiMessage.__init__(self, 'control_change', channel, abs_tick_time)
self.__velocity = velocity
@property
def velocity(self):
return self.__velocity
def to_midi_message(self, ticks_from_prior_msg):
return Message(self.msg_type, control=11, value=self.velocity, time=ticks_from_prior_msg,
channel=self.channel)
def __str__(self):
return '{0} {1}/{2}({3})'.format(self.abs_tick_time, self.msg_type, self.channel, self.velocity)
|
The ideal cancer treatment is to create a plan that stays one step ahead of the tumor. However, every patient’s cancer changes over time and often changes more quickly in response to treatment. Physicians work to hit this moving target with the best therapies available but there are often several therapies to choose from. That’s why it’s important for your doctor to determine as quickly as possible when it’s time to change therapies from one that either never worked or stopped working to one that will better control your cancer.
However, using standard diagnostics, it can take as long as 3 months to find out whether the treatment is working or has failed – a time span that cancers can exploit, growing and becoming more difficult to treat.
Researchers at Jefferson’s Medical Oncology Associates at the Sidney Kimmel Cancer Center have pioneered and implemented new tools to monitor a patient’s cancer in real time through a simple blood test. Starting September 2014, Jefferson will offer patients this new diagnostic blood test, often called a “liquid biopsy” or circulating-tumor-cell (CTC) count, to detect cancer cells that have been shed or are actively moving from one site to another into the bloodstream. The test lets doctors check for changes in the tumor quickly and easily, without repeating additional painful surgical biopsy that aren’t a good option for some patients, or scans, which can’t detect small increases in growth.
Is the test useful for any cancer?
Today, the test is FDA-approved for breast, colon, and prostate cancers that have metastasized, or begun to spread. But our researchers are working on expanding this to other cancers and have a number of clinical trials open (ask your doctor if you are interested in participating).
Can my doctor use CTCs only to track my cancer?
Whole body scans, and in some cases tissue biopsies may remain important in tracking the spread and growth of your cancer, but the CTC test provides your oncologist with more information more frequently than is possible with other tests and it is particularly important to predict the spreading as a consequence of treatment failure. It is the only test recognized that can inform your doctor about the risk of cancer spreading to other organs.
All your doctors need is a small vial of your blood. Solid tumors (in other words, cancers that don’t originate in blood cells) will often release individual cancer cells into the bloodstream when the cancer is metastatic, in order to seed the cancer to new locations in the body. Researchers have discovered that when a blood sample contains a small number of these cells, the tumor is more likely to be stable, with its growth under control – the sign of a successful medication. When the number of circulating tumor cells, or CTCs, as they are called is high, the cancer is probably growing and may suggest that it’s time for a new course of action.
Today, the CTC test is available in only a handful of academic medical centers on the East Coast, and Jefferson is the only institution in Philadelphia that has it. In addition, Jefferson is home to one of the researchers who developed the technology, Dr. Massimo Cristofanilli, MD, providing our patients with the benefit of his expertise.
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import platform
import sys
__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'
__version__ = '2.1.0'
# UserAgent string sample: 'Azure-Storage/0.37.0-0.38.0 (Python CPython 3.4.2; Windows 8)'
# First version(0.37.0) is the common package, and the second version(0.38.0) is the service package
USER_AGENT_STRING_PREFIX = 'Azure-Storage/{}-'.format(__version__)
USER_AGENT_STRING_SUFFIX = '(Python {} {}; {} {})'.format(platform.python_implementation(),
platform.python_version(), platform.system(),
platform.release())
# default values for common package, in case it is used directly
DEFAULT_X_MS_VERSION = '2019-02-02'
DEFAULT_USER_AGENT_STRING = '{}None {}'.format(USER_AGENT_STRING_PREFIX, USER_AGENT_STRING_SUFFIX)
# Live ServiceClient URLs
SERVICE_HOST_BASE = 'core.windows.net'
DEFAULT_PROTOCOL = 'https'
# Development ServiceClient URLs
DEV_BLOB_HOST = '127.0.0.1:10000'
DEV_QUEUE_HOST = '127.0.0.1:10001'
# Default credentials for Development Storage Service
DEV_ACCOUNT_NAME = 'devstoreaccount1'
DEV_ACCOUNT_SECONDARY_NAME = 'devstoreaccount1-secondary'
DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='
# Socket timeout in seconds
DEFAULT_SOCKET_TIMEOUT = 20
# for python 3.5+, there was a change to the definition of the socket timeout (as far as socket.sendall is concerned)
# The socket timeout is now the maximum total duration to send all data.
if sys.version_info >= (3, 5):
# the timeout to connect is 20 seconds, and the read timeout is 2000 seconds
# the 2000 seconds was calculated with: 100MB (max block size)/ 50KB/s (an arbitrarily chosen minimum upload speed)
DEFAULT_SOCKET_TIMEOUT = (20, 2000)
# Encryption constants
_ENCRYPTION_PROTOCOL_V1 = '1.0'
_AUTHORIZATION_HEADER_NAME = 'Authorization'
_COPY_SOURCE_HEADER_NAME = 'x-ms-copy-source'
_REDACTED_VALUE = 'REDACTED'
_CLIENT_REQUEST_ID_HEADER_NAME = 'x-ms-client-request-id'
|
How Far is Himayam MNI Apartments?
Himayam MNI Apartments is one of the popular residential projects in Pallavaram, Chennai. Developed by Himayam Engineers and Builders, this project offers apartments and commercial shops with basic amenities for the comfort of residents. It is close to market and many educational institutions.
|
'''
Created on 23/8/2014
@author: Alberto
'''
import pygame as py
from settings import SCREEN_WIDTH, SCREEN_HEIGHT
class ArrowsLauncher(object):
def __init__(self, arrowsManager, img, i):
self.am = arrowsManager
self.img = img
self.ConfigurePosition(i)
self.canShoot = True
self.timer = 0
self.delay = 1000
def ConfigurePosition(self, i):
floor = SCREEN_HEIGHT - 15 - self.img.get_height()
self.i = i
if i == 0:
x = 0
y = floor
elif i == 1:
x = 0
y = floor - self.img.get_height() * 3
if i == 2:
x = SCREEN_WIDTH - self.img.get_width()
y = floor
elif i == 3:
x = SCREEN_WIDTH - self.img.get_width()
y = floor - self.img.get_height() * 3
self.rect = py.Rect(x, y, self.img.get_width(), self.img.get_height())
def Update(self):
if not self.canShoot:
self.Timer()
def Render(self, screen):
screen.blit(self.img, self.rect)
def Timer(self):
if py.time.get_ticks() - self.timer > self.delay:
self.timer = py.time.get_ticks()
self.canShoot = True
def Shoot(self):
if self.canShoot:
self.am.AddArrow(self)
self.canShoot = False
|
Enjoy informal dining in the exquisite surroundings of our award winning 16th Century timber barn. Thoughtfully renovated by skilled craftspeople, sublime contemporary styling effortlessly blends with the traditional thatched roof and oak beams. Nestled in the beautiful Essex countryside, yet close to Colchester, The Barn provides a delightful setting for family celebrations, unique wedding venue, and a subtly impressive space for business meetings.
Treat yourself or someone special to an informal dining experience in the exquisite surroundings of the Barn Brasserie.
|
#!/usr/bin/python2.7
# QTVcp Widget
#
# Copyright (c) 2017 Chris Morley
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
###############################################################################
import os
from PyQt5 import QtCore, QtWidgets
from qtvcp.widgets.simple_widgets import ScaledLabel
from qtvcp.widgets.widget_baseclass import _HalWidgetBase
from qtvcp.core import Status
from qtvcp import logger
# Instantiate the libraries with global reference
# STATUS gives us status messages from linuxcnc
# LOG is for running code logging
STATUS = Status()
LOG = logger.getLogger(__name__)
# Set the log level for this module
# LOG.setLevel(logger.INFO) # One of DEBUG, INFO, WARNING, ERROR, CRITICAL
class StateLabel(ScaledLabel, _HalWidgetBase):
def __init__(self, parent=None):
super(StateLabel, self).__init__(parent)
self._true_textTemplate = 'True'
self._false_textTemplate = 'False'
self.metric_mode = True
self.css_mode = False
self.fpr_mode = False
self.diameter_mode = False
def _hal_init(self):
def _f(data):
self._set_text(data)
if self.metric_mode:
STATUS.connect('metric-mode-changed', lambda w, data: _f(data))
elif self.css_mode:
STATUS.connect('css-mode', lambda w, data: _f(data))
elif self.fpr_mode:
STATUS.connect('fpr-mode', lambda w, data: _f(data))
elif self.diameter_mode:
STATUS.connect('diameter-mode', lambda w, data: _f(data))
def _set_text(self, data):
if data:
self.setText(self._true_textTemplate)
else:
self.setText(self._false_textTemplate)
#########################################################################
# This is how designer can interact with our widget properties.
# designer will show the pyqtProperty properties in the editor
# it will use the get set and reset calls to do those actions
#
# _toggle_properties makes it so we can only select one option
########################################################################
def _toggle_properties(self, picked):
data = ('metric_mode', 'css_mode', 'fpr_mode', 'diameter_mode')
for i in data:
if not i == picked:
self[i + '_status'] = False
# property getter/setters
def set_true_textTemplate(self, data):
self._true_textTemplate = data
try:
self._set_text(True)
except Exception as e:
LOG.exception("textTemplate: {}, Data: {}".format(self._textTemplate, data), exc_info=e)
self.setText('Error')
def get_true_textTemplate(self):
return self._true_textTemplate
def reset_true_textTemplate(self):
self._true_textTemplate = '%s'
def set_false_textTemplate(self, data):
self._false_textTemplate = data
try:
self._set_text(False)
except:
self.setText('Error 2')
def get_false_textTemplate(self):
return self._false_textTemplate
def reset_false_textTemplate(self):
self._false_textTemplate = '%s'
# metric mode status
def set_metric_mode(self, data):
self.metric_mode = data
if data:
self._toggle_properties('metric_mode')
def get_metric_mode(self):
return self.metric_mode
def reset_metric_mode(self):
self.metric_mode = True
# css mode status
def set_css_mode(self, data):
self.css_mode = data
if data:
self._toggle_properties('css_mode')
def get_css_mode(self):
return self.css_mode
def reset_css_mode(self):
self.css_mode = True
# fpr mode status
def set_fpr_mode(self, data):
self.fpr_mode = data
if data:
self._toggle_properties('fpr_modee')
def get_fpr_mode(self):
return self.fpr_mode
def reset_fpr_mode(self):
self.fpr_mode = True
# diameter mode status
def set_diameter_mode(self, data):
self.diameter_mode = data
if data:
self._toggle_properties('diameter_mode')
def get_diameter_mode(self):
return self.diameter_mode
def reset_diameter_mode(self):
self.diameter_mode = True
# designer will show these properties in this order:
# BOOL
metric_mode_status = QtCore.pyqtProperty(bool, get_metric_mode, set_metric_mode, reset_metric_mode)
css_mode_status = QtCore.pyqtProperty(bool, get_css_mode, set_css_mode, reset_css_mode)
fpr_mode_status = QtCore.pyqtProperty(bool, get_fpr_mode, set_fpr_mode, reset_fpr_mode)
diameter_mode_status = QtCore.pyqtProperty(bool, get_diameter_mode, set_diameter_mode, reset_diameter_mode)
# Non BOOL
true_textTemplate = QtCore.pyqtProperty(str, get_true_textTemplate,
set_true_textTemplate, reset_true_textTemplate)
false_textTemplate = QtCore.pyqtProperty(str, get_false_textTemplate,
set_false_textTemplate, reset_false_textTemplate)
# boilder code
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, item, value):
return setattr(self, item, value)
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
label = Lcnc_STATUS_Bool_Label()
label.show()
sys.exit(app.exec_())
|
What does your skin tell you when you look in the mirror? If you’re like most people, glowing and radiant skin is the key to turning back the clock on your appearance.
Conroe Aesthetics and Wellness’ skin experts look forward to serving as your Houston skin care resource, recommending a specific treatment that’s just right for you. You’ll be amazed by the age-defying changes to your skin as part of your Houston anti-aging plan.
Your skin type is as unique as any other part of your body, so it’s essential to create an individualized skin care program that will produce the best possible results for busy Houston lifestyle. Sun exposure, hormones, age, medical problems and medications can all affect your skin, and changes in any of these factors should prompt you to alter your Houston skin care habits.
As a renowned Houston wellness center, Conroe Aesthetics and Wellness offers a complete, complimentary skin analysis that helps guide you through various products and procedures. Trust an experienced Houston skin care specialist to help you reach your goals.
To complement our in-office skin care treatments for our clients, Conroe Aesthetics and Wellness offers the exclusive Obagi skin care system. Obagi’s aesthetic and therapeutic products are clinically proven to improve and prevent skin conditions caused by heredity, aging and the environment.
Conroe serves as one of Houston’s premier Obagi distributors for this superior line of products. This revolutionary skin care line treats your skin at the cellular level to actually boost skin cell function. Obagi products can only be purchased from a licensed physician, but are all available at Conroe Aesthetics and Wellness, a premier Houston wellness clinic.
The Obagi Nuderm® System penetrates below the skin’s surface with prescription-strength ingredients to repair damage and skin cell turnover caused by UVA and UVB rays. The result is healthy new cell growth while damaged, old cells shed away. Nu-Derm® enhances elastin and collagen as well, improving resiliency and repairing wrinkles, sagging and fine lines. This high-powered Obagi system also effectively balances your skin’s pH level to help reduce acne flare-ups. In just 12 weeks you will notice improvement in firmness, texture, hyper-pigmentation and wrinkles. When used in combination with one of our center’s injectables such as Botox or Juvederm, you’ll see even more dramatic and beautiful results.
Obagi’s ELASTIderm® eye cream is a powerful formulation that dramatically reduces the appearance of crow’s feet by significantly improving elasticity around the eyes. Only available through Houston Obagi skin care professionals, this product restores youthfulness through ingredients that truly penetrate your skin such as zinc, copper, malonic acid, and blueberry extract.
You can treat puffy eyes with Obagi’s new Elastiderm® Complete Complex Serum. This soothing, innovative product uses the same proven formulation as the Elastiderm® Eye, but with the added benefit of caffeine and rollerball technology to reduce under-eye puffiness.
Discover longer, thicker, darker eyelashes with ELASTILash® from Obagi®. Just one application on each eyelid every night will produce beautiful, dramatic eyelashes in weeks. Unlike other eyelash growth products, ELASTILash® gives no evidence of change or darkening in the eye’s iris or eyelid. In fact, clients with fair skin or light eye color may find this as an ideal solution. There is no prescription needed, but Conroe Aesthetics and Wellness is one of the few Houston Obagi dealers to stock this product.
Young Pharmaceuticals® is a physician-dispensed skin care line that allows physicians to customize products and chemical peels based on patients’ skin care needs. Young® products are formulated with ingredients that reduce hyper-pigmentation, redness, and acne while adding anti-oxidants and increasing moisture in the skin. This customized approach reduces potential for irritation and improves patient outcome as you seek skin care help in Houston.
Young Pharmaceuticals® products are used exclusively in our customized facials that target our clients skin care needs. Young® also has a wide range of effective chemical peels that brighten skin and smooth its texture, restoring a polished, radiant, more youthful appearance for skin care in Houston clients.
Please come talk to our experienced staff about these and other products that can be selected and tailored for your unique Houston skin care goals. Conroe looks forward to being your primary Houston skin care advisor.
|
#
from abc import ABCMeta, abstractmethod
class Controller (metaclass=ABCMeta):
#public:
@abstractmethod
def __init__( self, process, database):
self.process = process
self.database = database
self.itemList = [] # Array
self.indexIdx = 0 # int
#self.initiateAllFromDB()
@abstractmethod
def createItem( self, **kwargs ):
"""
@**kwargs: all parameter used to create an item
"""
raise NotImplementedError
@abstractmethod
def delItem( self, id_item ):
"""
@id_item:int
"""
raise NotImplementedError
@abstractmethod
def initiateFromDB( self, id_item, validation ):
"""
@id_item:int
"""
raise NotImplementedError
@abstractmethod
def initiateAllFromDB( self, validation ):
"""
"""
raise NotImplementedError
def setDatabase( self, database ):
self.database=database
return True
def addItem( self, item ):
"""
@item:
"""
duplicate=self.isItem(item.getId())
if not duplicate:
self.itemList.append(item)
self.indexIdx+=1
return True
else:
print("Item is a duplicate! (%s: id=%s)"%(type(item).__name__,item.getId()))
return False
def removeItem( self, id_item ):
"""
@id_item:int
"""
for i in self.itemList:
if i.getId() == id_item:
item=i
if item:
self.itemList.remove(item)
self.indexIdx-=1
return True
else:
return False
#private:
def getItemById( self, id):
"""
@id:int
@item
"""
validation=False
"""
isUpToDate gibt True zurück,
wenn id ist nicht in usr
gibt False zurück,
wenn id ist in usr
"""
upToDate=self.process.isUpToDate(self.table_name,id)
removed=not self.process.isUpToDate(self.table_name,-id)
if not upToDate:
self.initiateFromDB(id)
self.process.setUpToDate(self.table_name,id)
elif removed:
self.removeItem(id)
self.process.setUpToDate(self.table_name,-id)
return False
for item in self.itemList:
if item.getId() == id:
validation=item
if not validation:
if self.initiateFromDB(id):
for item in self.itemList:
if item.getId() == id:
validation=item
return validation
def isItem(self, id):
validation=False
for item in self.itemList:
if item.getId() == id:
validation=True
return validation
def getAllItems( self ):
"""
@itemList:Array
"""
updates=self.process.getUpdates(self.table_name)
for update in updates:
if int(update) < 0:
self.getItemById(-update)
else:
self.getItemById(update)
return self.itemList
#public:
def getId( self, idx_permission):
"""
@idx_permission:int
@id:int
"""
if self.indexIdx > idx_permission and idx_permission >= 0:
id=self.itemList[idx_permission].getId()
return id
else:
return False
def getTableName( self ):
"""
@table_name:str
"""
return self.table_name
|
When I filed my major three-part report, Chile North to South in 2010, much of my focus was on Chile’s newer regions that could offer conditions suitable for growing Syrah, Sauvignon Blanc, Pinot Noir, Chardonnay, Riesling and other varieties that arguably find their ultimate expression in cooler climates. In Chile’s case this means areas at higher altitudes towards the foothills of the Andes, and it means new areas like Elqui and Limari to the north and Bio Bio to the south of the established central valleys. But most importantly of all perhaps, it means areas closer to Chile’s long Pacific coast, influenced by the cool waters fed from the Antarctic by the Humboldt current. Recently, Chilean producer Santa Rita brought a series of excellent tasting events to London for the wine press and trade that explored various themes, including Chile’s coolest coastal terroirs, from Huasco in the Atacama desert to the north, to Trehuaco towards Chile’s lush lake district in the south. Generously, each tasting included not only wines from their own estates, but examples of the country’s top wines from other producers.
Why is cool climate important, and is proximity to the sea an added advantage? Well, the varieties explored here can be classified as ‘aromatic’. Retaining acidity is one benefit of cooler growing conditions, where grapes ripen slowly so that phenolic ripeness (when pips and stems are fully ripe) develops before sugar is too high and acidity drops off. This freshness helps retain complex aromatics and flavours which are not dulled by over ripeness or excessive alcohol. Proximity to the sea often brings with it cool breezes, mist and fog, and it moderates the shifts between day and night temperatures.
This figure is multiplied by the number of days in the growing season to give the GDD total. So if the season lasts 100 days, the GDD for that territory is 1250. Note that for vine growers assessing GDD, temperatures above 30ºC are not counted, and the maximum is capped at 30ºC because vines ‘close down’ and fruit does not ripen above this. A chart of Chilean coastal territories shown by viticulturalist Sebastian Labbé was fascinating. It showed, for example, that Lo Abarca, just four kilometres from the coast, accumulates only 1000 degree days per growing season, whereas Casablanca, 30 kilometres from the coast, can claim 1380. Lo Abarca is truly ‘cool climate’, whilst Casablanca might be better described as moderate. By comparison Burgundy accumulates 1172 degree days, Bordeaux 1485. Another important factor is the diurnal range of temperature in any territory (the difference between day and night temperatures). Because fruit is not ripening below 10ºC or above 30ºC, vines in an area like Lo Abarca – with only 11ºC diurnal range – develop differently from those in Casablanca which sees a 16.5ºC diurnal range. Whilst Lo Abarca is cooler in absolute terms, it will see fewer peaks and troughs where growth is retarded.
Fifteen wines were presented for tasting, all coming from coastal vineyards that cover the length of Chile’s growing regions. Comparing the latitudes, altitudes, rainfall, temperatures and all important Growing Degree Days and diurnal ranges made for an extremely interesting tasting. It also included some of Chile’s most unusual wines that might well surprise those with a fixed image of the Chilean wine scene. Some of these wines are not available in the UK, but I have given worldwide wine-searcher links for those keen to track any of them down or see indicative prices.
Around 1200 GDD, with a diurnal range of 12.3ºC. 13.5% ABV. From granitic soils this is a shimmering, youthful green. Distinctly herbal, not so much grassy as showing asparagus and green bean notes, though a touch of fig and melon skins gives depth. On the palate a huge grapefruity tang of flavour and acidity, it is very dry, the pithy citrus finish relieved by a little oiliness to the texture. See UK stockists on wine-searcher.
Around 1100 GDD here, with a diurnal range of 11.6ºC. 12.5% ABV. Paradones is part of the Colchagua Valley, enjoying a cooler average temperature than Leyda and similar granite soils. This is pale, almost transparent yellow/green. It has an icy coolness, but less punchy nose showing some grassiness, but also a sense of minerality. It bursts with flavour on the palate, with a delicious and juicy apple and lemon bite. See UK stockists on wine-searcher.
Coolest region of all with 1000 GDD and a diurnal range of 11.3ºC. 14% ABV. Lying quite close to Leyda, but only four kilometres from the Pacific, this vineyard enjoys only 900 degree days per season. Shallow soils on granite. This aged Sauvignon Blanc (from 2003) has a golden hue with primrose yellow at the core. It has some honeyed development, rather oxidised, but nutty and with a certain minerality too. It is very lemony and lean on the palate, and having lost its vitality, is still nervy, steely and I rather enjoyed it (more than others in the room). See UK stockists on wine-searcher.
Around 1380 GDD here, with a diurnal range of 16.5ºC. 14.4% ABV. The first of the Chardonnays comes from sandy alluvial soils, where granite and clay make for good drainage but some water retention. It has a pale green-gold colour and gentle toast, hints of ripe fig and of brioche, with some Cox’s pippin nuttiness. On the palate it is dry, the mineral and stony character coming through, with lots of intensity and concentration, but good energy too. See UK stockists on wine-searcher.
One of the highest GDD at 1600, diurnal range of 16.1ºC. 14.1% ABV. Again from alluvial terraces with some chalk, and covered in rounded stones. Pale gold, still a hint of green. A bigger wine immediately, with lots of toast, giving a coffeeish character and a real fig and quince richness. Rounder and bigger than the Carmen on the palate, there is real fruit sweetness here, edging into the tropical spectrum, but tangy and juicy too, a real lemon and mineral, typical Limarí freshness in the finish. See UK stockists on wine-searcher.
From Huasco, 700 kilometres north of Santiago, with 1250 GDD and a diurnal range of 14.1ºC. 13% ABV. In this desert area there is only 20mm of rainfall per annum – as opposed to over 600mm in Colchagua for example. Who’d have thought that the big Ventisquero operation (Yali being just one of its supermarket labels) would also produce 400 bottles of this wine geek’s delight: foot trodden, aged in small steel barrels, and cloudy and yeasty in its presentation. It’s in the ‘natural wine’ spectrum, with clove and light medicinal notes from the yeastiness, leading onto a palate that is both nutty and dry, and very racy. There’s a limpid weight to the texture, and the finish is strikingly dry and uncompromising. Doesn’t appear to be currently available for sale.
High rainfall here (700mm) but still 1250 GDD and 15.3ºC diurnal range. 13% ABV. And to show Chile is definitely not all about playing it safe, is this Chile’s first ‘orange wine’? A natural wine made in 100-year-old clay amphora, it too is cloudy and yeasty, though the powerful Muscat character comes through strongly, the earthy, typically savoury and meaty aromas of natural wines in the background. Quite dry and steely on the palate, this has a hint of sweetness but finishes very dry and quite pithy. I rather liked this, though I confess more for the ‘interesting’ character rather than outright drinkability. Doesn’t appear to be currently available for sale.
From granite soils mixed with a little clay, 1200 GDD and a diurnal range of 12.3ºC. 14.1% ABV. This is made in open fermenters, with maturation in oak, one third new. It has a medium ruby/purple colour that is youthful and vivid. Spices and clove at first, quite full on the nose, with lots of juicy kirsch coming through. Lovely sweet fruit on the palate, a medium-bodied texture and again that cherry brightness to the fruit. It is smooth and silky, touched by espresso, with a nice freshness to the finish. A delicious Pinot and a good expression of the grape. See UK stockists on wine-searcher.
1380 GDD with a diurnal range of 16.5ºC. 14% ABV, this comes from further inland and a warmer climate, but quite similar granite hillside soils. A higher proportion of new oak is used. It is more earthy and vegetal, with real Pinot mushroom and truffle. It is big and quite grippy and structured stuff on the palate, spicy, with a firm grip to the tannins and that earthy, vegetal dryness. Quite fine tannins but dry, and a slightly less elegant but good expression of Pinot. See UK stockists on wine-searcher.
GDD 1550, diurnal range of 13.5ºC. 14% ABV. This comes from limestone soils with sandy loam, and a vineyard 12 kilometres from the coast that enjoy some cooling fogs, but quite warm as it edges closer to the Atacama. It spends 12 months in French oak. Lovely red fruit character, nice and ripe and open with a hint of strawberry. On the palate leaner in style than the Ocio from Casablanca certainly, more orange to the acidity and a little more angular overall. Enjoyable nevertheless. See UK stockists on wine-searcher.
One of the cooler areas of the day with around 1180 degree days here, with a diurnal range of 13.8ºC. 13.5% ABV. Clay and granite soils. Lots of flattering, creamy oak at first here in a deep crimson wine. There is a lovely pepper and game lift too though, and the fruit quite bright with a strawberry pulp character. Big, juicy fruitiness on the palate, loaded with sweet, slick fruit that is quite plush and velvety with ripe tannins and though plenty of dry extract, retains a juicy clarity. See UK stockists on wine-searcher.
From vineyards with a ‘big Pacific influence’, GDD 1600, diurnal range of 16.1ºC. 14% ABV. Vineyards planted on alluvial river soils, with clay and stones over chalk. Very dense, opaque crimson colour. Meaty aromatics are dense and savoury, not so aromatically lifted as the Errázuriz, but with a lovely savoury depth. Juicier on the palate, ripe and round but seems just a little too solid and foursquare, with a little leathery aspect in the finish. See UK stockists on wine-searcher.
16ºC diurnal range here, and though no degree days given, “slightly warmer than Leyda”. 14% ABV, from low fertility granite soils. Dramatically dark and saturated colour, and has some violet and pepper lift to its bright, kirsch-like fruit. Very juicy on the palate, but lots of grip too. Racy and savoury, there’s a hint of something herbal and green on the palate, and a big squirt of acidity keeps it racy and savoury into the finish. See UK stockists on wine-searcher.
Australian winemaker Brian Croser (who consults for Santa Rita) went on record during the Q and A to say that he thought Pumanque, in eastern Colchagua, would one day be Chile’s most famous terroir. The granite and clay soils see 1512 GDD and a diurnal range of 14.2ºC. 14.3% ABV. This very new area (this vineyard planted 2008) is harvested a month later than in the Alto Maipo for example, and soils are clay and granite. This has a saturated purple colour and such a lovely elegance with its blue/black fruit concentration, dry and Indian inky, but showing lovely glimpses of cherry and sappy character. On the palate deliciously sweet-fruited and fresh, that perfumed nose followed through with energy in the mouth. Perhaps the young vines will give a little more flesh in time, but delightful buoyant stuff. Doesn’t appear to be currently available for sale.
Also from Pumanque, 14.8 ABV. Planted in 2007, this is another very young vines wine with a massively deep, saturated colour. It too is inky and youthful on the nose, blue/black fruit again but great thrust and brightness, kirsch and purity to the fore. Racy and fine on the palate it is another delicious wine revelling in the upfront fruit and tannic grip of the young fruit, laden with blackberry crunch, but promising great things. Doesn’t appear to be currently available for sale.
|
#
# Sublime Text plugin to support Corona Editor
#
# Copyright (c) 2013 Corona Labs Inc. A mobile development software company. All rights reserved.
#
# MIT License - see https://raw.github.com/coronalabs/CoronaSDK-SublimeText/master/LICENSE
import sublime
import sublime_plugin
try:
from . import _corona_utils # P3
except:
import _corona_utils # P2
class ToggleBuildPanelCommand(sublime_plugin.WindowCommand):
def run(self):
output_panel = self.window.get_output_panel("exec")
if output_panel.window():
self.window.run_command("hide_panel", {"panel": "output.exec"})
else:
self.window.run_command("show_panel", {"panel": "output.exec"})
def description(self):
output_panel = self.window.get_output_panel("exec")
if output_panel.window():
return "Hide Build Panel"
else:
return "Show Build Panel"
class RunProjectCommand(sublime_plugin.WindowCommand):
# find a main.lua file to start the Simulator with or failing that, any open Lua
# file we can use as a place to start looking for a main.lua
def findLuaFile(self):
filename = self.window.active_view().file_name()
if filename is None or not filename.endswith(".lua"):
filename = None
# No current .lua file, see if we have one open
for view in self.window.views():
if view.file_name() and view.file_name().endswith(".lua"):
filename = view.file_name()
return filename
def is_enabled(self):
return self.findLuaFile() is not None
def run(self):
cmd = []
filename = self.findLuaFile()
if filename is None:
sublime.error_message("Can't find an open '.lua' file to determine the location of 'main.lua'")
return
mainlua = _corona_utils.ResolveMainLua(filename)
if mainlua is None:
sublime.error_message("Can't locate 'main.lua' for this project (try opening it in an editor tab)")
return
simulator_path, simulator_flags = _corona_utils.GetSimulatorCmd(mainlua)
cmd = [simulator_path]
cmd += simulator_flags
cmd.append(mainlua)
print(_corona_utils.PACKAGE_NAME + ": Running: " + str(cmd))
# Save our changes before we run
self.window.run_command("save")
self.window.run_command('exec', {'cmd': cmd})
|
Stick it Down: February sketch reveals!
Jenny at Scrappin Happy Studio. specializes in handmade scrapbook embellishments, such as flair epoxy. All my products are handmade in her home studio, in the beautiful state of Oregon!
One winner will receive a $20 store credit to the Scrappin Happy Studio!
One randomly selected winner will receive a prize from Scrappin Happy Studio! We'll also choose one winner from each sketch to be a guest designer!
Such wonderful works. All so inspiring.
|
# -*- coding: utf-8 -*-
'''
Created on 2014年7月18日
@author: SynTuner
'''
class CardCost: #have some problems handling Chinese with regular expressions on OCTGN. All these classes is necessary until we can handle it in a uniform way.
def __init__(self):
self.etype = -2
def WX01_001(self, card, etype): #太阳之巫女 玉依姬
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["白","白", "白"]
effectcost2 = ["白"]
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival
pass
else:
cost = effectcost2
specialcost = {"Discard":{"color": "白", "ctype": "SIGNI", "qty": 1}}
def WX01_002(self, card, etype): #晓之巫女 玉依姬
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["白","白", "红","红"]
effectcost2 = ["白","白","红"]
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival
pass
else:
cost = effectcost2
specialcost = {"Down":{"target":"self"}}
def WX01_003(self, card, etype): #百火缭乱 花代•肆
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["红", "红","红"]
effectcost2 = ["红"]
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival
pass
else:
cost = effectcost2
specialcost = {"Discard":{"color": "红", "ctype": "SIGNI", "qty": 1}}
def WX01_004(self, card, etype): #轰炎 花代•贰改
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["红", "红"]
effectcost2 = ["红", "红", "红"]
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival
pass
else:
cost = effectcost2
def WX01_005(self, card, etype): #代号 皮璐璐可•Ω
mute()
uniformCost(etype, ["蓝","蓝","蓝"], [], [], [], {"Discard":{"color": "蓝", "ctype": "SIGNI", "qty": 1}}, 2)
def WX01_006(self, card, etype): #四式战帝女 绿姬
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["绿","绿","绿"]
effectcost2 = []
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival effect
pass
else:
cost = effectcost2
specialcost = {"Discard":{"color": "绿", "ctype": "SIGNI", "qty": 1}}
def WX01_007(self, card, etype): #月蚀之巫女 玉依姬
mute()
global cost
global echoice
global specialcost
notify("done")
growcost = ["白","白"]
effectcost1 = ["白"]
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival effect
cost = effectcost1
else:
pass
def WX01_008(self, card, etype): #流星之巫女 玉依姬
mute()
uniformCost(etype, ["白"], [], [], [], {}, 0)
def WX01_009(self, card, etype): #新星之巫女 玉依姬
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}}, 1)
def WX01_010(self, card, etype): #杰诺之门
mute()
uniformCost(etype, [], [], [], ["白"], {}, 0)
def WX01_011(self, card, etype): #炽炎舞 花代•叁
mute()
uniformCost(etype, ["红", "红"], [], ["红"], [], {}, 0)
def WX01_012(self, card, etype): #刚炎 花代•贰
mute()
uniformCost(etype, ["红"], [], [], [], {}, 0)
def WX01_013(self, card, etype): #焰 花代•壹
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}}, 1)
def WX01_014(self, card, etype): #烈霸一络
mute()
uniformCost(etype, [], [], [], ["红", "红", "红"], {}, 0)
def WX01_015(self, card, etype): #代号 皮璐璐可•Γ
mute()
uniformCost(etype, ["蓝", "蓝"], [], ["蓝"], [], {}, 0)
def WX01_016(self, card, etype): #代号 皮璐璐可•Β
mute()
uniformCost(etype, ["蓝"], [], [], [], {}, 0)
def WX01_017(self, card, etype): #代号 皮璐璐可•Α
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}}, 1)
def WX01_018(self, card, etype): #魔法反制
mute()
uniformCost(etype, [], [], ["蓝"], ["蓝", "无"], {}, 0)
def WX01_019(self, card, etype): #四型皇艳娘 绿姬
mute()
uniformCost(etype, ["绿", "绿", "绿"], [], [], [], {}, 0)
def WX01_020(self, card, etype): #三型雌雌娘 绿姬
mute()
uniformCost(etype, ["绿", "绿"], [], [], [], {}, 0)
def WX01_021(self, card, etype): #二型斗婚娘 绿姬
mute()
uniformCost(etype, ["绿"], [], [], [], {}, 0)
def WX01_022(self, card, etype): #一型舞斗娘 绿姬
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}}, 1)
def WX01_023(self, card, etype): #大器晚成
mute()
uniformCost(etype, [], [], [], ["绿", "绿", "绿", "绿", "绿", "无", "无", "无", "无", "无", "无", "无"], {}, 0)
def WX01_024(self, card, etype): #奇奇怪怪
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_025(self, card, etype): #营救
mute()
uniformCost(etype, [], [], [], ["无"], {}, 0)
def WX01_026(self, card, etype): #充能
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_027(self, card, etype): #原枪 源能枪
mute()
uniformCost(etype, [], [], ["白"], ["白", "白"], {}, 0)
def WX01_028(self, card, etype): #弧光圣气
mute()
uniformCost(etype, [], [], [], ["白", "白", "白", "白", "白"], {}, 0)
def WX01_029(self, card, etype): #罗辉石 金刚珠玉
mute()
uniformCost(etype, [], [], ["红"], ["红", "红"], {}, 0)
def WX01_030(self, card, etype): #赎罪之对火
mute()
uniformCost(etype, [], [], [], ["红", "红", "红"], {}, 0)
def WX01_031(self, card, etype): #核心代号 V•A•C
mute()
uniformCost(etype, [], [], ["蓝"], ["蓝", "蓝"], {}, 0)
def WX01_032(self, card, etype): #抢夺
mute()
uniformCost(etype, [], [], [], ["蓝", "蓝", "无"], {}, 0)
def WX01_033(self, card, etype): #幻兽神 御先狐
mute()
uniformCost(etype, [], [], ["绿"], ["绿", "绿"], {}, 0)
def WX01_034(self, card, etype): #修复
mute()
uniformCost(etype, [], [], [], ["绿", "绿", "绿"], {}, 0)
def WX01_035(self, card, etype): #祝福女神 雅典娜
mute()
uniformCost(etype, [], [], [], ["白"], {"Down":{"target":"self"}}, 2)
def WX01_036(self, card, etype): #巨弓 抛射弓
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_037(self, card, etype): #无法忘却的幻想 瓦尔基里
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX01_038(self, card, etype): #获得但他林
mute()
uniformCost(etype, [], [], [], ["白", "红"], {}, 0)
def WX01_039(self, card, etype): #弩炮 加农炮
mute()
uniformCost(etype, [], [], [], ["红"], {"Down":{"target":"self"}}, 2)
def WX01_040(self, card, etype): #罗石 山铜
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_041(self, card, etype): #轰炮 法典炮
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX01_042(self, card, etype): #断罪之轹断
mute()
uniformCost(etype, [], [], [], ["红", "红", "红"], {}, 0)
def WX01_043(self, card, etype): #幻水 雅莱娅尔
mute()
uniformCost(etype, [], [], [], ["蓝"], {"Down":{"target":"self"}}, 2)
def WX01_044(self, card, etype): #技艺代号 P•Z•L
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_045(self, card, etype): #幻水 夏克兰丝
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX01_046(self, card, etype): #情况糟糕
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0)
def WX01_047(self, card, etype): #罗植 曼茶罗花
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX01_048(self, card, etype): #幻兽 雪怪
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_049(self, card, etype): #罗植 植生羊
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX01_050(self, card, etype): #大化
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WX01_051(self, card, etype): #侍从Q
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_052(self, card, etype): #包括的知识
mute()
uniformCost(etype, [], [], [], ["无", "无"], {}, 0)
def WX01_053(self, card, etype): #极剑 噬神剑
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_054(self, card, etype): #极盾 埃奎斯盾
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_055(self, card, etype): #大盾 镇暴盾
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_056(self, card, etype): #中盾 方盾
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_057(self, card, etype): #出弓 炽天弓
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_058(self, card, etype): #重新开始的对话 米迦勒
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_059(self, card, etype): #出弓 普弓
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_060(self, card, etype): #小盾 圆盾
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_061(self, card, etype): #探求的思想 汉尼尔
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_062(self, card, etype): #将之开启
mute()
uniformCost(etype, [], [], [], ["白"], {}, 0)
def WX01_063(self, card, etype): #做好准备
mute()
uniformCost(etype, [], [], [], ["白"], {}, 0)
def WX01_064(self, card, etype): #罗石 金属
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_065(self, card, etype): #罗石 绿宝石
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_066(self, card, etype): #罗石 红宝石
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_067(self, card, etype): #罗石 磷矿石
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_068(self, card, etype): #罗石 琥珀
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_069(self, card, etype): #爆炮 远射炮
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_070(self, card, etype): #罗石 海人草
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_071(self, card, etype): #罗石 蓝宝石
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_072(self, card, etype): #小炮 德拉古诺夫枪
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_073(self, card, etype): #落星炎球
mute()
uniformCost(etype, [], [], [], ["红", "红", "红"], {}, 0)
def WX01_074(self, card, etype): #棱晶火柱
mute()
uniformCost(etype, [], [], [], ["白", "红"], {}, 0)
def WX01_075(self, card, etype): #技艺代号 A•S•M
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_076(self, card, etype): #技艺代号 I•D•O•L
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_077(self, card, etype): #技艺代号 A•D•B
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_078(self, card, etype): #技艺代号 S•T•G
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_079(self, card, etype): #技艺代号 W•T•C
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_080(self, card, etype): #幻水 夏可檀
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_081(self, card, etype): #技艺代号 T•V
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_082(self, card, etype): #技艺代号 F•A•N
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_083(self, card, etype): #幻水 克马诺明
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_084(self, card, etype): #事不过三
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0)
def WX01_085(self, card, etype): #冰封
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0)
def WX01_086(self, card, etype): #幻兽 飞鹰
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_087(self, card, etype): #幻兽 猫妖精
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_088(self, card, etype): #幻兽 猫头鹰
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_089(self, card, etype): #幻兽 黑猫
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_090(self, card, etype): #幻兽 麻雀
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_091(self, card, etype): #幻兽 树袋熊
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_092(self, card, etype): #幻兽 白猫
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_093(self, card, etype): #罗植 蒲公英
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_094(self, card, etype): #幻兽 燕子
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_095(self, card, etype): #幻兽 大熊猫
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_096(self, card, etype): #幻兽 三色猫
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_097(self, card, etype): #罗植 鼠尾草
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX01_098(self, card, etype): #芽生
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WX01_099(self, card, etype): #逆出
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WX01_100(self, card, etype): #侍从T
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_101(self, card, etype): #侍从D
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_102(self, card, etype): #侍从O
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX01_103(self, card, etype): #喷流的知识
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_001(self, card, etype): #满月之巫女 玉依姬
mute()
uniformCost(etype, ["白","白","白"], [], [], [], {}, 0)
def WD01_002(self, card, etype): #弦月之巫女 玉依姬
mute()
uniformCost(etype, ["白","白"], [], [], [], {}, 0)
def WD01_003(self, card, etype): #半月之巫女 玉依姬
mute()
uniformCost(etype, ["白"], [], [], [], {}, 0)
def WD01_004(self, card, etype): #三日月之巫女 玉依姬
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_005(self, card, etype): #新月之巫女 玉依姬
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_006(self, card, etype): #洛可可界线
mute()
uniformCost(etype, [], [], [], ["白", "白", "白", "无", "无"], {}, 0)
def WD01_007(self, card, etype): #艾本之书
mute()
uniformCost(etype, [], [], [], ["白", "白", "白"], {}, 0)
def WD01_008(self, card, etype): #巴洛克防御
mute()
uniformCost(etype, [], [], [], ["白", "白"], {}, 0)
def WD01_009(self, card, etype): #甲胄 皇家铠
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_010(self, card, etype): #大剑 石中剑
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_011(self, card, etype): #笼手 铁拳
mute()
uniformCost(etype, [], [], ["白"], [], {}, 0)
def WD01_012(self, card, etype): #中剑 焰形剑
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_013(self, card, etype): #小剑 库克力弯刀
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_014(self, card, etype): #小弓 箭矢
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD01_015(self, card, etype): #获得圣经
mute()
uniformCost(etype, [], [], [], ["白"], {}, 0)
def WD02_001(self, card, etype): #花代•肆
mute()
uniformCost(etype, ["红", "红", "红"], [], [], [], {}, 0)
def WD02_002(self, card, etype): #花代•叁
mute()
uniformCost(etype, ["红", "红"], [], [], [], {}, 0)
def WD02_003(self, card, etype): #花代•贰
mute()
uniformCost(etype, ["红"], [], [], [], {}, 0)
def WD02_004(self, card, etype): #花代•壹
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_005(self, card, etype): #花代•零
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_006(self, card, etype): #飞火夏虫
mute()
uniformCost(etype, [], [], [], ["红", "红", "红"], {}, 0)
def WD02_007(self, card, etype): #背炎之阵
mute()
uniformCost(etype, [], [], [], ["红", "红"], {"Discard":{"qty": 3}}, 2)
def WD02_008(self, card, etype): #烧石炎
mute()
uniformCost(etype, [], [], [], ["红", "无"], {}, 0)
def WD02_009(self, card, etype): #罗石 火山石
mute()
uniformCost(etype, [], [], ["红", "红", "红"], [], {}, 0)
def WD02_010(self, card, etype): #罗石 白银
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_011(self, card, etype): #罗石 石榴石
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_012(self, card, etype): #罗石 铜
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_013(self, card, etype): #罗石 铁
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_014(self, card, etype): #罗石 紫水晶
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD02_015(self, card, etype): #轰音火柱
mute()
uniformCost(etype, [], [], [], ["红"], {}, 0)
def WD03_001(self, card, etype): #代号•皮璐璐可•T
mute()
uniformCost(etype, ["蓝", "蓝", "蓝"], [], [], [], {}, 0)
def WD03_002(self, card, etype): #代号•皮璐璐可•G
mute()
uniformCost(etype, ["蓝", "蓝"], [], [], [], {}, 0)
def WD03_003(self, card, etype): #代号•皮璐璐可•M
mute()
uniformCost(etype, ["蓝"], [], [], [], {}, 0)
def WD03_004(self, card, etype): #代号•皮璐璐可•K
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_005(self, card, etype): #代号•皮璐璐可
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_006(self, card, etype): #窥视分析
mute()
uniformCost(etype, [], [], [], ["蓝", "蓝", "蓝"], {}, 0)
def WD03_007(self, card, etype): #不可行动
mute()
uniformCost(etype, [], [], [], ["蓝", "蓝", "蓝"], {}, 0)
def WD03_008(self, card, etype): #双重抽卡
mute()
uniformCost(etype, [], [], [], ["蓝", "无"], {}, 0)
def WD03_009(self, card, etype): #技艺代号 R•M•N
mute()
uniformCost(etype, [], [], ["蓝"], [], {}, 0)
def WD03_010(self, card, etype): #技艺代号 D•R•S
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_011(self, card, etype): #技艺代号 S•M•P
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_012(self, card, etype): #技艺代号 J•V
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_013(self, card, etype): #技艺代号 S•C
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_014(self, card, etype): #技艺代号 R•F•R
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD03_015(self, card, etype): #真可惜
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0)
def PR_017(self, card, etype): #中枪 古罗马长矛
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def PR_018(self, card, etype): #罗石 秘银
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def PR_019(self, card, etype): #珍宝
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0) #special case , we treat TREASURE's special cost as its effect.
def PR_020(self, card, etype): #增援
mute()
effectcost2 = ["绿"]
choiceList = ["「从你的卡组里探寻1张力量10000以上的SIGNI卡,将其公开并加入加入手牌。之后将卡组洗切。」", \
"「从你的卡组顶将2张卡放置到能量区」"]
colorsList = ['#FF0000', '#FF0000']
global cost
global echoice
cost = effectcost2
if etype == 2:
echoice = askChoice("选择一个效果发动:", choiceList, colorsList)
def PR_040(self, card, etype): #多重
mute()
effectcost2 = ["白","白","蓝","蓝"]
choiceList = ["「对战对手的1只LRIG在这个回合中不能攻击。」", \
"「将对战对手的所有SIGNI冻结。」", \
"「将对战对手的1只SIGNI返回手牌。」", \
"「抽2张卡。」"]
colorsList = ['#FF0000', '#FF0000', '#FF0000', '#FF0000']
global cost
global echoice
cost = effectcost2
if etype == 2:
echoice1 = askChoice("选择第一个效果:", choiceList, colorsList)
del choiceList[echoice1 - 1]
del colorsList[echoice1 - 1]
inter = askChoice("选择第二个效果:", choiceList, colorsList)
if echoice1 <= inter:
echoice2 = inter + 1
else:
echoice2 = inter
echoice = [echoice1, echoice2]
def WD04_001(self, card, etype): #四之娘 绿姬
mute()
uniformCost(etype, ["绿", "绿", "绿"], [], [], [], {}, 0)
def WD04_002(self, card, etype): #三之娘 绿姬
mute()
uniformCost(etype, ["绿", "绿"], [], [], [], {}, 0)
def WD04_003(self, card, etype): #二之娘 绿姬
mute()
uniformCost(etype, ["绿"], [], [], [], {}, 0)
def WD04_004(self, card, etype): #一之娘 绿姬
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_005(self, card, etype): #斗娘 绿姬
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_006(self, card, etype): #意气扬扬
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WD04_007(self, card, etype): #再三再四
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WD04_008(self, card, etype): #付和雷同
mute()
uniformCost(etype, [], [], [], ["绿", "绿", "绿"], {}, 0)
def WD04_009(self, card, etype): #幻兽 青龙
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_010(self, card, etype): #幻兽 朱雀小姐
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_013(self, card, etype): #幻兽 小玄武
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_015(self, card, etype): #幻兽 白虎
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_016(self, card, etype): #侍从 Q2
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_017(self, card, etype): #侍从 O2
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD04_018(self, card, etype): #堕络
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WD05_001(self, card, etype): #狱卒阎魔 乌莉丝
mute()
uniformCost(etype, ["黑", "黑", "黑"], [], [], [], {}, 0)
def WD05_002(self, card, etype): #阿鼻阎魔 乌莉丝
mute()
uniformCost(etype, ["黑", "黑"], [], [], [], {}, 0)
def WD05_003(self, card, etype): #众合阎魔 乌莉丝
mute()
uniformCost(etype, ["黑"], [], [], [], {}, 0)
def WD05_004(self, card, etype): #灼热阎魔 乌莉丝
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_005(self, card, etype): #阎魔 乌莉丝
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_006(self, card, etype): #处刑时刻
mute()
uniformCost(etype, [], [], [], ["黑", "黑"], {}, 0)
def WD05_007(self, card, etype): #永恒处刑
mute()
uniformCost(etype, [], [], [], ["黑", "黑", "黑"], {}, 0)
def WD05_008(self, card, etype): #出墓
mute()
uniformCost(etype, [], [], [], ["黑", "黑", "黑", "黑", "黑"], {}, 0)
def WD05_009(self, card, etype): #堕落炮女 缅茨姆
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_010(self, card, etype): #废恶象征 别西卜
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_011(self, card, etype): #堕落炮女 卡莉
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_012(self, card, etype): #背德象征 科思莫
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_013(self, card, etype): #小恶象征 小鬼
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_014(self, card, etype): #堕落炮女 魅魔
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WD05_017(self, card, etype): #完全漆黑
mute()
uniformCost(etype, [], [], [], ["黑"], {}, 0)
def WD05_018(self, card, etype): #回想的祝福
mute()
uniformCost(etype, [], [], [], ["无", "无"], {}, 0)
def WX02_001(self, card, etype): #金木犀之巫女 玉依姬
mute()
growcost = ["白","红", "绿"]
effectcost1 = ["白"]
effectcost2 = ["白","红"]
effectcost3 = ["白","绿","无"]
choiceList = ["【起】白1+红1:将对战对手的1只力量7000以下的SIGNI驱逐。", "【起】白1+绿1+无1:将对战对手的1只力量10000以上的SIGNI驱逐。"]
colorsList = ['#FF0000', '#FF0000']
global cost
global echoice
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
for color in cost:
notify(color)
elif etype == 1: #to activate Arrival
cost = effectcost1
else:
echoice = askChoice("选择一个效果发动:", choiceList, colorsList, customButtons = ["取消"])
if echoice == 1:
cost = effectcost2
elif echoice == 2:
cost = effectcost3
def WX02_002(self, card, etype): #火鸟风月 游月·肆
mute()
uniformCost(etype, ["红", "红", "绿", "绿"], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_003(self, card, etype): #艾尔德拉×Ⅳ式
mute()
uniformCost(etype, ["蓝", "蓝", "蓝"], [], [], [], {}, 0)
def WX02_004(self, card, etype): #无间阎魔 乌莉丝
mute()
uniformCost(etype, ["黑", "黑", "黑"], [], [], ["黑"], {"Discard":{"color": "黑", "ctype": "SIGNI", "qty": 1}}, 0)
def WX02_005(self, card, etype): #纯白希望
mute()
uniformCost(etype, [], [], [], ["白", "白", "红"], {}, 0)
def WX02_006(self, card, etype): #漆黑野望
mute()
uniformCost(etype, [], [], [], ["黑", "黑", "黑"], {}, 0)
def WX02_007(self, card, etype): #轰罪炎 游月·叁
mute()
growcost = ["红", "绿"]
effectcost2 = ["红"]
effectcost3 = ["绿"]
choiceList = ["【起】红1:将对战对手的1只力量5000以下的SIGNI驱逐。", "【起】绿1:直到回合结束时为止,你所有的SIGNI的力量+5000。"]
colorsList = ['#FF0000', '#FF0000']
global cost
global echoice
if etype == -2:
return False
elif etype == -1:
cost == []
elif etype == 0: #to grow
cost = growcost
elif etype == 1: #to activate Arrival
cost = effectcost1
else:
echoice = askChoice("选择一个效果发动:", choiceList, colorsList, customButtons = ["取消"])
if echoice == 1:
cost = effectcost2
elif echoice == 2:
cost = effectcost3
def WX02_008(self, card, etype): #焰海 游月•贰
mute()
uniformCost(etype, ["红"], [], [], [], {}, 0)
def WX02_009(self, card, etype): #焰 游月•壹
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_010(self, card, etype): #艾尔德拉×Ⅲ式
mute()
uniformCost(etype, ["蓝", "蓝"], [], [], [], {}, 0)
def WX02_011(self, card, etype): #艾尔德拉×Ⅱ式
mute()
uniformCost(etype, ["蓝"], [], [], [], {}, 0)
def WX02_012(self, card, etype): #艾尔德拉×Ⅰ式
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_013(self, card, etype): #叫唤阎魔 乌莉丝
mute()
uniformCost(etype, ["黑", "黑"], [], [], [], {"Discard":{"qty": 1}}, 1)
def WX02_014(self, card, etype): #黑绳阎魔 乌莉丝
mute()
uniformCost(etype, ["黑"], [], [], [], {}, 0)
def WX02_015(self, card, etype): #等活阎魔 乌莉丝
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_016(self, card, etype): #哥特界限
mute()
uniformCost(etype, [], [], [], ["白", "无", "无"], {}, 0)
def WX02_017(self, card, etype): #气炎万丈
mute()
uniformCost(etype, [], [], [], ["红", "绿"], {}, 0)
def WX02_018(self, card, etype): #火红柳绿
mute()
uniformCost(etype, [], [], [], ["红"], {}, 0)
def WX02_019(self, card, etype): #交织生命护甲
mute()
uniformCost(etype, [], [], [], ["蓝"], {}, 0)
def WX02_020(self, card, etype): #鲜血斩击
mute()
uniformCost(etype, [], [], [], ["黑", "黑"], {}, 0)
def WX02_021(self, card, etype): #先驱的大天使 大天使该隐
mute()
global cost
cost = []
effectcost = ["白", "白"]
if etype == 1:
cost = effectcost
else:
cost =[]
def WX02_022(self, card, etype): #弩炮 狙击枪
mute()
uniformCost(etype, [], [], ["红"], [], {}, 0)
def WX02_023(self, card, etype): #幻水姬 丝派拉尔•卡米拉
mute()
uniformCost(etype, [], [], ["蓝"], [], {}, 0)
def WX02_024(self, card, etype): #罗植姬 戈休·雅格尼丝
mute()
uniformCost(etype, [], [], ["绿"], [], {"Down":{"target":"植物"}}, 0)
def WX02_025(self, card, etype): #恶魔姬 安娜•蜃影
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_026(self, card, etype): #愿望危机
mute()
uniformCost(etype, [], [], [], ["白"], {}, 0)
def WX02_027(self, card, etype): #焦土的代价
mute()
uniformCost(etype, [], [], [], ["红", "红"], {}, 0)
def WX02_028(self, card, etype): #谜言暗气
mute()
uniformCost(etype, [], [], [], ["黑", "黑"], {}, 0)
def WX02_029(self, card, etype): #宝具 御剑
mute()
global cost
global echoice
global specialcost
if etype == -2:
return False
elif etype == -1: pass
elif etype == 0: pass
elif etype == 1:
specialcost = {"Discard":{"ctype": "SIGNI", "signiclass": ["武装", "武器"], "qty": 1}}
else:
pass
def WX02_030(self, card, etype): #宝具 御镜
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_031(self, card, etype): #使其反跳
mute()
uniformCost(etype, [], [], [], ["白", "白"], {}, 0)
def WX02_032(self, card, etype): #罗石 蛋白石
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"ctype": "SIGNI", "signiclass": ["矿石", "宝石"], "qty": 1}}, 1)
def WX02_033(self, card, etype): #罗石 红玉髓
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_034(self, card, etype): #不希望的冲动
mute()
uniformCost(etype, [], [], [], ["红", "绿"], {}, 0)
def WX02_035(self, card, etype): #技艺代号 C·P·U
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"ctype": "SIGNI", "signiclass": ["电机"], "qty": 1}}, 1)
def WX02_036(self, card, etype): #技艺代号 G•A•B
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_037(self, card, etype): #飞溅
mute()
uniformCost(etype, [], [], [], ["蓝", "无"], {}, 0)
def WX02_038(self, card, etype): #幻兽 雉
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"ctype": "SIGNI", "signiclass": ["空兽", "地兽"], "qty": 1}}, 1)
def WX02_039(self, card, etype): #幻兽 八犬
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_040(self, card, etype): #着植
mute()
uniformCost(etype, [], [], [], ["绿", "绿", "无", "无", "无"], {}, 0)
def WX02_041(self, card, etype): #大损
mute()
uniformCost(etype, [], [], [], ["绿", "绿"], {}, 0)
def WX02_042(self, card, etype): #反制代号 巴勒贝克
mute()
uniformCost(etype, [], [], [], ["黑"], {"Down":{"target":"self"}}, 2)
def WX02_043(self, card, etype): #反制代号 基西拉
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_044(self, card, etype): #大罪缘由 巴力
mute()
uniformCost(etype, [], [], [], [], {"Down":{"target":"self"}}, 2)
def WX02_045(self, card, etype): #献祭斩击
mute()
uniformCost(etype, [], [], [], ["黑", "黑", "黑"], {}, 0)
def WX02_046(self, card, etype): #牺牲的微笑 丘雅耶尔
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_047(self, card, etype): #虚构的爱情 希耶尔
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_048(self, card, etype): #宝具 勾玉
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_049(self, card, etype): #博爱的聚集 萨尼耶尔
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_050(self, card, etype): #刀剑本领
mute()
uniformCost(etype, [], [], [], ["白", "白"], {}, 0)
def WX02_051(self, card, etype): #轰炮 远射装置
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_052(self, card, etype): #爆炮 MP5
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_053(self, card, etype): #罗石 翡翠
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_054(self, card, etype): #小炮 枪匠
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_055(self, card, etype): #光欲宝剑
mute()
uniformCost(etype, [], [], [], ["红"], {}, 0)
def WX02_056(self, card, etype): #幻水 奥科特
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_057(self, card, etype): #幻水 珍珠
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_058(self, card, etype): #技艺代号 M•M•R
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_059(self, card, etype): #幻水 科塞梅
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_060(self, card, etype): #探寻者
mute()
uniformCost(etype, [], [], [], ["无"], {}, 0)
def WX02_061(self, card, etype): #蓝色收获
mute()
uniformCost(etype, [], [], [], ["蓝", "无"], {}, 0)
def WX02_062(self, card, etype): #罗植 葵小姐
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_063(self, card, etype): #罗植 莲
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_064(self, card, etype): #幻兽 猴
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_065(self, card, etype): #罗植 虎尾兰
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_066(self, card, etype): #丰润
mute()
uniformCost(etype, [], [], [], ["绿"], {}, 0)
def WX02_067(self, card, etype): #恶魔续发 莉莉丝
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_068(self, card, etype): #恶魔勇武 摩莉甘
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"ctype": "SIGNI", "signiclass": ["恶魔"], "qty": 1}}, 1)
def WX02_069(self, card, etype): #反制代号 星云
mute()
uniformCost(etype, [], [], [], ["黑", "黑"], {}, 0)
def WX02_070(self, card, etype): #真实死神 阿尼玛
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_071(self, card, etype): #反制代号 德里
mute()
uniformCost(etype, [], [], [], ["黑", "无"], {}, 0)
def WX02_072(self, card, etype): #反制代号 马丘比
mute()
uniformCost(etype, [], [], [], [], {"Discard":{"qty": 1}, "Down":{"target":"self"}}, 2)
def WX02_073(self, card, etype): #反制代号 敌左反魔
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_074(self, card, etype): #小恶忧郁 格里姆
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_075(self, card, etype): #造墓者
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_077(self, card, etype): #侍从 T2
mute()
uniformCost(etype, [], [], [], [], {}, 0)
def WX02_078(self, card, etype): #侍从 D2
mute()
uniformCost(etype, [], [], [], [], {}, 0)
|
We promote culture and distribute books that make a difference and contribute to U.S communities.
A fox kidnaps a hen and runs away. While a bear, a rabbit, and a roster follow him day and night until they find in his hide out but.... follow their adventure and tell the story in your own words.
Cuenta este cuento con tus propias palabras, un libro bellamente ilustrado sin texto.
|
import json
from flask import Blueprint, Response
from flask.ext.cors import cross_origin
import copy
import StringIO
import uuid
from pgeo.error.custom_exceptions import PGeoException, errors
from pgeo.utils import log
from pgeorest.config.settings import settings
from pgeo.stats.raster import Stats
from pgeo.gis.raster_scatter import create_scatter
from pgeo.gis.raster_mapalgebra import filter_layers
from flask import request
from pgeo.manager.manager import Manager
app = Blueprint(__name__, __name__)
log = log.logger(__name__)
#TODO: Review the REST for also layers that are not published, but are on the filesystem
# default json_statistics
raster_statistics = {
"raster": {
"uid": None
},
"stats": {
"force": True
}
}
raster_histogram = {
"raster": {
"uid": None
},
"stats": {
"force": True,
"buckets": 256
}
}
@app.route('/')
def index():
"""
Welcome message
@return: welcome message
"""
return 'Welcome to the stats module!'
@app.route('/raster/<layer>/', methods=['GET'])
@app.route('/raster/<layer>', methods=['GET'])
def get_stats(layer):
"""
Extracts all the statistics of a layer
@param layer: workspace:layername
@return: json with the raster statistics
"""
try:
if ":" not in layer:
return PGeoException("Please Specify a workspace for " + str(layer), status_code=500)
json_stats = copy.deepcopy(raster_statistics)
json_stats["raster"]["uid"] = layer
# Module to process statistics
stats = Stats(settings)
return Response(json.dumps(stats.get_stats(json_stats)), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/raster/<layer>/hist/', methods=['GET'])
@app.route('/raster/<layer>/hist', methods=['GET'])
@cross_origin()
def get_histogram(layer):
"""
Extracts histogram from a layer
@param layer: workspace:layername
@return: json with the raster statistics
"""
try:
if ":" not in layer:
return PGeoException("Please Specify a workspace for " + str(layer), status_code=500)
json_stats = copy.deepcopy(raster_histogram)
json_stats["raster"]["uid"] = layer
# Module to process statistics
stats = Stats(settings)
return Response(json.dumps(stats.get_histogram(json_stats)), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/raster/<layer>/hist/<buckets>/', methods=['GET'])
@app.route('/raster/<layer>/hist/<buckets>', methods=['GET'])
@cross_origin(origins='*')
def get_histogram_buckets(layer, buckets):
"""
Extracts histogram from a layer
TODO: add a boolean and buckets
default: boolean = True, buckets = 256
@param layer: workspace:layername
@param buckets: number of buckets i.e. 256
@return: json with the raster statistics
"""
try:
if ":" not in layer:
return PGeoException("Please Specify a workspace for " + str(layer), status_code=500)
json_stats = copy.deepcopy(raster_histogram)
json_stats["raster"]["uid"] = layer
json_stats["stats"]["buckets"] = int(buckets)
# Module to process statistics
stats = Stats(settings)
return Response(json.dumps(stats.get_histogram(json_stats)), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/raster/<layer>/hist/buckets/<buckets>/min/<min>/max/<max>/', methods=['GET'])
@app.route('/raster/<layer>/hist/buckets/<buckets>/min/<min>/max/<max>', methods=['GET'])
@cross_origin(origins='*')
def get_histogram_buckets_min_max(layer, buckets, min, max):
"""
Extracts histogram from a layer
TODO: add a boolean and buckets
default: boolean = True, buckets = 256
@param layer: workspace:layername
@param buckets: number of buckets i.e. 256
@return: json with the raster statistics
"""
try:
if ":" not in layer:
return PGeoException("Please Specify a workspace for " + str(layer), status_code=500)
json_stats = copy.deepcopy(raster_histogram)
json_stats["raster"]["uid"] = layer
json_stats["stats"]["buckets"] = int(buckets)
json_stats["stats"]["min"] = float(min)
json_stats["stats"]["max"] = float(max)
# Module to process statistics
stats = Stats(settings)
return Response(json.dumps(stats.get_histogram(json_stats)), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/rasters/<layers>/lat/<lat>/lon/<lon>/', methods=['GET'])
@app.route('/rasters/<layers>/lat/<lat>/lon/<lon>', methods=['GET'])
@cross_origin(origins='*')
def get_lat_lon(layers, lat, lon):
"""
Get the value of the layer at lat/lon position
@param layer: workspace:layername
@param lat: latitude
@param lon: longitude
@return: json with the raster statistics
"""
try:
if ":" not in layers:
return PGeoException("Please Specify a workspace for " + str(layers), status_code=500)
input_layers = layers.split(",")
# Module to process statistics
stats = Stats(settings)
s = stats.get_location_values(input_layers, lat, lon)
return Response(json.dumps(s), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/raster/spatial_query/', methods=['POST'])
@app.route('/raster/spatial_query', methods=['POST'])
@cross_origin(origins='*', headers=['Content-Type'])
def get_stats_by_layer():
"""
TODO is it useful of should be used the one below? @Deprecated?
Get raster statistic filtered by a spatial query:
TODO add json definition of the spatial query and statistics that can be applied
:return: a json with the zonal statistics
"""
try:
user_json = request.get_json()
# Module to process statistics
stats = Stats(settings)
s = stats.zonal_stats(user_json)
return Response(json.dumps(s), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/rasters/spatial_query/', methods=['POST'])
@app.route('/rasters/spatial_query', methods=['POST'])
@cross_origin(origins='*', headers=['Content-Type'])
def get_stats_by_layers():
"""
Get raster statistic filtered by a spatial query:
TODO add json definition of the spatial query and statistics that can be applied
:return: a json with the zonal statistics
"""
try:
# Module to process statistics
stats = Stats(settings)
user_json = request.get_json()
response = []
for uid in user_json["raster"]["uids"]:
json_stat = copy.deepcopy(user_json)
json_stat["raster"]["uid"] = uid
s = {}
s[uid] = stats.zonal_stats(json_stat)
response.append(s)
return Response(json.dumps(response), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/rasters/scatter_analysis/', methods=['POST'])
@app.route('/rasters/scatter_analysis', methods=['POST'])
@cross_origin(origins='*', headers=['Content-Type'])
def get_scatter_analysis():
try:
# Module to process statistics
stats = Stats(settings)
user_json = request.get_json()
log.info(user_json)
response = []
for uid in user_json["raster"]["uids"]:
log.info(user_json)
json_stat = copy.deepcopy(user_json)
json_stat["raster"]["uid"] = uid
response.append(stats.zonal_stats(json_stat))
log.info(response[0])
log.info(response[1])
# io.BytesIO()
si = StringIO.StringIO()
result = stats.create_csv_merge(si, response[0], response[1])
log.info(result.getvalue())
return Response(result.getvalue())
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/rasters/scatter_plot/<layers>/', methods=['GET'])
@app.route('/rasters/scatter_plot/<layers>', methods=['GET'])
@cross_origin(origins='*', headers=['Content-Type'])
def get_scatter_plot(layers):
try:
"""
Create a scatter plot from two rasters of the same dimension
@param layers: workspace:layername1,workspace:layername2
@return: json with the scatter plot data
"""
if ":" not in layers:
return PGeoException("Please Specify a workspace for " + str(layers), status_code=500)
input_layers = layers.split(",")
stats = Stats(settings)
raster_path1 = stats.get_raster_path(input_layers[0])
raster_path2 = stats.get_raster_path(input_layers[1])
# creating scatter
response = create_scatter(raster_path1, raster_path2, 300)
return Response(json.dumps(response), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
@app.route('/rasters/mapalgebra/layers/<layers>/minmax/<minmax>', methods=['GET'])
@app.route('/rasters/mapalgebra/layers/<layers>/minmax/<minmax>', methods=['GET'])
@cross_origin(origins='*', headers=['Content-Type'])
def get_filter_layers(layers, minmax):
try:
"""
Create a temporary mask layer using min-max of the layers
@param layers: workspace:layername1,workspace:layername2
@param minmax: min1,max1,min2,max2
@return: json with the scatter plot data
"""
if ":" not in layers:
return PGeoException("Please Specify a workspace for " + str(layers), status_code=500)
input_layers = layers.split(",")
minmax_values = minmax.split(",")
stats = Stats(settings)
# getting raster information
raster_path1 = stats.get_raster_path(input_layers[0])
raster_path2 = stats.get_raster_path(input_layers[1])
# getting raster min max values
min1 = float(minmax_values[0])
max1 = float(minmax_values[1])
min2 = float(minmax_values[2])
max2 = float(minmax_values[3])
# create the layer
path = filter_layers(raster_path1, raster_path2, min1, max1, min2, max2)
# creating metdata
uid = str(uuid.uuid4())
metadata_def = {}
metadata_def["uid"] = "tmp:" + uid
metadata_def["title"] = {}
metadata_def["title"]["EN"] = "masked_" + uid
metadata_def["meSpatialRepresentation"] = {}
# publish the new tmp layer
# TODO: metadata? style to be applied?
# TODO: how to handle a tmp workspace overhead?
s = copy.deepcopy(settings)
# this copies the geoserver_tmp dato to "geoserver" settings to be passed to the manager
s["geoserver"] = s["geoserver_tmp"]
manager = Manager(s)
manager.publish_coverage(path, metadata_def, False, True, False)
# adding the tmp geoserver WMS URL
if "geoserver_wms" in s["geoserver"]:
metadata_def["url_wms"] = s["geoserver"]["geoserver_wms"]
return Response(json.dumps(metadata_def), content_type='application/json; charset=utf-8')
except PGeoException, e:
raise PGeoException(e.get_message(), e.get_status_code())
|
On Monday Joshua and I celebrated our 7th wedding anniversary.
We had a yummy dinner at The Melting Pot, and Joshua gifted me a gift card to my favorite spa, Hoshall’s. For him, I scheduled him a haircut and pedicure at a new place that opened up recently called Hammer & Nails. It’s basically a man-cave salon for dudes. He got to kick back in a leather chair while sipping whisky and watching tv while he got his services. Needless to say he plans on going back for his next haircut.
Throughout the rest of the week we were busy packing up our old house and doing stuff for the new house. Every day after work we’d pop on over to the new house to check the progress on the paint. Then we’d run errands, grab a bit to eat, come home, sleep, go to work and do it all over again. The good news is the painting is almost done, and we got a pool fence installed so the doggies won’t fall in.
We wrapped up the end of the week by getting to go over to Joshua’s parents’ house for dinner at their new house. Both them, us, and Joshua’s brother all went into escrow on our houses around the same time, so it’s been a pretty busy month for all of us.
This week a handful of us were assigned to read Donald Miller’s book Building a StoryBrand: Clarify Your Message So Customers Will Listen. I was very excited to read this book especially since I heard him talk about it on the Entreleadership podcast a couple years back. We’re using this book to help kind of redefine our brand, and I’m looking forward to the things we do as team in the coming year.
|
import pytest
from spectate import mvc
from .mock import model_events, Counter
def test_hold_events():
counter, events = model_events(Counter)
with mvc.hold(counter) as cache:
counter.increment(1)
assert cache == [{"old": 0, "new": 1}]
counter.increment(1)
assert cache == [{"old": 0, "new": 1}, {"old": 1, "new": 2}]
# Pop off one of the events so
# it isn't sent to notifiers.
cache.pop()
assert events == [{"old": 0, "new": 1}]
def test_hold_uses_events_from_reducer():
counter, events = model_events(Counter)
def reducer(model, events):
assert events == [{"old": 0, "new": 1}]
yield {"custom": "event-1"}
yield {"custom": "event-2"}
with mvc.hold(counter, reducer=reducer):
counter.increment(1)
assert events == [{"custom": "event-1"}, {"custom": "event-2"}]
def test_rollback_events():
counter, events = model_events(Counter)
with pytest.raises(ValueError):
with mvc.rollback(counter):
counter.increment(1)
raise ValueError()
assert not events
def test_rollback_calls_undo_without_side_effects():
calls = []
counter, events = model_events(Counter)
def undo(model, events, error):
calls.append(1)
assert error is error_from_rollback
assert events == ({"old": 0, "new": 1},)
# this decrement should not notify
model.decrement(1)
with pytest.raises(ValueError):
with mvc.rollback(counter, undo=undo):
counter.increment(1)
error_from_rollback = ValueError()
raise error_from_rollback
assert calls
assert counter.value == 0
def test_mute_events():
counter, events = model_events(Counter)
with mvc.mute(counter):
counter.increment(1)
counter.increment(1)
assert events == []
|
Stadium Tours are available to book this winter, running on most Fridays until 21 December, beginning at 2pm and lasting approximately 90 minutes.
Since being brought back, tours of Easter Road Stadium have proven to be hugely popular with Hibernian supporters and those with an interest in football enjoying their afternoon at the home of the club.
Led by members of the Hibernian Historical Trust, the tour will recall the history and folklore of the club, stretching back to its humble beginnings in 1875 and right up to the modern-day era of Hibernian.
Supporters who join the tour will see rare artefacts up close, such as programmes, Hibernian kits, opposition jerseys, football boots and trophies, as well as having the opportunity to step inside the dressing rooms, stand in the tunnel, before exiting via the five steps up to the pitch to take your seat in the dugout.
It is a perfect afternoon of entertainment for football fans in general – especially supporters of Hibernian.
Click on the corresponding date you would like to attend to be taken to our e-ticketing site where you can purchase your tickets to attend the tour.
Tours are priced at £10 per adult, £7.50 per concession/student or £5 per child aged between 5 and 16. For children under 5 years of age entrance is free.
|
from flask import Flask, jsonify, abort, render_template, request, session, escape
import MySQLdb
from flask_limiter import Limiter
import time
import random
app = Flask(__name__)
#####################################
#### MAIN CONFIG ####
#####################################
# Set website limits
limiter = Limiter(app, global_limits=["2 per second"])
websiteTitle = "Python" # Website Title
websiteURL = "http://192.168.2.12" # Website address, no "/" needed
websitePort = 1313 # Website port number to use
MySQLHost = "localhost" # MySQL hostname
MySQLUser = "root" # MySQL username
MySQLPass = "" # MySQL pass
MySQLDB = "pythonurlshortner" # Database name
storeIP = True # Store IP Address of user?
urlLength = 6 # The length of your short URLS
enableHyphenAndUnderscore = True # Have a "-" and "_"
# (Hyphen/Dash and Underscore) in URLs?
enableNumbers = True # Have numbers in the short URL?
enableUppercase = True # Have upper case along with lowercase
enableRedirectTimeout = False # Have a redirect page time out
# To use this give it a seconds timeout
# To disable, set to "False"
##############################################################################################################################
#################################### DO NOT EDIT BELOW UNLESS YOU KNOW WHAT YOU ARE DOING ####################################
##############################################################################################################################
#####################################
#### TOOLS ####
#####################################
def genUrl():
l = list(letterChoices)
final = ""
for x in range(urlLength):
final += random.choice(l)
return final
#####################################
#### SETUP URLS ####
#####################################
# Numbers and letters that look similar have been removed!
numbers = "123456789"
lowerCase = "abcdefghjkmnpqrstuvwxyz"
upperCase = lowerCase.upper() # Will take the lowercase variable
# and turn it into uppercase
letterChoices = lowerCase
if enableHyphenAndUnderscore:
letterChoices += "-_"
if enableUppercase:
letterChoices += upperCase
if enableNumbers:
letterChoices += numbers
#####################################
#### HOME PAGE ####
#####################################
# The main page
@app.route('/', methods=["GET"])
@app.route('/<path:url>', methods=["GET"])
@limiter.limit("200 per minute")
def home_page(url=None):
if (not url):
return render_template('index.html', websiteTitle=websiteTitle)
else:
db = MySQLdb.connect(MySQLHost, MySQLUser, MySQLPass, MySQLDB)
cursor = db.cursor()
cursor.execute("SELECT longLink FROM short WHERE shortLink='%s'" % (str(escape(url))))
if cursor.rowcount > 0:
foundURL = cursor.fetchone()[0]
db.close()
if (enableRedirectTimeout):
return render_template('redirect.html', redirectTimeout=enableRedirectTimeout, url=foundURL)
else:
return render_template('redirect.html', redirectTimeout=0, url=foundURL)
else:
return render_template('redirect.html', redirectTimeout=0, url="/")
#####################################
#### SAVE PAGE ####
#####################################
@app.route('/saveURL', methods=["GET", "POST"])
def save_URL():
if request.method == "POST":
url = str(escape(request.form["url"]))
cont = True
while cont:
custom = str(genUrl())
db = MySQLdb.connect(MySQLHost, MySQLUser, MySQLPass, MySQLDB)
cursor = db.cursor()
cursor.execute("SELECT shortLink FROM short WHERE shortLink='%s'" % (str(custom)))
if cursor.rowcount > 0:
cont = True
else:
cont = False
cursor = db.cursor()
cursor.execute("INSERT INTO short (id, shortLink, longLink, time, ipAddress) VALUES (DEFAULT, '%s', '%s', '%d','%s')" % (custom, url, time.time(), str(request.remote_addr)))
db.commit()
db.close()
return render_template('result.html', websiteTitle=websiteTitle, longURL=url, websiteURL=websiteURL+":"+str(websitePort), shortURL=custom)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=websitePort)
|
Android 4.4 KitKat - More For TVs than Phones? Android 4.4 KitKat - More For TVs than Phones?
According to ETNews, The Android 4.4 ‘KitKat’ update is emphasized more on connecting your device to your televisions. The publication cited sources familiar with the situation, who said that the update will improve the way TVs and Android-powered devices work together. The report didn’t offer any specifics, but it did mention an enhanced user interface.
Based on this, The Android 4.4 KitKat is shaping up to be a minor update with few overall changes to the platform that will most probably be focused around the improvement of UI than functionality. Still, Google could very well squeeze some added capabilities into KitKat that we just haven’t heard about yet.
There has been also a rumor with leaked screenshots a few weeks back that the update could feature the ability to mirror your Android screen to a television via the Miracast protocol. Add the other rumor that Google TV is set to be re-branded as Android TV, and Chromecast’s success, and this we migh have hit our first on-target rumor.
Tagged Android 4.4 KitKat, TV.
« Google: Banner ads on front page.
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# quest.py
import random
objects = [
{'name':'rusty key',
'desc':'an old rusty key',
'location':'woods',
'when_seen':'the key is old and rusty, but glitters slightly',
'on_success':'the key slides into the lock with a firm click'},
{'name':'chest',
'desc':'a battered chest',
'location':'ravine',
'when_seen':'the chest is covered in dirt',
'on_success':'the chest opens, revealing $TREASURE$'},
]
locations = [
{'name':'home'},
{'name':'woods'},
{'name':'forest'},
{'name':'hills'},
{'name':'ravine'},
]
characters = [
{'name':'Zentar',
}
]
def main():
"""
for loc in locations:
print('you search ' + loc['name'])
for obj in objects:
if obj['location'] == loc['name']:
print('you find ', obj['name'])
print(obj['when_seen'])
"""
# setup locations
hills = Location('hills', 'rolling hills', [4,8])
forest = Location('forest', 'Dark forest near the hills', [4,7])
woods = Location('woods', 'Woodland between mountains and river', [7,12])
beach = Location('beach', 'Nice sandy beach', [2,1])
wood = Item('wood', 'Wooden planks', [forest, hills, woods ])
string = Item('string', 'Used for fishing rods and bows', [hills ])
shells = Item('shells', 'Shells', [beach ])
jim = NPC('Jim', forest, 'Idle', [wood])
sandy = NPC('Sandy', hills, 'hunting', [string, shells])
# generate quest list
my_quests = []
my_quests.append(Quest().create_next_quest_via_npc_needs(jim))
my_quests.append(Quest().create_next_quest_via_npc_needs(sandy))
# Display game
print('NPCs in this land:')
print(jim)
print(sandy)
print('Your Quest List:')
for q in my_quests:
print(q)
class Location(object):
"""
map areas
"""
def __init__(self, name, desc, coords):
self.name = name
self.desc = desc
self.coords = coords
def __str__(self):
res = ''
res += self.name + ' - ' + self.desc
res += str(self.coords)
return res
class DataSet(object):
"""
handles a collection of Objects loaded from a reference file
"""
def __init__(self):
self.raw_data = []
self.object_list = []
def __str__(self):
return ''.join([d for d in self.raw_data])
def fill_from_csv(self, fname):
with open(fname, 'r') as fip:
for line in fip:
self.raw_data.append(line)
class Locations(DataSet):
"""
handles a collection of Locations loaded from a reference file
"""
def __init__(self):
DataSet.__init__(self)
def rebuild_list(self):
self.object_list = [] # clear the object list
for raw_line in self.raw_data:
cols = raw_line.split(',')
print('LOCATION RAW = ', cols)
cur_loc = Location(cols[0], cols[1], cols[2])
self.object_list.append(cur_loc)
class NPCs(DataSet):
"""
handles a collection of NPC Characters loaded from a reference file
"""
def __init__(self):
DataSet.__init__(self)
def rebuild_list(self):
self.object_list = [] # clear the object list
for raw_line in self.raw_data:
cols = raw_line.split(',')
cur_npc = NPC(cols[0], cols[1], cols[2], [cols[3]])
self.object_list.append(cur_npc)
class Items(DataSet):
"""
handles a collection of Items loaded from a reference file
"""
def __init__(self):
DataSet.__init__(self)
class Item(object):
"""
Items / objects that are in the world. Can be collected
or crafted
"""
def __init__(self, name, desc, spawns_at_locations):
self.name = name
self.desc = desc
self.spawns_at_locations = spawns_at_locations
def __str__(self):
res = ''
res += self.name + ' - ' + self.desc + ' (Spawns at locations:'
res += '|'.join([l.name for l in self.spawns_at_locations])
res += ')\n'
return res
class NPC(object):
"""
a Non-Player character
"""
def __init__(self, name, location, status, items_needed):
self.name = name
self.location = location
self.status = status
self.items_needed = items_needed
"""
print('init NPC!')
print('self.name = ', self.name )
print('self.location = ', self.location )
print('self.status = ', self.status )
print('self.items_needed = ', self.items_needed )
"""
def __str__(self):
res = ''
res += self.name + ' is at ' + str(self.location) + '. Status = ' + self.status
if len(self.items_needed) > 0:
if self.items_needed is list:
res += '\nThis NPC needs : '
#for i in self.items_needed:
# res += str(i.name)
res += ', '.join([i.name for i in self.items_needed])
return res
class Quest(object):
"""
handles a single quest
"""
def __init__(self):
pass
def __str__(self):
res = '+------------------------------------------------------------\n'
res += '| ***' + self.name + ' ***\n'
res += '| ' + self.desc + '\n'
res += '| Location = ' + str(self.location[0].name) + '\n'
res += '| Status = ' + self.status + '\n'
res += '| Reward = ' + self.reward + '\n'
res += '| Return to ' + self.quest_giver.name + ' with '
#res += ','.join([i.name for i in self.items_required])
res += str(self.quantity) + ' ' + self.items_required.name + '\n'
res += '+------------------------------------------------------------\n'
return res
def create_next_quest_via_npc_needs(self, npc):
"""
takes NPC as parameter and finds the next quest this person needs
"""
for needs in npc.items_needed: # just the first one
self.name = 'Collect ' + needs.name
self.quest_giver = npc
self.quantity = random.choice([4,8,10,25])
self.reward = random.choice(['fishing rod', 'hammer', '5 Gold', '10 Gold'])
self.items_required = needs
self.desc = npc.name + ' needs you to collect ' + needs.name #+ '. You can find these at ' + str(needs.spawns_at_locations)
self.status = 'Available'
self.location = needs.spawns_at_locations
return self
if __name__ == '__main__':
main()
|
"[There is] no question that the classical and indie realms can share a border where music turns pensive, knotty and otherworldly."
Their fifth album, The Creatures in the Garden of Lady Walton (2010), and companion EP, Veil Waltz (2010), were released to great acclaim following their previous album, Lantern (2006). The Creatures is their first album of songs after four primarily instrumental releases; a song-cycle composed by Padma Newsome for the group with extensive vocal work from Shara Worden of My Brightest Diamond. Other guests include Sufjan Stevens, Aaron Dessner and Matt Berninger of The National and the Osso String Quartet.
Clogs have performed at the 2013 Adelaide Festival, the 2012 Halifax Jazz Festival, the 2013 and 2011 Ecstatic Music Festivals and 2011 Barbican Centre's Reverberations Festival. Other noteworthy performances by the quartet have included the 2010 Big Ears Festival, a collaboration with the Brooklyn Philharmonic in 2009 at the Howard Gilman Opera House at BAM, and appearances at the 2007 and 2008 Sydney Festivals. They have performed at such venues/events as the Bang on a Can Marathon, MusicNow Festival, Wexner Center for the Arts, MFA Boston, and the Warhol Museum.
Copyright ©2015-2019 Clogs. Site by stephen gilewski design.
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TPU Distribution Strategy.
This is experimental. It's not ready for general use.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distribute.python import cross_tower_ops as cross_tower_ops_lib
from tensorflow.contrib.distribute.python import one_device_strategy
from tensorflow.contrib.distribute.python import values
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.contrib.tpu.python.tpu import tpu
from tensorflow.contrib.tpu.python.tpu import tpu_system_metadata as tpu_system_metadata_lib
from tensorflow.contrib.tpu.python.tpu import training_loop
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.training import device_util
from tensorflow.python.util import nest
def get_tpu_system_metadata(tpu_cluster_resolver):
"""Retrieves TPU system metadata given a TPUClusterResolver."""
master = tpu_cluster_resolver.master()
# pylint: disable=protected-access
cluster_spec = tpu_cluster_resolver.cluster_spec()
cluster_def = cluster_spec.as_cluster_def() if cluster_spec else None
tpu_system_metadata = (
tpu_system_metadata_lib._query_tpu_system_metadata(
master,
cluster_def=cluster_def,
query_topology=False))
return tpu_system_metadata
class TPUStrategy(one_device_strategy.OneDeviceStrategy):
"""Experimental TPU distribution strategy implementation."""
def __init__(self, tpu_cluster_resolver, steps_per_run, num_cores=None):
"""Initializes the TPUStrategy object.
Args:
tpu_cluster_resolver: A tf.contrib.cluster_resolver.TPUClusterResolver,
which provides information about the TPU cluster.
steps_per_run: Number of steps to run on device before returning to the
host. Note that this can have side-effects on performance, hooks,
metrics, summaries etc.
This parameter is only used when Distribution Strategy is used with
estimator or keras.
num_cores: Number of cores to use on the TPU. If None specified, then
auto-detect the cores and topology of the TPU system.
"""
# TODO(sourabhbajaj): OneDeviceStrategy should be initialized with the
# master node fetched from the cluster resolver.
super(TPUStrategy, self).__init__('/device:CPU:0')
self._tpu_cluster_resolver = tpu_cluster_resolver
self._tpu_metadata = get_tpu_system_metadata(self._tpu_cluster_resolver)
# TODO(sourabhbajaj): Change this from num_cores to metadata_override
self._num_cores_override = num_cores
# TODO(sourabhbajaj): Remove this once performance of running one step
# at a time is comparable to multiple steps.
self.steps_per_run = steps_per_run
def _get_enqueue_op_per_host(self, host_id, iterator, input_shapes,
iterations):
"""Create an enqueue op for a single host identified using host_id.
The while_loop op returned will run `iterations` times and in each run
enqueue batches for each shard.
Args:
host_id: integer, id of the host to run the enqueue ops on.
iterator: `tf.data` iterator to read the input data.
input_shapes: shape of inputs to be enqueue on the queue. This is same as
the value of `nest.flatten(iterator.output_shapes)`.
iterations: integer, number of iterations to be run; determines the
number of batches to be enqueued.
Returns:
while_loop_op running `iterations` times; in each run we enqueue a batch
on the infeed queue from the host with id `host_id` for each device shard.
"""
host = self.get_host_cpu_device(host_id)
def _infeed_enqueue_ops_fn():
"""Enqueue ops for one iteration."""
control_deps = []
sharded_inputs = []
enqueue_ops = []
with ops.device(host):
for _ in range(self.num_towers_per_host):
# Use control dependencies to ensure a deterministic ordering.
with ops.control_dependencies(control_deps):
inputs = nest.flatten(iterator.get_next())
control_deps.extend(inputs)
sharded_inputs.append(inputs)
for core_id, shard_input in enumerate(sharded_inputs):
enqueue_ops.append(
tpu_ops.infeed_enqueue_tuple(
inputs=shard_input,
shapes=input_shapes,
device_ordinal=core_id))
return enqueue_ops
def enqueue_ops_loop_body(i):
"""Callable for the loop body of the while_loop instantiated below."""
with ops.control_dependencies(_infeed_enqueue_ops_fn()):
return i + 1
with ops.device(host):
enqueue_op_per_host = control_flow_ops.while_loop(
lambda i: i < iterations,
enqueue_ops_loop_body,
[constant_op.constant(0)],
parallel_iterations=1)
return enqueue_op_per_host
def distribute_dataset(self, dataset_fn):
# TODO(priyag): Perhaps distribute across cores here.
return self._call_dataset_fn(dataset_fn)
# TODO(priyag): Deal with OutOfRange errors once b/111349762 is fixed.
# TODO(sourabhbajaj): Remove the initial_loop_values parameter when we have
# a mechanism to infer the outputs of `fn`. Pending b/110550782.
def _run_steps_on_dataset(self, fn, iterator, iterations,
initial_loop_values=None):
shapes = nest.flatten(iterator.output_shapes)
if any([not s.is_fully_defined() for s in shapes]):
raise ValueError(
'TPU currently requires fully defined shapes. Either use '
'set_shape() on the input tensors or use '
'dataset.apply(map_and_batch(..., drop_remainder=True)).')
types = nest.flatten(iterator.output_types)
enqueue_ops = [
self._get_enqueue_op_per_host(host_id, iterator, shapes, iterations)
for host_id in range(self.num_hosts)]
def dequeue_fn():
dequeued = tpu_ops.infeed_dequeue_tuple(dtypes=types, shapes=shapes)
return nest.pack_sequence_as(iterator.output_shapes, dequeued)
# Wrap `fn` for repeat.
if initial_loop_values is None:
initial_loop_values = {}
initial_loop_values = nest.flatten(initial_loop_values)
ctx = values.MultiStepContext()
def run_fn(*args, **kwargs):
"""Single step on the TPU device."""
del args, kwargs
fn_inputs = dequeue_fn()
if not isinstance(fn_inputs, tuple):
fn_inputs = (fn_inputs,)
fn_result = fn(ctx, *fn_inputs)
flat_last_step_outputs = nest.flatten(ctx.last_step_outputs)
if flat_last_step_outputs:
with ops.control_dependencies([fn_result]):
return [array_ops.identity(f) for f in flat_last_step_outputs]
else:
return fn_result
# TODO(sourabhbajaj): The input to while loop should be based on the output
# type of the step_fn
def iterate_on_tpu():
return training_loop.repeat(iterations, run_fn, initial_loop_values)
# We capture the control_flow_context at this point, before we run `fn`
# inside a while_loop and TPU replicate context. This is useful in cases
# where we might need to exit these contexts and get back to the outer
# context to do some things, for e.g. create an op which should be
# evaluated only once at the end of the loop on the host. One such usage
# is in creating metrics' value op.
self._outer_control_flow_context = (
ops.get_default_graph()._get_control_flow_context()) # pylint: disable=protected-access
replicate_inputs = [[]] * self.num_towers
replicate_outputs = tpu.replicate(iterate_on_tpu, replicate_inputs)
del self._outer_control_flow_context
ctx.run_op = control_flow_ops.group(replicate_outputs, enqueue_ops)
# Filter out any ops from the outputs, typically this would be the case
# when there were no tensor outputs.
last_step_tensor_outputs = [x for x in replicate_outputs
if not isinstance(x, ops.Operation)]
# Outputs are currently of the structure (grouped by device)
# [[output0_device0, output1_device0, output2_device0],
# [output0_device1, output1_device1, output2_device1]]
# Convert this to the following structure instead: (grouped by output)
# [[output0_device0, output0_device1],
# [output1_device0, output1_device1],
# [output2_device0, output2_device1]]
last_step_tensor_outputs = [list(x) for x in zip(*last_step_tensor_outputs)]
# Convert replicate_outputs to the original dict structure of
# last_step_outputs.
last_step_tensor_outputs_dict = nest.pack_sequence_as(
ctx.last_step_outputs, last_step_tensor_outputs)
for (name, aggregation) in ctx._last_step_outputs_aggregations.items(): # pylint: disable=protected-access
output = last_step_tensor_outputs_dict[name]
# For outputs that have already been aggregated, take the first value
# from the list as each value should be the same. Else return the full
# list of values.
if aggregation is not variables_lib.VariableAggregation.NONE:
# TODO(priyag): Should this return the element or a list with 1 element
last_step_tensor_outputs_dict[name] = output[0]
ctx._set_last_step_outputs(last_step_tensor_outputs_dict) # pylint: disable=protected-access
return ctx
def _call_for_each_tower(self, fn, *args, **kwargs):
kwargs.pop('run_concurrently', None)
with one_device_strategy._OneDeviceTowerContext(self): # pylint: disable=protected-access
return fn(*args, **kwargs)
def initialize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
return [tpu.initialize_system()]
def finalize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
return [tpu.shutdown_system()]
def _reduce(self, aggregation, value, destinations):
graph = ops.get_default_graph()
cf_context = graph._get_control_flow_context() # pylint: disable=protected-access
# If we're inside the ReplicateContext, reduction should be done using
# CrossReplicaSum while outside we can directly use an add_n op.
while cf_context:
if isinstance(cf_context, tpu.TPUReplicateContext):
if aggregation == vs.VariableAggregation.MEAN:
# TODO(jhseu): Revisit once we support model-parallelism.
value *= (1. / self.num_towers)
elif aggregation != vs.VariableAggregation.SUM:
raise NotImplementedError(
'Currently only support sum & mean in TPUStrategy.')
return tpu_ops.cross_replica_sum(value)
cf_context = cf_context.outer_context
# Validate that the destination is same as the host device
# Note we don't do this when in replicate context as the reduction is
# performed on the TPU device itself.
devices = cross_tower_ops_lib.get_devices_from(destinations)
if len(devices) == 1:
assert device_util.canonicalize(devices[0]) == device_util.canonicalize(
self.get_host_cpu_device(0))
else:
raise ValueError('Multiple devices are not supported for TPUStrategy')
if aggregation == vs.VariableAggregation.ONLY_FIRST_TOWER:
return value[0]
output = math_ops.add_n(value)
if aggregation == vs.VariableAggregation.MEAN:
return output * (1. / len(value))
return output
def _unwrap(self, value):
if isinstance(value, list):
return value
return [value]
@property
def num_towers(self):
return self._num_cores_override or self._tpu_metadata.num_cores
@property
def num_hosts(self):
return self._tpu_metadata.num_hosts
@property
def num_towers_per_host(self):
return self._tpu_metadata.num_of_cores_per_host
def get_host_cpu_device(self, host_id):
if self._tpu_cluster_resolver.get_master() in ('', 'local'):
return '/replica:0/task:0/device:CPU:0'
job_name = self._tpu_cluster_resolver.get_job_name() or 'tpu_worker'
return '/job:%s/task:%d/device:CPU:0' % (job_name, host_id)
def configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
del cluster_spec, task_type, task_id
if session_config:
session_config.isolate_session_state = True
cluster_spec = self._tpu_cluster_resolver.cluster_spec()
if cluster_spec:
session_config.cluster_def.CopyFrom(cluster_spec.as_cluster_def())
|
Deadpool wasn’t just big this weekend, it was big in a way that could change assumptions about which movies get greenlit. But first, how big: Early estimates have Deadpool going $135 million for the three-day and $150 million for the four-day weekend (domestic gross), setting a number of records. It became the first R-rated movie ever to open bigger than $100 million, and smashed the previous biggest R-rated opening (Matrix Reloaded, in 2003) even adjusted for inflation. Not adjusted for inflation, it’s the 17th biggest opening weekend (for any rating) of all time.
All this on an insanely-low-for-a-superhero-movie budget of $58 million. Early expectations had it going $65 to $75 million, which gives you some idea of how shocking $135-150 million is.
As to why this is important, aside from paying for a lifetime cocaine supply for Fox execs, the conventional wisdom has long been that superhero movies were at least partly for kids, and that making one R-rated would be leaving money on the table. In fact, this was thought to be more or less true for all movies, leading to bizarre decisions like making a PG-13 Expendables movie. With Deadpool opening bigger than any X-Men movie (which is nothing short of shocking, frankly), all that goes out the window (at least, it should).
This was a movie that took 11 years for Fox to greenlight, and now it has an opening that couldn’t have been bigger. With Deadpool, not only did it go huge while being rated R, it went huge largely because it was rated R. The R rating proved to fans that it was serious, and while I don’t think what comic book superfans think matters nearly as much as studios think it does, Deadpool‘s R-rating helped differentiate it at a time when just being a superhero movie is no longer that big a deal. The kinds of movies that get greenlit from here on out will depend on whether people in charge see Deadpool‘s opening as anecdotal or as an example to learn from. At the very least, it should initiate a temporary moratorium on “Is Ryan Reynolds Really A Star” thinkpieces. And thank God, I’m so sick of people always picking on that poor, ridiculously handsome hunk of Canadian beef.
|
# Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyparsing import ParseResults
from protocall.proto import protocall_pb2
from grammar import expression, statement, assignment, call, return_, block, scope, define, while_expression, while_scope, if_expression, if_scope, elif_expression, elif_scope, elif_scopes, else_scope, conditional
from AST import Call, Assignment, ArrayAssignment, Integer, String, Boolean, Proto, Array, Identifier, Field, ArrayRef, While, ArithmeticOperator, ComparisonOperator, Conditional, Return, Define
def convert_field(field):
f = protocall_pb2.Field()
for component in field.components:
c = f.component.add()
c.name = component.identifier
return f
def convert_statement(statement):
s = protocall_pb2.Statement()
if isinstance(statement.statement, Call):
call = statement.statement
field, args = call.field, call.args
c = protocall_pb2.Call()
c.field.CopyFrom(convert_field(field))
for arg in args:
a = c.argument.add()
a.identifier.name = arg.identifier.identifier
a.expression.CopyFrom(convert_expression(arg.expression.expression))
s.call.CopyFrom(c)
elif isinstance(statement.statement, Assignment):
assignment = statement.statement
field, expression = assignment.field, assignment.expression
a = protocall_pb2.Assignment()
a.field.CopyFrom(convert_field(field))
a.expression.CopyFrom(convert_expression(expression.expression))
s.assignment.CopyFrom(a)
elif isinstance(statement.statement, ArrayAssignment):
array_assignment = statement.statement
array_ref, expression = array_assignment.array_ref, array_assignment.expression
a = protocall_pb2.ArrayAssignment()
a.array_ref.field.CopyFrom(convert_field(array_ref.field))
a.array_ref.index.value = array_ref.index
a.expression.CopyFrom(convert_expression(expression.expression))
s.array_assignment.CopyFrom(a)
elif isinstance(statement.statement, While):
while_expression = statement.statement
expression, scope = while_expression.expression, while_expression.scope
w = protocall_pb2.While()
w.expression_scope.expression.CopyFrom(convert_expression(expression.expression))
w.expression_scope.scope.CopyFrom(convert_scope(scope.scope))
s.while_.CopyFrom(w)
elif isinstance(statement.statement, Conditional):
conditional = statement.statement
if_scope = conditional.if_scope
elif_scopes = conditional.elif_scopes
c = protocall_pb2.Conditional()
c.if_scope.expression.CopyFrom(convert_expression(if_scope.expression.expression))
c.if_scope.scope.CopyFrom(convert_scope(if_scope.scope.scope))
for elif_scope in elif_scopes:
es = c.elif_scope.add()
es.expression.CopyFrom(convert_expression(elif_scope.expression.expression))
es.scope.CopyFrom(convert_scope(elif_scope.scope.scope))
else_scope = conditional.else_scope
if else_scope:
c.else_scope.CopyFrom(convert_scope(else_scope.scope.scope))
s.conditional.CopyFrom(c)
elif isinstance(statement.statement, Return):
return_ = statement.statement
expression = return_.expression
r = protocall_pb2.Return()
r.expression.CopyFrom(convert_expression(expression.expression))
s.return_.CopyFrom(r)
elif isinstance(statement.statement, Define):
define = statement.statement
field = define.field
scope = define.scope
d = protocall_pb2.Define()
d.field.CopyFrom(convert_field(field))
d.scope.CopyFrom(convert_scope(scope.scope))
s.define.CopyFrom(d)
else:
print statement.statement
raise RuntimeError
return s
def convert_block(block):
bl = protocall_pb2.Block()
for statement in block.block:
s = convert_statement(statement)
bl.statement.add().CopyFrom(s)
return bl
def convert_argument(argument):
ar = protocall_pb2.Argument()
ar.identifier.name = argument.identifier.identifier
e = convert_expression(argument.expression.expression)
ar.expression.CopyFrom(e)
return ar
def convert_scope(scope):
s_pb = protocall_pb2.Scope()
block = scope.block
for statement in block:
s_pb.block.statement.add().CopyFrom(convert_statement(statement))
return s_pb
def convert_arithmetic_operator(arithmetic_operator, e):
if arithmetic_operator.operator == '*':
op = protocall_pb2.ArithmeticOperator.Op.Value("MULTIPLY")
elif arithmetic_operator.operator == '/':
op = protocall_pb2.ArithmeticOperator.Op.Value("DIVIDE")
elif arithmetic_operator.operator == '+':
op = protocall_pb2.ArithmeticOperator.Op.Value("PLUS")
elif arithmetic_operator.operator == '-':
op = protocall_pb2.ArithmeticOperator.Op.Value("MINUS")
else:
print arithmetic_operator.operator
raise RuntimeError
e.arithmetic_operator.operator = op
left = convert_expression(arithmetic_operator.left)
if isinstance(left, protocall_pb2.Expression):
e.arithmetic_operator.left.CopyFrom(left)
elif isinstance(left, protocall_pb2.Identifier):
e.atom.identifier.CopyFrom(left)
else:
raise RuntimeError
e.arithmetic_operator.left.CopyFrom(left)
right = convert_expression(arithmetic_operator.right)
if isinstance(right, protocall_pb2.Expression):
e.arithmetic_operator.right.CopyFrom(right)
elif isinstance(right, protocall_pb2.Identifier):
e.atom.identifier.CopyFrom(right)
else:
raise RuntimeError
e.arithmetic_operator.right.CopyFrom(right)
def convert_comparison_operator(comparison_operator, e):
if comparison_operator.operator == '>':
op = protocall_pb2.ComparisonOperator.Op.Value("GREATER_THAN")
elif comparison_operator.operator == '<':
op = protocall_pb2.ComparisonOperator.Op.Value("LESS_THAN")
elif comparison_operator.operator == '==':
op = protocall_pb2.ComparisonOperator.Op.Value("EQUALS")
else:
print comparison_operator.operator
raise RuntimeError
e.comparison_operator.operator = op
left = convert_expression(comparison_operator.left)
if isinstance(left, protocall_pb2.Expression):
e.comparison_operator.left.CopyFrom(left)
elif isinstance(left, protocall_pb2.Identifier):
e.atom.identifier.CopyFrom(left)
else:
raise RuntimeError
e.comparison_operator.left.CopyFrom(left)
right = convert_expression(comparison_operator.right)
if isinstance(right, protocall_pb2.Expression):
e.comparison_operator.right.CopyFrom(right)
elif isinstance(right, protocall_pb2.Identifier):
e.atom.identifier.CopyFrom(right)
else:
raise RuntimeError
e.comparison_operator.right.CopyFrom(right)
def convert_expression(expression):
e = protocall_pb2.Expression()
if isinstance(expression, Integer):
e.atom.literal.integer.value = expression.value
elif isinstance(expression, String):
e.atom.literal.string.value = expression.value
elif isinstance(expression, Boolean):
e.atom.literal.boolean.value = expression.value
elif isinstance(expression, Proto):
e.atom.literal.proto.field.CopyFrom(convert_expression(expression.field).atom.field)
e.atom.literal.proto.value = str(expression.proto)
elif isinstance(expression, Field):
e.atom.field.CopyFrom(convert_field(expression))
elif isinstance(expression, Array):
array = e.atom.literal.array
for item in expression.elements:
element = array.element.add()
element.CopyFrom(convert_expression(item.expression))
elif isinstance(expression, ArrayRef):
e.atom.array_ref.field.CopyFrom(convert_field(expression.field))
e.atom.array_ref.index.value = expression.index
elif isinstance(expression, ArithmeticOperator):
convert_arithmetic_operator(expression, e)
elif isinstance(expression, ComparisonOperator):
convert_comparison_operator(expression, e)
elif isinstance(expression, Call):
e.call.field.CopyFrom(convert_field(expression.field))
for arg in expression.args:
a = e.call.argument.add()
a.CopyFrom(convert_argument(arg))
else:
print expression.__class__
raise RuntimeError
return e
|
Gary Bowyer says he wouldn't like to be a defender marking either Kyle Vassell or Jamille Matt at the moment.
The pair, both on target against Notts County last weekend, have struck up a promising partnership in recent weeks and still have more to give.
Vassell has become the first Blackpool player to hit 10 goals by this stage of the season since Tom Ince in 2012, while Matt has two goals in his last two appearances and has been praised for his selfless displays.
"I don't know which one I'd want to mark and play against because they both present problems for defenders in different ways, and it's very difficult to combat that," Bowyer said.
"But the pair of them have still got a hell of a lot of improvement in them, and we've been working on that this week."
In terms of other options, the manager expects to have a fully fit squad available for the trip to Leyton Orient on Saturday.
"Danny Philliskirk had a bit of illness at the beginning of the week but we're at that time where we're going to get little bugs and illnesses like that. He's been back in training today so we're fully fit," he added.
|
# game.py (c) 2017 D.J.Whale 22/01/2017
# Star-Wars 'Use the Force, Luke' game
# Using many moving parts provided by Martin O'Hanlon
#----- CONFIGURATION ----------------------------------------------------------
DEATHSTAR_CENTRE_POS = (100,100,10)
TARGET_POS = (100,100,10)
IN_RANGE = ((100,100,10), (100,100,10))
XWING_START_POS = (46,10,-61)
PLAY_TIME_SECS = 5 #(2*60)
NUMBER_OF_TRIES = 3
FRAMES_PER_SEC = 10
#TODO: Mart's code animates the trench separately from deathstar
#so do we need to switch over to that animation at the right position?
#also is there a visual clue to where the trench is, in the deathstar model?
#TODO: xwing can turn or shift
#might make it turn if you tilt it left or right a long way
#in which case we need l,L and r,R for two ranges of left and right tilt
#----- LOAD ALL THE DEPENDENT PARTS -------------------------------------------
import sys
if sys.version_info[0] != 2:
print("Please run this game with Python version 2")
sys.exit()
import time
import controller # auto-connects to the controller
import starwars # auto-connects to Minecraft
#----- GAME STATE -------------------------------------------------------------
deathstar = None
xwing = None
missile = None
xwing_crashed = False
missile_missed = False
missile_hit = False
game_stop_time = 0
#----- BUILD THE GAME WORLD ---------------------------------------------------
def clear_space():
print("will clear_space")
#TODO:
def build_deathstar():
print("will build_deathstar")
#TODO: build at DEATHSTAR_CENTRE_POS
def create_xwing():
global xwing
if xwing is not None:
# kill off old x-wing
xwing.clear()
xwing = None
xwing = starwars.MCObject(starwars.XWING_BLOCKS, XWING_START_POS)
xwing.draw()
def setup_game():
clear_space()
build_deathstar()
create_xwing()
clear_flags()
def wait_for_start():
print("will wait_for_start")
raw_input("press RETURN to start")
#TODO: wait for A button press on micro:bit
#loop, read from micro:bit, until see 'A'
#----- GAME ACTIONS -----------------------------------------------------------
def fly_xwing():
buttons = controller.get_command_flags()
if buttons is not None:
up = 'U' in buttons
down = 'D' in buttons
left = 'L' in buttons
right = 'R' in buttons
fire = 'A' in buttons
eject = 'B' in buttons
# change xwing position based on u/d/l/r
if left:
xwing.rotate_by(yaw=-10)
print("left")
if right:
xwing.rotate_by(yaw=+10)
print("right")
if up:
xwing.move_by(y=+1)
print("up")
if down:
xwing.move_by(y=-1)
print("down")
if fire: print("boom!!")
if eject: print("yeehar!!")
# always move xwing forward by one block
xwing.fly()
# if xwing crashes into any block
# set_xwing_crashed()
#if fire: start_missile()
#if eject: ejector_seat()
def start_missile():
print("will start_missile")
#TODO:
# create missile object in front of xwing
# note we need to know what direction the xwing is flying in
# we also need to know a range of positions to succeed from
def move_missile():
print("will move_missile")
#TODO:
# if missile now out of range:
# set_missile_missed()
# elif missile not yet hit target:
# move missile forward by 1
# else must have hit
# set_missile_hit()
def ejector_seat():
print("will ejector_seat")
animate_eject()
animate_xwing_crashed()
set_xwing_crashed()
#------ GAME CONDITIONS -------------------------------------------------------
#
# Set various game conditions in the game state.
# The main loop will detect and action these appropriately.
# This prevents passing lots of variables around,
# but contains the global variables a bit more into a controlled space (here)
def clear_flags():
global xwing_crashed, missile_missed, missile_hit
xwing_crashed = False
missile_missed = False
missile_hit = False
def set_xwing_crashed():
global xwing_crashed
xwing_crashed = True
def set_missile_missed():
global missile_missed
missile_missed = True
def set_missile_hit():
global missile_hit
missile_hit = True
#----- ANIMATIONS -------------------------------------------------------------
def animate_missile_missed():
print("will animate_missile_missed")
#TODO:
def animate_missile_hit():
print("will animate_missile_hit")
#TODO:
def animate_eject():
print("will animate_eject")
#TODO:
def animate_xwing_crashed():
print("will xwing_crashed")
#TODO:
def animate_blow_up_deathstar():
print("will blow_up_deathstar")
#TODO:
# auto pilot the ship to a safe location
# animate the deathstar blowing up
# return when deathstar gone
#----- SPLASH SCREENS ---------------------------------------------------------
def splash_screen():
print("will splash_screen")
#TODO:
def game_over_failed():
print("will game_over_failed")
#TODO:
def game_over_succeeded():
print("will game_over_succeeded")
#TODO:
#----- GAME LOOP --------------------------------------------------------------
def start_game():
global game_stop_time
print("will start_game")
#TODO: move player to position on start (hides splash screen)
game_stop_time = time.time() + PLAY_TIME_SECS
def run_out_of_time():
return time.time() >= game_stop_time
def play_game():
missiles_left = NUMBER_OF_TRIES
while not run_out_of_time() and not xwing_crashed and not missile_hit and missiles_left > 0:
time.sleep(1/float(FRAMES_PER_SEC))
fly_xwing()
if missile is not None:
move_missile()
if missile_missed:
animate_missile_missed()
missiles_left -= 1
elif missile_hit:
animate_missile_hit()
animate_blow_up_deathstar()
return missile_hit
def whereami():
import starwars.mcpi.minecraft as minecraft
mc = minecraft.Minecraft.create()
x,y,z = mc.player.getTilePos()
print(x,y,z)
#----- MAIN PROGRAM -----------------------------------------------------------
#if __name__ == "__main__":
# while True:
# setup_game()
# splash_screen()
# wait_for_start()
# start_game()
#
# success = play_game()
#
# if success:
# game_over_succeeded()
# else:
# game_over_failed()
#whereami()
create_xwing()
while True:
print("fly")
fly_xwing()
time.sleep(0.1)
# END
|
Montreal First Church of the Nazarene invites Montrealer to share a meal with them every Wednesday from 11am to 2pm. Meals are served with love and compassion so all are invited to sit down, take the load off your feet, and most of all enjoy a nutritious free meal. Located at 455 Spring Garden Road, Dollard-des-Ormeaux, H9B-1T2 (BUS 208 from Fairview Shopping Mall) 514-684-2220, montrealfirstchurch@hotmail.com. Come and be blessed.
Come enjoy a nice breakfast at Union United Church.
Bethlehem United Church invites you to make the first step “Walking In New Life” Galatians 5:16-26. From May 25th-28th join us as we make the first step together in worship and prayer. Your host for this four-day spiritual gathering will be Pastor and Evang. Myrie, Overseer, Bishop D.W. Thompson. Keynote speaker will be Elder Oniel Fisher. Elder Donovan Brown will minister on Friday. Minister Christopher Bryan will lead a Saturday leadership and ministerial seminar. Use Hashtag #BUCMontreal #BUCMtl. All are welcome to join us May 25 to 28 at 2255 Westhill Ave.
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\symonsbe\Downloads\WinPython-64bit-3.4.4.5Qt5\notebooks\MultiToolSettings.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(559, 296)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.gridLayout.setObjectName("gridLayout")
self.treeWidget = QtWidgets.QTreeWidget(Dialog)
self.treeWidget.setMinimumSize(QtCore.QSize(133, 0))
self.treeWidget.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.treeWidget.setFrameShadow(QtWidgets.QFrame.Sunken)
self.treeWidget.setMidLineWidth(0)
self.treeWidget.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.treeWidget.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.treeWidget.setAlternatingRowColors(False)
self.treeWidget.setObjectName("treeWidget")
item_0 = QtWidgets.QTreeWidgetItem(self.treeWidget)
item_1 = QtWidgets.QTreeWidgetItem(item_0)
self.treeWidget.header().setVisible(False)
self.gridLayout.addWidget(self.treeWidget, 0, 0, 2, 1)
self.stackedWidget = QtWidgets.QStackedWidget(Dialog)
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.gridLayout_2 = QtWidgets.QGridLayout(self.page)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.groupBox = QtWidgets.QGroupBox(self.page)
self.groupBox.setMinimumSize(QtCore.QSize(300, 0))
self.groupBox.setFlat(False)
self.groupBox.setObjectName("groupBox")
self.formLayout = QtWidgets.QFormLayout(self.groupBox)
self.formLayout.setObjectName("formLayout")
self._lblCSVDirectory = QtWidgets.QLabel(self.groupBox)
self._lblCSVDirectory.setObjectName("_lblCSVDirectory")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self._lblCSVDirectory)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self._lineCSVDirectory = QtWidgets.QLineEdit(self.groupBox)
self._lineCSVDirectory.setObjectName("_lineCSVDirectory")
self.horizontalLayout.addWidget(self._lineCSVDirectory)
self._btnCSVDirectory = QtWidgets.QToolButton(self.groupBox)
self._btnCSVDirectory.setObjectName("_btnCSVDirectory")
self.horizontalLayout.addWidget(self._btnCSVDirectory)
self.formLayout.setLayout(2, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout)
self._lblPDFDirectory = QtWidgets.QLabel(self.groupBox)
self._lblPDFDirectory.setObjectName("_lblPDFDirectory")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self._lblPDFDirectory)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self._linePDFDirectory = QtWidgets.QLineEdit(self.groupBox)
self._linePDFDirectory.setObjectName("_linePDFDirectory")
self.horizontalLayout_2.addWidget(self._linePDFDirectory)
self._btnPDFDirectory = QtWidgets.QToolButton(self.groupBox)
self._btnPDFDirectory.setObjectName("_btnPDFDirectory")
self.horizontalLayout_2.addWidget(self._btnPDFDirectory)
self.formLayout.setLayout(3, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout_2)
self.gridLayout_2.addWidget(self.groupBox, 0, 0, 1, 1)
self.stackedWidget.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.stackedWidget.addWidget(self.page_2)
self.gridLayout.addWidget(self.stackedWidget, 0, 1, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setLayoutDirection(QtCore.Qt.LeftToRight)
self.buttonBox.setAutoFillBackground(False)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Save)
self.buttonBox.setCenterButtons(False)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 3, 1, 1, 1)
self.gridLayout.setColumnStretch(0, 1)
self.gridLayout.setColumnStretch(1, 3)
self.retranslateUi(Dialog)
self.stackedWidget.setCurrentIndex(0)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Settings"))
self.treeWidget.headerItem().setText(0, _translate("Dialog", "Settings"))
__sortingEnabled = self.treeWidget.isSortingEnabled()
self.treeWidget.setSortingEnabled(False)
self.treeWidget.topLevelItem(0).setText(0, _translate("Dialog", "Centrepay"))
self.treeWidget.topLevelItem(0).child(0).setText(0, _translate("Dialog", "Directories"))
self.treeWidget.setSortingEnabled(__sortingEnabled)
self.groupBox.setTitle(_translate("Dialog", "Centrepay Directories"))
self._lblCSVDirectory.setText(_translate("Dialog", "CSV Directory"))
self._btnCSVDirectory.setText(_translate("Dialog", "..."))
self._lblPDFDirectory.setText(_translate("Dialog", "PDF Directory"))
self._btnPDFDirectory.setText(_translate("Dialog", "..."))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
fleurs For My Fairy Sister ♥. fleurs For My Fairy Sister ♥. Wallpaper and background images in the rose du Yorkshire club tagged: yorkshire rose berni is love berni friends love image flowers.
|
# Copyright (C) 2014 Susanne Jauhiainen, Markku Kovanen, Ilari Paananen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import bpy
from mathutils import Vector
import json
import sys
import os
# Three.js blender export module 'export_threejs.py'
# needs THREE_exportGeometry custom property to be defined like this:
bpy.types.Object.THREE_exportGeometry = bpy.props.BoolProperty(default = True)
# The module is assumed to be in the same folder with this file.
sys.path.append(os.path.dirname(__file__))
import export_threejs
def clear_scene(scene):
for object in scene.objects:
object.select = True
bpy.ops.object.delete()
for mesh in bpy.data.meshes:
bpy.data.meshes.remove(mesh)
def get_data(shp_path, python_path):
path = os.path.dirname(__file__)
path = os.path.join(path, "shp2json.py")
with os.popen(python_path + " " + path + " " + shp_path) as f:
return json.loads(f.read())
def separate_regions(regions):
regions_sub = []
for region in regions:
last_point = len(region) - 1
pt1 = region[last_point]
pt2 = region[0]
sum = (pt2[0] - pt1[0]) * (pt2[1] + pt1[1])
for j in range(0, last_point): # we dont want to add last edge twice
pt1 = region[j]
pt2 = region[j + 1]
sum = sum + (pt2[0] - pt1[0]) * (pt2[1] + pt1[1])
if sum < 0:
regions_sub.append(region)
regions.remove(region)
return (regions, regions_sub)
def build_mesh(mesh, regions, height):
extrude_vec = Vector((0.0, 0.0, height))
verts = []
edges = []
for region in regions:
first = len(verts)
for pt in region:
index = len(verts)
verts.append((pt[0], pt[1], 0.0))
edges.append([index, index + 1])
last = len(edges) - 1
edges[last][1] = first
mesh.from_pydata(verts, edges,[])
bpy.ops.object.mode_set(mode = 'EDIT')
bpy.ops.mesh.extrude_edges_move(TRANSFORM_OT_translate={"value":extrude_vec})
bpy.ops.mesh.edge_face_add()
bpy.ops.mesh.select_all(action='SELECT')
if height > 1.0: # TODO: fix this
bpy.ops.mesh.normals_make_consistent(inside=False)
bpy.ops.mesh.quads_convert_to_tris()
bpy.ops.object.mode_set(mode = 'OBJECT')
def boolean_substract(object, object_sub):
bpy.context.scene.objects.active = object
bpy.ops.object.modifier_add(type='BOOLEAN')
bpy.context.object.modifiers["Boolean"].operation = 'DIFFERENCE'
bpy.context.object.modifiers["Boolean"].object = object_sub
bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Boolean")
id_name_dict = None
def create_scene(scene, data):
global id_name_dict
id_name_dict = {}
for country in data:
id = country[0]
name = country[1]
regions, regions_sub = separate_regions(country[2])
mesh = bpy.data.meshes.new(id)
object = bpy.data.objects.new(id, mesh)
scene.objects.link(object)
scene.objects.active = object
build_mesh(mesh, regions, 1.0)
if len(regions_sub) > 0:
mesh_sub = bpy.data.meshes.new(id + "_sub")
object_sub = bpy.data.objects.new(id + "_sub", mesh_sub)
scene.objects.link(object_sub)
scene.objects.active = object_sub
build_mesh(mesh_sub, regions_sub, 1.5)
boolean_substract(object, object_sub)
bpy.ops.object.select_all(action='DESELECT')
object_sub.select = True
bpy.ops.object.delete()
bpy.data.meshes.remove(mesh_sub)
id_name_dict[object.name] = name
def export_scene(scene, path):
global id_name_dict
data = []
for object in scene.objects:
id = object.name
name = id_name_dict[id]
file = id + ".js" # objects are not actually written in separate files
text = export_threejs.generate_mesh_string([object], scene,
True, # option_vertices
False, # option_vertices_truncate
True, # option_faces
False, # option_normals
False, # option_uv_coords
False, # option_materials
False, # option_colors
False, # option_bones
False, # option_skinning
"None", # align_model
True, # flipyz
1.0, # option_scale
True, # export_single_model
False, # option_copy_textures
file, # filepath
False, # option_animation_morph
False, # option_animation_skeletal
False, # option_frame_index_as_time
1)[0] # option_frame_step
data.append((id, name, json.loads(text)))
dir, _ = os.path.split(path)
if not os.path.isdir(dir):
os.makedirs(dir)
with open(path, "w") as f:
f.write(json.dumps(data, separators=(",", ":")))
def run(shp_file, out_file, python_path):
data = get_data(shp_file, python_path)
scene = bpy.context.scene
clear_scene(scene)
create_scene(scene, data)
export_scene(scene, out_file)
argv = sys.argv
argc = len(argv)
try:
argv = argv[argv.index("--"):]
argc = len(argv)
except ValueError:
pass
if argc < 2:
print("Give .shp file as 1st argument")
elif argc < 3:
print("Give output file as 2nd argument")
elif argc < 4:
print("Give path to python as 3rd argument")
else:
run(argv[1], argv[2], argv[3])
|
This video explains why New Yorkers should come together to fight the bailout for nuclear reactors that is happening in NYS right now and has been going on for the last year or so on a path to the tune of $7.6 billion dollars over 12 years.
NYS energy ratepayers, that’s nearly every household throughout NYS, is subsidizing these nuclear energy corporations on their monthly bill and will continue to do so for 11 more years unless they are stopped. This video represents one of the environmental groups that has launched a legal action to stop it and this is their outreach to us.
When the Public Service Commission circumvented the law for proper public comment we took them to court. The New York State Supreme Court ruled in our favor in an Article 78 proceeding that our case to repeal the handouts could go forward because their decision was arbitrary and capricious. This was despite Exelon and Entergy’s big legal spending and New York State deceptively positioning nuclear power as part of the “Clean Energy Standard,” alongside solar, wind, geothermal and other renewable energy sources. There is nothing “clean” about nuclear power. From uranium mining to generating nuclear waste and every step in between, nuclear power is dirty, dangerous, expensive energy. That’s why they can’t make it in the free market that we have for electricity.
|
# -*- coding: utf-8 -*-
# MusicPlayer, https://github.com/albertz/music-player
# Copyright (c) 2012, Albert Zeyer, www.az2000.de
# All rights reserved.
# This code is under the 2-clause BSD license, see License.txt in the root directory of this project.
import utils
from utils import UserAttrib, Event, initBy
import Traits
# Note: I'm not too happy with all the complicated update handling here...
# In general, the design is ok. But it needs some more specification
# and then some drastic simplification. Most of it should be one-liners.
class SongEdit:
@initBy
def _updateEvent(self): return Event()
def __init__(self, ctx=None):
if not ctx:
import gui
ctx = gui.ctx()
assert ctx, "no gui context"
self.ctx = ctx
self._updateHandler = lambda: self._updateEvent.push()
ctx.curSelectedSong_updateEvent.register(self._updateHandler)
@UserAttrib(type=Traits.Object)
@property
def song(self):
return self.ctx.curSelectedSong
@UserAttrib(type=Traits.EditableText)
def artist(self, updateText=None):
if self.song:
if updateText:
self.song.artist = updateText
return self.song.artist
return ""
@UserAttrib(type=Traits.EditableText)
def title(self, updateText=None):
if self.song:
if updateText:
self.song.title = updateText
return self.song.title
return ""
@staticmethod
def _convertTagsToText(tags):
def txtForTag(tag):
value = tags[tag]
if value >= 1: return tag
return tag + ":" + str(value)
return " ".join(map(txtForTag, sorted(tags.keys())))
@staticmethod
def _convertTextToTags(txt):
pass
# todo...
#@UserAttrib(type=Traits.EditableText)
def tags(self, updateText=None):
if self.song:
return self._convertTagsToText(self.song.tags)
return ""
@staticmethod
def _formatGain(gain):
factor = 10.0 ** (gain / 20.0)
return "%f dB (factor %f)" % (gain, factor)
@UserAttrib(type=Traits.Table(keys=("key", "value")), variableHeight=True)
@property
def metadata(self):
d = dict(self.song.metadata)
for (key,func) in (
("artist",None),
("title",None),
("album",None),
("duration",utils.formatTime),
("url",None),
("rating",None),
("tags",self._convertTagsToText),
("gain",self._formatGain),
("completedCount",None),
("skipCount",None),
("lastPlayedDate",utils.formatDate),
("id",repr),
):
try: value = getattr(self.song, key)
except AttributeError: pass
else:
if func: value = func(value)
d[key] = unicode(value)
l = []
for key,value in sorted(d.items()):
l += [{"key": key, "value": value}]
return l
@metadata.setUpdateEvent
@property
def metadata_updateEvent(self): return self.song._updateEvent
def _queryAcoustId(self):
fingerprint = self.song.get("fingerprint_AcoustId", timeout=None)[0]
duration = self.song.get("duration", timeout=None, accuracy=0.5)[0]
import base64
fingerprint = base64.urlsafe_b64encode(fingerprint)
api_url = "http://api.acoustid.org/v2/lookup"
# "8XaBELgH" is the one from the web example from AcoustID.
# "cSpUJKpD" is from the example from pyacoustid
# get an own one here: http://acoustid.org/api-key
client_api_key = "cSpUJKpD"
params = {
'format': 'json',
'client': client_api_key,
'duration': int(duration),
'fingerprint': fingerprint,
'meta': 'recordings recordingids releasegroups releases tracks compress',
}
import urllib
body = urllib.urlencode(params)
import urllib2
req = urllib2.Request(api_url, body)
import contextlib
with contextlib.closing(urllib2.urlopen(req)) as f:
data = f.read()
headers = f.info()
import json
data = json.loads(data)
return data
def queryAcoustIdResults_selectionChangeHandler(self, selection):
self._queryAcoustId_selection = selection
@UserAttrib(type=Traits.Table(keys=("artist", "title", "album", "track", "score")),
selectionChangeHandler=queryAcoustIdResults_selectionChangeHandler)
@property
def queryAcoustIdResults(self):
if getattr(self, "_queryAcoustIdResults_songId", "") != getattr(self.song, "id", ""):
return []
return list(getattr(self, "_queryAcoustIdResults", []))
@queryAcoustIdResults.setUpdateEvent
@initBy
def queryAcoustIdResults_updateEvent(self): return Event()
@UserAttrib(type=Traits.Action, variableWidth=False)
def queryAcoustId(self):
data = self._queryAcoustId()
self._queryAcoustIdResults_songId = self.song.id
self._queryAcoustIdResults = []
for result in data.get("results", []):
for recording in result.get("recordings", []):
for resGroup in recording.get("releasegroups", []):
artist = resGroup["artists"][0]
release = resGroup["releases"][0]
medium = release["mediums"][0]
track = medium["tracks"][0]
if artist["name"] == "Various Artists":
artist = track["artists"][0]
entry = {
"id": result["id"],
"score": result["score"],
"recording-id": recording["id"],
"releasegroup-id": resGroup["id"],
"artist-id": artist["id"],
"artist": artist["name"],
"title": track["title"],
"album": resGroup["title"],
"track": "%i/%i" % (track["position"], medium["track_count"])
}
self._queryAcoustIdResults += [entry]
if not self._queryAcoustIdResults:
self._queryAcoustIdResults += [{"artist":"- None found -","title":"","album":"","track":""}]
self.queryAcoustIdResults_updateEvent.push()
@UserAttrib(type=Traits.Action, variableWidth=False, alignRight=True)
def apply(self):
if getattr(self, "_queryAcoustIdResults_songId", "") != getattr(self.song, "id", ""):
return
sel = getattr(self, "_queryAcoustId_selection", [])
if not sel: return
sel = sel[0]
for key in ("artist","title"):
if not sel[key]: return
for key in ("artist","title","album","track"):
setattr(self.song, key, sel[key])
self._updateEvent.push() # the song is updating itself - but the edit fields aren't atm...
|
McMansion - What Does It Mean?
What is a McMansion? What is the definition of the term McMansion?
The term "McMansion", which is thought to have originated in the '80s, was popularized in the late '90s.
A "McMansion" is a bland, generic, oversized home that is usually situated on a block with a number of other bland, generic, oversized homes. Hence the term "McMansion", as these houses are thought to be mass-produced and lacking any distinctive taste or qualities.
"McMansions" are popular with the upper middle-class, as those in the lower to middle class can't afford them, and those in the upper class will purchase something of a higher quality.
"McMansions" are usually a mishmash of different architectural styles, which is why many people find them so displeasing.
"McMansions" are usually far too big for their occupants. For instance, many double income couples will purchase and live in McMansions, despite the fact that they don't have kids. The homes are not functional for these types of people - instead, they are simply buying them for the status symbol.
Many "McMansions" are now in the hands of banks throughout the country, due to the fact that many of the occupants of these homes couldn't afford the mortgages in the first place. When the real estate market tanked, a McMansion suddenly became much less of a valuable commodity, and many people simply chose to walk away from their mortgages and give their keys back to the bank.
|
'''
Copyright (C) 2014 Jacques Lucke
mail@jlucke.com
Created by Jacques Lucke
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import importlib, sys, os
from fnmatch import fnmatch
bl_info = {
"name": "Code Autocomplete",
"description": "Improve the scripting experience in Blenders text editor.",
"author": "Jacques Lucke",
"version": (1,0, 0),
"blender": (2, 7, 4),
"location": "Text Editor",
"category": "Development"
}
# import all modules in same/subdirectories
###########################################
currentPath = os.path.dirname(__file__)
module_name = "script_auto_complete"
sys.modules[module_name] = sys.modules[__name__]
def getAllImportFiles():
def get_path(base):
b, t = os.path.split(base)
if __name__ == t:
return [module_name]
else:
return get_path(b) + [t]
for root, dirs, files in os.walk(currentPath):
path = ".".join(get_path(root))
for f in filter(lambda f:f.endswith(".py"), files):
name = f[:-3]
if not name == "__init__":
yield path + "." + name
auto_complete_modules = []
for name in getAllImportFiles():
mod = importlib.import_module(name)
auto_complete_modules.append(mod)
reload_event = "bpy" in locals()
import bpy
# Reload
# makes F8 reload actually reload the code
if reload_event:
for module in auto_complete_modules:
importlib.reload(module)
class AddonPreferences(bpy.types.AddonPreferences):
bl_idname = module_name
line_amount = bpy.props.IntProperty(default = 8, min = 1, max = 20, name = "Lines")
def draw(self, context):
layout = self.layout
row = layout.row(align = False)
row.prop(self, "line_amount")
# register
##################################
def register():
try: bpy.utils.register_module(module_name)
except: pass
print("Loaded Script Auto Completion with {} modules".format(len(auto_complete_modules)))
def unregister():
try: bpy.utils.unregister_module(module_name)
except: pass
if __name__ == "__main__":
register()
|
Dental Implants are a permanent solution for teeth needing replacement, and can last for years, or even a lifetime if cared for properly. Implants are the best solution if you are looking for long-term value. They look, feel, and are just as strong as natural teeth. At Winning Smiles Family Dentistry, we provide a calm and relaxed environment with exceptional care for our patients.
Compared to other dental solutions such as dentures or bridges, implants make it easier to speak and chew food. They are made of titanium, and are the strongest solution today for tooth replacement. Implants give patients confidence, and the smile they have always dreamed of.
Adequate bone is required to ensure the implants are successful. Our team at Winning Smiles Family Dentistry will evaluate your teeth to see if you are a good candidate. If additional bone is needed to support the implant area, a bond graft can be performed. The bone graft increases the life of the implants, and lessens the risk of any teeth becoming loose.
Dental implants are built to last, and are made of titanium making them very reliable.
Implants are anchored to the bone rather than the tooth’s roots. This allows the implants to be stronger, and give them the ability to withstand more pressure than natural teeth.
With proper maintenance and care, implants are built to last a lifetime, and have a 98% success rate. This does depend on the patient’s bone strength, and strength of their existing teeth.
Bone grafts can be performed if needed, to provide additional support for the implants. This varies for each patient.
Dental Implants are one of the safest procedures done today.
If you are tired of the way your teeth look, and want a natural, beautiful looking smile, call or email Winning Smiles Family Dentistry today. Let our team show you all the benefits of dental implants, and how they can work for you.
|
#!/usr/bin/env python
#
# =========================================================================
# This file is part of six.sicd-python
# =========================================================================
#
# (C) Copyright 2004 - 2015, MDA Information Systems LLC
#
# six.sicd-python is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; If not,
# see <http://www.gnu.org/licenses/>.
#
from pysix.six_sicd import *
from pysix.six_base import *
from coda.xml_lite import *
from coda.coda_io import *
from coda.coda_logging import *
from coda.coda_types import *
import os
import sys
import os.path
import filecmp
##############################################################
#
# roundTrip test
#
##############################################################
# This test loads up an XML file to a complex data object and
# writes it out again. Then it compares the two files to see if
# they match
def roundTrip(filename):
vs = VectorString()
schemapath = os.environ['SIX_SCHEMA_PATH']
vs.push_back(schemapath)
fis = FileInputStream(filename)
xmlparser = MinidomParser()
xmlparser.preserveCharacterData(True)
xmlparser.parse(fis)
doc = xmlparser.getDocument()
dt = DataType(DataType.COMPLEX)
xml_ctrl = ComplexXMLControl()
data = xml_ctrl.fromXML(doc, vs)
cmplx = asComplexData(data)
out_doc = xml_ctrl.toXML(cmplx, vs)
out_filename = 'round_trip_' + os.path.basename(filename)
fos = FileOutputStream(out_filename)
root = out_doc.getRootElement()
root.prettyPrint(fos)
round_tripped_cmplx = SixSicdUtilities_parseDataFromFile(out_filename,
vs, Logger())
if cmplx == round_tripped_cmplx:
print(filename + " passed")
else:
print("handling " + filename + " failed")
##############################################################
#
# loadSicd test
#
##############################################################
# This test loads a sicd file as a complex data object and writes
# it out as an xml file. Unfortunately there is no handy way to compare
# the resulting xml so it just prints out various information and assumes
# it worked. Also real sicds can be kind of big so there probably won't
# be any in the repository, so this is really more of an example.
def loadSicd(filename):
vs = VectorString()
vs.push_back( os.environ['SIX_SCHEMA_PATH'] )
cmplx = SixSicdUtilities.getComplexData(filename, vs)
showInfo(cmplx)
out_filename = 'from_sicd_' + os.path.basename(filename) + '.xml'
xml_ctrl = ComplexXMLControl()
fos = FileOutputStream(out_filename)
out_doc = xml_ctrl.toXML(cmplx, vs)
root = out_doc.getRootElement()
root.prettyPrint(fos)
print(filename + " probably passed")
return cmplx
##############################################################
#
# showInfo test
#
##############################################################
# This isn't a test per se but is useful to put some assorted
# ComplexData member information on the screen just to show
# that we can actually get to the data
def showInfo(cmplx):
print("file is " + str(cmplx.getNumRows()) + "x" + str(cmplx.getNumCols()))
print(str(cmplx.getNumBytesPerPixel()) + " bytes per pixel")
print( "tx polarization: " + cmplx.radarCollection.txPolarization.toString())
print("image formation algorithm: " + cmplx.imageFormation.imageFormationAlgorithm.toString())
print("graze angle: " + str(cmplx.scpcoa.grazeAngle))
print("slant range: " + str(cmplx.scpcoa.slantRange))
print("radar mode: " + cmplx.collectionInformation.radarMode.toString())
collectionParameters = cmplx.radarCollection.parameters
print("radar collection parameters: ")
if collectionParameters.empty():
print(' (none)')
else:
for idx in range(0,collectionParameters.size()):
print(" " + collectionParameters[idx].getName() + ": " + str(collectionParameters[idx]))
print('image data \'validData\'')
validData = cmplx.imageData.validData
if validData.empty():
print(' (none)')
else:
for idx in range(0,validData.size()):
print(' (' + str(validData[idx].row) + ',' + str(validData[idx].col) + ')')
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
if 1 == len(sys.argv):
print("please provide a sicd or XML file")
for arg in sys.argv:
if (arg.endswith(".ntf") or arg.endswith(".nitf")):
cmplx = loadSicd(arg)
elif arg.endswith(".xml"):
roundTrip(arg)
|
We are pleased that you are interested to stay in our house.
The following variants are available.
Please fill out the form for reservation request.
Füllen Sie bitte das Feld aus. Please enter a value.
Geben Sie bitte eine gültige Email Adresse ein. Please enter valid email adress.
For questions or concerns feel free to contact us by phone or email.
|
class PTree:
def __init__(self, name, content):
if type(name) is str and ' ' not in name:
self.name = name
else:
print("Name is not a string")
self.content = content
self.height = 0
if type(content) is list:
for tree in content:
if tree.height >= self.height:
self.height = tree.height + 1
def __str__(self):
if (type(self.content) is str):
output = '\n(' + self.name + ' ' + self.content + ')'
else:
output = '\n(' + self.name
for y in self.content:
text = str(y).split('\n')
output = output + '\n '.join(text)
output = output + '\n)'
return output
def MatchParen(x):
output = []
outtext = ''
i = 0
while i < len(x):
c = x[i]
if c == '(':
if outtext not in [' ', '', '\t']:
output.append(outtext)
outtext = ''
y = MatchParen(x[i+1:])
output.append(y[0])
i = i+y[1]
elif c == ')':
if outtext not in [' ', '']:
output.append(outtext)
break
else:
outtext = outtext + c
i = i + 1
return (output, i+2)
def ParseTree(x):
if len(x) > 1 or type(x[0]) is list:
try:
name = x[0].rstrip()
start = 1
except:
name = ''
start = 0
content = []
for y in x[start:]:
if type(y) is list:
content.append(ParseTree(y))
else:
content.append(y)
else:
y = x[0].split(' ')
name = y[0]
content = y[1]
return PTree(name, content)
def ParseFiles(argvs,numtrees=1):
toklist = {}
for i in range(len(argvs)):
arg = argvs[i]
if arg[-3:] in ['ref', 'psd', 'out', 'cod']:
print(arg)
file = open(arg)
tokens = []
token = ''
storing = 1
for line in file:
if '/*' in line or '/~*' in line:
if token != '' and 'ID' in token:
tokens.append(ParseTree(MatchParen(token.lstrip().rstrip())[0][0]))
print('Tree found!')
token = ''
storing = 0
elif '*/' in line or '*~/' in line:
storing = 1
elif line == '\n' and 'ID' in token:
tokens.append(ParseTree(MatchParen(token.lstrip().rstrip())[0][0]))
print('Tree found!')
token = ''
elif line == '\n':
token = ''
elif storing == 1:
token = token + line.rstrip().lstrip()
toklist[arg[:-4]] = tokens
return toklist
|
Salim Amin is co-founder and Chairman of Africa24 Media, Chairman of Camerapix, and founder and Chairman of The Mohamed Amin Foundation.
He has been in the media industry for 30 years as a photographer, cameraman, producer, presenter and trainer and has won numerous awards for his documentary work.
He is a Young Global Leader with the World Economic Forums, a Fellow of the African Leadership Initiative and a member of the Aspen Global Leadership Network. In December 2012, the New African Magazine listed him as one of the “100 Most Influential Africans” and “top 50 Under 50” Africans and his weekly Talk Show “The Scoop” has a global reach of over 300 million people on television, radio and online.
|
import time
import xbmc
import xbmcgui
import xbmcaddon
if __name__ == '__main__':
# init props
trans_title = xbmc.getLocalizedString(369)
monitor = xbmc.Monitor()
xbmc.log("service.skin.viewswitcher - Start service", level=xbmc.LOGNOTICE)
while not monitor.abortRequested():
# Sleep/wait for abort for 0.5 seconds
if monitor.waitForAbort(0.5):
# Abort was requested while waiting. We should exit
break
# Check if forced view is enabled and do it's magic if yes
if not xbmc.getCondVisibility("!Skin.HasSetting(ForcedViews.Enabled)") == 1:
current_content = xbmc.getInfoLabel("Container.Content")
path = xbmc.getInfoLabel("Container.FolderName")
# Check if movie is part of a set
if current_content == "movies":
setname = xbmc.getInfoLabel("ListItem.Set")
if (str(trans_title) != str(path) and (str(trans_title)+'s' != str(path))):
#dlg = xbmcgui.Dialog()
#dlg.notification("Compare",str(path) + " - " + str(trans_title),xbmcgui.NOTIFICATION_INFO,1000)
current_content = "setmovies"
# Check if content is part of addon - if yes disable forced view and let addon select view
plugin = xbmc.getInfoLabel("Container.PluginName")
if plugin != "":
current_content = ""
# Check if conent type is part if defined views
if current_content in "movies|sets|setmovies|tvshows|seasons|episodes|albums|artists|songs|musicvideos|pictures|videos|files" and not current_content == "":
# Get labels and force ascii for compare to make rockstable for languages with special chars
current_view_label = xbmc.getInfoLabel("Container.Viewmode").decode("utf-8").encode("ascii","ignore")
dest_view_id = xbmc.getInfoLabel("Skin.String(SkinHelper.ForcedViews.%s)" % current_content).decode("utf-8").encode("ascii","ignore")
dest_view_label = xbmc.getInfoLabel("Skin.String(SkinHelper.ForcedViews.%s.label)" % current_content).decode("utf-8").encode("ascii","ignore")
# Set current view to forced one
if (dest_view_id != ""):
if current_view_label != dest_view_label:
#dlg = xbmcgui.Dialog()
#dlg.notification("Set",str(path) + " - " + current_content,xbmcgui.NOTIFICATION_INFO,1000)
xbmc.executebuiltin("Container.SetViewMode(%s)" % dest_view_id)
xbmc.log("service.skin.viewswitcher - Cur label: " + current_view_label, level=xbmc.LOGNOTICE)
xbmc.log("service.skin.viewswitcher - Cur content: " + str(current_content), level=xbmc.LOGNOTICE)
xbmc.log("service.skin.viewswitcher - Switching to:", level=xbmc.LOGNOTICE)
xbmc.log("service.skin.viewswitcher - Dest label: " + str(dest_view_label), level=xbmc.LOGNOTICE)
xbmc.log("service.skin.viewswitcher - Dest id: " + str(dest_view_id), level=xbmc.LOGNOTICE)
# give kodi time to relax :-)
time.sleep(1)
|
BYOD refers to the policy by which employees can use their own personal mobile devices to access company networks / systems. We consider guidelines on creating a policy. If your business is in the Sittingbourne area, we, at Williams Giles can help you to create a Bring your own device policy (BYOD).
If your business is in the Sittingbourne area we are here to help you create a Bring your own device policy (BYOD). Please contact us at Williams Giles for further advice.
|
# -*- coding: utf8 -*-
#
# Copyright (C) 2015 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
{
'name': 'Purchase delivery tracking (Colissimo)',
'version': '0.1',
'author': 'NDP Systèmes',
'maintainer': 'NDP Systèmes',
'category': 'Purchase',
'depends': ['purchase_delivery_tracking', 'base_delivery_tracking_colissimo'],
'description': """
Purchase delivery tracking (Colissimo)
======================================
This module allows to track the Colissimo deliveries
""",
'website': 'http://www.ndp-systemes.fr',
'data': ['purchase_delivery_tracking_colissimo.xml'],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'license': 'AGPL-3',
'application': False,
}
|
Rubber ceramic linings(wear resistant rubber ceramic linings)with very excellent quality and very nice price.320*320*20mm,320*320*30mm,500*500*25mm....All kinds of sizes and thickness are customised according to clients requirements.
2. Convenient construction: no clearance after installation. Rubber composite liner can be distorted and cut, suitable for the installation of all kinds of special equipment.
|
import re
from rest_framework import generics
from rest_framework.exceptions import NotFound, PermissionDenied, NotAuthenticated
from rest_framework import permissions as drf_permissions
from framework.auth.oauth_scopes import CoreScopes
from osf.models import ReviewAction, PreprintService
from osf.utils.requests import check_select_for_update
from api.actions.permissions import ReviewActionPermission
from api.actions.serializers import ReviewActionSerializer
from api.actions.views import get_review_actions_queryset
from api.base.exceptions import Conflict
from api.base.views import JSONAPIBaseView, WaterButlerMixin
from api.base.filters import ListFilterMixin, PreprintFilterMixin
from api.base.parsers import (
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
from api.base.utils import absolute_reverse, get_user_auth
from api.base import permissions as base_permissions
from api.citations.utils import render_citation, preprint_csl
from api.preprints.serializers import (
PreprintSerializer,
PreprintCreateSerializer,
PreprintCitationSerializer,
)
from api.nodes.serializers import (
NodeCitationStyleSerializer,
)
from api.identifiers.views import IdentifierList
from api.identifiers.serializers import PreprintIdentifierSerializer
from api.nodes.views import NodeMixin, NodeContributorsList
from api.nodes.permissions import ContributorOrPublic
from api.preprints.permissions import PreprintPublishedOrAdmin
class PreprintMixin(NodeMixin):
serializer_class = PreprintSerializer
preprint_lookup_url_kwarg = 'preprint_id'
def get_preprint(self, check_object_permissions=True):
qs = PreprintService.objects.filter(guids___id=self.kwargs[self.preprint_lookup_url_kwarg])
try:
preprint = qs.select_for_update().get() if check_select_for_update(self.request) else qs.select_related('node').get()
except PreprintService.DoesNotExist:
raise NotFound
if preprint.node.is_deleted:
raise NotFound
# May raise a permission denied
if check_object_permissions:
self.check_object_permissions(self.request, preprint)
return preprint
class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, PreprintFilterMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprints_list).
"""
# These permissions are not checked for the list of preprints, permissions handled by the query
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ContributorOrPublic,
)
parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
required_read_scopes = [CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.NODE_PREPRINTS_WRITE]
serializer_class = PreprintSerializer
ordering = ('-created')
ordering_fields = ('created', 'date_last_transitioned')
view_category = 'preprints'
view_name = 'preprint-list'
def get_serializer_class(self):
if self.request.method == 'POST':
return PreprintCreateSerializer
else:
return PreprintSerializer
def get_default_queryset(self):
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
# Permissions on the list objects are handled by the query
return self.preprints_queryset(PreprintService.objects.all(), auth_user)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, PreprintMixin, WaterButlerMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprints_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ContributorOrPublic,
PreprintPublishedOrAdmin,
)
parser_classes = (
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
required_read_scopes = [CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.NODE_PREPRINTS_WRITE]
serializer_class = PreprintSerializer
view_category = 'preprints'
view_name = 'preprint-detail'
def get_object(self):
return self.get_preprint()
def perform_destroy(self, instance):
if instance.is_published:
raise Conflict('Published preprints cannot be deleted.')
PreprintService.delete(instance)
def get_parser_context(self, http_request):
"""
Tells parser that type is required in request
"""
res = super(PreprintDetail, self).get_parser_context(http_request)
res['legacy_type_allowed'] = True
return res
class PreprintCitationDetail(JSONAPIBaseView, generics.RetrieveAPIView, PreprintMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprints_citation_list).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_CITATIONS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = PreprintCitationSerializer
view_category = 'preprints'
view_name = 'preprint-citation'
def get_object(self):
preprint = self.get_preprint()
auth = get_user_auth(self.request)
if preprint.node.is_public or preprint.node.can_view(auth) or preprint.is_published:
return preprint_csl(preprint, preprint.node)
raise PermissionDenied if auth.user else NotAuthenticated
class PreprintCitationStyleDetail(JSONAPIBaseView, generics.RetrieveAPIView, PreprintMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprints_citation_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_CITATIONS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = NodeCitationStyleSerializer
view_category = 'preprint'
view_name = 'preprint-citation'
def get_object(self):
preprint = self.get_preprint()
auth = get_user_auth(self.request)
style = self.kwargs.get('style_id')
if preprint.node.is_public or preprint.node.can_view(auth) or preprint.is_published:
try:
citation = render_citation(node=preprint, style=style)
except ValueError as err: # style requested could not be found
csl_name = re.findall('[a-zA-Z]+\.csl', err.message)[0]
raise NotFound('{} is not a known style.'.format(csl_name))
return {'citation': citation, 'id': style}
raise PermissionDenied if auth.user else NotAuthenticated
class PreprintIdentifierList(IdentifierList, PreprintMixin):
"""List of identifiers for a specified preprint. *Read-only*.
##Identifier Attributes
OSF Identifier entities have the "identifiers" `type`.
name type description
----------------------------------------------------------------------------
category string e.g. 'ark', 'doi'
value string the identifier value itself
##Links
self: this identifier's detail page
##Relationships
###Referent
The identifier is refers to this preprint.
##Actions
*None*.
##Query Params
Identifiers may be filtered by their category.
#This Request/Response
"""
permission_classes = (
PreprintPublishedOrAdmin,
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
serializer_class = PreprintIdentifierSerializer
required_read_scopes = [CoreScopes.IDENTIFIERS_READ]
required_write_scopes = [CoreScopes.NULL]
preprint_lookup_url_kwarg = 'preprint_id'
view_category = 'preprints'
view_name = 'identifier-list'
# overrides IdentifierList
def get_object(self, check_object_permissions=True):
return self.get_preprint(check_object_permissions=check_object_permissions)
class PreprintContributorsList(NodeContributorsList, PreprintMixin):
def create(self, request, *args, **kwargs):
self.kwargs['node_id'] = self.get_preprint(check_object_permissions=False).node._id
return super(PreprintContributorsList, self).create(request, *args, **kwargs)
class PreprintActionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, PreprintMixin):
"""Action List *Read-only*
Actions represent state changes and/or comments on a reviewable object (e.g. a preprint)
##Action Attributes
name type description
====================================================================================
date_created iso8601 timestamp timestamp that the action was created
date_modified iso8601 timestamp timestamp that the action was last modified
from_state string state of the reviewable before this action was created
to_state string state of the reviewable after this action was created
comment string comment explaining the state change
trigger string name of the trigger for this action
##Relationships
###Target
Link to the object (e.g. preprint) this action acts on
###Provider
Link to detail for the target object's provider
###Creator
Link to the user that created this action
##Links
- `self` -- Detail page for the current action
##Query Params
+ `page=<Int>` -- page number of results to view, default 1
+ `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
Actions may be filtered by their `id`, `from_state`, `to_state`, `date_created`, `date_modified`, `creator`, `provider`, `target`
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ReviewActionPermission,
)
required_read_scopes = [CoreScopes.ACTIONS_READ]
required_write_scopes = [CoreScopes.ACTIONS_WRITE]
parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
serializer_class = ReviewActionSerializer
model_class = ReviewAction
ordering = ('-created',)
view_category = 'preprints'
view_name = 'preprint-review-action-list'
# overrides ListCreateAPIView
def perform_create(self, serializer):
target = serializer.validated_data['target']
self.check_object_permissions(self.request, target)
if not target.provider.is_reviewed:
raise Conflict('{} is an unmoderated provider. If you are an admin, set up moderation by setting `reviews_workflow` at {}'.format(
target.provider.name,
absolute_reverse('preprint_providers:preprint_provider-detail', kwargs={
'provider_id': target.provider._id,
'version': self.request.parser_context['kwargs']['version']
})
))
serializer.save(user=self.request.user)
# overrides ListFilterMixin
def get_default_queryset(self):
return get_review_actions_queryset().filter(target_id=self.get_preprint().id)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
|
‘Reviews’ is taken from Tancred’s new album Nightstand. It’s out June 1, 2018, via Polyvinyl Records.
After the release of her breakthrough record Out of the Garden in 2016, indie/alternative singer/songwriter Tancred (Jess Abbott) is back with ‘Reviews’, the first track to be gleaned from her upcoming new album Nightstand. Formerly of Now, Now, Abbott is both relishing the chance to go solo and also apprehensive about the experience.
“After I became comfortable in this new skin, in truly being myself, I was immediately hit with loneliness,” she reveals. “I realized that human connection is really important to me.” To combat this, Abbott embarked on an inward journey combined with redoubled efforts with those around her. “I was reading a lot of books, learning a lot of new hobbies, meeting so many new people — just taking in as much information as possible to try and figure out what it really meant to me to be alive,” she recalls.
The result of all this enquiry is Nightstand, an album that is equally inward looking as outward focused. “I wanted the album to have a timeless feel to it, so you could hear my stories of love and loneliness and sense that these are themes that have existed for everyone forever,” she says.
|
# Twisted, the Framework of Your Internet
# Copyright (C) 2001 Matthew W. Lefkowitz
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from twisted.trial import unittest
from twisted.spread import util
from twisted.words import service
from twisted.internet import app
from twisted.cred.authorizer import DefaultAuthorizer
class WordsTestCase(unittest.TestCase):
def testWords(self):
ap = app.Application("testwords")
au = DefaultAuthorizer()
s = service.Service('twisted.words', ap, au)
s.createParticipant("glyph")
s.createParticipant("sean")
# XXX OBSOLETE: should be async getPerspectiveRequest
glyph = s.getPerspectiveNamed("glyph")
sean = s.getPerspectiveNamed("sean")
glyph.addContact("sean")
t = glyph.transcribeConversationWith('sean')
glyph.attached(DummyWordsClient(), None)
sean.attached(DummyWordsClient(), None)
glyph.directMessage("sean", "ping")
sean.directMessage("glyph", "pong")
self.failUnlessEqual(len(t.chat), 2)
t.endTranscript()
glyph.directMessage("sean", "(DUP!)")
self.failUnlessEqual(len(t.chat), 2)
class DummyWordsClient(util.LocalAsRemote):
"""A client to a perspective on the twisted.words service.
I attach to that participant with Participant.attached(),
and detatch with Participant.detached().
"""
def async_receiveContactList(self, contactList):
"""Receive a list of contacts and their status.
The list is composed of 2-tuples, of the form
(contactName, contactStatus)
"""
def async_notifyStatusChanged(self, name, status):
"""Notify me of a change in status of one of my contacts.
"""
def async_receiveGroupMembers(self, names, group):
"""Receive a list of members in a group.
'names' is a list of participant names in the group named 'group'.
"""
def async_setGroupMetadata(self, metadata, name):
"""Some metadata on a group has been set.
XXX: Should this be receiveGroupMetadata(name, metedata)?
"""
def async_receiveDirectMessage(self, sender, message, metadata=None):
"""Receive a message from someone named 'sender'.
'metadata' is a dict of special flags. So far 'style': 'emote'
is defined. Note that 'metadata' *must* be optional.
"""
def async_receiveGroupMessage(self, sender, group, message, metadata=None):
"""Receive a message from 'sender' directed to a group.
'metadata' is a dict of special flags. So far 'style': 'emote'
is defined. Note that 'metadata' *must* be optional.
"""
def async_memberJoined(self, member, group):
"""Tells me a member has joined a group.
"""
def async_memberLeft(self, member, group):
"""Tells me a member has left a group.
"""
testCases = [WordsTestCase]
|
Are you an experienced Waste Management or manufacturing Operations Manager looking for a new challenge?
Due to continued success and growth, we are now looking to recruit an Operations Manager to join our team at Ford.
Joining our team, the Operations Manager will assume full management and responsibility for the Household Waste Recycling Centre and Transfer Station operations within West Sussex; ensuring at all times that they operate in accordance with statutory legislation, environmental obligations and Company procedures.
You will lead, direct and motivate the staff across all HWRC and Transfer Station sites ensuring excellent customer service for the client. The Operations Manager will plan, communicate and work with the West Sussex Logistic department and operational Materials Recycling Facility to ensure a proficient work practice across the contract.
Additionally, you will have responsibility for implementing and reviewing measures to improve recycling rates across all HWRC and Transfer Station sites; as well as maintaining procedures to ensure compliance with all legal requirements and ISO standards.
In return for your hard work as our Operations Manager, we can offer a competitive salary plus excellent benefits, including pension, healthcare and a range of flexible benefits.
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os.path
from hashlib import sha1
import mock
from pants.base.exceptions import TargetDefinitionException
from pants.base.fingerprint_strategy import DefaultFingerprintStrategy
from pants.base.payload import Payload
from pants.base.payload_field import SetOfPrimitivesField
from pants.build_graph.address import Address
from pants.build_graph.target import Target
from pants.build_graph.target_scopes import Scopes
from pants.source.wrapped_globs import Globs
from pants_test.base_test import BaseTest
from pants_test.subsystem.subsystem_util import init_subsystem
class ImplicitSourcesTestingTarget(Target):
default_sources_globs = '*.foo'
class ImplicitSourcesTestingTargetMulti(Target):
default_sources_globs = ('*.foo', '*.bar')
default_sources_exclude_globs = ('*.baz', '*.qux')
class SourcesTarget(Target):
def __init__(self, sources, address=None, exports=None, **kwargs):
payload = Payload()
payload.add_field('sources', self.create_sources_field(sources,
sources_rel_path=address.spec_path,
key_arg='sources'))
payload.add_field('exports', SetOfPrimitivesField(exports))
super(SourcesTarget, self).__init__(address=address, payload=payload, **kwargs)
@property
def export_specs(self):
return self.payload.exports
class TargetTest(BaseTest):
def test_derived_from_chain(self):
# add concrete target
concrete = self.make_target('y:concrete', Target)
# add synthetic targets
syn_one = self.make_target('y:syn_one', Target, derived_from=concrete)
syn_two = self.make_target('y:syn_two', Target, derived_from=syn_one)
# validate
self.assertEquals(list(syn_two.derived_from_chain), [syn_one, concrete])
self.assertEquals(list(syn_one.derived_from_chain), [concrete])
self.assertEquals(list(concrete.derived_from_chain), [])
def test_is_synthetic(self):
# add concrete target
concrete = self.make_target('y:concrete', Target)
# add synthetic targets
syn_one = self.make_target('y:syn_one', Target, derived_from=concrete)
syn_two = self.make_target('y:syn_two', Target, derived_from=syn_one)
syn_three = self.make_target('y:syn_three', Target, synthetic=True)
self.assertFalse(concrete.is_synthetic)
self.assertTrue(syn_one.is_synthetic)
self.assertTrue(syn_two.is_synthetic)
self.assertTrue(syn_three.is_synthetic)
def test_empty_traversable_properties(self):
target = self.make_target(':foo', Target)
self.assertSequenceEqual([], list(target.compute_dependency_specs(payload=target.payload)))
def test_validate_target_representation_args_invalid_exactly_one(self):
with self.assertRaises(AssertionError):
Target._validate_target_representation_args(None, None)
with self.assertRaises(AssertionError):
Target._validate_target_representation_args({}, Payload())
def test_validate_target_representation_args_invalid_type(self):
with self.assertRaises(AssertionError):
Target._validate_target_representation_args(kwargs=Payload(), payload=None)
with self.assertRaises(AssertionError):
Target._validate_target_representation_args(kwargs=None, payload={})
def test_validate_target_representation_args_valid(self):
Target._validate_target_representation_args(kwargs={}, payload=None)
Target._validate_target_representation_args(kwargs=None, payload=Payload())
def test_illegal_kwargs(self):
init_subsystem(Target.Arguments)
with self.assertRaises(Target.Arguments.UnknownArgumentError) as cm:
self.make_target('foo:bar', Target, foobar='barfoo')
self.assertTrue('foobar = barfoo' in str(cm.exception))
self.assertTrue('foo:bar' in str(cm.exception))
def test_unknown_kwargs(self):
options = {Target.Arguments.options_scope: {'ignored': {'Target': ['foobar']}}}
init_subsystem(Target.Arguments, options)
target = self.make_target('foo:bar', Target, foobar='barfoo')
self.assertFalse(hasattr(target, 'foobar'))
def test_target_id_long(self):
long_path = 'dummy'
for i in range(1,30):
long_path = os.path.join(long_path, 'dummy{}'.format(i))
long_target = self.make_target('{}:foo'.format(long_path), Target)
long_id = long_target.id
self.assertEqual(len(long_id), 200)
self.assertEqual(long_id,
'dummy.dummy1.dummy2.dummy3.dummy4.dummy5.dummy6.dummy7.dummy8.dummy9.dummy10.du.'
'c582ce0f60008b3dc8196ae9e6ff5e8c40096974.y20.dummy21.dummy22.dummy23.dummy24.dummy25.'
'dummy26.dummy27.dummy28.dummy29.foo')
def test_target_id_short(self):
short_path = 'dummy'
for i in range(1,10):
short_path = os.path.join(short_path, 'dummy{}'.format(i))
short_target = self.make_target('{}:foo'.format(short_path), Target)
short_id = short_target.id
self.assertEqual(short_id,
'dummy.dummy1.dummy2.dummy3.dummy4.dummy5.dummy6.dummy7.dummy8.dummy9.foo')
def test_implicit_sources(self):
options = {Target.Arguments.options_scope: {'implicit_sources': True}}
init_subsystem(Target.Arguments, options)
target = self.make_target(':a', ImplicitSourcesTestingTarget)
# Note explicit key_arg.
sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar',
key_arg='sources')
self.assertEqual(sources.filespec, {'globs': ['src/foo/bar/*.foo']})
target = self.make_target(':b', ImplicitSourcesTestingTargetMulti)
# Note no explicit key_arg, which should behave just like key_arg='sources'.
sources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar')
self.assertEqual(sources.filespec, {
'globs': ['src/foo/bar/*.foo', 'src/foo/bar/*.bar'],
'exclude': [{'globs': ['src/foo/bar/*.baz', 'src/foo/bar/*.qux']}],
})
# Ensure that we don't use implicit sources when creating resources fields.
resources = target.create_sources_field(sources=None, sources_rel_path='src/foo/bar',
key_arg='resources')
self.assertEqual(resources.filespec, {'globs': []})
def test_create_sources_field_with_string_fails(self):
target = self.make_target(':a-target', Target)
# No key_arg.
with self.assertRaises(TargetDefinitionException) as cm:
target.create_sources_field(sources='a-string', sources_rel_path='')
self.assertIn("Expected a glob, an address or a list, but was <type \'unicode\'>",
str(cm.exception))
# With key_arg.
with self.assertRaises(TargetDefinitionException) as cm:
target.create_sources_field(sources='a-string', sources_rel_path='', key_arg='my_cool_field')
self.assertIn("Expected 'my_cool_field' to be a glob, an address or a list, but was <type \'unicode\'>",
str(cm.exception))
#could also test address case, but looks like nothing really uses it.
def test_max_recursion(self):
target_a = self.make_target('a', Target)
target_b = self.make_target('b', Target, dependencies=[target_a])
self.make_target('c', Target, dependencies=[target_b])
target_a.inject_dependency(Address.parse('c'))
with self.assertRaises(Target.RecursiveDepthError):
target_a.transitive_invalidation_hash()
def test_transitive_invalidation_hash(self):
target_a = self.make_target('a', Target)
target_b = self.make_target('b', Target, dependencies=[target_a])
target_c = self.make_target('c', Target, dependencies=[target_b])
hasher = sha1()
dep_hash = hasher.hexdigest()[:12]
target_hash = target_a.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_a.transitive_invalidation_hash())
hasher = sha1()
hasher.update(hash_value)
dep_hash = hasher.hexdigest()[:12]
target_hash = target_b.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_b.transitive_invalidation_hash())
hasher = sha1()
hasher.update(hash_value)
dep_hash = hasher.hexdigest()[:12]
target_hash = target_c.invalidation_hash()
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_c.transitive_invalidation_hash())
# Check direct invalidation.
class TestFingerprintStrategy(DefaultFingerprintStrategy):
def direct(self, target):
return True
fingerprint_strategy = TestFingerprintStrategy()
hasher = sha1()
hasher.update(target_b.invalidation_hash(fingerprint_strategy=fingerprint_strategy))
dep_hash = hasher.hexdigest()[:12]
target_hash = target_c.invalidation_hash(fingerprint_strategy=fingerprint_strategy)
hash_value = '{}.{}'.format(target_hash, dep_hash)
self.assertEqual(hash_value, target_c.transitive_invalidation_hash(fingerprint_strategy=fingerprint_strategy))
def test_has_sources(self):
def sources(rel_path, *args):
return Globs.create_fileset_with_spec(rel_path, *args)
self.create_file('foo/bar/a.txt', 'a_contents')
txt_sources = self.make_target('foo/bar:txt',
SourcesTarget,
sources=sources('foo/bar', '*.txt'))
self.assertTrue(txt_sources.has_sources())
self.assertTrue(txt_sources.has_sources('.txt'))
self.assertFalse(txt_sources.has_sources('.rs'))
no_sources = self.make_target('foo/bar:none',
SourcesTarget,
sources=sources('foo/bar', '*.rs'))
self.assertFalse(no_sources.has_sources())
self.assertFalse(no_sources.has_sources('.txt'))
self.assertFalse(no_sources.has_sources('.rs'))
def _generate_strict_dependencies(self):
init_subsystem(Target.Arguments)
self.lib_aa = self.make_target(
'com/foo:AA',
target_type=SourcesTarget,
sources=['com/foo/AA.scala'],
)
self.lib_a = self.make_target(
'com/foo:A',
target_type=SourcesTarget,
sources=['com/foo/A.scala'],
)
self.lib_b = self.make_target(
'com/foo:B',
target_type=SourcesTarget,
sources=['com/foo/B.scala'],
dependencies=[self.lib_a, self.lib_aa],
exports=[':A'],
)
self.lib_c = self.make_target(
'com/foo:C',
target_type=SourcesTarget,
sources=['com/foo/C.scala'],
dependencies=[self.lib_b],
exports=[':B'],
)
self.lib_c_alias = self.make_target(
'com/foo:C_alias',
dependencies=[self.lib_c],
)
self.lib_d = self.make_target(
'com/foo:D',
target_type=SourcesTarget,
sources=['com/foo/D.scala'],
dependencies=[self.lib_c_alias],
exports=[':C_alias'],
)
self.lib_f = self.make_target(
'com/foo:F',
target_type=SourcesTarget,
sources=['com/foo/E.scala'],
scope=Scopes.RUNTIME
)
self.lib_e = self.make_target(
'com/foo:E',
target_type=SourcesTarget,
sources=['com/foo/E.scala'],
dependencies=[self.lib_d, self.lib_f],
)
def test_strict_dependencies(self):
self._generate_strict_dependencies()
dep_context = mock.Mock()
dep_context.compiler_plugin_types = ()
dep_context.codegen_types = ()
dep_context.alias_types = (Target,)
dep_context.target_closure_kwargs = {'include_scopes': Scopes.JVM_COMPILE_SCOPES}
self.assertEqual(set(self.lib_b.strict_dependencies(dep_context)), {self.lib_a, self.lib_aa})
self.assertEqual(set(self.lib_c.strict_dependencies(dep_context)), {self.lib_b, self.lib_a})
self.assertEqual(set(self.lib_c_alias.strict_dependencies(dep_context)), {self.lib_c, self.lib_b, self.lib_a})
self.assertEqual(set(self.lib_d.strict_dependencies(dep_context)), {self.lib_c, self.lib_b, self.lib_a})
self.assertEqual(set(self.lib_e.strict_dependencies(dep_context)), {self.lib_d, self.lib_c, self.lib_b, self.lib_a})
|
Below is our basic rate schedule but of course, for specific costs, give us a call and we'll discuss details.
$70 per hour/$650 for 8-10 hour lockout.
$40 per hour/$350 for 8-10 hour lockout.
We accept Checks, Credit Cards or beads and furs.
|
import bounds
from py3D import Vector, Ray, Color, Body
from CheckPlane import CheckPlane
class ConcCircle(CheckPlane):
def set_orientation(self, orientation):
self.oX = (orientation - self._origin).norm()
self.oY = self._normal.cross(self.oX).norm()
return self
def __init__(self,
r = 1.0,
normal = Vector(0.0,1.0,0.0),
origin = Vector(0.0,0.0,0.0),
orientation = Vector(1.0,0.0,0.0),
c1 = Color(0.01,0.01,0.01),
c2 = Color(0.99,0.99,0.99)):
"""Initializes plane and plane colors."""
CheckPlane.__init__(self, normal, origin, orientation, c1, c2)
self.origin = origin
self.set_orientation(orientation)
self.r = r
self.R = r ** 2.0
def intersection(self, ray):
distance = CheckPlane.intersection(self, ray)
if distance < 0.0:
return distance
else:
point = ray.follow(distance).add(self.origin, -1.0)
dx = point.dot(self.oX)
dy = point.dot(self.oY)
if dx ** 2.0 + dy ** 2.0 > self.R:
return -1.0
else:
return distance
def get_color(self, point):
"""Returns color of plane at the point."""
d = point - self._origin
dist = int(d.dot(d) ** 0.5) % 2
if dist == 0:
return self.c1.dup()
else:
return self.c2.dup()
|
The Ballyhaunis Multi Sports Network primary aim is to bring fitness and health to the town of Ballyhaunis. In 2010 The Multi Sports Facility in the Friary Field. The Multi Sports Facility is the first of its kind in Ireland and was the brain child of Peter Cunnane who first saw a facility of this type in France. The facility is meant for many sports in one area, it can be used for soccer, Basketball, Tennis and Volleyball. It is marked out for all these sports, has goals for soccer, baskets for basketball and a nets for Tennis and volleyball. The facility also has the benefit of a roof so can be used in all weather, rain or shine. We have had a great response from the public about the facility and it is well used by all in the town from young children to sporting clubs wanting to train. The facility is also very versatile, it does not have to be used as it is laid out as one group proved by playing Bocci inside it, and it was recently used as a marquee for the Abbey Pattern stalls. The ball court is free to use for all!
|
from flask import Flask, request,render_template, jsonify, Response
import random
import glob
import os
import uuid
import urllib2
import time
import httplib, urllib
import requests
import json
app = Flask(__name__)
# print "hello"
@app.route("/upload_phrase", methods=['POST'])
def classify_url():
if request.method == 'POST':
phrase = request.form['phrase']
phrase = parse_and_dump.get_language_text(phrase)
counter = len(phrase)
test_splice_text.wrapper_main(phrase)
# time.sleep(counter / 2)
print phrase
return jsonify({'phrase':phrase})
else:
#get 10 most similar and return
return
@app.route("/")
def index_main():
print "rendering website"
return render_template('index.html', name = "hahahahahahah")
@app.route("/add_activity",methods=["POST"])
def add_activity(req = None):
print "rendering post activity"
# print req
# print request.form["description"]
# print request
name = request.form['activity_name']
print name, " activity_name"
description = request.form['description']
print description , " desc"
try:
capacity = int(request.form['capacity'])
except:
capacity = 12
print capacity, "capacity"
location = request.form['location']
print location , "location"
x = request.form['loclat']
print x, "loclat"
y = request.form['loclong']
print y, "locLong"
point = str(x) + "," + str(y)
print point, "point"
start_time = request.form['start_time']
end_time = request.form['end_time']
owner = 555
category = request.form['category']
data_r = {
"name" : name,
"desc" : description,
"cap" : capacity,
"loc" : location,
"point" : point,
"start" : start_time,
"end" : end_time,
"owner" : owner,
"categ" : category
}
data_r_json = json.dumps(data_r)
r = requests.post("http://10.10.200.66:8080/activity", data= data_r_json)
print(r.status_code, r.reason)
return render_template('submit_form.html', name = "add_activity_meh")
def gen(camera):
while True:
frame = camera.get_frame()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
@app.route('/video_feed')
def video_feed():
return Response(gen(VideoCamera()),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == "__main__":
app.run(host = '0.0.0.0', debug = True)
|
Have a story to tell that you think would make a good episode? Don't be shy. We love to talk to strangers.
Sign up to receive all news & updates.
|
import datetime
from django.db import models
from tours.models import Category, Tour
from easy_thumbnails.fields import ThumbnailerImageField
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
class Article(models.Model):
title_PT = models.CharField(_('Title PT'), max_length=100, blank=True, null=False)
title_EN = models.CharField(_('Title EN'), max_length=100, blank=True, null=False)
title_DE = models.CharField(_('Title DE'), max_length=100, blank=True, null=False)
description_PT = models.TextField(_('Description PT'), max_length=5000, blank=True, null=False)
description_EN = models.TextField(_('Description EN'), max_length=5000, blank=True, null=False)
description_DE = models.TextField(_('Description DE'), max_length=5000, blank=True, null=False)
category = models.ForeignKey(Category, default=None, blank=True, null=True)
tour = models.ForeignKey(Tour, default=None, blank=True, null=True)
link = models.URLField(max_length=100, blank=True, null=False)
img = ThumbnailerImageField(_('Article thumbnail'), null=True, blank=True)
keywords_SEO = models.TextField(_('Article keywords for SEO'), max_length=5000, blank=True, null=False)
description_SEO = models.TextField(_('Article description for SEO'), max_length=5000, blank=True, null=False)
created_on = models.DateTimeField(default=datetime.datetime.now(), blank=True)
def get_absolute_url(self):
return reverse('news:detail', args=[str(self.id)])
def get_edit_url(self):
return reverse('news:edit', args=[str(self.id)])
def get_delete_url(self):
return reverse('news:delete', args=[str(self.id)])
def __str__(self):
return self.title_EN
def __unicode__(self):
return self.title_EN
class Meta:
ordering = ['-created_on']
|
The Canadian economist, John Kenneth Galbraith is credited with coining the phrase “The Conventional Wisdom”. This means concepts that are assumed to be true because so many people believe them. In general, we spend most of our lives happily conforming to a set of conventions that we rarely, if ever, question.
Here on the seafront, Velopi likes to ensure that the material it provides in courses, such as its Project Management Professional Exam Preparation offering, are based on sound principles. Because of this, every so often someone in the office asks what seems like a blindingly obvious question – but one that often fails to find an obvious answer. Today, as we welcome in 2016, we ponder over another of these philosophical questions: why do we plan projects?
Trained project managers will, at this point, roar with laughter – planning has become second nature. But is this just Conventional Wisdom, or have we genuinely good reasons for spending so much time on this activity? Well the Project Management Institute seems to think so: In its Guide to the Project Management Body of Knowledge (PMBOK© Guide), it lists no fewer than twenty-four planning processes.
But, given that all projects have some unique aspect and often projects have a research element, making accurate planning difficult, would we not be better off just getting stuck in and working things out as we go along? Certainly, this has been standard practice in many software development companies, where prototypes are often wrestled into commercial products after much debugging. They argue that the pace of change is so fast they just do not have time for planning. Interestingly, they seem to have all the time in the world for fixing these products when they do appear.
For project managers, what is lacking here is evidence. While the ad-hoc approach has led to amazing scientific discoveries, the results of such projects often are not what were expected. Without plans, we are unable to determine if we achieved what we set out to achieve. Another issue relates to expectations. If the boss sets a team off to develop a new product, are we supposed to come up with a prototype or proof of concept? Or do we need to go further and optimize the product for manufacture and easy maintenance? A clear scope statement, where the exact deliverables to be provided are explicitly listed can save a lot of argument at the end of the day.
Some customers are adept at exploiting vague requirements. They sign off on a set of requirements that are neither testable nor measureable and then, when the project is near completion, they look at what they are getting with surprise and disappointment: “But I assumed that this would include …” How many companies have seen their profit margin wiped out because they did not state clearly what was in and what was out of scope? The problem here is: if no one said something was out of scope, then we cannot argue the case at the end of the project – we have to meet the customer’s expectations. However, if we state at the outset – during the scope planning work – that something is not included, the customer has to ask for it explicitly. This gives us the opportunity to charge for the extra work – a much happier outcome.
The schedule is another area where we might question the need for planning. This is especially true when an inexperienced team embarks on a series of tasks that it has not attempted before. Realistically, how can they estimate how long this work will take? Again, project management shows how Project Time Management can help, even if we do not estimate. Our work breakdown structure has divided the project into more manageable work packages. Now we need to break these down further into schedule activities or tasks. This effort alone – determining exactly what needs to be done – can show up serious gaps in our thinking. However, the next step in the process – sequencing the activities – allows us to include mandatory and discretionary dependencies. A network diagram with no estimates can show the amount of parallelism in the project and where extra staff can help accelerate the work.
Relating the requirements to the work breakdown structure and then tracing them through the schedule, allows us to show how requirements were met. And this is really the crucial element of planning: providing evidence. If there are any complaints at the end of the project – a customer reluctant to accept the product, service or result for instance, or a project sponsor who regards the project as a failure – a sound set of plans will allow the project manager prove that the work was actually done. Even in a project where uncertainty is high, planning helps. Setting up an effective change control regime will facilitate change while still remaining in control.
Even without exploring all twenty-four planning processes, the case for project planning can be made. So it is safe to say that project planning does not come under the heading of conventional wisdom; it is wisdom, full stop!
|
import warnings
from abc import abstractmethod
from typing import Dict, Any
from jsonschema import validate
from pykechain.models.validators.validator_schemas import representation_jsonschema_stub
class BaseRepresentation(object):
"""
Base class for all Representations.
:cvar jsonschema: jsonschema to validate the json of the representation
:type jsonschema: Dict
:cvar rtype: type of representation
:type rtype: Text
"""
jsonschema = representation_jsonschema_stub
rtype = None
_config_value_key = None
def __init__(self, obj=None, json=None, value=None, prop=None):
"""
Construct a base representation.
:param obj: the object to which the representation is applied, such as a property.
:type obj: Base
:param json: representation json (usually part of the original object json)
:type json: dict
:param value: value of the representation, its options vary per representation type
:type value: Any
:param prop: deprecated keyword for obj
:type prop: Property
"""
if prop is not None:
warnings.warn(
"Keyword `prop` is deprecated in favor of `obj`.",
PendingDeprecationWarning,
)
obj = prop
del prop
self._obj = obj
self._json: dict = json or dict(rtype=self.rtype, config=dict())
self._config: dict = self._json.get("config", dict())
self._json["config"] = self._config
if value is not None:
self.validate_representation(value)
self._config[self._config_value_key] = value
def __repr__(self):
return "{} ({})".format(self.__class__.__name__, self.value)
def as_json(self) -> Dict:
"""Parse the validator to a proper validator json."""
return self._json
def validate_json(self) -> Any:
"""Validate the json representation of the validator against the validator jsonschema."""
return validate(self._json, self.jsonschema)
@classmethod
def parse(cls, obj: Any, json: Dict) -> "BaseRepresentation":
"""Parse a json dict and return the correct subclass of :class:`BaseRepresentation`.
It uses the 'rtype' key to determine which :class:`BaseRepresentation` to instantiate.
:param obj: object to which the `BaseRepresentation` belongs.
:type: prop: Base
:param json: dictionary containing the specific keys to parse into a :class:`BaseRepresentation`
:type json: dict
:returns: the instantiated subclass of :class:`BaseRepresentation`
:rtype: :class:`BaseRepresentation` or subclass thereof
"""
try:
rtype = json["rtype"]
except KeyError:
raise ValueError(
"Representation unknown, incorrect json: '{}'".format(json)
)
try:
from pykechain.models.representations import rtype_class_map
repr_class: type(BaseRepresentation) = rtype_class_map[rtype]
except KeyError:
raise TypeError('Unknown rtype "{}" in json'.format(rtype))
return repr_class(obj=obj, json=json)
@property
def value(self):
"""
Retrieve current representation value.
:return: value
:rtype Any
"""
return self._config[self._config_value_key] if self._config_value_key else None
@value.setter
def value(self, value):
"""
Set a new representation value.
:param value: the new value to be set
:type value: Any
:return: the value
:rtype Any
"""
self.validate_representation(value)
self._config[self._config_value_key] = value
# Update the property in-place
if self._obj:
self._obj.representations = self._obj.representations
@abstractmethod
def validate_representation(self, value: Any) -> None:
"""
Validate whether the representation value can be set.
:param value: representation value to set.
:type value: Any
:raises IllegalArgumentError
:return: None
"""
pass # pragma: no cover
|
Today, I’m sharing a story with you of a remarkable young woman who got out of $30,000 worth of debt in 96 days.
Mary is a Financial Coach who runs her own business called Abundant Wealth Solutions where she helps people eliminate their student loan debt and achieve financial freedom.
Mary didn’t always have a strong grasp on her finances, though. She actually grew up in the projects in Mississippi where she says she was surrounded by a culture of consumption. It took her years to figure out a way to climb out of debt and build her own net worth.
To learn more about Mary, visit AbundantWealthSolutions.com and find her on Twitter @AbundantWS.
|
import os
import inspect
import json
from ruamel import yaml
from .models import Intent, Entity
class SchemaHandler(object):
def __init__(self, assist, object_type=None):
self.assist = assist
self.intents = []
self.api = assist.api
self.object_type = object_type
# File set up
def get_or_create_dir(self, dir_name):
d = os.path.join(self.assist.app.root_path, dir_name)
if not os.path.isdir(d):
os.mkdir(d)
return d
@property
def schema_dir(self):
return self.get_or_create_dir('schema')
@property
def json_file(self):
file_name = '{}.json'.format(self.object_type)
f = os.path.join(self.schema_dir, file_name)
if not os.path.isfile(f):
open(f, 'w+').close()
return f
@property
def saved_schema(self):
with open(self.json_file, 'r') as f:
try:
return json.load(f)
except ValueError as e: # python2
return []
except json.decoder.JSONDecodeError: # python3
return []
@property
def registered(self):
if self.saved_schema:
return [i for i in self.saved_schema if i if i.get('id')]
def dump_schema(self, schema):
print('Writing schema json to file')
with open(self.json_file, 'w') as f:
json.dump(schema, f, indent=4)
# templates
@property
def template_dir(self):
return self.get_or_create_dir('templates')
def template_file(self, template_type):
file_name = '{}.yaml'.format(template_type)
f = os.path.join(self.template_dir, file_name)
if not os.path.isfile(f):
open(f, 'w+').close()
return f
@property
def user_says_template(self):
return self.template_file('user_says')
@property
def entity_template(self):
return self.template_file('entities')
def load_yaml(self, template_file):
with open(template_file) as f:
try:
return yaml.safe_load(f)
except yaml.YAMLError as e:
print(e)
return []
def user_says_yaml(self):
return self.load_yaml(self.user_says_template)
def entity_yaml(self):
return self.load_yaml(self.entity_template)
def grab_id(self, obj_name):
if self.registered:
for obj in self.registered:
if obj['name'] == obj_name:
return obj['id']
class IntentGenerator(SchemaHandler):
def __init__(self, assist):
super(IntentGenerator, self).__init__(assist, object_type='intents')
@property
def app_intents(self):
"""Returns a list of Intent objects created from the assistant's acion functions"""
from_app = []
for intent_name in self.assist._intent_action_funcs:
intent = self.build_intent(intent_name)
from_app.append(intent)
return from_app
def build_intent(self, intent_name):
"""Builds an Intent object of the given name"""
# TODO: contexts
is_fallback = self.assist._intent_fallbacks[intent_name]
contexts = self.assist._required_contexts[intent_name]
events = self.assist._intent_events[intent_name]
new_intent = Intent(intent_name, fallback_intent=is_fallback, contexts=contexts, events=events)
self.build_action(new_intent)
self.build_user_says(new_intent) # TODO
return new_intent
def build_action(self, intent):
action_name = self.assist._intent_action_funcs[intent.name][0].__name__
params = self.parse_params(intent.name)
intent.add_action(action_name, parameters=params)
def parse_params(self, intent_name):
"""Parses params from an intent's action decorator and view function.
Returns a list of parameter field dicts to be included in the intent object's response field.
"""
params = []
action_func = self.assist._intent_action_funcs[intent_name][0]
argspec = inspect.getargspec(action_func)
param_entity_map = self.assist._intent_mappings.get(intent_name)
args, defaults = argspec.args, argspec.defaults
default_map = {}
if defaults:
default_map = dict(zip(args[-len(defaults):], defaults))
# import ipdb; ipdb.set_trace()
for arg in args:
param_info = {}
param_entity = param_entity_map.get(arg, arg)
param_name = param_entity.replace('sys.', '')
# param_name = arg
param_info['name'] = param_name
param_info['value'] = '$' + param_name
param_info['dataType'] = '@' + param_entity
param_info['prompts'] = [] # TODO: fill in provided prompts
param_info['required'] = arg not in default_map
param_info['isList'] = isinstance(default_map.get(arg), list)
if param_info['isList']:
param_info['defaultValue'] = ''
else:
param_info['defaultValue'] = default_map.get(arg, '')
params.append(param_info)
return params
def get_synonyms(self, annotation, entity):
raw_temp = self.entity_yaml()
for temp_dict in [d for d in raw_temp if d == entity]:
for entry in raw_temp.get(temp_dict):
if isinstance(entry, dict):
for a, s in entry.items():
if a == annotation:
for synonym in s:
yield(synonym)
def build_user_says(self, intent):
raw = self.user_says_yaml()
intent_data = raw.get(intent.name)
if intent_data:
phrases = intent_data.get('UserSays', [])
annotations = intent_data.get('Annotations', [])
events = intent_data.get('Events', [])
mapping = {}
for a in [a for a in annotations if a]:
for annotation, entity in a.items():
mapping.update({str(annotation):str(entity)})
for synonym in self.get_synonyms(annotation, entity):
mapping.update({str(synonym):str(entity)})
for phrase in [p for p in phrases if p]:
if phrase != '':
intent.add_example(phrase, templ_entity_map=mapping)
for event in [e for e in events if e]:
intent.add_event(event)
def push_intent(self, intent):
"""Registers or updates an intent and returns the intent_json with an ID"""
if intent.id:
print('Updating {} intent'.format(intent.name))
self.update(intent)
else:
print('Registering {} intent'.format(intent.name))
intent = self.register(intent)
return intent
def register(self, intent):
"""Registers a new intent and returns the Intent object with an ID"""
response = self.api.post_intent(intent.serialize)
print(response)
print()
if response['status']['code'] == 200:
intent.id = response['id']
elif response['status']['code'] == 409: # intent already exists
intent.id = next(i.id for i in self.api.agent_intents if i.name == intent.name)
self.update(intent)
return intent
def update(self, intent):
response = self.api.put_intent(intent.id, intent.serialize)
print(response)
print()
if response['status']['code'] == 200:
return response
def generate(self):
print('Generating intent schema...')
schema = []
for intent in self.app_intents:
intent.id = self.grab_id(intent.name)
intent = self.push_intent(intent)
schema.append(intent.__dict__)
self.dump_schema(schema)
class EntityGenerator(SchemaHandler):
def __init__(self, assist):
super(EntityGenerator, self).__init__(assist, object_type='entities')
def build_entities(self):
raw_temp = self.entity_yaml()
for entity_name in raw_temp:
e = Entity(entity_name)
self.build_entries(e, raw_temp)
yield e
def build_entries(self, entity, temp_dict):
entries = temp_dict.get(entity.name, [])
for entry in entries:
if isinstance(entry, dict): # mapping
(value, synyms), = entry.items()
else: # enum/composite
entity.isEnum = True
value = entry
synyms = [entry]
entity.add_entry(value, synyms)
def register(self, entity):
"""Registers a new entity and returns the entity object with an ID"""
response = self.api.post_entity(entity.serialize)
print(response)
print()
if response['status']['code'] == 200:
entity.id = response['id']
if response['status']['code'] == 409: # entity already exists
entity.id = next(i.id for i in self.api.agent_entities if i.name == entity.name)
self.update(entity)
return entity
def update(self, entity):
response = self.api.put_entity(entity.id, entity.serialize)
print(response)
print()
if response['status']['code'] == 200:
return response
def push_entity(self, entity):
"""Registers or updates an entity and returns the entity_json with an ID"""
if entity.id:
print('Updating {} entity'.format(entity.name))
self.update(entity)
else:
print('Registering {} entity'.format(entity.name))
entity = self.register(entity)
return entity
def generate(self):
print('Generating entity schema...')
schema = []
for entity in self.build_entities():
entity.id = self.grab_id(entity.name)
entity = self.push_entity(entity)
schema.append(entity.__dict__)
self.dump_schema(schema)
class TemplateCreator(SchemaHandler):
def __init__(self, assist):
super(TemplateCreator, self).__init__(assist)
self.assist = assist
def generate(self):
if not self.user_says_yaml():
self.create_user_says_skeleton()
if not self.entity_yaml():
self.create_entity_skeleton()
def get_or_create_dir(self, dir_name):
try:
root = self.assist.app.root_path
except AttributeError: # for blueprints
root = self.assist.blueprint.root_path
d = os.path.join(root, dir_name)
if not os.path.isdir(d):
os.mkdir(d)
return d
@property
def template_dir(self):
return self.get_or_create_dir('templates')
@property
def user_says_exists(self):
return self._user_says_exists
def parse_annotations_from_action_mappings(self, intent_name):
annotations = []
entity_map = self.assist._intent_mappings.get(intent_name, {})
for param in entity_map:
annotations.append({param: entity_map[param]})
return annotations
def create(self, user_says=True, entities=True):
if user_says:
self.create_user_says_skeleton()
if entities:
self.create_entity_skeleton()
def create_user_says_skeleton(self):
template = os.path.join(self.template_dir, 'user_says.yaml')
skeleton = {}
for intent in self.assist._intent_action_funcs:
# print(type(intent))
entity_map_from_action = self.assist._intent_mappings.get(intent, {})
d = yaml.compat.ordereddict()
d['UserSays'] = [None, None]
d['Annotations'] = [None, None]
d['Events'] = [None]
# d['Annotations'] = self.parse_annotations_from_action_mappings(intent)
data = yaml.comments.CommentedMap(d) # to preserve order w/o tags
skeleton[intent] = data
with open(template, 'a') as f:
f.write('# Template for defining UserSays examples\n\n')
f.write('# give-color-intent:\n\n')
f.write('# UserSays:\n')
f.write('# - My color is blue\n')
f.write('# - red is my favorite color\n\n')
f.write('# Annotations:\n')
f.write('# - blue: sys.color # maps param value -> entity\n')
f.write('# - red: sys.color\n\n')
f.write('# Events:\n')
f.write('# - event1 # adds a triggerable event named \'event1\' to the intent\n\n\n\n')
# f.write(header)
yaml.dump(skeleton, f, default_flow_style=False, Dumper=yaml.RoundTripDumper)
def create_entity_skeleton(self):
print('Creating Template for Entities')
template = os.path.join(self.template_dir, 'entities.yaml')
message = """# Template file for entities\n\n"""
skeleton = {}
for intent in self.assist._intent_action_funcs:
entity_map = self.assist._intent_mappings.get(intent)
action_func = self.assist._intent_action_funcs[intent][0]
args = inspect.getargspec(action_func).args
# dont add API 'sys' entities to the template
if entity_map:
args = [a for a in args if 'sys.' not in entity_map.get(a, [])]
for param in [p for p in args if p not in skeleton]:
skeleton[param] = [None, None]
with open(template, 'w') as f:
f.write(message)
f.write('#Format as below\n\n')
f.write("# entity_name:\n")
f.write("# - entry1: list of synonyms \n")
f.write("# - entry2: list of synonyms \n\n")
f.write("#For example:\n\n")
f.write("# drink:\n")
f.write("# - water: ['aqua', 'h20'] \n")
f.write("# - coffee: ['joe', 'caffeine', 'espresso', 'late'] \n")
f.write("# - soda: ['pop', 'coke']\n\n\n\n")
yaml.dump(skeleton, f, default_flow_style=False, Dumper=yaml.RoundTripDumper)
|
For the past several years, the United States has been debating freedom of speech. Some argue that encouraging dissenting points of view to be expressed, even the most hateful, ensures a healthy democracy. Others argue that speech that is offensive or hateful limits room in the public square for historically marginalized voices, while allowing it helps odious political views gain respectability.
These debates for and against free speech focus largely on what citizens and policymakers ought to do. They take a normative stance on the question of freedom of speech. In recently published research, however, we tried to see whether there is an empirical link between a climate of support for freedom of speech and indicators of the frequency of debate and dissent. In doing so, our goal is to test whether freedom of speech does in fact help to bolster democratic vitality.
We begin by measuring support for the principle of freedom of speech in American communities. We used public opinion data from the Freedom and Tolerance surveys, which are nationally representative surveys of the U.S. public, conducted every year between 2007 and 2011. The relevant survey questions focus on the broader concept of “political tolerance” — the degree to which citizens will support the extension of civil liberties to all, including those who advocate viewpoints that are highly disagreeable to respondents.
The first set of questions focuses on people’s support for civil liberties in general: whether citizens should have to carry around a national identity card at all times, whether high school teachers should be required to defend U.S. policies, whether the government should be allowed to monitor telephone calls and emails to prevent terrorist or criminal acts, and whether law enforcement officials should be allowed to use racial profiling and investigate non-violent protesters. The second set of questions asks if a group that the respondent strongly dislikes should be allowed to make a speech in the community, run for office, or hold a public rally. We combine responses to these eight questions into a single index of political tolerance.
Although our pooled survey data set is fairly large, with around 4,000 respondents, it is not large enough to allow us to split the data by respondents’ communities of residence. Instead, we apply a method known as multilevel regression with poststratification to the pooled survey data. This enables us to reliably measure support for freedom of speech and other civil liberties in 365 U.S. metropolitan areas.
As you can see below, the map shows some surprising findings. Major metropolitan areas, such as New York/Northern New Jersey/Long Island and Chicago/Naperville/Elgin, are not particularly supportive of free speech and civil liberties. The most tolerant metro areas are instead college towns such as Boulder, Colo.; Ithaca, N.Y.; and Corvallis, Ore. As previous research on tolerance in the states has found, regional patterns are also evident, with tolerance predominating in the upper Midwest and Mountain West, and intolerance in the South.
Next, to measure dissent in these metropolitan areas, we used data on protest incidence from the GDELT project, which uses software to scan news media for stories on political events. We counted the number of protests recorded by GDELT in each metropolitan area between 2007 and 2011. We divided this by the population of each metro area, and used it as a measure of publicly expressed dissent in that area.
More support for freedom of speech is associated with more public protests. But which comes first?
As you can see in the figure below, the rate of protest increases in step with the level of a metropolitan areas’ political tolerance.
But this doesn’t tell us why they are associated with one another. Which comes first? Does support for freedom of speech and other civil liberties lead to more protest, or does protesting increase political tolerance? Do they reinforce one another, or is there some other relationship between the two that makes the same area more likely to host both?
To help us find out, we use a regression model to adjust for other factors that might affect the rate of protest, such as the ideological leaning and proportion of students in the area. Even accounting for those factors, we found that tolerance and protest are closely related. We then examined whether this association between tolerance and protest holds when we take the rate of protest in the preceding years into account. It does. Ultimately, although our data do not allow us to firmly conclude that tolerant metropolitan areas cause increased rates of protest, higher political tolerance and higher rates of protest are closely linked.
These findings are consistent with earlier research showing that Americans living in politically tolerant neighborhoods feel freer to engage in public expression of politics. What’s especially interesting is that African Americans living in neighborhoods more tolerant of racist speech also feel greater freedom to express themselves politically.
What are the implications of these findings for democratic vitality?
These studies suggest that local support for freedom of speech emboldens citizens living in these areas to participate in political life. If this is true, it would support a classic argument for freedom of speech and political tolerance more generally: that it encourages all views to be expressed. Limiting the freedom to express unpleasant opinions, the argument goes, can create a creeping conformity that spreads, in a “spiral of silence,” as people learn that it is best to simply keep their mouths shut. While our findings do not suggest where exactly these limitations should lie, they do bolster the argument for putting up with a variety of unwelcome speech if one’s goal is to invigorate political engagement.
Christopher Claassen is a lecturer in politics at the University of Glasgow.
James L. Gibson is Sidney W. Souers Professor of Government at Washington University in St. Louis and Fellow, Centre for Comparative and International Politics and Professor Extraordinary in Political Science at Stellenbosch University (South Africa).
The data used in the reported research were funded by the National Science Foundation (SES 1228619) and the Weidenbaum Center on the Economy, Government, and Public Policy at Washington University in St. Louis.
|
# This file contains the base models that individual versioned models
# are based on. They shouldn't be directly used with Api objects.
# stdlib, alphabetical
import json
import logging
import os
import pprint
import re
import shutil
import urllib
# Core Django, alphabetical
from django.conf import settings
from django.conf.urls import url
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.forms.models import model_to_dict
from django.utils.translation import ugettext as _
from django.utils import six
# Third party dependencies, alphabetical
import bagit
from tastypie.authentication import (
BasicAuthentication,
ApiKeyAuthentication,
MultiAuthentication,
SessionAuthentication,
)
from tastypie.authorization import DjangoAuthorization
import tastypie.exceptions
from tastypie import fields
from tastypie import http
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS
from tastypie.validation import CleanedDataFormValidation
from tastypie.utils import trailing_slash, dict_strip_unicode_keys
# This project, alphabetical
from administration.models import Settings
from common import utils
from locations.api.sword import views as sword_views
from ..models import (
Callback,
CallbackError,
Event,
File,
Package,
Location,
LocationPipeline,
Space,
Pipeline,
StorageException,
Async,
PosixMoveUnsupportedError,
)
from ..forms import SpaceForm
from ..constants import PROTOCOL
from locations import signals
from ..models.async_manager import AsyncManager
LOGGER = logging.getLogger(__name__)
# FIXME ModelResources with ForeignKeys to another model don't work with
# validation = CleanedDataFormValidation On creation, it errors with:
# "Select a valid choice. That choice is not one of the available choices."
# This is because the ModelResource accepts a URI, but does not convert it to a
# primary key (in our case, UUID) before passing it to Django.
# See https://github.com/toastdriven/django-tastypie/issues/152 for details
def _custom_endpoint(expected_methods=["get"], required_fields=[]):
"""
Decorator for custom endpoints that handles boilerplate code.
Checks if method allowed, authenticated, deserializes and can require fields
in the body.
Custom endpoint must accept request and bundle.
"""
def decorator(func):
""" The decorator applied to the endpoint """
def wrapper(resource, request, **kwargs):
""" Wrapper for custom endpoints with boilerplate code. """
# Tastypie API checks
resource.method_check(request, allowed=expected_methods)
resource.is_authenticated(request)
resource.throttle_check(request)
# Get object
try:
obj = resource._meta.queryset.get(uuid=kwargs["uuid"])
except ObjectDoesNotExist:
return http.HttpNotFound(
_("Resource with UUID %(uuid)s does not exist")
% {"uuid": kwargs["uuid"]}
)
except MultipleObjectsReturned:
return http.HttpMultipleChoices(
_("More than one resource is found at this URI.")
)
# Get body content
try:
deserialized = resource.deserialize(
request,
request.body,
format=request.META.get("CONTENT_TYPE", "application/json"),
)
deserialized = resource.alter_deserialized_detail_data(
request, deserialized
)
except Exception:
# Trouble decoding request body - may not actually exist
deserialized = []
# Check required fields, if any
if not all(k in deserialized for k in required_fields):
# Don't have enough information to make the request - return error
return http.HttpBadRequest(
_("All of these fields must be provided: %(fields)s")
% {"fields": ", ".join(required_fields)}
)
# Build bundle and return it
bundle = resource.build_bundle(obj=obj, data=deserialized, request=request)
bundle = resource.alter_detail_data_to_serialize(request, bundle)
# Call the decorated method
result = func(resource, request, bundle, **kwargs)
resource.log_throttled_access(request)
return result
return wrapper
return decorator
class PipelineResource(ModelResource):
# Attributes used for POST, exclude from GET
create_default_locations = fields.BooleanField(use_in=lambda x: False)
shared_path = fields.CharField(use_in=lambda x: False)
class Meta:
queryset = Pipeline.active.all()
authentication = MultiAuthentication(
BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication()
)
authorization = DjangoAuthorization()
# validation = CleanedDataFormValidation(form_class=PipelineForm)
resource_name = "pipeline"
fields = ["uuid", "description", "remote_name", "api_key", "api_username"]
list_allowed_methods = ["get", "post"]
detail_allowed_methods = ["get"]
detail_uri_name = "uuid"
always_return_data = True
filtering = {"description": ALL, "uuid": ALL}
def dehydrate(self, bundle):
# Don't return API username or key
del bundle.data["api_username"]
del bundle.data["api_key"]
return bundle
def obj_create(self, bundle, **kwargs):
bundle = super(PipelineResource, self).obj_create(bundle, **kwargs)
bundle.obj.enabled = not utils.get_setting("pipelines_disabled", False)
create_default_locations = bundle.data.get("create_default_locations", False)
# Try to guess Pipeline's IP if remote_name is undefined
if bundle.data.get("remote_name") is None:
ip = bundle.request.META.get("REMOTE_ADDR") or None
bundle.obj.remote_name = ip
shared_path = bundle.data.get("shared_path", None)
bundle.obj.save(create_default_locations, shared_path)
return bundle
class SpaceResource(ModelResource):
class Meta:
queryset = Space.objects.all()
authentication = MultiAuthentication(
BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication()
)
authorization = DjangoAuthorization()
validation = CleanedDataFormValidation(form_class=SpaceForm)
resource_name = "space"
fields = [
"access_protocol",
"last_verified",
"location_set",
"path",
"size",
"used",
"uuid",
"verified",
]
list_allowed_methods = ["get", "post"]
detail_allowed_methods = ["get"]
detail_uri_name = "uuid"
always_return_data = True
filtering = {
"access_protocol": ALL,
"path": ALL,
"size": ALL,
"used": ALL,
"uuid": ALL,
"verified": ALL,
}
def prepend_urls(self):
return [
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/browse%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("browse"),
name="browse",
)
]
# Is there a better place to add protocol-specific space info?
# alter_detail_data_to_serialize
# alter_deserialized_detail_data
def dehydrate(self, bundle):
""" Add protocol specific fields to an entry. """
bundle = super(SpaceResource, self).dehydrate(bundle)
access_protocol = bundle.obj.access_protocol
model = PROTOCOL[access_protocol]["model"]
try:
space = model.objects.get(space=bundle.obj.uuid)
except model.DoesNotExist:
LOGGER.error("Space matching UUID %s does not exist", bundle.obj.uuid)
# TODO this should assert later once creation/deletion stuff works
else:
keep_fields = PROTOCOL[access_protocol]["fields"]
added_fields = model_to_dict(space, keep_fields)
bundle.data.update(added_fields)
return bundle
def obj_create(self, bundle, **kwargs):
""" Creates protocol specific class when creating a Space. """
# TODO How to move this to the model?
# Make dict of fields in model and values from bundle.data
access_protocol = bundle.data["access_protocol"]
keep_fields = PROTOCOL[access_protocol]["fields"]
fields_dict = {key: bundle.data[key] for key in keep_fields}
bundle = super(SpaceResource, self).obj_create(bundle, **kwargs)
model = PROTOCOL[access_protocol]["model"]
obj = model.objects.create(space=bundle.obj, **fields_dict)
obj.save()
return bundle
def get_objects(self, space, path):
message = _("This method should be accessed via a versioned subclass")
raise NotImplementedError(message)
@_custom_endpoint(expected_methods=["get"])
def browse(self, request, bundle, **kwargs):
""" Returns all of the entries in a space, optionally at a subpath.
Returns a dict with
{'entries': [list of entries in the directory],
'directories': [list of directories in the directory]}
Directories is a subset of entries, all are just the name.
If a path=<path> parameter is provided, will look in that path inside
the Space. """
space = bundle.obj
path = request.GET.get("path", "")
if not path.startswith(space.path):
path = os.path.join(space.path, path)
objects = self.get_objects(space, path)
return self.create_response(request, objects)
class LocationResource(ModelResource):
space = fields.ForeignKey(SpaceResource, "space")
path = fields.CharField(attribute="full_path", readonly=True)
pipeline = fields.ToManyField(PipelineResource, "pipeline")
class Meta:
queryset = Location.active.all()
authentication = MultiAuthentication(
BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication()
)
authorization = DjangoAuthorization()
# validation = CleanedDataFormValidation(form_class=LocationForm)
resource_name = "location"
fields = [
"enabled",
"relative_path",
"purpose",
"quota",
"used",
"uuid",
"description",
]
list_allowed_methods = ["get", "post"]
detail_allowed_methods = ["get", "post"]
detail_uri_name = "uuid"
always_return_data = True
filtering = {
"relative_path": ALL,
"pipeline": ALL_WITH_RELATIONS,
"purpose": ALL,
"quota": ALL,
"space": ALL_WITH_RELATIONS,
"used": ALL,
"uuid": ALL,
"description": ALL,
}
def prepend_urls(self):
return [
url(
r"^(?P<resource_name>%s)/default/(?P<purpose>[A-Z]{2})%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view("default"),
name="default_location",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/browse%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("browse"),
name="browse",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/async%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("post_detail_async"),
name="post_detail_async",
),
# FEDORA/SWORD2 endpoints
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/sword/collection%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("sword_collection"),
name="sword_collection",
),
]
def decode_path(self, path):
return path
def get_objects(self, space, path):
message = _("This method should be accessed via a versioned subclass")
raise NotImplementedError(message)
def default(self, request, **kwargs):
"""Redirects to the default location for the given purpose.
This function is not using the `_custom_endpoint` decorator because it
is not bound to an object.
"""
# Tastypie API checks
self.method_check(request, allowed=["get", "post"])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
try:
name = "default_{}_location".format(kwargs["purpose"])
uuid = Settings.objects.get(name=name).value
except (Settings.DoesNotExist, KeyError):
return http.HttpNotFound("Default location not defined for this purpose.")
return HttpResponseRedirect(
reverse(
"api_dispatch_detail",
kwargs={"api_name": "v2", "resource_name": "location", "uuid": uuid},
)
)
def save_m2m(self, bundle):
for field_name, field_object in self.fields.items():
if field_name != "pipeline":
continue
if not getattr(field_object, "is_m2m", False):
continue
if not field_object.attribute:
continue
pipelines = bundle.data["pipeline"]
for item in pipelines:
LocationPipeline.objects.get_or_create(
pipeline=item.obj, location=bundle.obj
)
def obj_create(self, bundle, **kwargs):
"""Create a new location and make it the default when requested."""
if "default" in bundle.data:
# This is going to result in calling the `default` attribute setter
# in the underlying model (Location).
kwargs["default"] = bundle.data["default"]
return super(LocationResource, self).obj_create(bundle, **kwargs)
@_custom_endpoint(expected_methods=["get"])
def browse(self, request, bundle, **kwargs):
""" Returns all of the entries in a location, optionally at a subpath.
Returns a dict with
{'entries': [list of entries in the directory],
'directories': [list of directories in the directory]}
Directories is a subset of entries, all are just the name.
If a path=<path> parameter is provided, will look in that path inside
the Location. """
location = bundle.obj
path = request.GET.get("path", "")
path = self.decode_path(path)
location_path = location.full_path
if isinstance(location_path, six.text_type):
location_path = location_path.encode("utf8")
if not path.startswith(location_path):
path = os.path.join(location_path, path)
objects = self.get_objects(location.space, path)
return self.create_response(request, objects)
def _move_files_between_locations(
self, files, origin_location, destination_location
):
"""
Synchronously move files from one location to another. May be called from
the request thread, or as an async task.
"""
# For each file in files, call move to/from
origin_space = origin_location.space
destination_space = destination_location.space
for sip_file in files:
source_path = sip_file.get("source", None)
destination_path = sip_file.get("destination", None)
# make path relative to the location
source_path = os.path.join(origin_location.relative_path, source_path)
destination_path = os.path.join(
destination_location.relative_path, destination_path
)
try:
if not origin_location.is_move_allowed():
LOGGER.debug("Moving files from this location is not allowed")
raise PosixMoveUnsupportedError
origin_space.posix_move(
source_path=source_path,
destination_path=destination_path,
destination_space=destination_space,
package=None,
)
except PosixMoveUnsupportedError:
origin_space.move_to_storage_service(
source_path=source_path,
destination_path=destination_path,
destination_space=destination_space,
)
origin_space.post_move_to_storage_service()
destination_space.move_from_storage_service(
source_path=destination_path,
destination_path=destination_path,
package=None,
)
destination_space.post_move_from_storage_service(
destination_path, destination_path
)
def _handle_location_file_move(self, move_files_fn, request, *args, **kwargs):
"""
Handle a request to moves files to this Location.
Intended for use with creating Transfers, SIPs, etc and other cases
where files need to be moved but not tracked by the storage service.
POST body should contain a dict with elements:
origin_location: URI of the Location the files should be moved from
pipeline: URI of the Pipeline both Locations belong to
files: List of dicts containing 'source' and 'destination', paths
relative to their Location of the files to be moved.
The actual work of moving the files is delegated to move_files_fn, which
will be called with:
* The list of files to move
* The origin location
* The destination location
and should return a HttpResponse suitable for response to the
client. This is parameterised in this way to give the caller the choice
of copying synchronously (returning a HTTP 201 response) or
asynchronously (returning a HTTP 202 + redirect).
"""
data = self.deserialize(request, request.body)
data = self.alter_deserialized_detail_data(request, data)
# Get the object for this endpoint
try:
destination_location = Location.active.get(uuid=kwargs["uuid"])
except Location.DoesNotExist:
return http.HttpNotFound()
# Check for require fields
required_fields = ["origin_location", "pipeline", "files"]
if not all((k in data) for k in required_fields):
# Don't have enough information to make the request - return error
return http.HttpBadRequest
# Get the destination Location
origin_uri = data["origin_location"]
try:
# splitting origin_uri on / results in:
# ['', 'api', 'v1', '<resource_name>', '<uuid>', '']
origin_uuid = origin_uri.split("/")[4]
origin_location = Location.active.get(uuid=origin_uuid)
except (IndexError, Location.DoesNotExist):
return http.HttpNotFound(
_("The URL provided '%(url)s' was not a link to a valid Location.")
% {"url": origin_uri}
)
# For each file in files, call move to/from
for sip_file in data["files"]:
source_path = sip_file.get("source", None)
destination_path = sip_file.get("destination", None)
if not all([source_path, destination_path]):
return http.HttpBadRequest
return move_files_fn(data["files"], origin_location, destination_location)
@_custom_endpoint(expected_methods=["post"])
def post_detail_async(self, request, *args, **kwargs):
"""
Moves files to this Location. Return an async response (202 code) on
success.
See _handle_location_file_move for a description of the expected request
format.
"""
def move_files(files, origin_location, destination_location):
"""Move our list of files in a background task, returning a HTTP Accepted response."""
def task():
self._move_files_between_locations(
files, origin_location, destination_location
)
return _("Files moved successfully")
async_task = AsyncManager.run_task(task)
response = http.HttpAccepted()
response["Location"] = reverse(
"api_dispatch_detail",
kwargs={
"api_name": "v2",
"resource_name": "async",
"id": async_task.id,
},
)
return response
return self._handle_location_file_move(move_files, request, *args, **kwargs)
def post_detail(self, request, *args, **kwargs):
""" Moves files to this Location.
See _handle_location_file_move for a description of the expected request
format.
"""
def move_files(files, origin_location, destination_location):
"""Move our list of files synchronously, returning a HTTP Created response."""
self._move_files_between_locations(
files, origin_location, destination_location
)
response = {"error": None, "message": _("Files moved successfully")}
return self.create_response(request, response)
return self._handle_location_file_move(move_files, request, *args, **kwargs)
def sword_collection(self, request, **kwargs):
try:
location = Location.objects.get(uuid=kwargs["uuid"])
except Location.DoesNotExist:
location = None
if location and (
location.purpose != Location.SWORD_DEPOSIT
or location.space.access_protocol != Space.FEDORA
):
return http.HttpBadRequest(_("This is not a SWORD server space."))
self.log_throttled_access(request)
return sword_views.collection(request, location or kwargs["uuid"])
class PackageResource(ModelResource):
""" Resource for managing Packages.
List (api/v1/file/) supports:
GET: List of files
POST: Create new Package
Detail (api/v1/file/<uuid>/) supports:
GET: Get details on a specific file
Download package (/api/v1/file/<uuid>/download/) supports:
GET: Get package as download
Extract file (/api/v1/file/<uuid>/extract_file/) supports:
GET: Extract file from package (param "relative_path_to_file" specifies which file)
api/v1/file/<uuid>/delete_aip/ supports:
POST: Create a delete request for that AIP.
Validate fixity (api/v1/file/<uuid>/check_fixity/) supports:
GET: Scan package for fixity
Compress package (api/v1/file/<uuid>/compress/) supports:
PUT: Compress an existing Package
"""
origin_pipeline = fields.ForeignKey(PipelineResource, "origin_pipeline")
origin_location = fields.ForeignKey(LocationResource, None, use_in=lambda x: False)
origin_path = fields.CharField(use_in=lambda x: False)
current_location = fields.ForeignKey(LocationResource, "current_location")
current_full_path = fields.CharField(attribute="full_path", readonly=True)
related_packages = fields.ManyToManyField("self", "related_packages", null=True)
replicated_package = fields.ForeignKey(
"self", "replicated_package", null=True, blank=True, readonly=True
)
replicas = fields.ManyToManyField(
"self", "replicas", null=True, blank=True, readonly=True
)
default_location_regex = re.compile(
r"\/api\/v2\/location\/default\/(?P<purpose>[A-Z]{2})\/?"
)
class Meta:
queryset = Package.objects.all()
authentication = MultiAuthentication(
BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication()
)
authorization = DjangoAuthorization()
# validation = CleanedDataFormValidation(form_class=PackageForm)
#
# Note that this resource is exposed as 'file' to the API for
# compatibility because the resource itself was originally under
# that name.
resource_name = "file"
fields = [
"current_path",
"package_type",
"size",
"status",
"uuid",
"related_packages",
"misc_attributes",
"replicated_package",
"replicas",
]
list_allowed_methods = ["get", "post"]
detail_allowed_methods = ["get", "put", "patch"]
allowed_patch_fields = ["reingest"] # for customized update_in_place
detail_uri_name = "uuid"
always_return_data = True
filtering = {
"current_location": ALL_WITH_RELATIONS,
"package_type": ALL,
"path": ALL,
"uuid": ALL,
"status": ALL,
"related_packages": ALL_WITH_RELATIONS,
}
def prepend_urls(self):
return [
url(
r"^(?P<resource_name>%s)/async%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view("obj_create_async"),
name="obj_create_async",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/delete_aip%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("delete_aip_request"),
name="delete_aip_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/recover_aip%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("recover_aip_request"),
name="recover_aip_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/extract_file%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("extract_file_request"),
name="extract_file_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/download/(?P<chunk_number>\d+)%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("download_request"),
name="download_lockss",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/download%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("download_request"),
name="download_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/pointer_file%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("pointer_file_request"),
name="pointer_file_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/check_fixity%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("check_fixity_request"),
name="check_fixity_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/compress%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("compress_request"),
name="compress_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/send_callback/post_store%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("aip_store_callback_request"),
name="aip_store_callback_request",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/contents%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("manage_contents"),
name="manage_contents",
),
url(
r"^(?P<resource_name>%s)/metadata%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view("file_data"),
name="file_data",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/reindex%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("reindex_request"),
name="reindex_request",
),
# Reingest
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/reingest%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("reingest_request"),
name="reingest_request",
),
# Move
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/move%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("move_request"),
name="move_request",
),
# FEDORA/SWORD2 endpoints
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/sword%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("sword_deposit"),
name="sword_deposit",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/sword/media%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("sword_deposit_media"),
name="sword_deposit_media",
),
url(
r"^(?P<resource_name>%s)/(?P<%s>\w[\w/-]*)/sword/state%s$"
% (
self._meta.resource_name,
self._meta.detail_uri_name,
trailing_slash(),
),
self.wrap_view("sword_deposit_state"),
name="sword_deposit_state",
),
]
def dehydrate_misc_attributes(self, bundle):
"""Customize serialization of misc_attributes."""
# Serialize JSONField as dict, not as repr of a dict
return bundle.obj.misc_attributes
def dehydrate(self, bundle):
"""Add an encrypted boolean key to the returned package indicating
whether it is encrypted.
"""
encrypted = False
space = bundle.obj.current_location.space
if space.access_protocol == Space.GPG:
encrypted = True
bundle.data["encrypted"] = encrypted
return bundle
def hydrate_current_location(self, bundle):
"""Customize unserialization of current_location.
If current_location uses the default location form (i.e. if matches the
regular expression ``default_location_regex``), this method augments
its value by converting it into the absolute path of the location being
referenced, which is the expected form internally.
This method is invoked in Tastypie's hydrate cycle.
E.g.: ``/api/v2/location/default/DS/`` becomes:
``/api/v2/location/363f42ea-905d-40f5-a2e8-1b6b9c122629/`` or similar.
"""
try:
current_location = bundle.data["current_location"]
except KeyError:
return bundle
matches = self.default_location_regex.match(current_location)
try:
purpose = matches.group("purpose")
except AttributeError:
LOGGER.debug(
"`current_location` was not matched by `default_location_regex`"
)
return bundle
try:
name = "default_{}_location".format(purpose)
uuid = Settings.objects.get(name=name).value
except (Settings.DoesNotExist, KeyError):
LOGGER.debug(
"`current_location` had the form of a default location (purpose %s) but the setting `%s` was not found",
purpose,
name,
)
return bundle
location_path = reverse(
"api_dispatch_detail",
kwargs={"api_name": "v2", "resource_name": "location", "uuid": uuid},
)
LOGGER.info("`current_location` was augmented: `%s`", location_path)
bundle.data["current_location"] = location_path
return bundle
def _store_bundle(self, bundle):
"""
Synchronously store a bundle. May be called from the request thread, or as
an async task.
"""
related_package_uuid = bundle.data.get("related_package_uuid")
# IDEA add custom endpoints, instead of storing all AIPS that come in?
origin_location_uri = bundle.data.get("origin_location")
origin_location = self.origin_location.build_related_resource(
origin_location_uri, bundle.request
).obj
origin_path = bundle.data.get("origin_path")
if bundle.obj.package_type in (
Package.AIP,
Package.AIC,
Package.DIP,
) and bundle.obj.current_location.purpose in (
Location.AIP_STORAGE,
Location.DIP_STORAGE,
):
# Store AIP/AIC
events = bundle.data.get("events", [])
agents = bundle.data.get("agents", [])
aip_subtype = bundle.data.get("aip_subtype", None)
bundle.obj.store_aip(
origin_location,
origin_path,
related_package_uuid,
premis_events=events,
premis_agents=agents,
aip_subtype=aip_subtype,
)
elif bundle.obj.package_type in (
Package.TRANSFER,
) and bundle.obj.current_location.purpose in (Location.BACKLOG,):
# Move transfer to backlog
bundle.obj.backlog_transfer(origin_location, origin_path)
def obj_create_async(self, request, **kwargs):
"""
Create a new Package model instance. Called when a POST request is made
to api/v2/file/async/.
Returns a HTTP 202 response immediately, along with a redirect to a URL
for polling for job completion.
"""
try:
self.method_check(request, allowed=["post"])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
deserialized = self.deserialize(
request,
request.body,
format=request.META.get("CONTENT_TYPE", "application/json"),
)
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(
data=dict_strip_unicode_keys(deserialized), request=request
)
bundle = super(PackageResource, self).obj_create(bundle, **kwargs)
def task():
self._store_bundle(bundle)
new_bundle = self.full_dehydrate(bundle)
new_bundle = self.alter_detail_data_to_serialize(request, new_bundle)
return new_bundle.data
async_task = AsyncManager.run_task(task)
response = http.HttpAccepted()
response["Location"] = reverse(
"api_dispatch_detail",
kwargs={
"api_name": "v2",
"resource_name": "async",
"id": async_task.id,
},
)
return response
except Exception as e:
LOGGER.warning("Failure in obj_create_async: %s" % e)
raise e
def obj_create(self, bundle, **kwargs):
"""
Create a new Package model instance. Called when a POST request is
made to api/v2/file/.
"""
bundle = super(PackageResource, self).obj_create(bundle, **kwargs)
self._store_bundle(bundle)
return bundle
def obj_update(self, bundle, skip_errors=False, **kwargs):
"""
Modified version of the Django ORM implementation of obj_update.
Identical to original function except obj_update_hook added between hydrating the data and saving the object.
"""
if not bundle.obj or not self.get_bundle_detail_data(bundle):
try:
lookup_kwargs = self.lookup_kwargs_with_identifiers(bundle, kwargs)
except Exception:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.obj_get(bundle=bundle, **lookup_kwargs)
except ObjectDoesNotExist:
raise tastypie.exceptions.NotFound(
_(
"A model instance matching the provided arguments could not be found."
)
)
bundle = self.full_hydrate(bundle)
bundle = self.obj_update_hook(bundle, **kwargs)
return self.save(bundle, skip_errors=skip_errors)
def obj_update_hook(self, bundle, **kwargs):
"""
Hook to update Package and move files around before package is saved.
bundle.obj has been updated, but not yet saved.
"""
# PATCH should be only for updating metadata, not actually moving files.
# Don't do any additional processing.
if bundle.request.method == "PATCH":
# Update reingest - should only be notifications of done/failed
if "reingest" in bundle.data:
bundle.obj.misc_attributes.update({"reingest_pipeline": None})
return bundle
origin_location_uri = bundle.data.get("origin_location")
origin_path = bundle.data.get("origin_path")
events = bundle.data.get("events", [])
agents = bundle.data.get("agents", [])
aip_subtype = bundle.data.get("aip_subtype", None)
if origin_location_uri and origin_path:
# Sending origin information implies that the package should be copied from there
origin_location = self.origin_location.build_related_resource(
origin_location_uri, bundle.request
).obj
if (
bundle.obj.package_type in (Package.AIP, Package.AIC)
and bundle.obj.current_location.purpose in (Location.AIP_STORAGE)
and "reingest" in bundle.data
):
# AIP Reingest
# Reset the current Location & path to original values
# Package.finish_reingest will update them if successful
original_package = self._meta.queryset.get(uuid=bundle.obj.uuid)
bundle.obj.current_path = original_package.current_path
bundle.obj.current_location = original_package.current_location
reingest_location = self.origin_location.build_related_resource(
bundle.data["current_location"], bundle.request
).obj
reingest_path = bundle.data["current_path"]
bundle.obj.finish_reingest(
origin_location,
origin_path,
reingest_location,
reingest_path,
premis_events=events,
premis_agents=agents,
aip_subtype=aip_subtype,
)
return bundle
def update_in_place(self, request, original_bundle, new_data):
"""
Update the object in original_bundle in-place using new_data.
Overridden to restrict what fields can be updated to only
`allowed_patch_fields`.
"""
# From http://stackoverflow.com/questions/13704344/tastypie-where-to-restrict-fields-that-may-be-updated-by-patch
if set(new_data.keys()) - set(self._meta.allowed_patch_fields):
raise tastypie.exceptions.BadRequest(
_("PATCH only allowed on %(fields)s")
% {"fields": ", ".join(self._meta.allowed_patch_fields)}
)
return super(PackageResource, self).update_in_place(
request, original_bundle, new_data
)
@_custom_endpoint(
expected_methods=["post"],
required_fields=("event_reason", "pipeline", "user_id", "user_email"),
)
def delete_aip_request(self, request, bundle, **kwargs):
"""Create a delete request for an AIP. Does not perform the deletion."""
request_info = bundle.data
package = bundle.obj
if package.package_type not in Package.PACKAGE_TYPE_CAN_DELETE:
# Can only request deletion on AIPs
response = {"message": _("Deletes not allowed on this package type.")}
response_json = json.dumps(response)
return http.HttpMethodNotAllowed(
response_json, content_type="application/json"
)
(status_code, response) = self._attempt_package_request_event(
package, request_info, Event.DELETE, Package.DEL_REQ
)
if status_code == 202:
# This isn't configured by default
site_url = getattr(settings, "SITE_BASE_URL", None)
signals.deletion_request.send(
sender=self,
url=site_url,
uuid=package.uuid,
location=package.full_path,
pipeline=request_info["pipeline"],
)
else:
response = {"message": _("A deletion request already exists for this AIP.")}
self.log_throttled_access(request)
response_json = json.dumps(response)
return http.HttpResponse(
status=status_code, content=response_json, content_type="application/json"
)
@_custom_endpoint(
expected_methods=["post"],
required_fields=("event_reason", "pipeline", "user_id", "user_email"),
)
def recover_aip_request(self, request, bundle, **kwargs):
request_info = bundle.data
package = bundle.obj
if package.package_type not in Package.PACKAGE_TYPE_CAN_RECOVER:
# Can only request recovery of AIPs
response = {"message": _("Recovery not allowed on this package type.")}
response_json = json.dumps(response)
return http.HttpMethodNotAllowed(
response_json, content_type="application/json"
)
(status_code, response) = self._attempt_package_request_event(
package, request_info, Event.RECOVER, Package.RECOVER_REQ
)
self.log_throttled_access(request)
response_json = json.dumps(response)
return http.HttpResponse(
status=status_code, content=response_json, content_type="application/json"
)
@_custom_endpoint(expected_methods=["get", "head"])
def extract_file_request(self, request, bundle, **kwargs):
"""Return a single file from the Package, extracting if necessary."""
# NOTE this responds to HEAD because AtoM uses HEAD to check for the existence of a file. The storage service has no way to check if a file exists except by downloading and extracting this AIP
# TODO this needs to be fixed so that HEAD is not identical to GET
relative_path_to_file = request.GET.get("relative_path_to_file")
if not relative_path_to_file:
return http.HttpBadRequest(
_("All of these fields must be provided: relative_path_to_file")
)
relative_path_to_file = urllib.unquote(relative_path_to_file)
temp_dir = extracted_file_path = ""
# Get Package details
package = bundle.obj
# Handle package name duplication in path for compressed packages
if not package.is_compressed:
full_path = package.fetch_local_path()
# The basename of the AIP may be included with the request, because
# all AIPs contain a base directory. That directory may already be
# inside the full path though, so remove the basename only if the
# relative path begins with it.
basename = os.path.join(os.path.basename(full_path), "")
if relative_path_to_file.startswith(basename):
relative_path_to_file = relative_path_to_file.replace(basename, "", 1)
# Check if the package is in Arkivum and not actually there
if package.current_location.space.access_protocol == Space.ARKIVUM:
is_local = package.current_location.space.get_child_space().is_file_local(
package,
path=relative_path_to_file,
email_nonlocal=request.method == "GET",
)
if is_local is False:
# Need to fetch from tape, return 202
return http.HttpAccepted(
json.dumps(
{
"error": False,
"message": _(
"File is not locally available. Contact your storage administrator to fetch it."
),
}
)
)
if is_local is None:
# Arkivum error, return 502
return http.HttpResponse(
json.dumps(
{
"error": True,
"message": _(
"Error checking if file in Arkivum in locally available."
),
}
),
content_type="application/json",
status=502,
)
# If local file exists - return that
if not package.is_compressed:
extracted_file_path = os.path.join(full_path, relative_path_to_file)
if not os.path.exists(extracted_file_path):
return http.HttpResponse(
status=404,
content=_("Requested file, %(filename)s, not found in AIP")
% {"filename": relative_path_to_file},
)
elif package.package_type in Package.PACKAGE_TYPE_CAN_EXTRACT:
# If file doesn't exist, try to extract it
(extracted_file_path, temp_dir) = package.extract_file(
relative_path_to_file
)
else:
# If the package is compressed and we can't extract it,
return http.HttpResponse(
status=501,
content=_("Unable to extract package of type: %(typename)s")
% {"typename": package.package_type},
)
response = utils.download_file_stream(extracted_file_path, temp_dir)
return response
@_custom_endpoint(expected_methods=["get", "head"])
def download_request(self, request, bundle, **kwargs):
"""Return the entire Package to be downloaded."""
# NOTE this responds to HEAD because AtoM uses HEAD to check for the existence of a package. The storage service has no way to check if the package still exists except by downloading it
# TODO this needs to be fixed so that HEAD is not identical to GET
# Get AIP details
package = bundle.obj
# Check if the package is in Arkivum and not actually there
if package.current_location.space.access_protocol == Space.ARKIVUM:
is_local = package.current_location.space.get_child_space().is_file_local(
package, email_nonlocal=request.method == "GET"
)
if is_local is False:
# Need to fetch from tape, return 202
return http.HttpAccepted(
json.dumps(
{
"error": False,
"message": _(
"File is not locally available. Contact your storage administrator to fetch it."
),
}
)
)
if is_local is None:
# Arkivum error, return 502
return http.HttpResponse(
json.dumps(
{
"error": True,
"message": _(
"Error checking if file in Arkivum in locally available."
),
}
),
content_type="application/json",
status=502,
)
lockss_au_number = kwargs.get("chunk_number")
try:
temp_dir = None
full_path = package.get_download_path(lockss_au_number)
except StorageException:
full_path, temp_dir = package.compress_package(utils.COMPRESSION_TAR)
response = utils.download_file_stream(full_path, temp_dir)
return response
@_custom_endpoint(expected_methods=["get"])
def pointer_file_request(self, request, bundle, **kwargs):
"""Return AIP pointer file."""
# Get AIP details
pointer_path = bundle.obj.full_pointer_file_path
if not pointer_path:
response = http.HttpNotFound(
_("Resource with UUID %(uuid)s does not have a pointer file")
% {"uuid": bundle.obj.uuid}
)
else:
response = utils.download_file_stream(pointer_path)
return response
@_custom_endpoint(expected_methods=["get"])
def check_fixity_request(self, request, bundle, **kwargs):
"""
Check a package's bagit/fixity.
:param force_local: GET parameter. If True, will ignore any space-specific bagit checks and run it locally.
"""
force_local = False
if request.GET.get("force_local") in ("True", "true", "1"):
force_local = True
report_json, report_dict = bundle.obj.get_fixity_check_report_send_signals(
force_local=force_local
)
return http.HttpResponse(report_json, content_type="application/json")
@_custom_endpoint(expected_methods=["put"])
def compress_request(self, request, bundle, **kwargs):
"""Compress an existing package.
PUT /api/v1/file/<uuid>/compress/
"""
return http.HttpResponse(
{"response": "You want to compress package {}".format(bundle.obj.uuid)},
content_type="application/json",
)
@_custom_endpoint(expected_methods=["get"])
def aip_store_callback_request(self, request, bundle, **kwargs):
package = bundle.obj
callbacks = Callback.objects.filter(event="post_store", enabled=True)
if len(callbacks) == 0:
return http.HttpNoContent()
fail = 0
if package.is_compressed:
# Don't extract the entire AIP, which could take forever;
# instead, just extract bagit.txt and manifest-sha512.txt,
# which is enough to get bag.entries with the
# precalculated sha512 checksums
try:
basedir = package.get_base_directory()
# Currently we only support this for local packages.
except NotImplementedError:
return http.HttpNoContent()
__, tmpdir = package.extract_file(os.path.join(basedir, "bagit.txt"))
package.extract_file(
os.path.join(basedir, "manifest-sha512.txt"), extract_path=tmpdir
)
package_dir = os.path.join(tmpdir, basedir)
else:
package_dir = package.full_path()
tmpdir = None
safe_files = ("bag-info.txt", "manifest-sha512.txt", "bagit.txt")
bag = bagit.Bag(package_dir)
for f, checksums in bag.entries.items():
try:
cksum = checksums["sha512"]
except KeyError:
# These files do not typically have an sha512 hash, so it's
# fine for these to be missing that key; every other file should.
if f not in safe_files:
LOGGER.warning("Post-store callback: sha512 missing for file %s", f)
continue
files = File.objects.filter(checksum=cksum, stored=False)
if len(files) > 1:
LOGGER.warning("Multiple File entries found for sha512 %s", cksum)
for file_ in files:
for callback in callbacks:
uri = callback.uri.replace("<source_id>", file_.source_id)
body = callback.body.replace("<source_id>", file_.source_id)
try:
callback.execute(uri, body)
file_.stored = True
file_.save()
except CallbackError:
fail += 1
if tmpdir is not None:
shutil.rmtree(tmpdir)
if fail > 0:
response = {
"message": _("Failed to POST %(count)d responses to callback URI")
% {"count": fail},
"failure_count": fail,
"callback_uris": [c.uri for c in callbacks],
}
return http.HttpApplicationError(
json.dumps(response), content_type="application/json"
)
else:
return http.HttpNoContent()
@_custom_endpoint(expected_methods=["post"])
def reindex_request(self, request, bundle, **kwargs):
"""Index file data from the Package transfer METS file."""
package = bundle.obj
if package.package_type != Package.TRANSFER:
return http.HttpBadRequest(
json.dumps(
{"error": True, "message": _("This package is not a transfer.")}
),
content_type="application/json",
)
if package.current_location.purpose != Location.BACKLOG:
return http.HttpBadRequest(
json.dumps(
{
"error": True,
"message": _("This package is not in transfer backlog."),
}
),
content_type="application/json",
)
try:
package.index_file_data_from_transfer_mets() # Create File entries for every file in the transfer
except Exception as e:
LOGGER.warning(
"An error occurred while reindexing the Transfer: %s",
str(e),
exc_info=True,
)
return http.HttpApplicationError(
json.dumps(
{
"error": True,
"message": _(
"An error occurred while reindexing the Transfer."
),
}
),
content_type="application/json",
)
count = File.objects.filter(package=package).count()
response = {
"error": False,
"message": _("Files indexed: %(count)d") % {"count": count},
}
return http.HttpResponse(
content=json.dumps(response), content_type="application/json"
)
@_custom_endpoint(
expected_methods=["post"], required_fields=("pipeline", "reingest_type")
)
def reingest_request(self, request, bundle, **kwargs):
"""Request to reingest an AIP."""
try:
pipeline = Pipeline.objects.get(uuid=bundle.data["pipeline"])
except (Pipeline.DoesNotExist, Pipeline.MultipleObjectsReturned):
response = {
"error": True,
"message": _("Pipeline UUID %(uuid)s failed to return a pipeline")
% {"uuid": bundle.data["pipeline"]},
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
reingest_type = bundle.data["reingest_type"]
processing_config = bundle.data.get("processing_config", "default")
response = bundle.obj.start_reingest(pipeline, reingest_type, processing_config)
status_code = response.get("status_code", 500)
return self.create_response(request, response, status=status_code)
@_custom_endpoint(expected_methods=["post"])
def move_request(self, request, bundle, **kwargs):
"""Request to move a stored AIP.
Called when a POST request is made to api/v2/file/UUID/move/ with a location_uuid
parameter with the UUID of the location that the AIP should be moved to.
"""
package = bundle.obj
if package.status != Package.UPLOADED:
response = {
"error": True,
"message": _(
"The file must be in an %s state to be moved. "
"Current state: %s" % (Package.UPLOADED, package.status)
),
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
location_uuid = request.POST.get("location_uuid")
if not location_uuid:
return http.HttpBadRequest(
_("All of these fields must be provided: " "location_uuid")
)
try:
location = Location.objects.get(uuid=location_uuid)
except (Location.DoesNotExist, Location.MultipleObjectsReturned):
response = {
"error": True,
"message": _(
"Location UUID %(uuid)s \
failed to return a location"
)
% {"uuid": location_uuid},
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
if location == package.current_location:
response = {
"error": True,
"message": _(
"New location must be different " "to the current location"
),
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
if location.purpose != package.current_location.purpose:
response = {
"error": True,
"message": _(
"New location must have the same purpose as "
"the current location - %s" % package.current_location.purpose
),
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
number_matched = Package.objects.filter(
id=package.id, status=Package.UPLOADED
).update(status=Package.MOVING)
if number_matched == 1:
package.refresh_from_db()
else:
response = {
"error": True,
"message": _(
"The package must be in an %s state to be moved. "
"Current state: %s" % (Package.UPLOADED, package.status)
),
}
return self.create_response(
request, response, response_class=http.HttpBadRequest
)
def task():
package.move(location)
package.status = Package.UPLOADED
package.save()
return _("Package moved successfully")
async_task = AsyncManager.run_task(task)
response = http.HttpAccepted()
response["Location"] = reverse(
"api_dispatch_detail",
kwargs={"api_name": "v2", "resource_name": "async", "id": async_task.id},
)
return response
def sword_deposit(self, request, **kwargs):
try:
package = Package.objects.get(uuid=kwargs["uuid"])
except Package.DoesNotExist:
package = None
if package and package.package_type != Package.DEPOSIT:
return http.HttpBadRequest(_("This is not a SWORD deposit location."))
self.log_throttled_access(request)
return sword_views.deposit_edit(request, package or kwargs["uuid"])
def sword_deposit_media(self, request, **kwargs):
try:
package = Package.objects.get(uuid=kwargs["uuid"])
except Package.DoesNotExist:
package = None
if package and package.package_type != Package.DEPOSIT:
return http.HttpBadRequest(_("This is not a SWORD deposit location."))
self.log_throttled_access(request)
return sword_views.deposit_media(request, package or kwargs["uuid"])
def sword_deposit_state(self, request, **kwargs):
try:
package = Package.objects.get(uuid=kwargs["uuid"])
except Package.DoesNotExist:
package = None
if package and package.package_type != Package.DEPOSIT:
return http.HttpBadRequest(_("This is not a SWORD deposit location."))
self.log_throttled_access(request)
return sword_views.deposit_state(request, package or kwargs["uuid"])
def _attempt_package_request_event(
self, package, request_info, event_type, event_status
):
"""Generic package request handler, e.g. package recovery: RECOVER_REQ,
or package deletion: DEL_REQ.
"""
LOGGER.info(
"Package event: '{}' requested, with package status: '{}'".format(
event_type, event_status
)
)
LOGGER.debug(pprint.pformat(request_info))
pipeline = Pipeline.objects.get(uuid=request_info["pipeline"])
request_description = event_type.replace("_", " ").lower()
# See if an event already exists
existing_requests = Event.objects.filter(
package=package, event_type=event_type, status=Event.SUBMITTED
).count()
if existing_requests < 1:
request_event = Event(
package=package,
event_type=event_type,
status=Event.SUBMITTED,
event_reason=request_info["event_reason"],
pipeline=pipeline,
user_id=request_info["user_id"],
user_email=request_info["user_email"],
store_data=package.status,
)
request_event.save()
response = {
"message": _("%(event_type)s request created successfully.")
% {"event_type": request_description.title()},
"id": request_event.id,
}
status_code = 202
else:
response = {
"error_message": _(
"A %(event_type)s request already exists for this AIP."
)
% {"event_type": request_description}
}
status_code = 200
return (status_code, response)
@_custom_endpoint(expected_methods=["get", "put", "delete"])
def manage_contents(self, request, bundle, **kwargs):
if request.method == "PUT":
return self._add_files_to_package(request, bundle, **kwargs)
elif request.method == "DELETE":
return self._remove_files_from_package(request, bundle, **kwargs)
elif request.method == "GET":
return self._package_contents(request, bundle, **kwargs)
def _remove_files_from_package(self, request, bundle, **kwargs):
"""
Removes all file records associated with this package.
"""
bundle.obj.file_set.all().delete()
return http.HttpNoContent()
def _add_files_to_package(self, request, bundle, **kwargs):
"""
Adds a set of files to a package.
The PUT body must be a list of zero or more JavaScript objects in the following format:
{
"relative_path": "string",
"fileuuid": "string",
"accessionid", "string",
"sipuuid": "string",
"origin": "string"
}
"""
try:
files_list = json.load(request)
except ValueError:
response = {
"success": False,
"error": _("No JSON object could be decoded from POST body."),
}
return http.HttpBadRequest(
json.dumps(response), content_type="application/json"
)
if not isinstance(files_list, list):
response = {
"success": False,
"error": _("JSON request must contain a list of objects."),
}
return http.HttpBadRequest(
json.dumps(response), content_type="application/json"
)
property_map = {
"relative_path": "name",
"fileuuid": "source_id",
"accessionid": "accessionid",
"sipuuid": "source_package",
"origin": "origin",
}
if len(files_list) == 0:
return http.HttpResponse()
created_files = []
for f in files_list:
kwargs = {"package": bundle.obj}
for source, dest in property_map.items():
try:
kwargs[dest] = f[source]
except KeyError:
response = {
"success": False,
"error": _("File object was missing key: %(key)s")
% {"key": source},
}
return http.HttpBadRequest(
json.dumps(response), content_type="application_json"
)
created_files.append(File(**kwargs))
for f in created_files:
f.save()
response = {
"success": True,
"message": _("%(count)d files created in package %(uuid)s")
% {"count": len(created_files), "uuid": bundle.obj.uuid},
}
return http.HttpCreated(json.dumps(response), content_type="application_json")
def _package_contents(self, request, bundle, **kwargs):
"""
Returns metadata about every file within a specified Storage Service
package, specified via Storage Service UUID.
The file properties provided are the properties of the ~:class:`~locations.models.event.File` class; see the class definition for more information.
:returns: a JSON object in the following format:
{
"success": True,
"package": "UUID (as string)",
"files": [
# array containing zero or more objects containing
# all of the file's properties, in the format:
{
"source_id": "",
# ...
}
]
}
"""
response = {"success": True, "package": bundle.obj.uuid, "files": []}
for f in bundle.obj.file_set.all():
response["files"].append(
{
attr: getattr(f, attr)
for attr in (
"source_id",
"name",
"source_package",
"checksum",
"accessionid",
"origin",
)
}
)
return http.HttpResponse(
status=200, content=json.dumps(response), content_type="application/json"
)
def file_data(self, request, **kwargs):
"""
Returns file metadata as a JSON array of objects.
This maps properties of the File class to the names of the
Elasticsearch indices' Transferfile index, allowing this to directly
substitute for Elasticsearch when looking up metadata on specific files.
Acceptable parameters are:
* relative_path (searches the `name` field)
* fileuuid (searches the `source_id` field)
* accessionid (searches the `accessionid` field)
* sipuuid (searches the `source_package` field)
:returns: an array of one or more objects. See the transferfile
index for information on the return format.
If no results are found for the specified query, returns 404.
If no acceptable query parameters are found, returns 400.
"""
# Tastypie API checks
self.method_check(request, allowed=["get", "post"])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
property_map = {
"relative_path": "name",
"fileuuid": "source_id",
"accessionid": "accessionid",
"sipuuid": "source_package",
}
query = {}
for source, dest in property_map.items():
try:
query[dest] = request.GET[source]
except KeyError:
pass
if not query:
response = {
"success": False,
"error": _("No supported query properties found!"),
}
return http.HttpBadRequest(
content=json.dumps(response), content_type="application/json"
)
files = File.objects.filter(**query)
if not files.exists():
return http.HttpNotFound()
response = []
for f in files:
response.append(
{
"accessionid": f.accessionid,
"file_extension": os.path.splitext(f.name)[1],
"filename": os.path.basename(f.name),
"relative_path": f.name,
"fileuuid": f.source_id,
"origin": f.origin,
"sipuuid": f.source_package,
}
)
return http.HttpResponse(
content=json.dumps(response), content_type="application/json"
)
class AsyncResource(ModelResource):
"""
Represents an async task that may or may not still be running.
"""
class Meta:
queryset = Async.objects.all()
resource_name = "async"
authentication = MultiAuthentication(
BasicAuthentication(), ApiKeyAuthentication(), SessionAuthentication()
)
authorization = DjangoAuthorization()
fields = [
"id",
"completed",
"was_error",
"created_time",
"updated_time",
"completed_time",
]
always_return_data = True
detail_allowed_methods = ["get"]
detail_uri_name = "id"
def dehydrate(self, bundle):
"""Pull out errors and results using our accessors so they get unpickled."""
if bundle.obj.completed:
if bundle.obj.was_error:
bundle.data["error"] = bundle.obj.error
else:
bundle.data["result"] = bundle.obj.result
return bundle
|
This was a snapshot taken of my "Creation Station" at the Alpine Stake Relief Society Enrichment. If you go to their site (listed below) you will find all the photos of the event, as well as all the handout downloads from each Creation Station. Enjoy!
The theme for this Alpine, Utah Stake Enrichment is "Creating Happiness." They are having a program with Guest Speaker Nancy Murphy, and a special musical number "Fill the Well Within". Following this program the sisters will be dismissed to go into the cultural hall that is filled with "Creation Stations". The Tables are set up around the outside of the Cultural hall with different sisters presenting their ideas of Creating happiness in many ways. The Creation Stations include: Create an MTC at home (missionary prep for MTC experience), Add Life to your years (Planning a Church mission), Create a Learning Environment (BYU Literacy), Create a Simpler Life through Frugality : Less is more, Create peace Through Preparedness, Create Great Grandma Moments, Create Long Distance Grandmothering, Create Spectacular Imaginations, Create Comfort and Smiles with Volunteerism & community service, Create a house of order, Create Fabulous Family Reunions, Create Unity Though Family Traditions, Create More joy in the Golden years, Family Search, What an Adventure, Create Precious, Priceless Photographs, Creative Blogging (by yours truly), Create Scrapbooking Family History, Create Joy in Journaling, My big fat Family Record book.
Also going on is a Compassionate Service Project where the sisters will each make 15 meat pies to be contributed to the Food & Care Coalition in Provo, Utah. The theme for this is "Many Hands Make Light Work." Tables are set up with a production line fashion for making these.
In the center of the Cultural hall are 4 banquet tables with a huge beautiful floral Centerpiece in the middle. Refreshments will be served from these tables. There are about 6 round tables situated close by for sisters who want to sit down to enjoy their refreshments, or they can walk around with them while they enjoy learning about the different ways to create happiness, and make contributions to home, family, the church and society.
When photos are available, I will add some for you to see how it all turned out. This is a terrific idea. Please make sure to read President Uchtdorf's talk and that should bring to you the spirit of what this event is all about. Enjoy!
|
from zeit.cms.i18n import MessageFactory as _
import gocept.form.grouped
import zeit.cms.browser.interfaces
import zeit.cms.content.interfaces
import zeit.cms.workflow.interfaces
import zeit.content.cp.interfaces
import zeit.objectlog.interfaces
import zeit.workflow.browser.form
import zope.component
import zope.dublincore.interfaces
import zope.formlib.form
def is_published_and_has_permission(form, action):
return (zeit.workflow.browser.form.is_published(form, action) and
form.request.interaction.checkPermission(
'zeit.content.cp.Retract', form.context))
class CenterPageWorkflowForm(zeit.workflow.browser.form.WorkflowForm):
# same as zeit.workflow.browser.form.ContentWorkflow, except for the
# fields: we use ITimeBasedPublishing instead of IContentWorkflow
zope.component.adapts(
zeit.content.cp.interfaces.ICenterPage,
zeit.cms.browser.interfaces.ICMSLayer)
field_groups = (
gocept.form.grouped.Fields(
_("Status"),
zeit.workflow.browser.form.WorkflowForm.modified_fields,
css_class='column-left'),
gocept.form.grouped.RemainingFields(
_("Settings"), css_class='column-right'),
gocept.form.grouped.Fields(
_("Log"), fields=('logs', ),
css_class='full-width')
)
form_fields = (
zope.formlib.form.FormFields(
zeit.workflow.interfaces.ITimeBasedPublishing,
zeit.objectlog.interfaces.ILog,
zeit.cms.workflow.interfaces.IModified,
zeit.cms.content.interfaces.ISemanticChange).omit(
*zeit.workflow.browser.form.WorkflowForm.omit_fields) +
zope.formlib.form.FormFields(
zope.dublincore.interfaces.IDCTimes, for_display=True).select(
'created'))
@zope.formlib.form.action(_('Save state only'), name='save')
def handle_save_state(self, action, data):
"""Duplicate action from base class, since we overwrite handle_retract.
"""
super(CenterPageWorkflowForm, self).handle_save_state.success(data)
@zope.formlib.form.action(_('Save state and publish now'), name='publish')
def handle_publish(self, action, data):
"""Duplicate action from base class, since we overwrite handle_retract.
"""
super(CenterPageWorkflowForm, self).handle_publish.success(data)
@gocept.form.action.confirm(
_('Save state and retract now'),
name='retract',
confirm_message=_('Really retract? This will remove the object from '
'all channels it is syndicated in and make it '
'unavailable to the public!'),
condition=is_published_and_has_permission)
def handle_retract(self, action, data):
"""Overwrite action to additionally test Retract permission."""
super(CenterPageWorkflowForm, self).handle_retract.success(data)
def get_error_message(self, mapping):
return _('Could not publish ${id} since it has validation errors.',
mapping=mapping)
|
Samsung refrigerators that display OF OF need to be taken out of demo mode. This is a very simple thing to do. Read below to learn how.
Demo mode is built into the electronics for dealers. This allows the dealer to show potential customers all of the functions of the refrigerator on the floor with the exception of “Cooling Mode” this helps with energy consumption.
On Samsung refrigerators that don’t have a display, the lights will be scrolling up and down. This means the same thing. The refrigerator is in Demo/Floor model mode.
To get the refrigerator out of demo you need to press several buttons. This button sequence is the same for display and non display mode. The only exception is for models that use the WiFi touchscreen.
For models with the display on the inside of the refrigerator and not on the freezer door you will need to press and hold “Power Freeze” & “Freezer” Holding those two buttons in on these models will take the refrigerator out of demo mode and send it back into cooling mode.
Did this article help you get your Samsung refrigerator out of demo mode?
If you need further Samsung refrigerator troubleshooting help, comment below and I’ll try my best to assist you. You can also visit our free DIY Samsung Refrigerator Repair Forum by clicking the link to the left or the link at the top of this page.
Thanks for taking the time to stop by the site. If you have any questions you can’t find by using the options available, please use the “Contact Us” page and we will try our best to assist.
Hi, that should have started the fans, lights, and compressor. If it didn’t then there is another issue. The problem would be one of the boards or the compressor.
You’re very welcome! I’m glad the post saved the day for you.
|
#!/usr/bin/env python
# @(#) $Id: TConsumer.py,v 1.4 2015/01/23 16:51:58 pcolomer Exp $
#*******************************************************************************
# ALMA - Atacama Large Millimiter Array
# (c) Associated Universities Inc., 2002
# (c) European Southern Observatory, 2002
# Copyright by ESO (in the framework of the ALMA collaboration)
# and Cosylab 2002, All rights reserved
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
#------------------------------------------------------------------------------
from Acspy.Nc.Consumer import Consumer
from time import sleep
import TEST_NS_STATISTICS_SERVICE
import sys
def dataHandler1(params):
print "Consumer1 - ", params.strVal
sleep(1)
return
def dataHandler2(params):
print "Consumer2 - ", params.strVal
sleep(1)
return
def dataHandler3(params):
print "Consumer3 - ", params.strVal
sleep(1)
return
def dataHandler4(params):
print "Consumer4 - ", params.strVal
sleep(1)
return
def main(argv):
consumers = []
wait_sec = int(argv[0])
for ch in argv[1:]:
ch = int(ch)
print "Creating channel %d" % (ch)
consumer = None
if ch == 1:
consumer = Consumer(TEST_NS_STATISTICS_SERVICE.CHANNEL_1)
consumer.addSubscription(TEST_NS_STATISTICS_SERVICE.Test1EventData,handler_function=dataHandler1)
elif ch == 2:
consumer = Consumer(TEST_NS_STATISTICS_SERVICE.CHANNEL_2)
consumer.addSubscription(TEST_NS_STATISTICS_SERVICE.Test1EventData,handler_function=dataHandler2)
elif ch == 3:
consumer = Consumer(TEST_NS_STATISTICS_SERVICE.CHANNEL_3)
consumer.addSubscription(TEST_NS_STATISTICS_SERVICE.Test1EventData,handler_function=dataHandler3)
elif ch == 4:
consumer = Consumer(TEST_NS_STATISTICS_SERVICE.CHANNEL_4)
consumer.addSubscription(TEST_NS_STATISTICS_SERVICE.Test1EventData,handler_function=dataHandler4)
if consumer is None:
raise BaseException("Unknown channel. Allowed values are from 1 to 4: %d"%(ch))
else:
consumers.append(consumer)
print "Enable %d consumers"%(len(consumers))
for consumer in consumers:
consumer.consumerReady()
if wait_sec > 0:
print "Wait %d seconds"%(wait_sec)
sleep(wait_sec)
# Disconnect consumers
print "Disconnect %d consumers"%(len(consumers))
for consumer in consumers:
consumer.disconnect()
if __name__ == "__main__":
# > TConsumer.py wait_sec ch_id_1 ch_id_2 ... ch_id_N
# Where ch_id can be 1, 2, 3, 4
main(sys.argv[1:])
|
There will be bleacher seating for easier conversation. And also rosé ice pops.
After a weird month of gray, rainy, cold days, summer is coming to Chicago at last. Sunshine and 80-degree temperatures are expected just in time for Memorial Day weekend. How perfect, then, that Bounce Sporting Club (324 W. Chicago, Near North) has decided to open its brand new rooftop bar this Thursday.
What does that actually mean? Well, the brand-new rooftop space—which tops off a three-story building with a great view of the skyline—boasts tons of seating and shade, as well as a cool new feature: bleacher-style seating facing the bar.
The bleachers came about because Benvenisti realized that, despite Bounce’s social setting as a sports bar and dance club, a lot of people don’t really interact at tables. “We wanted to make it comfortable for someone to come up and grab a drink at the bar and go sit at this bleacher area where you can talk to people, interact and engage,” he says.
Speaking of drinks, Bounce has a new special rooftop menu that includes plenty of summer-perfect frozen goodness, like the Story of OJ, a concoction of vodka, ginger, kaffir lime, and blood orange. It’s also serving something fairly unique—boozy ice pops. The Pretty in Pink combines rosé, lemon, sugar, and vodka; you can finally lick your rosé, as well as sip on it. The rest of the cocktails are playful and made for partying (I didn’t even know there was such a thing as caffeinated whiskey!) and there’s plenty of bubbly, rosé, and summer-friendly beers on draft.
Food options are a little lighter than those at the original Bounce. Dishes include crab toast with smashed peas and chiles, tuna poke tacos, head-on prawn skewers, and a summer flatbread made with ramp pesto, ’nduja, corn, asparagus, and roasted tomato. “We’re serving stuff that reminds me of the summer,” says Benvenisti. “Being from the East Coast, lobster rolls are synonymous with my summer.” On Bounce’s menu, the sandwich comes with a healthy dose of celery salt, arugula, and Old Bay potato chips.
Benvenisti expects the rooftop bar to be in full swing for the weekend festivities. “Having a good rooftop in Chicago is imperative to having a great summer,” he says. He’s not wrong.
|
import pandas as pd
import numpy as np
from functools import wraps
import re
import struct
import os
from glob import glob
from math import ceil
from toolz import curry, merge, partial
from itertools import count
import bcolz
from operator import getitem
from ..compatibility import StringIO
from ..utils import textblock
from .core import names, DataFrame, compute, concat, categorize_block
from .shuffle import set_partition
def _StringIO(data):
if isinstance(data, bytes):
data = data.decode()
return StringIO(data)
def file_size(fn, compression=None):
""" Size of a file on disk
If compressed then return the uncompressed file size
"""
if compression == 'gzip':
with open(fn, 'rb') as f:
f.seek(-4, 2)
result = struct.unpack('I', f.read(4))[0]
else:
result = os.stat(fn).st_size
return result
@wraps(pd.read_csv)
def read_csv(fn, *args, **kwargs):
if '*' in fn:
return concat([read_csv(f, *args, **kwargs) for f in sorted(glob(fn))])
categorize = kwargs.pop('categorize', None)
index = kwargs.pop('index', None)
if index and categorize == None:
categorize = True
compression = kwargs.pop('compression', None)
# Chunk sizes and numbers
chunkbytes = kwargs.pop('chunkbytes', 2**28) # 500 MB
total_bytes = file_size(fn, compression)
nchunks = int(ceil(float(total_bytes) / chunkbytes))
divisions = [None] * (nchunks - 1)
# Let pandas infer on the first 100 rows
head = pd.read_csv(fn, *args, nrows=100, compression=compression, **kwargs)
if names not in kwargs:
kwargs['names'] = csv_names(fn, compression=compression, **kwargs)
if 'header' not in kwargs:
header = infer_header(fn, compression=compression, **kwargs)
if header is True:
header = 0
else:
header = kwargs.pop('header')
if 'parse_dates' not in kwargs:
parse_dates = [col for col in head.dtypes.index
if np.issubdtype(head.dtypes[col], np.datetime64)]
if parse_dates:
kwargs['parse_dates'] = parse_dates
else:
parse_dates = kwargs.get('parse_dates')
if 'dtypes' in kwargs:
dtype = kwargs['dtype']
else:
dtype = dict(head.dtypes)
if parse_dates:
for col in parse_dates:
del dtype[col]
kwargs['dtype'] = dtype
first_read_csv = curry(pd.read_csv, *args, header=header, **kwargs)
rest_read_csv = curry(pd.read_csv, *args, header=None, **kwargs)
# Create dask graph
name = next(names)
dsk = dict(((name, i), (rest_read_csv, (_StringIO,
(textblock, fn,
i*chunkbytes, (i+1) * chunkbytes,
compression))))
for i in range(1, nchunks))
dsk[(name, 0)] = (first_read_csv, (_StringIO,
(textblock, fn, 0, chunkbytes, compression)))
result = DataFrame(dsk, name, head.columns, divisions)
if categorize or index:
categories, quantiles = categories_and_quantiles(fn, args, kwargs,
index, categorize,
chunkbytes=chunkbytes)
if categorize:
func = partial(categorize_block, categories=categories)
result = result.map_blocks(func, columns=result.columns)
if index:
result = set_partition(result, index, quantiles)
return result
def infer_header(fn, encoding='utf-8', compression=None, **kwargs):
""" Guess if csv file has a header or not
This uses Pandas to read a sample of the file, then looks at the column
names to see if they are all word-like.
Returns True or False
"""
# See read_csv docs for header for reasoning
try:
df = pd.read_csv(fn, encoding=encoding, compression=compression,nrows=5)
except StopIteration:
df = pd.read_csv(fn, encoding=encoding, compression=compression)
return (len(df) > 0 and
all(re.match('^\s*\D\w*\s*$', n) for n in df.columns) and
not all(dt == 'O' for dt in df.dtypes))
def csv_names(fn, encoding='utf-8', compression=None, names=None,
parse_dates=None, **kwargs):
try:
df = pd.read_csv(fn, encoding=encoding, compression=compression,
names=names, parse_dates=parse_dates, nrows=5)
except StopIteration:
df = pd.read_csv(fn, encoding=encoding, compression=compression,
names=names, parse_dates=parse_dates)
return list(df.columns)
def categories_and_quantiles(fn, args, kwargs, index=None, categorize=None,
chunkbytes=2**28):
"""
Categories of Object columns and quantiles of index column for CSV
Computes both of the following in a single pass
1. The categories for all object dtype columns
2. The quantiles of the index column
Parameters
----------
fn: string
Filename of csv file
args: tuple
arguments to be passed in to read_csv function
kwargs: dict
keyword arguments to pass in to read_csv function
index: string or None
Name of column on which to compute quantiles
categorize: bool
Whether or not to compute categories of Object dtype columns
"""
kwargs = kwargs.copy()
compression = kwargs.get('compression', None)
total_bytes = file_size(fn, compression)
nchunks = int(ceil(float(total_bytes) / chunkbytes))
if infer_header(fn, **kwargs):
kwargs['header'] = 0
one_chunk = pd.read_csv(fn, *args, nrows=100, **kwargs)
if categorize is not False:
category_columns = [c for c in one_chunk.dtypes.index
if one_chunk.dtypes[c] == 'O'
and c not in kwargs.get('parse_dates', ())]
else:
category_columns = []
cols = category_columns + ([index] if index else [])
dtypes = dict((c, one_chunk.dtypes[c]) for c in cols)
d = read_csv(fn, *args, **merge(kwargs,
dict(usecols=cols,
parse_dates=None,
dtype=dtypes)))
categories = [d[c].drop_duplicates() for c in category_columns]
import dask
if index:
quantiles = d[index].quantiles(np.linspace(0, 100, nchunks + 1)[1:-1])
result = compute(quantiles, *categories)
quantiles, categories = result[0], result[1:]
else:
categories = compute(*categories)
quantiles = None
categories = dict(zip(category_columns, categories))
return categories, quantiles
from_array_names = ('from-array-%d' % i for i in count(1))
def from_array(x, chunksize=50000):
""" Read dask Dataframe from any slicable array with record dtype
Uses getitem syntax to pull slices out of the array. The array need not be
a NumPy array but must support slicing syntax
x[50000:100000]
and have a record dtype
x.dtype == [('name', 'O'), ('balance', 'i8')]
"""
columns = tuple(x.dtype.names)
divisions = tuple(range(chunksize, len(x), chunksize))
name = next(from_array_names)
dsk = dict(((name, i), (pd.DataFrame,
(getitem, x,
(slice(i * chunksize, (i + 1) * chunksize),))))
for i in range(0, int(ceil(float(len(x)) / chunksize))))
return DataFrame(dsk, name, columns, divisions)
from pframe.categories import reapply_categories
def from_bcolz(x, chunksize=None, categorize=True, index=None, **kwargs):
""" Read dask Dataframe from bcolz.ctable
Parameters
----------
x : bcolz.ctable
Input data
chunksize : int (optional)
The size of blocks to pull out from ctable. Ideally as large as can
comfortably fit in memory
categorize : bool (defaults to True)
Automatically categorize all string dtypes
index : string (optional)
Column to make the index
See Also
--------
from_array: more generic function not optimized for bcolz
"""
import dask.array as da
bc_chunklen = max(x[name].chunklen for name in x.names)
if chunksize is None and bc_chunklen > 10000:
chunksize = bc_chunklen
categories = dict()
if categorize:
for name in x.names:
if (np.issubdtype(x.dtype[name], np.string_) or
np.issubdtype(x.dtype[name], np.unicode_) or
np.issubdtype(x.dtype[name], np.object_)):
a = da.from_array(x[name], chunks=(chunksize*len(x.names),))
categories[name] = da.unique(a)
columns = tuple(x.dtype.names)
divisions = tuple(range(chunksize, len(x), chunksize))
new_name = next(from_array_names)
dsk = dict(((new_name, i),
(dataframe_from_ctable,
x,
(slice(i * chunksize, (i + 1) * chunksize),),
None, categories))
for i in range(0, int(ceil(float(len(x)) / chunksize))))
result = DataFrame(dsk, new_name, columns, divisions)
if index:
assert index in x.names
a = da.from_array(x[index], chunks=(chunksize*len(x.names),))
q = np.linspace(1, 100, len(x) / chunksize + 2)[1:-1]
divisions = da.percentile(a, q).compute()
return set_partition(result, index, divisions, **kwargs)
else:
return result
def dataframe_from_ctable(x, slc, columns=None, categories=None):
""" Get DataFrame from bcolz.ctable
Parameters
----------
x: bcolz.ctable
slc: slice
columns: list of column names or None
>>> x = bcolz.ctable([[1, 2, 3, 4], [10, 20, 30, 40]], names=['a', 'b'])
>>> dataframe_from_ctable(x, slice(1, 3))
a b
0 2 20
1 3 30
>>> dataframe_from_ctable(x, slice(1, 3), columns=['b'])
b
0 20
1 30
>>> dataframe_from_ctable(x, slice(1, 3), columns='b')
0 20
1 30
Name: b, dtype: int64
"""
if columns is not None:
if isinstance(columns, tuple):
columns = list(columns)
x = x[columns]
name = next(names)
if isinstance(x, bcolz.ctable):
chunks = [x[name][slc] for name in x.names]
if categories is not None:
chunks = [pd.Categorical.from_codes(np.searchsorted(categories[name],
chunk),
categories[name], True)
if name in categories else chunk
for name, chunk in zip(x.names, chunks)]
return pd.DataFrame(dict(zip(x.names, chunks)))
elif isinstance(x, bcolz.carray):
chunk = x[slc]
if categories is not None and columns and columns in categories:
chunk = pd.Categorical.from_codes(
np.searchsorted(categories[columns], chunk),
categories[columns], True)
return pd.Series(chunk, name=columns)
|
Tea Rose Home: Link Party No.46!
I wanted to share some of the birthday presents I received from my husband's cousin's wife (is there special name for that?) well, let me just say she is my friend.
We were supposed to meet back in December, but they got sick, then our family got sick and we didn't actually get together until a few weeks ago. She had special birthday surprises for me.
I look at them and think about what I am going to do with them. She told me that I can cut the lace to use it for whatever project I want. Nooooo! Seriously, I don't think I can do that. I am just going to look at it and admire the even stitches. The flowers, Rachel already had dibs on a couple of them. She wanted me to make a flower clip for her hair and most likely that will happen.
I thought they were very thoughtful and beautiful birthday gifts. Don't you think so too?
Go check out her etsy shop she makes awesome crocheted items.
Also, I posted something new on the Japan Disaster Relief page. My friend sent me this link. I watched it in Japanese first and she found the video in English. There are some mistakes in the translation, but it truly touched me.
1. Please post something YOU made. It doesn't matter if it is crochet, quilt, clothing, anything would be fine. I want to see it! If you use someone's idea or patterns please give them the rightful credit.
2. Link to your specific post, that way it is easier for everybody to find and read it.
3. Please grab the "Tea Rose Home" grab button, and link back from your post. If you can mention the party in your post or place the button on your side bar, that would be great! I want more people to have a chance to come and show off their creation.
Show us what you've done!!
I love the lace, it is so beautiful!
I don't normally participate in your link parties (either haven't made anything, or totally forget!), but today I can join in, and it's very fitting.
I've made your ruffled t-shirt. It's the second time I've done this, and I have to say, your tutorial is so easy to follow - I love it.
gorgeous goodies. you lucky thing!
I'm just stunned by what's happening in Japan over the past 10days. It was so touching to read your posts. Take care and stay strong.
Thanks for hosting the link party, too.
I don't normally comment, but just want to say that I have been following along with your posts on Japan. I am so sorry to hear what has happened and how it has affected your family. Praying for everyone there.
What gorgeous lace and the flowers are soooo cute!!
I'm so excited to finally be able to participate in one of your link parties! Love all the creative inspiration here!
Wow, I want to know how she crocheted the lace so that you could cut it w/o it coming out - that's awesome!
that lace is unbelievable! What a thoughtful gift.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from collections import namedtuple
from subprocess import CalledProcessError
from subprocess import PIPE
from subprocess import Popen
from subprocess import STDOUT
from .debug import debug
from .errors import Unsupervised
def svc(args):
"""Wrapper for daemontools svc cmd"""
# svc never writes to stdout.
cmd = ('s6-svc',) + tuple(args)
debug('CMD: %s', cmd)
process = Popen(cmd, stderr=PIPE)
_, error = process.communicate()
if error.startswith('s6-svc: fatal: unable to control '):
raise Unsupervised(cmd, error)
if process.returncode: # pragma: no cover: there's no known way to hit this.
import sys
sys.stderr.write(error)
raise CalledProcessError(process.returncode, cmd)
class SvStat(
namedtuple('SvStat', ['state', 'pid', 'exitcode', 'seconds', 'process'])
):
__slots__ = ()
UNSUPERVISED = 'could not get status, supervisor is down'
INVALID = 'no such service'
def __repr__(self):
format = '{0.state}'
if self.pid is not None:
format += ' (pid {0.pid})'
if self.exitcode is not None:
format += ' (exitcode {0.exitcode})'
if self.seconds is not None:
format += ' {0.seconds} seconds'
if self.process is not None:
format += ', {0.process}'
return format.format(self)
def svstat_string(service_path):
"""Wrapper for daemontools svstat cmd"""
# svstat *always* exits with code zero...
cmd = ('s6-svok', service_path)
process = Popen(cmd, stdout=PIPE, stderr=STDOUT)
status, _ = process.communicate()
assert status == ''
if process.returncode != 0:
return SvStat.UNSUPERVISED
cmd = ('s6-svstat', service_path)
process = Popen(cmd, stdout=PIPE, stderr=STDOUT)
status, _ = process.communicate()
#status is listed per line for each argument
return status
def svstat_parse(svstat_string):
r'''
up (pid 2557675) 172858 seconds, ready 172856 seconds\n
>>> svstat_parse('up (pid 1202562) 100 seconds, ready 10 seconds\n')
ready (pid 1202562) 10 seconds
>>> svstat_parse('up (pid 1202562) 100 seconds\n')
up (pid 1202562) 100 seconds
>>> svstat_parse('down 4334 seconds, normally up, want up')
down 4334 seconds, starting
>>> svstat_parse('down (exitcode 0) 0 seconds, normally up, want up, ready 0 seconds')
down (exitcode 0) 0 seconds, starting
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('up (pid 1202) 1 seconds, want down\n')
up (pid 1202) 1 seconds, stopping
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('down 0 seconds, normally up')
down 0 seconds
>>> svstat_parse('s6-svstat: fatal: unable to read status for wat: No such file or directory')
could not get status, supervisor is down
>>> svstat_parse("s6-svstat: fatal: unable to read status for sweet: Broken pipe\n")
could not get status, supervisor is down
>>> svstat_parse('unable to chdir: file does not exist')
no such service
>>> svstat_parse('totally unpredictable error message')
totally unpredictable error message
'''
status = svstat_string.strip()
debug('RAW : %s', status)
state, status = __get_state(status)
if status.startswith('(pid '):
pid, status = status[5:].rsplit(') ', 1)
pid = int(pid)
else:
pid = None
if status.startswith('(exitcode '):
exitcode, status = status[10:].rsplit(') ', 1)
exitcode = int(exitcode)
else:
exitcode = None
try:
seconds, status = status.split(' seconds', 1)
seconds = int(seconds)
except ValueError:
seconds = None
if ', want up' in status:
process = 'starting'
elif ', want down' in status:
process = 'stopping'
else:
process = None
if status.startswith(', ready '):
state = 'ready'
status = status[8:]
seconds, status = status.split(' seconds', 1)
seconds = int(seconds)
process = None
return SvStat(state, pid, exitcode, seconds, process)
def __get_state(status):
first, rest = status.split(None, 1)
if first in ('up', 'down'):
return first, rest
elif status.startswith('unable to chdir:'):
return SvStat.INVALID, rest
elif (
status.startswith('s6-svstat: fatal: unable to read status for ') and status.endswith((
': No such file or directory',
': Broken pipe',
))
):
# the service has never been started before; it's down.
return SvStat.UNSUPERVISED, ''
else: # unknown errors
return status, ''
def svstat(path):
return svstat_parse(svstat_string(path))
|
Is This The Real Reason Why Gigi Hadid And Zayn Malik Split?
Their break-up seems pretty amicable, but are the pair hiding something?
Was Tamara Ecclestone's Daughter's Birthday Party Too Extravagant?
|
import base64
from bottle import jinja2_template as template, request
from models.cmsmodels import Users
import admin.session as withsession
@withsession.app.app.route('/password', method=['GET', 'POST'])
@withsession.issessionactive()
def password():
try:
if request.method == 'GET':
return template('admin/views/password.jinja2')
else:
user = Users.objects.get()
if user and user.password == base64.b64encode(bytes(request.forms.get('oldPassword'), 'UTF8')):
user.password = base64.b64encode(bytes(request.forms.get('newPassword'), 'UTF8'))
user.save()
return template('admin/views/password.jinja2', {"saved": True})
else:
return template('admin/views/password.jinja2',
{"saved": False, "errorMessage": "incorrect password"})
except:
return template('admin/views/password.jinja2',
{"saved": False, "errorMessage": "DB error"})
def initialize():
print('password controller initialized')
|
Grand Father: Shri. Virendra Singh Bhadauria, Settled at Gangapur City. Presently reside at Kota.
Professionally qualified girl with moral values.
|
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import os
import errno
import logging
logger = logging.getLogger("zc.lockfile")
class LockError(Exception):
"""Couldn't get a lock
"""
try:
import fcntl
except ImportError:
try:
import msvcrt
except ImportError:
def _lock_file(file):
raise TypeError('No file-locking support on this platform')
def _unlock_file(file):
raise TypeError('No file-locking support on this platform')
else:
# Windows
def _lock_file(file):
# Lock just the first byte
try:
msvcrt.locking(file.fileno(), msvcrt.LK_NBLCK, 1)
except IOError:
raise LockError("Couldn't lock %r" % file.name)
def _unlock_file(file):
try:
file.seek(0)
msvcrt.locking(file.fileno(), msvcrt.LK_UNLCK, 1)
except IOError:
raise LockError("Couldn't unlock %r" % file.name)
else:
# Unix
_flags = fcntl.LOCK_EX | fcntl.LOCK_NB
def _lock_file(file):
try:
fcntl.flock(file.fileno(), _flags)
except IOError:
raise LockError("Couldn't lock %r" % file.name)
def _unlock_file(file):
# File is automatically unlocked on close
pass
class LockFile:
_fp = None
def __init__(self, path):
self._path = path
fp = open(path, 'w+')
try:
_lock_file(fp)
except:
fp.seek(1)
pid = fp.read().strip()[:20]
fp.close()
if not pid:
pid = 'UNKNOWN'
logger.exception("Error locking file %s; pid=%s", path, pid)
raise
self._fp = fp
fp.write(" %s\n" % os.getpid())
fp.truncate()
fp.flush()
def close(self):
if self._fp is not None:
_unlock_file(self._fp)
self._fp.close()
self._fp = None
|
There is no occasion where this gift wouldn’t be welcomed. These corporate crystal awards can be used for reaching company metrics, such as sales goals, or completing a major project. They also make unique teaching tools for the medical education industry and beyond. Or consider this custom crystal awards for your next event, like a golf tournament.
Crystal recognition awards are perfect for any and all occasions. Due to the nature of this customization process pricing may vary based on artwork complexity. Artwork shown is considered "standard complexity". Contact us to talk about the artwork you want to provide or collaborate with our graphic designers to create something new.
The 3D engraved award is shown is 4-3/8 x 3 x3.
Create your own design or ask for the artistic team at Hillary’s to help.
Talk to us about production-ready formats: 3D cad or 2D Illustrator; all text converted to outlines (curves).
Art can be sent by email to service@askhillarys.com. Include your company name and order number in the subject line.
Quantity pricing available for larger unit needs.
Call (800-742-6800) or email (service@askhillarys.com) to begin the conversation and customizing your 3D subsurface crystal today. Not exactly what you need, let us help you find just the right product.
|
#!/usr/bin/python
# setup.py
import glob
import os.path
from distutils.command.install import install
from distutils.core import setup
from exe.engine import version
g_files = { '/usr/share/exe': ["README",
"COPYING",
"NEWS",
"ChangeLog",
"exe/webui/mr_x.gif"]}
g_oldBase = "exe/webui"
g_newBase = "/usr/share/exe"
def dataFiles(dirs, excludes=[]):
"""Recursively get all the files in these directories"""
import os.path
import glob
global dataFiles, g_oldBase, g_newBase, g_files
for file in dirs:
if not os.path.basename(file[0]).startswith("."):
if os.path.isfile(file) and file not in excludes:
path = file[len(g_oldBase)+1:]
dir = g_newBase + "/" + os.path.dirname(path)
if dir in g_files:
g_files[dir].append(file)
else:
g_files[dir] = [file]
elif os.path.isdir(file):
dataFiles(glob.glob(file+"/*"))
dataFiles(["exe/webui/style",
"exe/webui/css",
"exe/webui/docs",
"exe/webui/images",
"exe/webui/schemas",
"exe/webui/scripts",
"exe/webui/templates",
"exe/webui/linux-profile",
"exe/webui/firefox"],
excludes = ["mimetex.64.cgi", "mimetex-darwin.cgi", "mimetex.exe"])
g_oldBase = "exe"
g_newBase = "/usr/share/exe"
dataFiles(["exe/locale"])
g_oldBase = "exe/xului"
g_newBase = "/usr/share/exe"
dataFiles(["exe/xului/scripts",
"exe/xului/templates"])
opts = {
"bdist_rpm": {
"requires": ["python-imaging",]
}
}
setup(name = version.project,
version = version.release,
description = "eLearning XHTML editor",
long_description = """\
The eXe project is an authoring environment to enable teachers to publish
web content without the need to become proficient in HTML or XML markup.
Content generated using eXe can be used by any Learning Management System.
""",
url = "http://exelearning.org",
author = "eXe Project",
author_email = "exe@exelearning.org",
license = "GPL",
scripts = ["exe/exe"],
packages = ["exe", "exe.webui", "exe.xului",
"exe.engine", "exe.export"],
data_files = g_files.items(),
options = opts
)
|
Here comes the Bleu Cheese portion of the program.
Heavy D makes me sad.
A lot of talented people I’ve never heard of passed this year.
|
## COMPUTE AND PRODUCE TIMESCALE PLOTS
from __future__ import print_function
path = '/home/mkloewer/python/swm/'
import os; os.chdir(path) # change working directory
import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset
def acf(x,l):
""" autocorrelation function of vector x up to lag l."""
return np.array([1]+[np.corrcoef(x[:-i],x[i:])[0,1] for i in range(1,l)])
def findc(x,a):
""" find crossing of vector x with value a."""
return np.argmin(abs(x-a))
runfolders = [7,8,9,10]
p1 = np.array([300,1920])*1e3
p2 = np.array([2880,1920])*1e3
pi = np.zeros((3,2,2),dtype=np.int) # (u,v,T) x (p1,p2) x (i,j)
## read data
for r,i in zip(runfolders,range(len(runfolders))):
runpath = path+'data/run%04i' % r
param = np.load(runpath+'/param.npy').all()
# find p1 and p2 indices
for ig,g in enumerate(['u','v','T']):
for ip,p in enumerate([p1,p2]):
for ij,xy in enumerate(['x','y']):
pi[ig,ip,ij] = findc(param[xy+'_'+g],p[ij])
ncu = Dataset(runpath+'/u.nc')
ncv = Dataset(runpath+'/v.nc')
nch = Dataset(runpath+'/h.nc')
istart = 0
if i == 0:
u = ncu.variables['u'][istart:,pi[0,:,0],pi[0,:,1]][:,[0,1],[0,1]]
v = ncv.variables['v'][istart:,pi[1,:,0],pi[1,:,1]][:,[0,1],[0,1]]
h = nch.variables['h'][istart:,pi[2,:,0],pi[2,:,1]][:,[0,1],[0,1]]
t = nch.variables['t'][istart:]
else:
u = np.concatenate((u,ncu.variables['u'][1:,pi[0,:,0],pi[0,:,1]][:,[0,1],[0,1]]))
v = np.concatenate((v,ncv.variables['v'][1:,pi[1,:,0],pi[1,:,1]][:,[0,1],[0,1]]))
h = np.concatenate((h,nch.variables['h'][1:,pi[2,:,0],pi[2,:,1]][:,[0,1],[0,1]]))
t = np.hstack((t,nch.variables['t'][1:]))
ncu.close()
ncv.close()
nch.close()
print('run %i read.' % r)
## computation
l = 200 # in 1/4 days
acfs = np.zeros((l,3,2))
for iv,var in enumerate([u,v,h]):
for ip in range(2):
acfs[:,iv,ip] = acf(var[:,ip],l)
dt = t[1]-t[0]
time = np.arange(l)*dt/24/3600
## STORING
dic = dict()
all_var2export = ['time','acfs','p1','p2']
for var in all_var2export:
exec('dic[var] ='+var)
np.save(runpath+'/analysis/acfs.npy',dic)
print('Everything stored.')
|
"Yes please stop with emoticons. Complete waste of time an energy. Learn how to communicate without them mellinals!!"
I will stop when you learn how to spell...the word you were looking for is millennials. Also I will not stop, why do you care if there are more emojis if you are not using them? Must be a generational thing. I can't have fun, you can't have fun?
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""RMSProp optimizer."""
from __future__ import absolute_import
from ..ndarray import (zeros, clip, sqrt, square)
from ..ndarray import (rmsprop_update, rmspropalex_update)
from .optimizer import Optimizer, register
__all__ = ['RMSProp']
@register
class RMSProp(Optimizer):
"""The RMSProp optimizer.
Two versions of RMSProp are implemented:
If ``centered=False``, we follow
http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf by
Tieleman & Hinton, 2012.
For details of the update algorithm see :class:`~mxnet.ndarray.rmsprop_update`.
If ``centered=True``, we follow http://arxiv.org/pdf/1308.0850v5.pdf (38)-(45)
by Alex Graves, 2013.
For details of the update algorithm see :class:`~mxnet.ndarray.rmspropalex_update`.
This optimizer accepts the following parameters in addition to those accepted
by :class:`.Optimizer`.
Parameters
----------
learning_rate : float, default 0.001
The initial learning rate. If None, the optimization will use the
learning rate from ``lr_scheduler``. If not None, it will overwrite
the learning rate in ``lr_scheduler``. If None and ``lr_scheduler``
is also None, then it will be set to 0.01 by default.
rho: float, default 0.9
A decay factor of moving average over past squared gradient.
momentum: float, default 0.9
Heavy ball momentum factor. Only used if `centered`=``True``.
epsilon : float, default 1e-8
Small value to avoid division by 0.
centered : bool, default False
Flag to control which version of RMSProp to use.::
True: will use Graves's version of `RMSProp`,
False: will use Tieleman & Hinton's version of `RMSProp`.
clip_weights : float, optional
Clips weights into range ``[-clip_weights, clip_weights]``.
use_fused_step : bool, default True
Whether or not to use fused kernels for optimizer.
When use_fused_step=False, step is called,
otherwise, fused_step is called.
"""
def __init__(self, learning_rate=0.001, rho=0.9, momentum=0.9,
epsilon=1e-8, centered=False, clip_weights=None,
use_fused_step=True, **kwargs):
super(RMSProp, self).__init__(learning_rate=learning_rate,
use_fused_step=use_fused_step,
**kwargs)
self.rho = rho
self.momentum = momentum
self.centered = centered
self.epsilon = epsilon
self.clip_weights = clip_weights
def create_state(self, index, weight):
if self.centered:
return (
zeros(weight.shape, weight.context, stype=weight.stype), # mean
zeros(weight.shape, weight.context, stype=weight.stype), # var
zeros(weight.shape, weight.context, stype=weight.stype)) # mom
else:
return zeros(weight.shape, weight.context, stype=weight.stype) # var
def step(self, indices, weights, grads, states):
"""Perform an optimization step using gradients and states.
Parameters
----------
indices : list of int
List of unique indices of the parameters into the individual learning rates
and weight decays. Learning rates and weight decay may be set via `set_lr_mult()`
and `set_wd_mult()`, respectively.
weights : list of NDArray
List of parameters to be updated.
grads : list of NDArray
List of gradients of the objective with respect to this parameter.
states : List of any obj
List of state returned by `create_state()`.
"""
for index, weight, grad, state in zip(indices, weights, grads, states):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
# preprocess grad
grad *= self.rescale_grad
if self.clip_gradient is not None:
grad = clip(grad, - self.clip_gradient, self.clip_gradient)
grad += wd * weight
if not self.centered:
# update var
var = state
var[:] *= self.rho
var[:] += (1 - self.rho) * square(grad)
# update weight
d = grad / (sqrt(var) + self.epsilon)
weight[:] -= lr * d
else:
# update mean, var, mom
mean, var, mom = state
mean[:] *= self.rho
mean[:] += (1 - self.rho) * grad
var[:] *= self.rho
var[:] += (1 - self.rho) * square(grad)
mom[:] *= self.momentum
mom[:] -= lr * grad / sqrt(var - square(mean) + self.epsilon)
# update weight
weight[:] += mom
if self.clip_weights:
clip(weight, -self.clip_weights, self.clip_weights, out=weight)
def fused_step(self, indices, weights, grads, states):
"""Perform a fused optimization step using gradients and states.
Fused kernel is used for update.
Parameters
----------
indices : list of int
List of unique indices of the parameters into the individual learning rates
and weight decays. Learning rates and weight decay may be set via `set_lr_mult()`
and `set_wd_mult()`, respectively.
weights : list of NDArray
List of parameters to be updated.
grads : list of NDArray
List of gradients of the objective with respect to this parameter.
states : List of any obj
List of state returned by `create_state()`.
"""
for index, weight, grad, state in zip(indices, weights, grads, states):
self._update_count(index)
lr = self._get_lr(index)
wd = self._get_wd(index)
kwargs = {'rho': self.rho, 'epsilon': self.epsilon,
'rescale_grad': self.rescale_grad}
if self.centered:
kwargs['momentum'] = self.momentum
if self.clip_gradient:
kwargs['clip_gradient'] = self.clip_gradient
if self.clip_weights:
kwargs['clip_weights'] = self.clip_weights
# update weight with fused kernel
if not self.centered:
var = state
rmsprop_update(weight, grad, var, out=weight, lr=lr, wd=wd, **kwargs)
else:
mean, var, mom = state
rmspropalex_update(weight, grad, mean, var, mom, out=weight,
lr=lr, wd=wd, **kwargs)
|
Been a few days since I’ve updated. The weekend alternated between busy (company picnic in Virginia Beach, shopping trip) and lazy/malaisey (coining a word here–I’m a writer, I’m allowed).
Not much progress on the writing front, except that today I finally got past a scene that was driving me crazy. I’m very uncertain about it, and whether it’s a direction I want to take, so I’ve been stalling and procrastinating about it. Finally just decided to freakin’ do it already.
My responses from agents have slowed a bit, so I might increase my current “five at a time” method.
Diana Peterfreund writes a handy primer for new writers: Part One, Part Two, and more to come tomorrow.
Agent Kristin Nelson describes a conference encounter that, dare I say, even beats the “manuscript-passed-under-bathroom-door” story/myth/legend.
A new blog by frequent commenter Tia, in which she reviews fantasy debuts with a fascinating “as-she-reads-it” style.
Ever wondered if your sunrise/sunset timing is right in your writing? Say, the scene takes place at five o’clock pm, and the sun is setting…but the scene takes place in summer? It’s a little inconsistency that drives us perfectionists NUTS. Here’s a site to keep you from pushing me those last few steps toward the asylum =).
A fabulous video blog entry at Brotherhood 2.0 (If you haven’t checked out John and Hank Green‘s yearlong video blog project, then your life is missing some awesomeness).
Am I the only one who never knew about the Poe Toaster? That’s seriously cool, although I think the new guy is kind of ruining it by making it political.
That’s all I’ve got. Enjoy.
Hey, thanks for the plug about my fantasy debut blog! I am definitely going to check out the sunrise/sunset thingy.
|
from flask import Flask, render_template, request, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from werkzeug.utils import secure_filename
from datetime import datetime
from random import choice
from functools import wraps
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'static/'
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
db = SQLAlchemy(app)
class Blogpost(db.Model):
userid = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(50))
author = db.Column(db.String(20))
date_posted = db.Column(db.DateTime)
content = db.Column(db.Text)
image = db.Column(db.Text)
class User:
def __init__(self, username, password):
self.session = {}
self.username = username
self.password = password
dean = User('dean', 'bloggy')
@app.route('/')
def index():
bgs = [ 'blog-bg1.jpg',
'blog-bg2.jpg',
'blog-bg3.jpg',
'blog-bg4.jpg',
'blog-bg5.jpg',]
bg = choice(bgs)
posts = Blogpost.query.order_by(Blogpost.date_posted.desc()).all()
return render_template('index.html', posts=posts, filename=bg)
def login_required(test):
@wraps(test)
def wrap(*args, **kwargs):
if 'logged_in' in dean.session:
return test(*args, **kwargs)
else:
return redirect(url_for('login'))
return wrap
@app.route("/signin", methods = ['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] == dean.username or request.form['password'] == dean.password:
dean.session['logged_in'] = True
return redirect(url_for('add'))
else:
error = "Wrong username or password"
return render_template("login.html", error=error)
@app.route('/add')
@login_required
def add():
bgs = ['add-bg1.jpg','add-bg2.jpg','add-bg3.jpg']
bg = choice(bgs)
return render_template('add.html',filename=bg)
@app.route('/post/<int:post_id>')
def post(post_id):
post = Blogpost.query.filter_by(userid=post_id).one()
print('{} \n{} \n{} \n {}\n'.format(post.author, post.title, post.content, post.image))
if post.image:
filename= post.image
else:
filename='home-bg.jpg'
return render_template('post.html', post=post, filename=filename)
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/addpost', methods=['POST'])
def addpost():
title = request.form['title']
author = request.form['author']
content = request.form['content']
if request.method == 'POST':
image = request.files['image']
img_filename = secure_filename(image.filename)
image.save(os.path.join(app.config['UPLOAD_FOLDER'], img_filename))
else:
img_filename=None
post = Blogpost(title=title, author=author, date_posted=datetime.now(), content=content, image=img_filename)
db.session.add(post)
db.session.commit()
return redirect(url_for('index'))
@app.route('/addimage', methods=['POST'])
def addimage():
if request.method == 'POST' and 'image' in request.files:
image = request.files['image']
if image and allowed_file(image.filename):
img_filename = secure_filename(image.filename)
image.save(os.path.join(app.config['UPLOAD_FOLDER'], img_filename))
else:
img_filename=None
return None
@app.route('/about')
def about():
bg = 'about-bg.jpg'
return render_template('about.html', filename=bg)
if __name__ == '__main__':
db.create_all()
app.run(debug=True)
|
You won’t find much sage advice or brilliant tips for how to clean your house/raise your kids/run a blog/get ahead in your career, mostly because I barely know how to do any of these things myself. I assure you any success on my part is primarily luck.
This is my place to be my ridiculous self, in all my geeked out, nerdy, judgmental glory. Equal parts sarcasm, silliness and just a tinge of abject failure, this is the deepest darkest inner workings of a middle-aged wife and mom who has her hands full staying employed, getting her kid to basketball practice and remembering to feed the dogs. (Sooooo many dogs.) This is also my place to share my work. If you have something you’d like to tell me, please comment. If you enjoy my site, please share. I’m glad you’re here. Grab a cup of coffee and stay awhile.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.