text
stringlengths 29
850k
|
|---|
from fabric.api import *
import fabric.contrib.project as project
import os
# Local path configuration (can be absolute or relative to fabfile)
env.deploy_path = 'output'
DEPLOY_PATH = env.deploy_path
# Remote server configuration
production = 'root@localhost:22'
dest_path = '/var/www'
def clean():
if os.path.isdir(DEPLOY_PATH):
local('rm -rf {deploy_path}'.format(**env))
local('mkdir {deploy_path}'.format(**env))
def build():
local('pelican -s pelicanconf.py')
def rebuild():
clean()
build()
def regenerate():
local('pelican -r -s pelicanconf.py')
def serve():
local('cd {deploy_path} && python -m SimpleHTTPServer'.format(**env))
def reserve():
build()
serve()
def preview():
local('pelican -s publishconf.py')
def github():
#if os.path.isdir(DEPLOY_PATH):
# local('ghp-import {deploy_path}'.format(**env))
# local('git push origin gh-pages')
print('WARNING:To be supported!')
@hosts(production)
def publish():
local('pelican -s publishconf.py')
project.rsync_project(
remote_dir=dest_path,
exclude=".DS_Store",
local_dir=DEPLOY_PATH.rstrip('/') + '/',
delete=True
)
|
Build your knowledge base and spread the word about BruceGreySimcoe with our upcoming Visiting Friends and Relatives (VFR) Facebook campaign, which kicks off on May 1, 2017.
Need Staff for the Summer Season? New TIAO Member Mobilize Jobs Can Help!
Are you short staffed for the upcoming summer season? There isn't much time left to fill positions before your busiest season begins. Contact Mobilize and discover how young Canadians can fill summer labour shortages at your business.
Please note the change in deadline for the Canada Summer Jobs Program - it has been extended until Friday, February 3, 2017.
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_main_window.ui'
#
# Created: Thu Mar 17 23:49:49 2011
# by: pyside-uic 0.2.7 running on PySide 1.0.0~rc1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(670, 494)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.widget_3 = QtGui.QWidget(self.centralwidget)
self.widget_3.setObjectName("widget_3")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.widget_3)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.pushButton_3 = QtGui.QPushButton(self.widget_3)
self.pushButton_3.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/icon17_04.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_3.setIcon(icon)
self.pushButton_3.setIconSize(QtCore.QSize(64, 64))
self.pushButton_3.setFlat(False)
self.pushButton_3.setObjectName("pushButton_3")
self.verticalLayout_3.addWidget(self.pushButton_3)
self.label_3 = QtGui.QLabel(self.widget_3)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName("label_3")
self.verticalLayout_3.addWidget(self.label_3)
self.gridLayout.addWidget(self.widget_3, 2, 5, 1, 1)
self.widget_4 = QtGui.QWidget(self.centralwidget)
self.widget_4.setObjectName("widget_4")
self.verticalLayout_4 = QtGui.QVBoxLayout(self.widget_4)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.pushButton_4 = QtGui.QPushButton(self.widget_4)
self.pushButton_4.setText("")
self.pushButton_4.setObjectName("pushButton_4")
self.verticalLayout_4.addWidget(self.pushButton_4)
self.label_4 = QtGui.QLabel(self.widget_4)
self.label_4.setObjectName("label_4")
self.verticalLayout_4.addWidget(self.label_4)
self.gridLayout.addWidget(self.widget_4, 2, 6, 1, 1)
self.widget = QtGui.QWidget(self.centralwidget)
self.widget.setObjectName("widget")
self.verticalLayout = QtGui.QVBoxLayout(self.widget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName("verticalLayout")
self.pushButton = QtGui.QPushButton(self.widget)
self.pushButton.setText("")
self.pushButton.setAutoDefault(False)
self.pushButton.setDefault(False)
self.pushButton.setFlat(False)
self.pushButton.setObjectName("pushButton")
self.verticalLayout.addWidget(self.pushButton)
self.label = QtGui.QLabel(self.widget)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.gridLayout.addWidget(self.widget, 2, 3, 1, 1)
self.widget_2 = QtGui.QWidget(self.centralwidget)
self.widget_2.setObjectName("widget_2")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.widget_2)
self.verticalLayout_2.setMargin(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.pushButton_2 = QtGui.QPushButton(self.widget_2)
self.pushButton_2.setText("")
self.pushButton_2.setIconSize(QtCore.QSize(64, 64))
self.pushButton_2.setObjectName("pushButton_2")
self.verticalLayout_2.addWidget(self.pushButton_2)
self.label_2 = QtGui.QLabel(self.widget_2)
self.label_2.setObjectName("label_2")
self.verticalLayout_2.addWidget(self.label_2)
self.gridLayout.addWidget(self.widget_2, 2, 4, 1, 1)
self.widget_5 = QtGui.QWidget(self.centralwidget)
self.widget_5.setObjectName("widget_5")
self.verticalLayout_5 = QtGui.QVBoxLayout(self.widget_5)
self.verticalLayout_5.setMargin(0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.manufacturing_calculator_button = QtGui.QPushButton(self.widget_5)
self.manufacturing_calculator_button.setText("")
self.manufacturing_calculator_button.setObjectName("manufacturing_calculator_button")
self.verticalLayout_5.addWidget(self.manufacturing_calculator_button)
self.label_5 = QtGui.QLabel(self.widget_5)
self.label_5.setObjectName("label_5")
self.verticalLayout_5.addWidget(self.label_5)
self.gridLayout.addWidget(self.widget_5, 3, 3, 1, 1)
self.widget_6 = QtGui.QWidget(self.centralwidget)
self.widget_6.setObjectName("widget_6")
self.verticalLayout_9 = QtGui.QVBoxLayout(self.widget_6)
self.verticalLayout_9.setMargin(0)
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.pushButton_9 = QtGui.QPushButton(self.widget_6)
self.pushButton_9.setText("")
self.pushButton_9.setObjectName("pushButton_9")
self.verticalLayout_9.addWidget(self.pushButton_9)
self.label_9 = QtGui.QLabel(self.widget_6)
self.label_9.setObjectName("label_9")
self.verticalLayout_9.addWidget(self.label_9)
self.gridLayout.addWidget(self.widget_6, 4, 3, 1, 1)
self.widget_8 = QtGui.QWidget(self.centralwidget)
self.widget_8.setObjectName("widget_8")
self.verticalLayout_6 = QtGui.QVBoxLayout(self.widget_8)
self.verticalLayout_6.setMargin(0)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.pushButton_6 = QtGui.QPushButton(self.widget_8)
self.pushButton_6.setText("")
self.pushButton_6.setObjectName("pushButton_6")
self.verticalLayout_6.addWidget(self.pushButton_6)
self.label_6 = QtGui.QLabel(self.widget_8)
self.label_6.setObjectName("label_6")
self.verticalLayout_6.addWidget(self.label_6)
self.gridLayout.addWidget(self.widget_8, 3, 4, 1, 1)
self.widget_9 = QtGui.QWidget(self.centralwidget)
self.widget_9.setObjectName("widget_9")
self.verticalLayout_7 = QtGui.QVBoxLayout(self.widget_9)
self.verticalLayout_7.setMargin(0)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.pushButton_7 = QtGui.QPushButton(self.widget_9)
self.pushButton_7.setText("")
self.pushButton_7.setObjectName("pushButton_7")
self.verticalLayout_7.addWidget(self.pushButton_7)
self.label_7 = QtGui.QLabel(self.widget_9)
self.label_7.setObjectName("label_7")
self.verticalLayout_7.addWidget(self.label_7)
self.gridLayout.addWidget(self.widget_9, 3, 5, 1, 1)
self.widget_10 = QtGui.QWidget(self.centralwidget)
self.widget_10.setObjectName("widget_10")
self.verticalLayout_8 = QtGui.QVBoxLayout(self.widget_10)
self.verticalLayout_8.setMargin(0)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.research_calculator_button = QtGui.QPushButton(self.widget_10)
self.research_calculator_button.setText("")
self.research_calculator_button.setObjectName("research_calculator_button")
self.verticalLayout_8.addWidget(self.research_calculator_button)
self.label_8 = QtGui.QLabel(self.widget_10)
self.label_8.setObjectName("label_8")
self.verticalLayout_8.addWidget(self.label_8)
self.gridLayout.addWidget(self.widget_10, 3, 6, 1, 1)
self.widget_11 = QtGui.QWidget(self.centralwidget)
self.widget_11.setObjectName("widget_11")
self.verticalLayout_10 = QtGui.QVBoxLayout(self.widget_11)
self.verticalLayout_10.setMargin(0)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.pushButton_10 = QtGui.QPushButton(self.widget_11)
self.pushButton_10.setText("")
self.pushButton_10.setObjectName("pushButton_10")
self.verticalLayout_10.addWidget(self.pushButton_10)
self.label_10 = QtGui.QLabel(self.widget_11)
self.label_10.setObjectName("label_10")
self.verticalLayout_10.addWidget(self.label_10)
self.gridLayout.addWidget(self.widget_11, 4, 4, 1, 1)
self.widget_12 = QtGui.QWidget(self.centralwidget)
self.widget_12.setObjectName("widget_12")
self.verticalLayout_11 = QtGui.QVBoxLayout(self.widget_12)
self.verticalLayout_11.setMargin(0)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.pushButton_11 = QtGui.QPushButton(self.widget_12)
self.pushButton_11.setText("")
self.pushButton_11.setObjectName("pushButton_11")
self.verticalLayout_11.addWidget(self.pushButton_11)
self.label_11 = QtGui.QLabel(self.widget_12)
self.label_11.setObjectName("label_11")
self.verticalLayout_11.addWidget(self.label_11)
self.gridLayout.addWidget(self.widget_12, 4, 5, 1, 1)
self.widget_13 = QtGui.QWidget(self.centralwidget)
self.widget_13.setObjectName("widget_13")
self.verticalLayout_12 = QtGui.QVBoxLayout(self.widget_13)
self.verticalLayout_12.setMargin(0)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.pushButton_12 = QtGui.QPushButton(self.widget_13)
self.pushButton_12.setText("")
self.pushButton_12.setObjectName("pushButton_12")
self.verticalLayout_12.addWidget(self.pushButton_12)
self.label_12 = QtGui.QLabel(self.widget_13)
self.label_12.setObjectName("label_12")
self.verticalLayout_12.addWidget(self.label_12)
self.gridLayout.addWidget(self.widget_13, 4, 6, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "EVE Suite", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("MainWindow", "Ship Fitter", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("MainWindow", "Item Browser", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MainWindow", "Asset Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Character Viewer", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("MainWindow", "Manufacturing Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("MainWindow", "POS Fitter", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("MainWindow", "Mining Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("MainWindow", "Refine Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("MainWindow", "Research Calculator", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setText(QtGui.QApplication.translate("MainWindow", "Skill Browser", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("MainWindow", "Skill Planner", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setText(QtGui.QApplication.translate("MainWindow", "Travel Planner", None, QtGui.QApplication.UnicodeUTF8))
import rc_main_window_rc
import rc_main_window_rc
|
Little Mrs. Married: Valentine's Day Extravaganza!
So, Mama M at My Little Life has started up Valentine's Day Extravaganza for Valentine's week! If you know me at all, you know that I'm a sucker for romance & love this!!
I hope I can participate, because honestly, I've answered some of these questions before. But i feel that i could at least share a condensed version of my stories, in case you don't want to read the NOVEL i wrote before!! (See sidebar). Mama M has a different topic each day of this week to tell blogland about you & your love!
Sunday: Tell the story of how you met the love of your life!
Wednesday: Favorite memory of your wedding day.
Thursday: Your love's biggest romantic screwup.
Friday: Five Question Friday will be questions to do with L-O-V-E LOVE!
Saturday: Show your favorite photo of you & your love.
Sunday: 10 things you love about your love!
I didn't get to do yesterday's so... here we go! How we met. The longer version of this story is told here. But i don't remember meeting my husband. Why? Well, i was an infant. And he was age 3. Yeah - our parents met at church when we were kids. They started hanging out together & i have lots of memories at their house. Then we lost touch for a few years but then we got together again! And that time - we fell in loooove. Oh, yes, he was my first love...we were so young! See?
Now today's topic is Favorite Date Story, good or bad! And I'd say my favorite date with him was February 16, 2008. We had just gotten back together after 11 years apart! CRAZY. And when he first called me after 11 years it was wierd, awkward, i wasn't at all sure WHY he was suddenly back in my life! We went on 3 dates & then we realized it wasn't going to work. But a few weeks later he emailed me & we ended up writing LONG emails back & forth, talking about what had happened 11 years before, sorting things out. After about a month & a half of nothing but letters i knew he was the one. Isn't that crazy?! But it's true. So, the date. We went to Carraba's. SO YUMMY. And sweet. We didn't say much - we just kept looking at each other all googly eyed & love sick. HAHA. I wish i had a picture from that dinner. I don't, but i think this one was from later that day...or so.
Ok, have i made y'all roll your eyes & gag enough? Or at least my sister, if she is reading!? HAHAHA.
You two have got to be the most adorable couple I've ever seen!!
And, wow...you've known each other for a long time!!!
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
setup(
name='tellervo-python',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.0.1',
description='Python client to the Tellervo dendrochronology suite',
# The project's main homepage.
url='https://github.com/brews/tellervo-python',
# Author details
author='S. Brewster Malevich',
author_email='malevich@email.arizona.edu',
# Choose your license
license='GPLv3',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='client dendrochronology development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs']),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['lxml'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
}
)
|
So, you have decided to join motshelo this coming month? Well done, for you are about to make a real mess of your life. I am not for that sort of thing� especially one by co-workers.
Here is why: motshelo brings frustration and regret. It can reduce you to an empty can, and make you appear like an irresponsible mother or father.
Ever heard the cliché “money is the root of all evil’? Whoever said that must have had motshelo in mind.
The originator must have realised how something so sugarcoated as motshelo can put wives and husbands asunder, divide siblings and friends and bring enmity between families.
Let her who will hear understand my cynicism.
December 2013. I am anxiously waiting for my motshelo money. My calculations show that it will be something in the region of P10, 000.
There is a group of us – three really, as the other person pulled out around June. Throughout the year we have been lending out our initial P2, 000 contribution at interest. By November we knew each one of us would be getting P10, 000. That was not to be so.
Dipuo* our co-worker on whom we had entrusted the funds had lent herself the money and was “still waiting” for her bank to approve her loan from which she would pay us when the company closed for business on December 20.
When we confronted her on the last day, she said she would be in town and would be calling us. The following day her phone was off. It would be so until we came back to work the following year.
Suddenly I saw my plans go up in smoke. I had planned, together with some friends, to go to Namibia then Cape Town for the Christmas and New Year Holidays. All that was ruined when Dipuo decided to chow our motshelo savings without our knowledge.
when newyear struck, but I was too absorbed in my misery; in my self-acrimony.
How could I have been so stupid? Here I was having the most miserable time of my life, when Dipuo was having her best time with her family. I had no doubt she had bought them huge presents from our savings.
January came and my other colleague and I were in no mood for negotiations. Numerous threats had not worked, so we tried the courts. It was only after we served her with initial court process that she paid. Whew!
Unbeknown to us, Dipuo was a serial borrower and owed a number of other people. In the time she could not pay us, other people were hounding her to pay. So, it turned out, she took our money.
She essentially postponed her obligation to pay to another time. Unfortunately her muzzle of dishonesty was focused on us.
What embittered me most about the whole experience was the fact I had trusted Dipuo. She was nice. She was also several years older than I was, and appeared mature. How I kicked myself for trusting so blindly!
Such was our trust of Dipuo that when she said she had opened a bank account for the motshelo we believed her. But she had not done so.
So, go ahead, join your motshelo, but please don’t ask me to be part of your group. Just remember to keep a clean damp cloth to wipe off those tears come December 2015.
|
import random as rand
from sage.all import ZZ
from sage.rings.all import RR, QQ, PolynomialRing, Zmod
from sage.rings.arith import next_prime
from sage.functions.all import log, ceil, sqrt
from sage.misc.misc_c import prod
from sage.modules.free_module_element import vector, zero_vector
from sage.stats.distributions.discrete_gaussian_lattice import DiscreteGaussianDistributionLatticeSampler as DGSL
from mmp import MMP
from util import *
import norms
class GGH(MMP):
@staticmethod
def set_params(lam, k):
n = pow(2, ceil(log(lam**2 * k)/log(2))) # dim of poly ring, closest power of 2 to k(lam^2)
q = next_prime(ZZ(2)**(8*k*lam) * n**k, proof=False) # prime modulus
sigma = int(sqrt(lam * n))
sigma_prime = lam * int(n**(1.5))
return (n, q, sigma, sigma_prime, k)
@profile(LOG, "setup")
def __init__(self, params, asym=False):
(self.n, self.q, sigma, self.sigma_prime, self.k) = params
S, x = PolynomialRing(ZZ, 'x').objgen()
self.R = S.quotient_ring(S.ideal(x**self.n + 1))
Sq = PolynomialRing(Zmod(self.q), 'x')
self.Rq = Sq.quotient_ring(Sq.ideal(x**self.n + 1))
# draw z_is uniformly from Rq and compute its inverse in Rq
if asym:
z = [self.Rq.random_element() for i in range(self.k)]
self.zinv = [z_i**(-1) for z_i in z]
else: # or do symmetric version
z = self.Rq.random_element()
zinv = z**(-1)
z, self.zinv = zip(*[(z,zinv) for i in range(self.k)])
# set up some discrete Gaussians
DGSL_sigma = DGSL(ZZ**self.n, sigma)
self.D_sigma = lambda: self.Rq(list(DGSL_sigma()))
# discrete Gaussian in ZZ^n with stddev sigma_prime, yields random level-0 encodings
DGSL_sigmap_ZZ = DGSL(ZZ**self.n, self.sigma_prime)
self.D_sigmap_ZZ = lambda: self.Rq(list(DGSL_sigmap_ZZ()))
# draw g repeatedly from a Gaussian distribution of Z^n (with param sigma)
# until g^(-1) in QQ[x]/<x^n + 1> is small (< n^2)
Sk = PolynomialRing(QQ, 'x')
K = Sk.quotient_ring(Sk.ideal(x**self.n + 1))
while True:
l = self.D_sigma()
ginv_K = K(mod_near_poly(l, self.q))**(-1)
ginv_size = vector(ginv_K).norm()
if ginv_size < self.n**2:
g = self.Rq(l)
self.ginv = g**(-1)
break
# discrete Gaussian in I = <g>, yields random encodings of 0
short_g = vector(ZZ, mod_near_poly(g,self.q))
DGSL_sigmap_I = DGSL(short_g, self.sigma_prime)
self.D_sigmap_I = lambda: self.Rq(list(DGSL_sigmap_I()))
# compute zero-testing parameter p_zt
# randomly draw h (in Rq) from a discrete Gaussian with param q^(1/2)
self.h = self.Rq(list(DGSL(ZZ**self.n, round(sqrt(self.q)))()))
# create p_zt
self.p_zt = self.ginv * self.h * prod(z)
def encode(self, m, S):
''' encodes a vector m (in Zmod(q)^n) to index set S '''
zinv = prod([self.zinv[i] for i in S])
m = vector(Zmod(self.q),m)
zero = vector(Zmod(self.q),self.D_sigmap_I()) # random encoding of 0
c = self.Rq(list(zero + m))
return c * zinv
def sample(self,S):
# draw an element of Rq from a Gaussian distribution of Z^n (with param sigmaprime)
# then encode at index set S
return self.D_sigmap_ZZ() * prod([self.zinv[i] for i in S])
def zero(self,S):
''' encoding of 0 at index S '''
return self.encode(list(self.D_sigmap_I()), S)
def is_zero(self, c):
w = self.Rq(c) * self.p_zt
return (norms.linf(w,self.q) < ZZ(RR(self.q)**(.75)))
|
Looking for a garage to fix your Power Steering in Paisley?
Save money: Instantly compare Power Steering Repairs prices in Paisley to find the best deal.
Quick and easy: Simply enter the registration number of your car and your post code, compare prices, choose a local garage in Paisley and book your Power Steering Repairs.
|
#!/usr/bin/python
#
# Copyright 2015 Timothy Sutton, w/ insignificant contributions by Allister Banks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""See docstring for PuppetlabsProductsURLProvider class"""
from __future__ import absolute_import
import re
from distutils.version import LooseVersion
from autopkglib import Processor, ProcessorError, URLGetter
__all__ = ["PuppetAgentProductsURLProvider"]
DL_INDEX = "https://downloads.puppetlabs.com/mac"
DEFAULT_VERSION = "latest"
DEFAULT_PRODUCT_VERSION = "5"
OS_VERSION = "10.12"
class PuppetAgentProductsURLProvider(URLGetter):
"""Extracts a URL for a Puppet Labs item."""
description = __doc__
input_variables = {
"product_version": {
"required": False,
"description":
"Major version of the AIO installer. Either 5 or 6 at "
"present. Defaults to %s" % DEFAULT_PRODUCT_VERSION,
},
"get_version": {
"required": False,
"description":
("Specific version to request. Defaults to '%s', which "
"automatically finds the highest available release version."
% (DEFAULT_VERSION)),
},
"get_os_version": {
"required": False,
"description":
("When fetching the puppet-agent, collection-style pkg, "
"designates OS. Defaults to '%s'. Currently only 10.9 "
"or 10.10 packages are available."
% (OS_VERSION)),
},
}
output_variables = {
"version": {
"description": "Version of the product.",
},
"url": {
"description": "Download URL.",
},
}
def main(self):
"""Return a download URL for a PuppetLabs item"""
download_url = DL_INDEX
os_version = self.env.get("get_os_version", OS_VERSION)
product_version = self.env.get("product_version", DEFAULT_PRODUCT_VERSION)
version_re = r"\d+\.\d+\.\d+" # e.g.: 10.10/PC1/x86_64/puppet-agent-1.2.5-1.osx10.10.dmg
download_url += str("/puppet" + product_version + "/" + os_version + "/x86_64")
re_download = ("href=\"(puppet-agent-(%s)-1.osx(%s).dmg)\"" % (version_re, os_version))
try:
data = self.download(download_url, text=True)
except BaseException as err:
raise ProcessorError(
"Unexpected error retrieving download index: '%s'" % err)
# (dmg, version)
candidates = re.findall(re_download, data)
if not candidates:
raise ProcessorError(
"Unable to parse any products from download index.")
# sort to get the highest version
highest = candidates[0]
if len(candidates) > 1:
for prod in candidates:
if LooseVersion(prod[1]) > LooseVersion(highest[1]):
highest = prod
ver, url = highest[1], "%s/%s" % (download_url, highest[0])
self.env["version"] = ver
self.env["url"] = url
self.output("Found URL %s" % self.env["url"])
if __name__ == "__main__":
PROCESSOR = PuppetAgentProductsURLProvider()
PROCESSOR.execute_shell()
|
The Democratic Alliance in the Eastern Cape would like to wish all leaners well as they begin the new 2018 school year.
Through hard work and dedication; you will be able to make a success of your education.
That said, the EC Provincial Government in tandem with the National Department of Basic Education must work to eradicate mud schools and ensure that learners living far from education opportunities are afforded safe and reliable scholar transport. Furthermore, learners from poor backgrounds must benefit from the school nutrition programme – a hungry child cannot reach their full potential.
We believe every learner has the right to equal education and the DA will work hard to make sure all teachers have the necessary resources they need to teach and all learners have the necessary resources they need to learn.
|
#!/usr/bin/env python
import sys, argparse
parser = argparse.ArgumentParser(description="Converts the output of MS into \
psmcfa format (the input file of psmc)")
parser.add_argument("-s", "--bin_size", type=int, default=100,
help="The equivalent of bin_size in psmc")
parser.add_argument("input_ms_results", help="The file produced by MS")
# Read the input from the command line
args = parser.parse_args()
BinSize = args.bin_size
fname = args.input_ms_results
# Read the file
with open(fname, 'r') as f:
ms_out_text = f.read()
ms_command = ms_out_text[:ms_out_text.index('\n')]
# Compute the total length of the simulated sequence
SeqLength = int(ms_command.split(' -r ')[1].split(' ')[1])
# Compute the number of bins (see PSMC documentation)
nBins = int(SeqLength / BinSize) + (SeqLength%BinSize != 0)
sequences_list = ms_out_text.split('segsites: ')[1:]
count = 0
for seq in sequences_list:
count += 1
(segsites, positions_list) = seq.split('\n')[:2]
segsites = int(segsites)
positions_list = positions_list.split()[1:]
# Start by a sequence of length nBins with all positions being
# heterozigous. As long as we find a SNP position, we compute the
# place in the sequence and we marked with 'K'
A=['T'] * nBins
for p in positions_list:
pos = int(float(SeqLength) * float(p) / BinSize )
A[pos] = 'K'
sys.stdout.write(">{}\n".format(count))
# Split the sequence in lines of 60 characters and send them to the
# standart output
for i in range(len(A)):
if i>0 and i%60==0:
sys.stdout.write('\n')
sys.stdout.write(A[i])
sys.stdout.write('\n')
|
The 14K yellow or white gold V seems to be an arrow, pointing all eyes toward authentic beauty. Strikingly bright and lustrous, White South Sea Pearls leave their mark. Once witnessed, their satiny yet brilliant surface impresses and leaves nothing to be desired.
This pendant speaks volumes with its delicate presence. Your White South Sea Pearl selection will stop eyes mid-gaze. This pearl is absolutely the picture of luxury.
|
from django.db import models
from django.contrib.auth.models import User, Group
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from accounts.models import UserProfile
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory, inlineformset_factory
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
class LoginForm(AuthenticationForm):
username = forms.CharField(label=_("Username"), max_length=30, help_text='%s <a href="%s" tabindex="4">%s</a>' % (_("Are you a new user?"), "/accounts/register/", _("Please register")))
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput, help_text='%s <a href="%s" tabindex="5">%s</a>' % (_("No password?"), "/accounts/password/reset/", _("Reset your password")))
class ProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
try:
self.fields['username'].initial = self.instance.user.username
self.fields['email'].initial = self.instance.user.email
self.fields['first_name'].initial = self.instance.user.first_name
self.fields['last_name'].initial = self.instance.user.last_name
except User.DoesNotExist:
pass
first_name = forms.CharField(label=_("First name"), help_text="")
last_name = forms.CharField(label=_("Last name"), help_text="")
email = forms.EmailField(label=_("Primary email"), help_text="")
username = forms.CharField(label=_('Username'), max_length=30, help_text="")
class Meta:
model = UserProfile
fields = ('first_name', 'last_name', 'email', 'username', 'cellphone', 'address', 'postcode', 'born', 'personal_website', 'occupation', 'employer', 'employer_website')
def save(self, *args, **kwargs):
"""
Update the primary email address on the related User object as well.
"""
u = self.instance.user
u.email = self.cleaned_data['email']
u.first_name = self.cleaned_data['first_name']
u.last_name = self.cleaned_data['last_name']
u.username = self.cleaned_data['username']
u.save()
profile = super(ProfileForm, self).save(*args, **kwargs)
return profile
class UserGroupsForm(forms.ModelForm):
groups = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
widget=forms.CheckboxSelectMultiple,
required=False)
class Meta:
model = User
fields = ('id', 'groups')
|
Here’s what we can say about the Trump administration’s just-released National Defense Strategy: it’s not a strategy and its subject is not defense.
Bearing the imprimatur of Pentagon chief James Mattis, the NDS—at least the unclassified summary that we citizens are permitted to see—is in essence a brief for increasing the size of the U.S. military budget. Implicit in the document is this proposition: more spending will make the armed forces of the United States “stronger” and the United States “safer.” Simply put, the NDS is all about funneling more bucks to the Pentagon.
Remarkably, the NDS advances this argument while resolutely avoiding any discussion of what Americans have gotten in return for the $11 trillion (give or take) expended pursuant to the past 16-plus years of continuous war—as if past performance should have no bearing on the future allocation of resources.
Try this thought experiment. The hapless Cleveland Browns went winless this year. How might Browns fans react if the team’s management were to propose hiking ticket prices next season? Think they might raise a ruckus?
President-elect Donald Trump’s message for the nation’s senior military leadership is ambiguously unambiguous. Here is he on 60 Minutes just days after winning the election.
In reality, Trump, the former reality-show host, knows next to nothing about ISIS, one of many gaps in his education that his impending encounter with actual reality is likely to fill. Yet when it comes to America’s generals, our president-to-be is on to something. No doubt our three- and four-star officers qualify as “great” in the sense that they mean well, work hard, and are altogether fine men and women. That they have not “done the job,” however, is indisputable—at least if their job is to bring America’s wars to a timely and successful conclusion.
Trump’s unhappy verdict—that the senior US military leadership doesn’t know how to win—applies in spades to the two principal conflicts of the post-9/11 era: the Afghanistan War, now in its 16th year, and the Iraq War, launched in 2003 and (after a brief hiatus) once more grinding on. Yet the verdict applies equally to lesser theaters of conflict, largely overlooked by the American public, that in recent years have engaged the attention of US forces, a list that would include conflicts in Libya, Somalia, Syria, and Yemen.
|
from django.conf import settings
from rest_framework import serializers
from models import Location, Story
from localflavor.us.us_states import US_STATES
from people.models import Author
from people.serializers import AuthorSerializer
STORY_PREVIEW_MAX_COUNT = 3
STORY_PREVIEW_MAX_LENGTH = 100
# to remove
class LocationSerializer(serializers.ModelSerializer):
city = serializers.CharField(source='city_fmt', allow_blank=True, required=False)
state = serializers.CharField(source='state_fmt', allow_blank=True, required=False)
county = serializers.CharField(source='county_fmt', allow_blank=True, required=False)
class Meta:
model = Location
fields = ('id', 'zipcode', 'city', 'county', 'state', 'lon', 'lat')
class StateStoriesSerializer(serializers.ModelSerializer):
abbr = serializers.CharField(source='location__state')
name = serializers.SerializerMethodField('state_full')
story_count = serializers.IntegerField(read_only=True, source='id__count')
preview = serializers.SerializerMethodField('story_preview')
def state_full(self, obj):
abbr = obj.get('location__state')
if abbr:
return dict(US_STATES)[abbr]
else:
return ""
def story_preview(self, obj):
# returns limited preview of up recent stories in state
state = obj.get('location__state')
stories = (Story.objects.filter(display=True, location__state=state)
.order_by('created_at')[:STORY_PREVIEW_MAX_COUNT])
return [s.content[:STORY_PREVIEW_MAX_LENGTH].replace('\n', '') for s in stories]
class Meta:
model = Location
fields = ('id', 'abbr', 'name', 'story_count', 'preview')
class CountyStoriesSerializer(serializers.ModelSerializer):
name = serializers.CharField(source='location__county')
state = serializers.CharField(source='location__state')
state_name = serializers.SerializerMethodField('state_full')
story_count = serializers.IntegerField(read_only=True, source='id__count')
preview = serializers.SerializerMethodField('story_preview')
def state_full(self, obj):
abbr = obj.get('location__state')
if abbr:
return dict(US_STATES)[abbr]
else:
return ""
def story_preview(self, obj):
# returns limited preview of up recent stories in county
# TODO, limit by state as well?
county = obj.get('location__county')
if county:
stories = (Story.objects.filter(display=True, location__county__startswith=county)
.order_by('created_at')[:STORY_PREVIEW_MAX_COUNT])
return [s.content[:STORY_PREVIEW_MAX_LENGTH].replace('\n', '') for s in stories]
class Meta:
model = Location
fields = ('id', 'name', 'state', 'state_name', 'story_count', 'preview')
class ZipcodeStoriesSerializer(serializers.ModelSerializer):
story_count = serializers.IntegerField(read_only=True, source='id__count')
zipcode = serializers.CharField(source='location__zipcode')
class Meta:
model = Location
fields = ('id', 'zipcode', 'story_count')
class LocationStoriesSerializer(serializers.ModelSerializer):
story_count = serializers.IntegerField(read_only=True)
city = serializers.CharField(source='city_fmt')
state = serializers.CharField(source='state_fmt')
class Meta:
model = Location
fields = ('id', 'zipcode', 'city', 'state', 'lon', 'lat', 'story_count')
class StorySerializer(serializers.ModelSerializer):
author = AuthorSerializer(required=False)
location = LocationSerializer(required=False)
photo = serializers.SerializerMethodField('get_photo_url')
content = serializers.CharField(error_messages={'required': "Share a story before submitting"})
def get_photo_url(self, obj):
if obj.photo and obj.photo.url:
return obj.photo.url
else:
return ''
#abuse to_relationship to hide name for anonymous authors
def to_representation(self, instance):
data = super(StorySerializer, self).to_representation(instance)
if data['anonymous'] or data['author']['anonymous']:
name = data.pop('author')
return data
return data
def create(self, validated_data):
"Handles nested data and model lookup or creation for author and location."
initial_data = self.initial_data # instead of validated_data, which won't include non-named fields
name = initial_data.get('name')
author, new_author = Author.objects.get_or_create_user(user__name=name)
validated_data['author'] = author
city = initial_data.get('location.city')
state = initial_data.get('location.state')
if (city and state) or state:
location, new_location = Location.objects.get_or_create(city=city, state=state)
if new_location:
location.geocode('%s, %s' % (city, state))
location.save()
validated_data['location'] = location
else:
# overwrite the empty dict to avoid validation errors
validated_data['location'] = None
# save the photo
if 'photo' in initial_data:
validated_data['photo'] = initial_data['photo']
story = Story.objects.create(**validated_data) # here use validated_data which will include new objects
return story
class Meta:
model = Story
fields = ('id', 'created_at', 'updated_at',
'location', 'content', 'photo', 'author', 'anonymous')
|
Wish to see a specific feature/change in future releases? Feel free to post it here, and if it gets enough "likes", we'd definitely include it in future releases!
I was looking for a way to modify the children table without having to rewrite the code each time the application is compiled.
and I came up with the idea of implementing a Hook in the "parent-children.php" file.
and now I put the function "tablename_ini_child" inside "tablename.php" inside the hook folder and I can modify some parameters before visualizing the children table.
This would allow to keep a lot of code without needing to rewrite it (or re-paste it).
in $ options, comes the array of values, the rest is equal to the normal function tablename_init of the hook.
Any doubt I remain available.
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""application_policy_group
Revision ID: b6e301d5757f
Revises: daaa11a358a2
Create Date: 2017-02-10 01:15:32.361753
"""
# revision identifiers, used by Alembic.
revision = 'b6e301d5757f'
down_revision = 'daaa11a358a2'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'gp_application_policy_groups',
sa.Column('id', sa.String(36), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('name', sa.String(length=50), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.Column('status_details', sa.String(length=4096), nullable=True),
sa.PrimaryKeyConstraint('id'))
op.add_column(
'gp_policy_target_groups',
sa.Column('application_policy_group_id', sa.String(length=36),
nullable=True))
op.create_foreign_key('gp_application_policy_group_ibfk_1',
source='gp_policy_target_groups',
referent='gp_application_policy_groups',
local_cols=['application_policy_group_id'],
remote_cols=['id'])
def downgrade():
pass
|
Ps 42 Claremont: Search homes, houses and land by Ps 42 Claremont in Bronx NY. You can views homes and houses by distance, price, bedroom, bathrooms and square footage. Use the ?Near by? feature to find real estate for sale near a location in Bronx NY including schools, restaurants, local attractions and more!
|
from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import re
from urllib.parse import urljoin
from urllib.parse import urlparse
from svtplay_dl.error import ServiceError
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.service import Service
class Cmore(Service):
supported_domains = ["www.cmore.se", "www.cmore.dk", "www.cmore.no", "www.cmore.fi"]
def get(self):
if not self.config.get("username") or not self.config.get("password"):
yield ServiceError("You need username and password to download things from this site.")
return
token, message = self._login()
if not token:
yield ServiceError(message)
return
vid = self._get_vid()
if not vid:
yield ServiceError("Can't find video id")
return
tld = self._gettld()
self.output["id"] = vid
metaurl = "https://playback-api.b17g.net/asset/{}?service=cmore.{}" "&device=browser&drm=widevine&protocol=dash%2Chls".format(
self.output["id"], tld
)
res = self.http.get(metaurl)
janson = res.json()
self._autoname(janson)
if janson["metadata"]["isDrmProtected"]:
yield ServiceError("Can't play this because the video got drm.")
return
url = "https://playback-api.b17g.net/media/{}?service=cmore.{}&device=browser&protocol=hls%2Cdash&drm=widevine".format(self.output["id"], tld)
res = self.http.request("get", url, cookies=self.cookies, headers={"authorization": "Bearer {}".format(token)})
if res.status_code > 200:
yield ServiceError("Can't play this because the video is geoblocked.")
return
if res.json()["playbackItem"]["type"] == "hls":
streams = hlsparse(
self.config,
self.http.request("get", res.json()["playbackItem"]["manifestUrl"]),
res.json()["playbackItem"]["manifestUrl"],
output=self.output,
)
for n in list(streams.keys()):
yield streams[n]
def find_all_episodes(self, config):
episodes = []
token, message = self._login()
if not token:
logging.error(message)
return
res = self.http.get(self.url)
tags = re.findall('<a class="card__link" href="([^"]+)"', res.text)
for i in tags:
url = urljoin("https://www.cmore.{}/".format(self._gettld()), i)
if url not in episodes:
episodes.append(url)
if config.get("all_last") > 0:
return sorted(episodes[-config.get("all_last") :])
return sorted(episodes)
def _gettld(self):
if isinstance(self.url, list):
parse = urlparse(self.url[0])
else:
parse = urlparse(self.url)
return re.search(r"\.(\w{2})$", parse.netloc).group(1)
def _login(self):
tld = self._gettld()
url = "https://www.cmore.{}/login".format(tld)
res = self.http.get(url, cookies=self.cookies)
if self.config.get("cmoreoperator"):
post = {
"username": self.config.get("username"),
"password": self.config.get("password"),
"operator": self.config.get("cmoreoperator"),
"country_code": tld,
}
else:
post = {"username": self.config.get("username"), "password": self.config.get("password")}
res = self.http.post("https://account.cmore.{}/session?client=cmore-web-prod".format(tld), json=post, cookies=self.cookies)
if res.status_code >= 400:
return None, "Wrong username or password"
janson = res.json()
token = janson["data"]["vimond_token"]
return token, None
def operatorlist(self):
res = self.http.get("https://tve.cmore.se/country/{}/operator?client=cmore-web".format(self._gettld()))
for i in res.json()["data"]["operators"]:
print("operator: '{}'".format(i["name"].lower()))
def _get_vid(self):
res = self.http.get(self.url)
match = re.search('data-asset-id="([^"]+)"', res.text)
if match:
return match.group(1)
parse = urlparse(self.url)
match = re.search(r"/(\d+)-[\w-]+$", parse.path)
if match:
return match.group(1)
return None
def _autoname(self, janson):
if "seriesTitle" in janson["metadata"]:
self.output["title"] = janson["metadata"]["seriesTitle"]
self.output["episodename"] = janson["metadata"]["episodeTitle"]
else:
self.output["title"] = janson["metadata"]["title"]
self.output["season"] = janson["metadata"]["seasonNumber"]
self.output["episode"] = janson["metadata"]["episodeNumber"]
self.config.set("live", janson["metadata"]["isLive"])
|
First single from the upcoming EP released on february 2012.
Buy it on bandcamp. Watch the official video on youtube.
|
"""
Compute k-nearest neighbors using brute force search in parallel
via scipy.spatial.distance.cdist and multiprocessing.Pool
psutil is used to evaluate available memory and minimize the number
of parallel jobs for the available resources
"""
import numpy as np
from scipy.spatial.distance import cdist
from multiprocessing import Pool
from contextlib import closing
from functools import partial
import psutil
def process_chunk(chunk, data, k, metric):
d = cdist(chunk, data, metric=metric).astype('float32')
p = np.argpartition(d, k).astype('int32')[:, :k]
rows = np.arange(chunk.shape[0])[:, None]
d = d[rows, p]
i = np.argsort(d)
return d[rows, i], p[rows, i]
def determine_n_chunks(n, k):
"""Assuming 32 bit representations for distances and indices"""
# available memory
available = psutil.virtual_memory().available
# memory needed to store final knn data (d, idx)
final = 2 * (n * k * 32) / 8
# total memory usable for subprocesses
usable = available - final
# usable per subprocess
usable_per_subprocess = usable / psutil.cpu_count()
# chunk size - number of n-dimensional distance arrays that can be held in memory by each subprocess simultaneously
chunk_size = usable_per_subprocess // (n * 32)
return int(n // chunk_size)
def knnsearch(data, k, metric):
"""k-nearest neighbor search via parallelized brute force
Parameters
----------
data : ndarray
n observations in d dimensions
k : int
number of neighbors (including self)
metric : str
see cdist documentation http://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.cdist.html
Returns
-------
d : ndarray
distances to k nearest neighbors
idx : ndarray
indices of k nearest neighbors
Notes
-----
This implementation uses np.array_split to pass the data to subprocesses. This uses views and does not copy the data
in the subprocesses
"""
f = partial(process_chunk, **{'data': data, 'k': k, 'metric': metric})
n_chunks = determine_n_chunks(len(data), k)
if n_chunks > 2:
with closing(Pool()) as pool:
result = pool.map(f, np.array_split(data, n_chunks))
d, idx = zip(*result)
d, idx = np.vstack(d), np.vstack(idx)
else:
d, idx = process_chunk(data, data, k, metric)
return d, idx
|
THE West Coast Eagles have shared the picture they sent to their banned star Andrew Gaff after his mammoth punishment for punching a fellow player.
MINUTES after his AFL club pinched victory, suspended West Coast star Andrew Gaff’s mobile phone buzzed.
It was his joyous Eagles teammates, sending him a snapshot of their celebratory scenes in the Adelaide Oval rooms.
Gaff watched on television in Melbourne as his teammate Jeremy McGovern kicked a goal after the final siren to steal a four-point win over Port Adelaide on Saturday.
“We sent him a photo of the boys after the game in the rooms,” Eagles coach Adam Simpson said.
Gaff was banned for eight games for a strike which broke the jaw of Fremantle’s Andrew Brayshaw in last Sunday’s West Australian derby.
“It was a big week for the club,” Simpson said after West Coast’s 9.8 (62) to 9.4 (58) win over Port.
“To lose Andrew like we did — and it was a six-day break — part of me felt it was good to get away (from Perth).
"We took this photo and sent it to Gaffy right after the game, I think our supporters would like to see it."
“But yeah, it took us right to the line to move on from it.
Simpson said the club had no other choice.
“We have got to be resilient, don’t we. We haven’t got him now,” he said. “The whole week, it was emotional, it was a big week for everyone. “Andrew has accepted that his actions were poor. And we will do everything we can to get him in the right headspace in the coming weeks. “But I think as an industry we just move on. We know what kind of bloke he is so we’ll just move on.” The second-placed Eagles are now assured of a top-four spot entering the finals.
Another win in their remaining matches, against Melbourne at home and Brisbane away, will secure second spot and coveted home-ground rights for their first final.
|
# Copyright 2014, 2015, 2016 Kevin Reid <kpreid@switchb.org>
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
"""
This is a adapter to allow ghpsdr3-alex clients such as "glSDR" for
Android to connect to ShinySDR as a "dspserver"; references:
<http://openhpsdr.org/wiki/index.php?title=Ghpsdr3_protocols>.
<https://github.com/alexlee188/ghpsdr3-alex/tree/master/trunk/src/dspserver/client.c>
<https://github.com/alexlee188/ghpsdr3-alex/tree/master/trunk/src/dspserver/audiostream.c>
DOES NOT YET WORK: some messages we send have the wrong length as judged
by the glSDR client, resulting in the following messages being
misparsed. No success yet in figuring out where the discrepancy is.
Patches welcome.
"""
from __future__ import absolute_import, division
import array
import struct
from twisted.application.service import Service
from twisted.internet import defer
from twisted.internet import endpoints
from twisted.internet import protocol
from twisted.internet import task
from twisted.python import log
from gnuradio import gr
from shinysdr.twisted_ext import FactoryWithArgs
__all__ = ['DspserverService']
_CLIENT_MSG_LENGTH = 64
def _cmd_noop(self, argstr):
"""stub command implementation"""
pass
def _cmd_setFPS(self, argstr):
width, rate = [int(x) for x in argstr.split(' ')]
self._req_width = width
self._top.monitor.set_freq_resolution(width)
self._top.monitor.set_frame_rate(rate)
self._poller.start(1.0 / (rate * 2.0))
self._top.monitor.set_paused(False)
def _cmd_setFrequency(self, argstr):
pass
# TODO: reenable this
# freq = int(argstr)
# self._get_receiver().set_rec_freq(freq)
# self._top.source.set_freq(freq)
_dspserver_commands = {
'q-master': _cmd_noop, # don't know what this means
'setFPS': _cmd_setFPS,
'setFrequency': _cmd_setFrequency,
}
class _DspserverProtocol(protocol.Protocol):
def __init__(self, top):
self._top = top
self._req_width = None
self.__msgbuf = ''
self._poller = task.LoopingCall(self.__poll)
self.__splitter = top.monitor.state()['fft'].subscribe_to_stream()
self.__audio_queue = gr.msg_queue(limit=100)
self.__audio_buffer = ''
self._top.add_audio_queue(self.__audio_queue, 8000)
def dataReceived(self, data):
"""twisted Protocol implementation"""
self.__msgbuf += data
while len(self.__msgbuf) >= _CLIENT_MSG_LENGTH:
# TODO: efficient buffering
msg = self.__msgbuf[:_CLIENT_MSG_LENGTH]
self.__msgbuf = self.__msgbuf[_CLIENT_MSG_LENGTH:]
self.__messageReceived(msg)
def _get_receiver(self):
receiver_cells = self._top.receivers.state().values()
if len(receiver_cells) > 0:
receiver = receiver_cells[0].get()
else:
_, receiver = self._top.add_receiver('AM')
return receiver
def __messageReceived(self, data):
null = data.find('\0')
if null > -1:
data = data[:null]
print 'Message received: ' + data
sep = data.find(' ')
if sep > -1:
cmd = data[0:sep]
argstr = data[sep + 1:]
else:
cmd = data
argstr = ''
impl = _dspserver_commands.get(cmd)
if impl is not None:
impl(self, argstr)
def connectionLost(self, reason):
# pylint: disable=signature-differs
self._top.remove_audio_queue(self.__audio_queue)
self._poller.stop()
self.__splitter.close()
def __poll(self):
receiver = self._get_receiver()
while True:
frame = self.__splitter.get()
if frame is None:
break
((freq, sample_rate), fft) = frame
if self._req_width is None:
break
print 'Sending frame', self._req_width, sample_rate # TODO: Remove debugging
msg = struct.pack('BBBHHHIh' + str(self._req_width) + 's',
0,
2,
1,
self._req_width, # short
0, # meter
0, # subrx meter
sample_rate,
receiver.get_rec_freq() - freq, # lo_offset
''.join([chr(int(max(1, min(255, -(x - 20))))) for x in fft]))
self.transport.write(msg)
# audio
aqueue = self.__audio_queue
while not aqueue.empty_p():
# pylint: disable=no-member
grmessage = aqueue.delete_head()
self.__audio_buffer += grmessage.to_string()
size_in_bytes = 2000 * 4
if len(self.__audio_buffer) > size_in_bytes:
abuf = self.__audio_buffer[:size_in_bytes]
self.__audio_buffer = self.__audio_buffer[size_in_bytes:]
print 'Sending audio', len(abuf) # TODO: Remove debugging
unpacker = array.array('f')
unpacker.fromstring(abuf)
nsamples = len(unpacker)
msg = struct.pack('BBBH' + str(nsamples) + 'B',
1,
2,
1,
nsamples,
# TODO tweak
*[int(max(0, min(255, x * 127 + 127))) for x in unpacker.tolist()])
# TODO: Disabled until we fix fft messages
# self.transport.write(msg)
class DspserverService(Service):
def __init__(self, reactor, top, endpoint_string):
self.__top = top
self.__endpoint = endpoints.serverFromString(reactor, endpoint_string)
self.__port_obj = None
@defer.inlineCallbacks
def startService(self):
self.__port_obj = yield self.__endpoint.listen(
FactoryWithArgs.forProtocol(_DspserverProtocol, self.__top))
def stopService(self):
return self.__port_obj.stopListening()
def announce(self, open_client):
"""interface used by shinysdr.main"""
log.msg('GHPSDR-compatible server at port %s' % self.__port_obj.getHost().port)
|
Bought a Single Family home in 2018 for approximately $250K in Surprise, AZ.
She helped us sell 5 properties in record time with better then expected prices. She new how to prep the home and did a lot of advance marketing. By far best real estate agent I have ever worked with.
Sold a Single Family home in 2018 for approximately $425K in Waddell, AZ.
Bought a Single Family home in 2018 in Surprise, AZ.
Bought a Single Family home in 2018 for approximately $200K in Buckeye, AZ.
Bought a home in 2017 in Landa, ND.
Call now at 623-850-3351 or fill out the form below as best suits your needs.
Surprise, AZ Home for Sale!
|
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Apr 20, 2015
@author: talbpaul
"""
import sys
import time
import bisect
import builtins
from utils import utils
_starttime = time.time()
#custom exceptions
class NoMoreSamplesNeeded(GeneratorExit):
"""
Custom exception class for no more samples
"""
pass
"""
HOW THIS MODULE WORKS
The intention is for a single instance of the MessageHandler class to exist in any simulation.
Currently, that instance is created in the Simulation initialization and propogated through
all the RAVEN objects. This usually happens by passing it to BaseClass.readXML, but for
objects that don't inherit from BaseClass, the messageHandler instance should be passed
and set via instantiation or initialization. The appropriate class member to point at the
messageHandler instance reference is "self.messageHandler," for reasons that will be made clear
with the BaseClasses.MessageUser superclass.
While an object can access the messageHandler to raise messages and errors, for convienience
RAVEN provides the MessageUser superclass, which BaseType and (almost?) all other Raven objects
inherit from. This provides simplistic hooks for a developer to raise an error or message
with the standard message priorities, as
self.raiseAnError(IOError, 'Input value is invalid:', value)
There are currently 4 verbosity levels/message priorities. They are:
- silent: only errors are displayed
- quiet : errors and warnings are displayed
- all : (default) errors, warnings, and messages are displayed
- debug : errors, warnings, messages, and debug messages are displayed
The developer can change the priority level of their raised messages through the 'verbosity'
keyword. For example,
self.raiseAMessage('Hello, World', verbosity='silent')
will be printed along with errors if the simulation verbosity is set to 'silent', as well as
all other levels.
TL;DR: BaseClasses/MessageUser is a superclass that gives access to hooks to the simulation's MessageHandler
instance, while the MessageHandler is an output stream control tool.
In an effort to make the MH more flexible, we insert getMessageHandler into the python "builtins" module.
This means that any time after this module (MessageHandler) is imported, you can use
"getMessageHandler(name='default')" to retrieve a particular message handler as identified by "name".
"""
class MessageHandler(object):
"""
Class for handling messages, warnings, and errors in RAVEN. One instance of this
class should be created at the start of the Simulation and propagated through
the readMoreXML function of the BaseClass, and initialization of other classes.
"""
def __init__(self):
"""
Init of class
@ In, None
@ Out, None
"""
self.starttime = _starttime
self.printTag = 'MESSAGE HANDLER'
self.verbosity = 'all'
self.callerLength = 25
self.tagLength = 15
self.suppressErrs = False
self.printTime = True
self.inColor = False
self.verbCode = {'silent':0, 'quiet':1, 'all':2, 'debug':3}
self.colorDict = {'debug':'yellow', 'message':'neutral', 'warning':'magenta', 'error':'red'}
self.colors={
'neutral' : '\033[0m',
'red' : '\033[31m',
'green' : '\033[32m',
'yellow' : '\033[33m',
'blue' : '\033[34m',
'magenta' : '\033[35m',
'cyan' : '\033[36m'}
self.warnings = [] #collection of warnings that were raised during this run
self.warningCount = [] #count of the collections of warning above
def initialize(self, initDict):
"""
Initializes basic instance attributes
@ In, initDict, dict, dictionary of global options
@ Out, None
"""
self.verbosity = initDict.get('verbosity','all').lower()
self.callerLength = initDict.get('callerLength',25)
self.tagLength = initDict.get('tagLength',15)
self.suppressErrs = utils.stringIsTrue(initDict.get('suppressErrs', 'False'))
def printWarnings(self):
"""
Prints a summary of warnings collected during the run.
@ In, None
@ Out, None
"""
if len(self.warnings)>0:
if self.verbCode[self.verbosity]>0:
print('-'*50)
print('There were %i warnings during the simulation run:' %sum(self.warningCount))
for w,warning in enumerate(self.warnings):
count = self.warningCount[w]
time = 'time'
if count > 1:
time += 's'
print('(%i %s) %s' %(self.warningCount[w],time,warning))
print('-'*50)
else:
print('There were %i warnings during the simulation run.' %sum(self.warningCount))
def paint(self, str, color):
"""
Formats string with color
@ In, str, string, string
@ In, color, string, color name
@ Out, paint, string, formatted string
"""
if color.lower() not in self.colors.keys():
self.message(self,'Requested color %s not recognized! Skipping...' %color,'Warning','quiet')
return str
return self.colors[color.lower()]+str+self.colors['neutral']
def setTimePrint(self, msg):
"""
Allows the code to toggle timestamp printing.
@ In, msg, string, the string that means true or false
@ Out, None
"""
if utils.stringIsTrue(msg):
self.callerLength = 40
self.tagLength = 30
self.printTime = True
elif utils.stringIsFalse(msg):
self.callerLength = 25
self.tagLength = 15
self.printTime = False
def setColor(self, inColor):
"""
Allows output to screen to be colorized.
@ In, inColor, string, boolean value
@ Out, None
"""
if utils.stringIsTrue(inColor):
self.inColor = True
def getStringFromCaller(self, obj):
"""
Determines the appropriate print string from an object
@ In, obj, instance, preferably an object with a printTag method; otherwise, a string or an object
@ Out, tag, string, string to print
"""
if type(obj).__name__ in ['str','unicode']: # ?when is this ever not true?
return obj
if hasattr(obj,'printTag'):
tag = str(obj.printTag)
else:
tag = str(obj)
return tag
def getDesiredVerbosity(self, caller):
"""
Tries to use local verbosity; otherwise uses global
@ In, caller, instance, the object desiring to print
@ Out, desVerbosity, int, integer equivalent to verbosity level
"""
if hasattr(caller, 'getVerbosity'):
localVerb = caller.getVerbosity()
else:
localVerb = None
if localVerb is None:
localVerb = self.verbosity
desVerbosity = self.checkVerbosity(localVerb)
return desVerbosity
def checkVerbosity(self, verb):
"""
Converts English-readable verbosity to computer-legible integer
@ In, verb, string, the string verbosity equivalent
@ Out, currentVerb, int, integer equivalent to verbosity level
"""
if str(verb).strip().lower() not in self.verbCode.keys():
raise IOError(f'Verbosity key {verb} not recognized! Options are {list(self.verbCode.keys())}')
currentVerb = self.verbCode[str(verb).strip().lower()]
return currentVerb
def error(self, caller, etype, message, tag='ERROR', verbosity='silent', color=None):
"""
Raise an error message, unless errors are suppressed.
@ In, caller, object, the entity desiring to print a message
@ In, etype, Error, the type of error to throw
@ In, message, string, the message to print
@ In, tag, string, optional, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbosity, string, optional, the print priority of the message
@ In, color, string, optional, color to apply to message
@ Out, None
"""
verbval = max(self.getDesiredVerbosity(caller),self.checkVerbosity(self.verbosity))
self.message(caller,message,tag,verbosity,color=color)
if not self.suppressErrs:
self.printWarnings()
# debug mode gets full traceback, others quieted
if verbval<3:
#all, quiet, silent
sys.tracebacklimit=0
raise etype(message)
def message(self, caller, message, tag, verbosity, color=None, writeTo=sys.stdout, forcePrint=False):
"""
Print a message
@ In, caller, object, the entity desiring to print a message
@ In, message, string, the message to print
@ In, tag, string, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbosity, string, the print priority of the message
@ In, color, string, optional, color to apply to message
@ In, forcePrint, bool, optional, force the print independetly on the verbosity level? Defaul False
@ Out, None
"""
verbval = self.checkVerbosity(verbosity)
okay, msg = self._printMessage(caller, message, tag, verbval, color, forcePrint)
if tag.lower().strip() == 'warning':
self.addWarning(message)
if okay:
print(msg,file=writeTo)
sys.stdout.flush()
def addWarning(self, msg):
"""
Stores warnings so that they can be reported in summary later.
@ In, msg, string, only the main part of the message, used to determine uniqueness
@ Out, None
"""
index = bisect.bisect_left(self.warnings,msg)
if len(self.warnings) == 0 or index == len(self.warnings) or self.warnings[index] != msg:
self.warnings.insert(index,msg)
self.warningCount.insert(index,1)
else:
self.warningCount[index] += 1
def _printMessage(self, caller, message, tag, verbval, color=None, forcePrint=False):
"""
Checks verbosity to determine whether something should be printed, and formats message
@ In, caller , object, the entity desiring to print a message
@ In, message, string, the message to print
@ In, tag , string, the printed message type (usually Message, Debug, or Warning, and sometimes FIXME)
@ In, verbval, int , the print priority of the message
@ In, color, string, optional, color to apply to message
@ In, forcePrint, bool, optional, force the print independetly on the verbosity level? Defaul False
@ Out, (shouldIPrint,msg), tuple, shouldIPrint -> bool, indication if the print should be allowed
msg -> string, the formatted message
"""
#allows raising standardized messages
shouldIPrint = False
desired = self.getDesiredVerbosity(caller)
if verbval <= desired or forcePrint:
shouldIPrint=True
if not shouldIPrint:
return False,''
ctag = self.getStringFromCaller(caller)
msg=self.stdMessage(ctag,tag,message,color)
return shouldIPrint,msg
def stdMessage(self, pre, tag, post, color=None):
"""
Formats string for pretty printing
@ In, pre , string, who is printing the message
@ In, tag , string, the type of message being printed (Error, Warning, Message, Debug, FIXME, etc)
@ In, post , string, the actual message body
@ In, color, string, optional, color to apply to message
@ Out, msg, string, formatted message
"""
msg = ''
if self.printTime:
curtime = time.time()-self.starttime
msg+='('+'{:8.2f}'.format(curtime)+' sec) '
if self.inColor:
msg = self.paint(msg,'cyan')
msgend = pre.ljust(self.callerLength)[0:self.callerLength] + ': '+tag.ljust(self.tagLength)[0:self.tagLength]+' -> ' + post
if self.inColor:
if color is not None:
#overrides other options
msgend = self.paint(msgend,color)
elif tag.lower() in self.colorDict.keys():
msgend = self.paint(msgend,self.colorDict[tag.lower()])
msg+=msgend
return msg
def timePrint(message):
"""
Prints the time since start then the message
@ In, message, string
@ Out, None
"""
curtime = time.time()-_starttime
msg = ''
msg+='('+'{:8.2f}'.format(curtime)+' sec) '
print(msg + message)
_handlers = {}
def makeHandler(name):
"""
Instantiate and register new instance of message handler
@ In, name, str, identifying name for new handler
@ Out, makeHandler, MessageHandler, instance
"""
handler = MessageHandler()
_handlers[name] = handler
return handler
# default handler
makeHandler('default')
def getHandler(name='default'):
"""
Retrieve a message handling instance.
Styled after the Python logging module, maybe we should be switching to that.
@ In, name, str, optional, identifying name of handler to return
@ Out, getHandler, MessageHandler, instance (created if not existing)
"""
h = _handlers.get(name, None)
if h is None:
h = makeHandler(name)
# NOTE: idk why, but h = _handlers.get(name, makeHandler(name)) does not work.
# I think it's because it executes makeHandler(name) regardless of if name is present or not.
return h
builtins.getMessageHandler = getHandler
|
Anchusa arvensis (L.) M.Bieb., Fl. Taur.-Caucas. 1: 123. 1808.
Lycopsis arvensis L., Sp. Pl. 1: 139. 1753.
Anchusa guculeacii Sennen, Pl. Espagne 1927: no. 6133 [in schedis]. 1928.
Anchusa rigolei Sennen, Bull. Soc. Bot. France 74: 388. 1927.
Lycopsis arvensis subsp. occidentalis Kusn., Trudy Bot. Muz. Imp. Akad. Nauk 8: 96. 1911.
Anchusa arvensis subsp. occidentalis (Kusn.) Nordh., Norsk Fl.: 526. 1940.
Euro+Med 2006 onwards: Euro+Med PlantBase – the information resource for Euro-Mediterranean plant diversity. Anchusa arvensis. Published on the internet. Accessed: 2016 6 26.
USDA, ARS, Germplasm Resources Information Network. Anchusa arvensis in the Germplasm Resources Information Network (GRIN), U.S. Department of Agriculture Agricultural Research Service.
For more multimedia, look at Anchusa arvensis on Wikimedia Commons.
This page was last edited on 24 March 2019, at 04:14.
|
from shorty.utils import url_encode
from django.contrib import admin
__author__ = 'cingusoft'
#admin section
class UrlAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
list_display = ('url_field','show_slug','user','status','is_protected','created')
list_display_links = ('url_field',)
#list_editable = ('url_field',)
list_filter = ('status',)
search_fields = ('url_field','status')
fieldsets = (
('General',{
'fields':('url_field','user','status')
}),
('Advanced options',{
'classes': ('collapse',),
'fields':('private','private_password')
})
)
actions = ['ban_this_link','active_this_link','refuse_this_link','pending_this_link']
#actions
def ban_this_link(self,request,queryset):
queryset.update(status='Banned')
ban_this_link.short_description = 'Ban selected links'
#actions
def active_this_link(self,request,queryset):
queryset.update(status='Active')
active_this_link.short_description = 'Active selected links'
#actions
def refuse_this_link(self,request,queryset):
queryset.update(status='Refused')
refuse_this_link.short_description = 'Refuse selected links'
#actions
def pending_this_link(self,request,queryset):
queryset.update(status='Pending')
pending_this_link.short_description = 'Move selected links to Pending'
#property
def show_slug(self,obj):
if obj.personal:
#the link have a personal slug
return obj.personal_slug
else:
return url_encode(obj.id)
show_slug.short_description = "Slug"
#property
def is_protected(self,obj):
if obj.private:
#the link have a personal slug
return "yes"
else:
return "no"
is_protected.short_description = "Is Protected"
|
The 4th C.M.G. Coy. was attached to the 4th Infantry Brigade, 2nd Canadian Infantry Division.
On 23 March, 1918, it was amalgamated with the 6th C.M.G. Company to form No. 2 Company, 2nd Bn, C.M.G.C..
|
# -*- coding: UTF-8 -*-
"""Generic GMG solver"""
__docformat__ = "restructuredtext en"
from warnings import warn
import scipy
import numpy
__all__ = ['multilevel_solver']
class level:
"""Stores one level of the multigrid hierarchy
All level objects will have an 'A' attribute referencing the matrix
of that level. All levels, except for the coarsest level, will
also have 'P' and 'R' attributes referencing the prolongation and
restriction operators that act between each level and the next
coarser level.
Attributes
----------
A :
Problem matrix for Ax=b
R : reduction
Restriction matrix between levels (often R = P.T)
P : interpolator
Prolongation or Interpolation matrix.
Notes
-----
The functionality of this class is a struct
"""
def __init__(self, withPETSc=False):
from pigasus.fem.matrix import matrix
self.withPETSc = withPETSc
self.R = None
self.P = None
self.A = matrix()
if self.withPETSc: # will be treated after
self.slv = None
self.smt = None
else:
from pigasus.solver.solver import solver
from pigasus.fem.constants import SPM_SOLVER_BASIC_CG, SPM_SOLVER_BASIC_GS
self.slv = solver(matrix=self.A, solver=SPM_SOLVER_BASIC_CG)
self.smt = solver(matrix=self.A, solver=SPM_SOLVER_BASIC_GS)
def set_P(self, P):
"""
"""
self.P = P
def set_R(self, R):
"""
"""
self.R = R
def set_A(self, A):
"""
"""
self.A.set(A)
def construct(self):
"""
construct the current level, operators and the coarse matrix
"""
self.P.construct()
self.R.construct()
def solve(self, b, maxiter=6000, tol=1.e-10):
if self.withPETSc:
_b = PETSc.Vec().createWithArray(b, comm=PETSc.COMM_SELF)
_x = PETSc.Vec().createWithArray(np.zeros_like(b), comm=PETSc.COMM_SELF)
self.ksp_slv.rtol = tol
# self.ksp_slv.setConvergenceHistory()
self.ksp_slv.solve(_b, _x)
return _x.getArray()
else:
return self.slv.solve(b, guess=np.zeros_like(b) \
, maxiter=maxiter, eps=tol)
def smoother(self, x, b, iterations=100, tol=1.e-10):
if self.withPETSc:
_b = PETSc.Vec().createWithArray(b, comm=PETSc.COMM_SELF)
_x = PETSc.Vec().createWithArray(np.zeros_like(b), comm=PETSc.COMM_SELF)
self.ksp_smt.rtol = tol
self.ksp_smt.max_it = nu
# self.ksp_smt.setConvergenceHistory()
self.ksp_smt.solve(_b, _x)
return _x.getArray()
else:
return self.smt.solve(b, guess=np.zeros_like(b) \
, maxiter=iterations, eps=tol)
class multilevel_solver:
"""Stores multigrid hierarchy and implements the multigrid cycle
The class constructs the cycling process and points to the methods for
coarse grid solves. A call to multilevel_solver.solve() is a typical access point.
The class also defines methods for constructing operator, cycle, and grid complexities.
Attributes
----------
levels : level array
Array of level objects that contain A, R, and P.
coarse_solver : string
String passed to coarse_grid_solver indicating the solve type
Methods
-------
cycle_complexity()
A measure of the cost of a single multigrid cycle.
grid_complexity()
A measure of the rate of coarsening.
operator_complexity()
A measure of the size of the multigrid hierarchy.
solve()
Iteratively solves a linear system for the right hand side.
"""
def __init__(self, list_geometry, gamma, nu1, nu2, withPETSc=False):
"""Creates a geometric multigrid for the matrix list_A[-1]
Parameters
----------
list_A : is a list of csr_matrix or pigasus-matrix. list_A[0] is on the finest grid
list_geometry : list of geometries [-1] -> the finest geometry and [0] -> the coarse
nlevels : the number of subdomains levels
Returns
-------
mg : the geometric multigrid
Examples
--------
>>> from scipy.sparse import csr_matrix
>>> from pigasus.gallery import poisson
See Also
--------
TODO
"""
# TODO : for the moment we only treate 1-patch geometries
self.withPETSc = withPETSc
self.geometries = list_geometry
self.dim = self.geometries[0].dim
self.nlevels = len(self.geometries)-1
self.gamma = gamma
self.nu1 = nu1
self.nu2 = nu2
self.nloop = 1
self.levels = []
for i in range(0, len(self.geometries)):
self.levels.append(level())
self.list_allresiduals = [] # list of residuals for each step
self.list_coarseresiduals = []
#-----------------------------------
#-----------------------------------
def initialize(self, list_A):
from scipy.sparse import identity as Id
from pigasus.multigrid.operators import interpolation, reduction
self.A = list_A[-1]
self.list_A = list_A
n,m = self.A.shape
ilvl = 0
lvl = self.levels[ilvl]
lvl.set(self.A, Id(n), Id(n))
geometries = self.geometries[::-1]
list_A = self.list_A[::-1]
for (geo_h, geo_H) in zip(geometries[:-1], geometries[1:]):
ilvl += 1
lvl = self.levels[ilvl]
# ... interpolator
P = interpolation(geo_H, geo_h)
# ... restriction
R = reduction(geo_H, geo_h)
# ... the coarse system
try:
A_H = list_A[i].get()
except:
print("Galerkin coarse grid operator has been initialized")
A_H = coarse_matrix(geo_H, geo_h, DirFaces=self.DirFaces)
A_h = self.levels[ilvl-1].A.get()
A_H.construct(A_h)
# print A_h.shape, A_H.shape
# A_H = A_H.tocsr()
lvl.set_P(P)
lvl.set_R(R)
lvl.set_A(A)
self.levels = self.levels[::-1]
#-----------------------------------
#-----------------------------------
def interpolation(self, level, vH):
P = self.levels[level].P
vh = P.apply(vH)
return vh
#-----------------------------------
#-----------------------------------
def restriction(self, level, vh):
R = self.levels[level].R
vH = R.apply(vh)
return vH
#-----------------------------------
#-----------------------------------
def mgcyc(self, k, gamma, ukm, fk, nu1, nu2 \
, smoother=None, coarse_solver=None):
"""
this routine will retrurn uk_{m+1} using ukm
"""
if smoother is None:
smoother = self.smoother
if coarse_solver is None:
coarse_solver = self.coarse_solver
nlevels = self.nlevels + 1
lvl = self.levels[::-1][nlevels-k]
lvl1 = self.levels[::-1][nlevels-k-1]
Rk = lvl.R
Pk = lvl.P
Lk = lvl1.A
Lk1 = lvl.A
# ----------------------------
# presmoothing
# ----------------------------
ukm_s = lvl1.smoother(ukm, fk, nu1)
# ukm_s = smoother(nu1, ukm, Lk, fk)
# ----------------------------
# ----------------------------
# coarse grid correction
# ----------------------------
# Compute the defect
dkm = fk - Lk.dot(ukm_s)
# Restrict the defect
dk1m = Rk.dot(dkm)
# Compute an approximate solution vk1m of the defect equation on Omega_{k-1}
# if k = 1, use a direct or fast iterative solver, by calling
if k == 1:
# TODO : enlever le guess
guess = np.zeros_like(dk1m)
vk1m = lvl.solve(dk1m)
# vk1m = coarse_solver(Lk1, guess, dk1m)
if k > 1:
a = np.zeros_like(dk1m)
vk1m_ = dk1m
for i in range(0, gamma):
dk1m_ = vk1m_
vk1m_, err_ = self.mgcyc(k-1, gamma, a, dk1m_, nu1, nu2 \
, smoother=smoother \
, coarse_solver=coarse_solver)
vk1m = vk1m_
# Interpolate the correction
# print "vk1m : ", vk1m.__class__.__name__, vk1m.shape
# print "Pk : ", Pk.__class__.__name__, Pk.shape
vkm = Pk.dot(vk1m)
# Compute the corrected approximation
ukm += vkm
# ----------------------------
# ----------------------------
# postsmoothing
# ----------------------------
ukp1m = lvl1.smoother(ukm, fk, nu2)
# ukp1m = smoother(nu2, ukm, Lk, fk)
# ----------------------------
err = residual_norm(Lk, ukp1m, fk)
return ukp1m, err
#-----------------------------------
def solve(self, b, x0=None, tol=1e-5, maxiter=100, cycle='V', residuals=None):
"""Main solution call to execute multigrid cycling.
Parameters
----------
b : array
Right hand side.
x0 : array
Initial guess.
tol : float
Stopping criteria: relative residual r[k]/r[0] tolerance.
maxiter : int
Stopping criteria: maximum number of allowable iterations.
cycle : {'V','W','F'}
Type of multigrid cycle to perform in each iteration.
residuals : list
List to contain residual norms at each iteration.
Returns
-------
x : array
Approximate solution to Ax=b
See Also
--------
aspreconditioner
Examples
--------
"""
if x0 is None:
x = np.zeros_like(b)
else:
x = np.array(x0) # copy
# Scale tol by normb
# normb = norm(b)
# if normb != 0:
# tol = tol * normb
residuals.append(residual_norm(self.A, x, b))
self.first_pass = True
self.nloop = 0
while len(residuals) <= maxiter and residuals[-1] > tol:
x, err = self.mgcyc(self.nlevels, self.gamma, x, b, self.nu1, self.nu2)
residuals.append(err)
self.first_pass = False
self.nloop += 1
return x
#-----------------------------------
def __repr__(self):
"""Prints basic statistics about the multigrid hierarchy.
"""
from pyamg.util.linalg import condest
levels = self.levels[::-1]
output = 'multilevel_solver\n'
output += 'Conditioning Number of the matrix: %d\n' % condest(self.A)
output += 'Number of Levels: %d\n' % len(levels)
output += 'Operator Complexity: %6.3f\n' % self.operator_complexity()
output += 'Grid Complexity: %6.3f\n' % self.grid_complexity()
# output += 'Coarse Solver: %s\n' % self.coarse_solver.name()
total_nnz = sum([level.A.nnz for level in levels])
output += ' level unknowns nonzeros\n'
for n, level in enumerate(levels):
A = level.A
output += ' %2d %10d %10d [%5.2f%%]\n' %\
(n, A.shape[1], A.nnz,\
(100 * float(A.nnz) / float(total_nnz)))
return output
def operator_complexity(self):
"""Operator complexity of this multigrid hierarchy
Defined as:
Number of nonzeros in the matrix on all levels /
Number of nonzeros in the matrix on the finest level
"""
levels = self.levels[::-1]
return sum([level.A.nnz for level in levels]) /\
float(levels[0].A.nnz)
def grid_complexity(self):
"""Grid complexity of this multigrid hierarchy
Defined as:
Number of unknowns on all levels /
Number of unknowns on the finest level
"""
levels = self.levels[::-1]
return sum([level.A.shape[0] for level in levels]) /\
float(levels[0].A.shape[0])
|
Short Description: Ceramic Candle Container With Swan Handle,Ceramic candle jar, Ceramic candle canister, candle container,ceramic candle cup made out of ceramic Taking simple goods and making them beautiful. That's why this candle containers was made by hand and is meant to be a permanent piece of art in your home, even after the flame is long gone. Just wipe out an excess wax and reuse the container however you fancy.
|
# -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email legal@cyan.com
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
from Plasma import *
class UCMarkerGame(object):
def __init__(self, markerNode):
assert isinstance(markerNode, ptVaultMarkerGameNode)
self._node = markerNode
self._RefreshMarkersFromNode()
self._editMode = False
self._showingMarkers = False
self._playing = False
def AddMarker(self, age, pos, desc):
""""Adds a new marker to this game"""
idx = self._nextMarkerID
self._nextMarkerID += 1
self._markers.append((idx, age, pos, desc))
if self._showingMarkers and age.lower() == PtGetAgeName().lower():
ptMarkerMgr().addMarker(pos, idx, True)
return idx
def BeginEditingMarkers(self):
"""Displays all markers for editing"""
self._RefreshMarkersFromNode()
curAge = PtGetAgeName().lower()
mgr = self._ResetMarkerMgr()
self._editMode = True
self._showingMarkers = True
for idx, age, pos, desc in self.markers:
if curAge == age.lower():
mgr.addMarker(pos, idx, False)
def DeleteMarker(self, idx):
for i, marker in enumerate(self._markers):
if marker[0] == idx:
if self.selected_marker_id == idx:
self.selected_marker_id = -1
self._markers.pop(i)
return
raise KeyError(idx)
@property
def edit_mode(self):
return self._editMode
def FinishEditingMarkers(self):
"""Hides all markers and commits edits back to the vault node"""
self._editMode = False
self._ResetMarkerMgr()
self._node.setMarkers(self._markers)
self._node.save()
@property
def game_id(self):
return str(self._node.getID())
@property
def game_name(self):
return self._node.getGameName()
@property
def marker_total(self):
return len(self._markers)
@property
def markers(self):
return self._markers
@property
def markers_visible(self):
return self._showingMarkers
def Play(self):
self._playing = True
self._showingMarkers = True
self._RefreshMarkersFromNode()
@property
def playing(self):
return self._playing
def _RefreshMarkersFromNode(self):
# We hold a local copy of the markers so that we don't have to worry if the game is updated
# while we're in the middle of playing it.
self._markers = self._node.getMarkers()
# This will hold the next marker ID. Will be useful for adding new markers
if self._markers:
self._nextMarkerID = max(self._markers, key=lambda x: x[0])[0] + 1
else:
self._nextMarkerID = 0
def _ResetMarkerMgr(self):
self._showingMarkers = False
mgr = ptMarkerMgr()
mgr.clearSelectedMarker()
mgr.removeAllMarkers()
return mgr
@property
def selected_marker(self):
id = ptMarkerMgr().getSelectedMarker()
if id != -1:
for marker in self._markers:
if marker[0] == id:
return marker
return None
def _get_selected_marker_id(self):
return ptMarkerMgr().getSelectedMarker()
def _set_selected_marker_id(self, value):
ptMarkerMgr().setSelectedMarker(value)
selected_marker_id = property(_get_selected_marker_id, _set_selected_marker_id)
def _get_selected_marker_index(self):
wantID = ptMarkerMgr().getSelectedMarker()
for idx, (id, age, pos, desc) in enumerate(self._markers):
if id == wantID:
return idx
return -1
def _set_selected_marker_index(self, value):
for idx, (id, age, pos, desc) in enumerate(self._markers):
if idx == value:
ptMarkerMgr().setSelectedMarker(id)
return
selected_marker_index = property(_get_selected_marker_index, _set_selected_marker_index)
def _get_selected_marker_name(self):
marker = self.selected_marker
if marker is not None:
return marker[3]
return "?UNKOWN MARKER?"
def _set_selected_marker_name(self, value):
idx = self.selected_marker_index
if idx != -1:
id, age, pos, desc = self._markers[idx]
self._markers[idx] = (id, age, pos, value)
selected_marker_name = property(_get_selected_marker_name, _set_selected_marker_name)
def Stop(self):
self._playing = False
self._ResetMarkerMgr()
|
This is the second version of my phonetic chart. I'm working on symbols for it, and I think I have an idea of what I want to do. p.s, in the chart under the voiceless category, where it says (Not all are COMPLETELY Voiceless) I'm mostly referring to /tʃ/.
Last edited by Taurenzine on 07 Oct 2016 21:53, edited 1 time in total.
This is more a UX/design consideration, rather than a comment on the sounds themselves, but that red is very striking. It ends up having the effect of calling attention to itself, rather to the surrounding text, creating an odd focal point around the empty cells in your chart. You'll notice that a lot of other charts around the internet (examples on Wikipedia: Valyrian, Swahili) either leave unused cells the same color as used cells, or subtly darken them, so the brighter cells get the focus. It helps draw the eye to the important data of the table, rather than the unimportant data. My recommendation would be (if you really want to color the cells) to use a soft grey, leaving the brighter white background behind used sounds as the foregrounding focus.
Axiem wrote: This is more a UX/design consideration, rather than a comment on the sounds themselves, but that red is very striking. It ends up having the effect of calling attention to itself, rather to the surrounding text, creating an odd focal point around the empty cells in your chart. You'll notice that a lot of other charts around the internet (examples on Wikipedia: Valyrian, Swahili) either leave unused cells the same color as used cells, or subtly darken them, so the brighter cells get the focus. It helps draw the eye to the important data of the table, rather than the unimportant data. My recommendation would be (if you really want to color the cells) to use a soft grey, leaving the brighter white background behind used sounds as the foregrounding focus.
Where’s the first version? Also i’m genuinely confused by that classification (eg. /r/ as a stop), what is it based on?
CrazyEttin wrote: Where’s the first version? Also i’m genuinely confused by that classification (eg. /r/ as a stop), what is it based on?
First of all, if you want to see the first version you should have looked at my other posts before even sending this message, like seriously did you think it would be easier for you if I just linked it to you? I only have like 2 posts.
Second of all, /r/ could go into the fricatives (I'm not going to add another column other than fricatives, stops, and nasals because then it would make my written language more complicated. reasons as to why; I would say it but I'd like to work a bit more on my language before I post anything on that), and for that I thank you because I am now going to change my chart.
most of the classification in this chart is based off of what I would classify it as, and not by what the IPA classifies it as. I mean I'm making one language, I really shouldn't make it as complicated as the IPA chart, because that would be too complicated.
Oh, it’s in another thread. But why make two for one subject?
So, a featural writing system? Those are always awesome, and it definitely explains the classification (although i’d suggest using "continuant" instead of "fricative", since that includes sonorants sucj as /j w/ as well).
Agreed. Some of the simplifications you made just seemed a bit odd before i knew about the writing system.
Thanks for understanding to an extent. please also understand that I only really started getting into languages and conlangs a week or 2 after September started, so I really don't fully understand why certain IPA classification names are called what they are called, so I just do what I can.
and now that you mention it, your right it is weird that I've put this chart on a different topic. I'll move it over there.
Thank you so much this was really helpful I'll make sure to keep this in mind.
I am happy to help! I fully recommend listening to all of his linguistics videos actually.
Nachtuil wrote: I fully recommend listening to all of his linguistics videos actually.
I fully support that statement. Besides, his accent is just cool.
|
"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
from oslo.config import cfg
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import config
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
CONF = cfg.CONF
CONF.register_cli_opts(destroy_opts)
def main():
config.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=CONF.all_cached,
dry_run=CONF.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
30-year old male in Phoenix, Arizona, United States.
Post a comment to webnebula's profile.
webnebula does not have any Debate.org friends.
webnebula has not added any photo albums.
If you are logged in, you will also see green or red bullets next to each issue, which indicate whether you agree or disagree with webnebula on that particular issue.
You can also click each issue to find other members that agree with webnebula's position on the issue.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import os
import curses
import codecs
import six
import pytest
from rtv.docs import HELP, COMMENT_EDIT_FILE
from rtv.objects import Color
from rtv.exceptions import TemporaryFileError, MailcapEntryNotFound
try:
from unittest import mock
except ImportError:
import mock
def test_terminal_properties(terminal, config):
assert len(terminal.up_arrow) == 2
assert isinstance(terminal.up_arrow[0], six.text_type)
assert len(terminal.down_arrow) == 2
assert isinstance(terminal.down_arrow[0], six.text_type)
assert len(terminal.neutral_arrow) == 2
assert isinstance(terminal.neutral_arrow[0], six.text_type)
assert len(terminal.guilded) == 2
assert isinstance(terminal.guilded[0], six.text_type)
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ''}):
assert terminal.display is False
terminal._display = None
with mock.patch('rtv.terminal.sys') as sys, \
mock.patch.dict('os.environ', {'DISPLAY': ''}):
sys.platform = 'darwin'
assert terminal.display is False
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ':0', 'BROWSER': 'w3m'}):
assert terminal.display is False
terminal._display = None
with mock.patch.dict('os.environ', {'DISPLAY': ':0', 'BROWSER': ''}), \
mock.patch('webbrowser._tryorder'):
assert terminal.display is True
assert terminal.get_arrow(None) is not None
assert terminal.get_arrow(True) is not None
assert terminal.get_arrow(False) is not None
assert terminal.config == config
assert terminal.loader is not None
assert terminal.MIN_HEIGHT is not None
assert terminal.MIN_WIDTH is not None
def test_terminal_functions(terminal):
terminal.flash()
assert curses.flash.called
terminal.getch()
assert terminal.stdscr.getch.called
with pytest.raises(RuntimeError):
with terminal.no_delay():
raise RuntimeError()
terminal.stdscr.nodelay.assert_any_call(0)
terminal.stdscr.nodelay.assert_any_call(1)
curses.endwin.reset_mock()
curses.doupdate.reset_mock()
with terminal.suspend():
pass
assert curses.endwin.called
assert curses.doupdate.called
curses.endwin.reset_mock()
curses.doupdate.reset_mock()
with pytest.raises(RuntimeError):
with terminal.suspend():
raise RuntimeError()
assert curses.endwin.called
assert curses.doupdate.called
terminal.addch(terminal.stdscr, 3, 5, 'ch', 'attr')
terminal.stdscr.addch.assert_called_with(3, 5, 'ch', 'attr')
def test_terminal_clean_ascii(terminal):
terminal.config['ascii'] = True
# unicode returns ascii
text = terminal.clean('hello ❤')
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello ?'
# utf-8 returns ascii
text = terminal.clean('hello ❤'.encode('utf-8'))
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello ?'
# ascii returns ascii
text = terminal.clean('hello'.encode('ascii'))
assert isinstance(text, six.binary_type)
assert text.decode('ascii') == 'hello'
def test_terminal_clean_unicode(terminal):
terminal.config['ascii'] = False
# unicode returns utf-8
text = terminal.clean('hello ❤')
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello ❤'
# utf-8 returns utf-8
text = terminal.clean('hello ❤'.encode('utf-8'))
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello ❤'
# ascii returns utf-8
text = terminal.clean('hello'.encode('ascii'))
assert isinstance(text, six.binary_type)
assert text.decode('utf-8') == 'hello'
def test_terminal_clean_ncols(terminal):
text = terminal.clean('hello', n_cols=5)
assert text.decode('utf-8') == 'hello'
text = terminal.clean('hello', n_cols=4)
assert text.decode('utf-8') == 'hell'
text = terminal.clean('hello', n_cols=10)
assert text.decode('utf-8') == 'hello'
text = terminal.clean('hello', n_cols=9)
assert text.decode('utf-8') == 'hell'
@pytest.mark.parametrize('use_ascii', [True, False])
def test_terminal_clean_unescape_html(terminal, use_ascii):
# HTML characters get decoded
terminal.config['ascii'] = use_ascii
text = terminal.clean('<')
assert isinstance(text, six.binary_type)
assert text.decode('ascii' if use_ascii else 'utf-8') == '<'
@pytest.mark.parametrize('use_ascii', [True, False])
def test_terminal_add_line(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
terminal.add_line(stdscr, 'hello')
assert stdscr.addstr.called_with(0, 0, 'hello'.encode('ascii'))
stdscr.reset_mock()
# Text will be drawn, but cut off to fit on the screen
terminal.add_line(stdscr, 'hello', row=3, col=75)
assert stdscr.addstr.called_with((3, 75, 'hell'.encode('ascii')))
stdscr.reset_mock()
# Outside of screen bounds, don't even try to draw the text
terminal.add_line(stdscr, 'hello', col=79)
assert not stdscr.addstr.called
stdscr.reset_mock()
@pytest.mark.parametrize('use_ascii', [True, False])
def test_show_notification(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
# Multi-line messages should be automatically split
text = 'line 1\nline 2\nline3'
terminal.show_notification(text)
assert stdscr.subwin.nlines == 5
assert stdscr.subwin.addstr.call_count == 3
stdscr.reset_mock()
# The text should be trimmed to fit 40x80
text = HELP.strip().splitlines()
terminal.show_notification(text)
assert stdscr.subwin.nlines == 40
assert stdscr.subwin.ncols <= 80
assert stdscr.subwin.addstr.call_count == 38
stdscr.reset_mock()
# The text should be trimmed to fit in 20x20
stdscr.nlines, stdscr.ncols = 15, 20
text = HELP.strip().splitlines()
terminal.show_notification(text)
assert stdscr.subwin.nlines == 15
assert stdscr.subwin.ncols == 20
assert stdscr.subwin.addstr.call_count == 13
@pytest.mark.parametrize('use_ascii', [True, False])
def test_text_input(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
stdscr.nlines = 1
# Text will be wrong because stdscr.inch() is not implemented
# But we can at least tell if text was captured or not
stdscr.getch.side_effect = [ord('h'), ord('i'), ord('!'), terminal.RETURN]
assert isinstance(terminal.text_input(stdscr), six.text_type)
stdscr.getch.side_effect = [ord('b'), ord('y'), ord('e'), terminal.ESCAPE]
assert terminal.text_input(stdscr) is None
stdscr.getch.side_effect = [ord('h'), curses.KEY_RESIZE, terminal.RETURN]
assert terminal.text_input(stdscr, allow_resize=True) is not None
stdscr.getch.side_effect = [ord('h'), curses.KEY_RESIZE, terminal.RETURN]
assert terminal.text_input(stdscr, allow_resize=False) is None
@pytest.mark.parametrize('use_ascii', [True, False])
def test_prompt_input(terminal, stdscr, use_ascii):
terminal.config['ascii'] = use_ascii
window = stdscr.derwin()
window.getch.side_effect = [ord('h'), ord('i'), terminal.RETURN]
assert isinstance(terminal.prompt_input('hi'), six.text_type)
attr = Color.CYAN | curses.A_BOLD
stdscr.subwin.addstr.assert_called_with(0, 0, 'hi'.encode('ascii'), attr)
assert window.nlines == 1
assert window.ncols == 78
window.getch.side_effect = [ord('b'), ord('y'), ord('e'), terminal.ESCAPE]
assert terminal.prompt_input('hi') is None
stdscr.getch.side_effect = [ord('b'), ord('e'), terminal.RETURN]
assert terminal.prompt_input('hi', key=True) == ord('b')
stdscr.getch.side_effect = [terminal.ESCAPE, ord('e'), ord('l')]
assert terminal.prompt_input('hi', key=True) is None
def test_prompt_y_or_n(terminal, stdscr):
stdscr.getch.side_effect = [ord('y'), ord('N'), terminal.ESCAPE, ord('a')]
attr = Color.CYAN | curses.A_BOLD
text = 'hi'.encode('ascii')
# Press 'y'
assert terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press 'N'
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press Esc
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert not curses.flash.called
# Press an invalid key
assert not terminal.prompt_y_or_n('hi')
stdscr.subwin.addstr.assert_called_with(0, 0, text, attr)
assert curses.flash.called
@pytest.mark.parametrize('use_ascii', [True, False])
def test_open_editor(terminal, use_ascii):
terminal.config['ascii'] = use_ascii
comment = COMMENT_EDIT_FILE.format(content='#| This is a comment! ❤')
data = {'filename': None}
def side_effect(args):
data['filename'] = args[1]
with codecs.open(data['filename'], 'r+', 'utf-8') as fp:
assert fp.read() == comment
fp.write('This is an amended comment! ❤')
return mock.Mock()
with mock.patch('subprocess.Popen', autospec=True) as Popen:
Popen.side_effect = side_effect
with terminal.open_editor(comment) as reply_text:
assert reply_text == 'This is an amended comment! ❤'
assert os.path.isfile(data['filename'])
assert curses.endwin.called
assert curses.doupdate.called
assert not os.path.isfile(data['filename'])
def test_open_editor_error(terminal):
with mock.patch('subprocess.Popen', autospec=True) as Popen, \
mock.patch.object(terminal, 'show_notification'):
# Invalid editor
Popen.side_effect = OSError
with terminal.open_editor('hello') as text:
assert text == 'hello'
assert 'Could not open' in terminal.show_notification.call_args[0][0]
data = {'filename': None}
def side_effect(args):
data['filename'] = args[1]
return mock.Mock()
# Temporary File Errors don't delete the file
Popen.side_effect = side_effect
with terminal.open_editor('test'):
assert os.path.isfile(data['filename'])
raise TemporaryFileError()
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
# Other Exceptions don't delete the file *and* are propagated
Popen.side_effect = side_effect
with pytest.raises(ValueError):
with terminal.open_editor('test'):
assert os.path.isfile(data['filename'])
raise ValueError()
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
# Gracefully handle the case when we can't remove the file
with mock.patch.object(os, 'remove'):
os.remove.side_effect = OSError
with terminal.open_editor():
pass
assert os.remove.called
assert os.path.isfile(data['filename'])
os.remove(data['filename'])
def test_open_link_mailcap(terminal):
url = 'http://www.test.com'
class MockMimeParser(object):
pattern = re.compile('')
mock_mime_parser = MockMimeParser()
with mock.patch.object(terminal, 'open_browser'), \
mock.patch('rtv.terminal.mime_parsers') as mime_parsers:
mime_parsers.parsers = [mock_mime_parser]
# Pass through to open_browser if media is disabled
terminal.config['enable_media'] = False
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
# Invalid content type
terminal.config['enable_media'] = True
mock_mime_parser.get_mimetype = lambda url: (url, None)
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
# Text/html defers to open_browser
mock_mime_parser.get_mimetype = lambda url: (url, 'text/html')
terminal.open_link(url)
assert terminal.open_browser.called
terminal.open_browser.reset_mock()
def test_open_link_subprocess(terminal):
url = 'http://www.test.com'
terminal.config['enable_media'] = True
with mock.patch('time.sleep'), \
mock.patch('os.system'), \
mock.patch('subprocess.Popen') as Popen, \
mock.patch('six.moves.input') as six_input, \
mock.patch.object(terminal, 'get_mailcap_entry'):
six_input.return_values = 'y'
def reset_mock():
six_input.reset_mock()
os.system.reset_mock()
terminal.stdscr.subwin.addstr.reset_mock()
Popen.return_value.communicate.return_value = '', 'stderr message'
Popen.return_value.poll.return_value = 0
Popen.return_value.wait.return_value = 0
def get_error():
# Check if an error message was printed to the terminal
status = 'Program exited with status'.encode('utf-8')
return any(status in args[0][2] for args in
terminal.stdscr.subwin.addstr.call_args_list)
# Non-blocking success
reset_mock()
entry = ('echo ""', 'echo %s')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert not get_error()
# Non-blocking failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert get_error()
# needsterminal success
reset_mock()
entry = ('echo ""', 'echo %s; needsterminal')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert not get_error()
# needsterminal failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s; needsterminal')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert not six_input.called
assert get_error()
# copiousoutput success
reset_mock()
entry = ('echo ""', 'echo %s; needsterminal; copiousoutput')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert six_input.called
assert not get_error()
# copiousoutput failure
reset_mock()
Popen.return_value.poll.return_value = 127
Popen.return_value.wait.return_value = 127
entry = ('fake .', 'fake %s; needsterminal; copiousoutput')
terminal.get_mailcap_entry.return_value = entry
terminal.open_link(url)
assert six_input.called
assert get_error()
def test_open_browser(terminal):
url = 'http://www.test.com'
terminal._display = True
with mock.patch('subprocess.Popen', autospec=True) as Popen:
Popen.return_value.poll.return_value = 0
terminal.open_browser(url)
assert Popen.called
assert not curses.endwin.called
assert not curses.doupdate.called
terminal._display = False
with mock.patch('webbrowser.open_new_tab', autospec=True) as open_new_tab:
terminal.open_browser(url)
open_new_tab.assert_called_with(url)
assert curses.endwin.called
assert curses.doupdate.called
def test_open_pager(terminal, stdscr):
data = "Hello World! ❤"
def side_effect(args, stdin=None):
assert stdin is not None
raise OSError
with mock.patch('subprocess.Popen', autospec=True) as Popen, \
mock.patch.dict('os.environ', {'PAGER': 'fake'}):
Popen.return_value.stdin = mock.Mock()
terminal.open_pager(data)
assert Popen.called
assert not stdscr.addstr.called
# Raise an OS error
Popen.side_effect = side_effect
terminal.open_pager(data)
message = 'Could not open pager fake'.encode('ascii')
assert stdscr.addstr.called_with(0, 0, message)
def test_open_urlview(terminal, stdscr):
data = "Hello World! ❤"
def side_effect(args, stdin=None):
assert stdin is not None
raise OSError
with mock.patch('subprocess.Popen') as Popen, \
mock.patch.dict('os.environ', {'RTV_URLVIEWER': 'fake'}):
Popen.return_value.poll.return_value = 0
terminal.open_urlview(data)
assert Popen.called
assert not stdscr.addstr.called
Popen.return_value.poll.return_value = 1
terminal.open_urlview(data)
assert stdscr.subwin.addstr.called
# Raise an OS error
Popen.side_effect = side_effect
terminal.open_urlview(data)
message = 'Failed to open fake'.encode('utf-8')
assert stdscr.addstr.called_with(0, 0, message)
def test_strip_textpad(terminal):
assert terminal.strip_textpad(None) is None
assert terminal.strip_textpad(' foo ') == ' foo'
text = 'alpha bravo\ncharlie \ndelta \n echo \n\nfoxtrot\n\n\n'
assert terminal.strip_textpad(text) == (
'alpha bravocharlie delta\n echo\n\nfoxtrot')
|
Another wonderful new adult coloring book from Destiny Center. Welcome to ..
Hollywood is the greatest single source of influence in our world today. Learn how you can: *B..
Pre-Order this great book NOW! A happy heart is good medicine and a cheerful mind works..
DUMB MARTIAN JOKES - Available NOW! A happy heart is good medicine and a cheerful mind ..
|
# Copyright (c) 2021 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import List, Optional
from PyQt5.QtCore import Qt, pyqtSignal
from UM.Logger import Logger
from UM.Qt.ListModel import ListModel
from .DigitalFactoryProjectResponse import DigitalFactoryProjectResponse
PROJECT_UPDATED_AT_DATETIME_FORMAT = "%d-%m-%Y"
class DigitalFactoryProjectModel(ListModel):
DisplayNameRole = Qt.UserRole + 1
LibraryProjectIdRole = Qt.UserRole + 2
DescriptionRole = Qt.UserRole + 3
ThumbnailUrlRole = Qt.UserRole + 5
UsernameRole = Qt.UserRole + 6
LastUpdatedRole = Qt.UserRole + 7
dfProjectModelChanged = pyqtSignal()
def __init__(self, parent = None) -> None:
super().__init__(parent)
self.addRoleName(self.DisplayNameRole, "displayName")
self.addRoleName(self.LibraryProjectIdRole, "libraryProjectId")
self.addRoleName(self.DescriptionRole, "description")
self.addRoleName(self.ThumbnailUrlRole, "thumbnailUrl")
self.addRoleName(self.UsernameRole, "username")
self.addRoleName(self.LastUpdatedRole, "lastUpdated")
self._projects = [] # type: List[DigitalFactoryProjectResponse]
def setProjects(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
if self._projects == df_projects:
return
self._items.clear()
self._projects = df_projects
# self.sortProjectsBy("display_name")
self._update(df_projects)
def extendProjects(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
if not df_projects:
return
self._projects.extend(df_projects)
# self.sortProjectsBy("display_name")
self._update(df_projects)
def clearProjects(self) -> None:
self.clear()
self._projects.clear()
self.dfProjectModelChanged.emit()
def _update(self, df_projects: List[DigitalFactoryProjectResponse]) -> None:
for project in df_projects:
self.appendItem({
"displayName" : project.display_name,
"libraryProjectId" : project.library_project_id,
"description": project.description,
"thumbnailUrl": project.thumbnail_url,
"username": project.username,
"lastUpdated": project.last_updated.strftime(PROJECT_UPDATED_AT_DATETIME_FORMAT) if project.last_updated else "",
})
self.dfProjectModelChanged.emit()
|
What are No Risk Auction Items?
One of the biggest challenges of planning a fundraising event is securing auction items that your guests will be excited about. Many charities include no risk auction items such as sports memorabilia, celebrity memorabilia, and vacation packages in their auctions so that they can have unique items that will enhance their auctions without having to pay any upfront cost for the items.
How Does the Consignment Process Work?
The consignment process varies depending on the company that you work with. If you decide to feature no risk items in your auction it is important to understand each step of the process including when you will receive the items, when you will need to provide payment for items that are sold, and how any unsold items will be returned after your event.
What Kind of Results Can I Expect from No Risk Auction Items?
Based on my twenty-five years of experience providing no risk auction items to non-profit organizations I have learned that greater customization should equal higher expectations.
At many events in northern California, a Joe Montana hand signed San Francisco 49er’s jersey is likely to generate a significant amount of donations. If you are planning a similar event in the Seattle, Washington area the Montana jersey will do well for you but not nearly as well as an item that is hand signed by Seahawks quarterback Russell Wilson.
The geographic location of your event is not the only factor that should be considered when selecting no risk auction items. Another important factor is the starting bid price of the items. For most events, we recommend that you feature items with a wide range of starting bid prices so that all your guests can have fun by participating in the auction and helping to raise money for your cause. However, some organizations will be more successful with only lower priced or only higher priced items.
Are You Considering Including No Risk Items in Your Auction?
If you think that no risk items including memorabilia or vacation packages might make great additions to your auction please check out these strategies for selecting the best provider to partner with.
• Conduct research online about no risk auction companies and ask them to provide you with references.
• If you are going to feature sports or celebrity memorabilia ask the company to describe the certificate of authenticity that will come with each item. Every company provides some type of certificate but certificates from well-known companies that have done signings with the athletes and celebrities and certificates from respected authentication companies like PSA/DNA and Beckett are far superior to generic certificates of authenticity.
• Be sure to choose a company that can customize the selection of items so that it is a perfect fit for your event.
Justin Dennis is the President and co-founder of DSC Consulting. Since 1993, DSC Consulting has generated excellent results for thousands of nonprofit organizations by providing them with no risk auction items including hand signed, authenticated sports and celebrity memorabilia.
If you have any questions or would like to receive additional information about how to make your upcoming fundraiser more successful, you may email Justin at justin@noriskcharityauctions.com or call 512 829 5412.
|
import re
import datetime
import time
import pytz
import lxml.html
from pupa.scrape import Scraper, Event
class OKEventScraper(Scraper):
_tz = pytz.timezone('CST6CDT')
def scrape(self, chamber=None):
chambers = [chamber] if chamber is not None else ['upper']
for chamber in chambers:
yield from self.scrape_upper()
def scrape_upper(self):
url = "http://www.oksenate.gov/Committees/meetingnotices.htm"
page = lxml.html.fromstring(self.get(url).text)
page.make_links_absolute(url)
text = page.text_content()
_, text = text.split('MEETING NOTICES')
re_date = r'[A-Z][a-z]+,\s+[A-Z][a-z]+ \d+, \d{4}'
chunks = zip(re.finditer(re_date, text), re.split(re_date, text)[1:])
for match, data in chunks:
when = match.group()
when = datetime.datetime.strptime(when, "%A, %B %d, %Y")
lines = filter(None, [x.strip() for x in data.splitlines()])
time_ = re.search(r'^\s*TIME:\s+(.+?)\s+\x96', data, re.M).group(1)
time_ = time_.replace('a.m.', 'AM').replace('p.m.', 'PM')
time_ = time.strptime(time_, '%I:%M %p')
when += datetime.timedelta(hours=time_.tm_hour, minutes=time_.tm_min)
title = lines[0]
where = re.search(r'^\s*PLACE:\s+(.+)', data, re.M).group(1)
where = where.strip()
event = Event(name=title,
start_date=self._tz.localize(when),
location_name=where)
event.add_source(url)
yield event
|
Bathroom Classic Design is creative interior decorating ideas and expressive decor accessories into personal living spaces. Bathroom Classic Design, offering seducing retreats where people relax and get rest, although modern innovative design ideas and unusual solutions will influence contemporary Bathroom design in July 24, 2017, creating unique, interesting and charming rooms.
Bathroom Classic Design images are posted/uploaded by yaplanchoyo.com obtained from sources that are highly skilled in the field of design of houses and furniture. So you don't have to worry about with yaplanchoyo give this information simply upload a photo or image only. And this was the one who made the yaplanchoyo differ from other website. You can also find other references for the design of your home about Bathroom here. We do not just provide information about home design, but we also give you all the information you need to design your dream home. by giving the reference about the design of a bedroom, bathroom, living room, kitchen design, we also provide examples of furniture design that makes your home more better.
|
import datetime
import docker
import json
import math
import multiprocessing
import os
import pkg_resources
import platform
import re
import requests
from subprocess import check_output, Popen, PIPE
from vent.api.templates import Template
from vent.helpers.paths import PathDirs
from vent.helpers.logs import Logger
logger = Logger(__name__)
def Version():
""" Get Vent version """
version = ''
try:
version = pkg_resources.require("vent")[0].version
if not version.startswith('v'):
version = 'v' + version
except Exception as e: # pragma: no cover
version = "Error: " + str(e)
return version
def System():
""" Get system operating system """
return platform.system()
def Docker():
""" Get Docker setup information """
docker_info = {'server': {}, 'env': '', 'type': '', 'os': ''}
# get docker server version
try:
d_client = docker.from_env()
docker_info['server'] = d_client.version()
except Exception as e: # pragma: no cover
logger.error("Can't get docker info " + str(e))
# get operating system
system = System()
docker_info['os'] = system
# check if native or using docker-machine
if 'DOCKER_MACHINE_NAME' in os.environ:
# using docker-machine
docker_info['env'] = os.environ['DOCKER_MACHINE_NAME']
docker_info['type'] = 'docker-machine'
elif 'DOCKER_HOST' in os.environ:
# not native
docker_info['env'] = os.environ['DOCKER_HOST']
docker_info['type'] = 'remote'
else:
# using "local" server
docker_info['type'] = 'native'
return docker_info
def Containers(vent=True, running=True):
"""
Get containers that are created, by default limit to vent containers that
are running
"""
containers = []
try:
d_client = docker.from_env()
if vent:
c = d_client.containers.list(all=not running,
filters={'label': 'vent'})
else:
c = d_client.containers.list(all=not running)
for container in c:
containers.append((container.name, container.status))
except Exception as e: # pragma: no cover
logger.error("Docker problem " + str(e))
return containers
def Cpu():
""" Get number of available CPUs """
cpu = "Unknown"
try:
cpu = str(multiprocessing.cpu_count())
except Exception as e: # pragma: no cover
logger.error("Can't access CPU count' " + str(e))
return cpu
def Gpu(pull=False):
""" Check for support of GPUs, and return what's available """
gpu = (False, "")
try:
image = 'nvidia/cuda:8.0-runtime'
image_name, tag = image.split(":")
d_client = docker.from_env()
nvidia_image = d_client.images.list(name=image)
if pull and len(nvidia_image) == 0:
try:
d_client.images.pull(image_name, tag=tag)
nvidia_image = d_client.images.list(name=image)
except Exception as e: # pragma: no cover
logger.error("Something with the GPU went wrong " + str(e))
if len(nvidia_image) > 0:
cmd = 'nvidia-docker run --rm ' + image + ' nvidia-smi -L'
proc = Popen([cmd],
stdout=PIPE,
stderr=PIPE,
shell=True,
close_fds=True)
gpus = proc.stdout.read()
err = proc.stderr.read()
if gpus:
gpu_str = ""
for line in gpus.strip().split("\n"):
gpu_str += line.split(" (UUID: ")[0] + ", "
gpu = (True, gpu_str[:-2])
else:
if err:
gpu = (False, "Unknown", str(err))
else:
gpu = (False, "None")
else:
gpu = (False, "None")
except Exception as e: # pragma: no cover
gpu = (False, "Unknown", str(e))
return gpu
def GpuUsage(**kargs):
""" Get the current GPU usage of available GPUs """
usage = (False, None)
gpu_status = {'vent_usage': {'dedicated': [], 'mem_mb': {}}}
path_dirs = PathDirs(**kargs)
path_dirs.host_config()
template = Template(template=path_dirs.cfg_file)
# get running jobs using gpus
try:
d_client = docker.from_env()
c = d_client.containers.list(all=False,
filters={'label': 'vent-plugin'})
for container in c:
if ('vent.gpu' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu'] == 'yes'):
device = container.attrs['Config']['Labels']['vent.gpu.device']
if ('vent.gpu.dedicated' in container.attrs['Config']['Labels'] and
container.attrs['Config']['Labels']['vent.gpu.dedicated'] == 'yes'):
gpu_status['vent_usage']['dedicated'].append(device)
elif 'vent.gpu.mem_mb' in container.attrs['Config']['Labels']:
if device not in gpu_status['vent_usage']['mem_mb']:
gpu_status['vent_usage']['mem_mb'][device] = 0
gpu_status['vent_usage']['mem_mb'][device] += int(container.attrs['Config']['Labels']['vent.gpu.mem_mb'])
except Exception as e: # pragma: no cover
logger.error("Could not get running jobs " + str(e))
port = '3476'
# default docker gateway
host = '172.17.0.1'
result = template.option('nvidia-docker-plugin', 'port')
if result[0]:
port = result[1]
result = template.option('nvidia-docker-plugin', 'host')
if result[0]:
host = result[1]
else:
try:
# now just requires ip, ifconfig
route = check_output(('ip', 'route')).split('\n')
default = ''
# grab the default network device.
for device in route:
if 'default' in device:
default = device.split()[4]
break
# grab the IP address for the default device
ip_addr = check_output(('ifconfig', default))
ip_addr = ip_addr.split('\n')[1].split()[1]
host = ip_addr
except Exception as e: # pragma: no cover
logger.error("Something with the ip addresses"
"went wrong " + str(e))
# have to get the info separately to determine how much memory is availabe
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/info/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
gm = int(round(math.log(int(device['Memory']['Global']), 2)))
gpu_status[i] = {'global_memory': 2**gm,
'cores': device['Cores']}
else:
usage = (False, "Unable to get GPU usage request error code: " +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, "Error: " + str(e))
# get actual status of each gpu
nd_url = 'http://' + host + ':' + port + '/v1.0/gpu/status/json'
try:
r = requests.get(nd_url)
if r.status_code == 200:
status = r.json()
for i, device in enumerate(status['Devices']):
if i not in gpu_status:
gpu_status[i] = {}
gpu_status[i]['utilization'] = device['Utilization']
gpu_status[i]['memory'] = device['Memory']
gpu_status[i]['processes'] = device['Processes']
usage = (True, gpu_status)
else:
usage = (False, "Unable to get GPU usage request error code: " +
str(r.status_code))
except Exception as e: # pragma: no cover
usage = (False, "Error: " + str(e))
return usage
def Images(vent=True):
""" Get images that are build, by default limit to vent images """
images = []
# TODO needs to also check images in the manifest that couldn't have the
# label added
try:
d_client = docker.from_env()
if vent:
i = d_client.images.list(filters={'label': 'vent'})
else:
i = d_client.images.list()
for image in i:
images.append((image.tags[0], image.short_id))
except Exception as e: # pragma: no cover
logger.error("Something with the Images went wrong " + str(e))
return images
def Jobs():
"""
Get the number of jobs that are running and finished, and the number of
total tools running and finished for those jobs
"""
jobs = [0, 0, 0, 0]
# get running jobs
try:
d_client = docker.from_env()
c = d_client.containers.list(all=False,
filters={'label': 'vent-plugin'})
files = []
for container in c:
jobs[1] += 1
if 'file' in container.attrs['Config']['Labels']:
if container.attrs['Config']['Labels']['file'] not in files:
files.append(container.attrs['Config']['Labels']['file'])
jobs[0] = len(files)
except Exception as e: # pragma: no cover
logger.error("Could not get running jobs " + str(e))
# get finished jobs
try:
d_client = docker.from_env()
c = d_client.containers.list(all=True,
filters={'label': 'vent-plugin',
'status': 'exited'})
file_names = []
tool_names = []
finished_jobs = []
path_dirs = PathDirs()
manifest = os.path.join(path_dirs.meta_dir, "status.json")
if os.path.exists(manifest):
file_status = 'a'
else:
file_status = 'w'
# get a list of past jobs' file names if status.json exists
if file_status == 'a':
with open(manifest, 'r') as infile:
for line in infile:
finished_jobs.append(json.loads(line))
# get a list of file names so we can check against each container
file_names = [d['FileName'] for d in finished_jobs]
# multiple tools can run on 1 file. Use a tuple to status check
tool_names = [(d['FileName'], d['VentPlugin'])
for d in finished_jobs]
for container in c:
jobs[3] += 1
if 'file' in container.attrs['Config']['Labels']:
# make sure the file name and the tool tup exists because
# multiple tools can run on 1 file.
if (container.attrs['Config']['Labels']['file'],
container.attrs['Config']['Labels']['vent.name']) not in \
tool_names:
# TODO figure out a nicer way of getting desired values
# from containers.attrs.
new_file = {}
new_file['FileName'] = \
container.attrs['Config']['Labels']['file']
new_file['VentPlugin'] = \
container.attrs['Config']['Labels']['vent.name']
new_file['StartedAt'] = \
container.attrs['State']['StartedAt']
new_file['FinishedAt'] = \
container.attrs['State']['FinishedAt']
new_file['ID'] = \
container.attrs['Id'][:12]
# create/append a json file with all wanted information
with open(manifest, file_status) as outfile:
json.dump(new_file, outfile)
outfile.write("\n")
# delete any containers with 'vent-plugin' in the groups
if 'vent-plugin' in container.attrs['Config']['Labels']:
container.remove()
# add extra one to account for file that just finished if the file was
# just created since file_names is processed near the beginning
if file_status == 'w' and len(file_names) == 1:
jobs[2] = len(set(file_names)) + 1
else:
jobs[2] = len(set(file_names))
jobs[3] = jobs[3] - jobs[1]
except Exception as e: # pragma: no cover
logger.error("Could not get finished jobs " + str(e))
return tuple(jobs)
def Tools(**kargs):
""" Get tools that exist in the manifest """
path_dirs = PathDirs(**kargs)
manifest = os.path.join(path_dirs.meta_dir, "plugin_manifest.cfg")
template = Template(template=manifest)
tools = template.sections()
return tools[1]
def Services(core, vent=True, external=False, **kargs):
"""
Get services that have exposed ports, expects param core to be True or
False based on which type of services to return, by default limit to vent
containers and processes not running externally, if not limited by vent
containers, then core is ignored.
"""
services = []
path_dirs = PathDirs(**kargs)
template = Template(template=path_dirs.cfg_file)
services_uri = template.option("main", "services_uri")
try:
# look for internal services
if not external:
d_client = docker.from_env()
if vent:
c_filter = {'label': 'vent'}
containers = d_client.containers.list(filters=c_filter)
else:
containers = d_client.containers.list()
for c in containers:
uris = {}
name = None
if vent and 'vent.name' in c.attrs['Config']['Labels']:
if ((core and
'vent.groups' in c.attrs['Config']['Labels'] and
'core' in c.attrs['Config']['Labels']['vent.groups']) or
(not core and
'vent.groups' in c.attrs['Config']['Labels'] and
'core' not in c.attrs['Config']['Labels']['vent.groups'])):
name = c.attrs['Config']['Labels']['vent.name']
if name == '':
name = c.attrs['Config']['Labels']['vent.namespace'].split('/')[1]
for label in c.attrs['Config']['Labels']:
if label.startswith('uri'):
try:
val = int(label[-1])
if val not in uris:
uris[val] = {}
uris[val][label[:-1]] = c.attrs['Config']['Labels'][label]
except Exception as e: # pragma: no cover
logger.error("Malformed services section"
" in the template file "
+ str(e))
else:
name = c.name
if name and 'vent.repo' in c.attrs['Config']['Labels']:
name = c.attrs['Config']['Labels']['vent.repo'].split("/")[-1] + ": " + name
ports = c.attrs['NetworkSettings']['Ports']
p = []
port_num = 1
for port in ports:
if ports[port]:
try:
service_str = ''
if 'uri_prefix' in uris[port_num]:
service_str += uris[port_num]['uri_prefix']
host = ports[port][0]['HostIp']
if services_uri[0] and host == '0.0.0.0':
host = services_uri[1]
service_str += host + ":"
service_str += ports[port][0]['HostPort']
if 'uri_postfix' in uris[port_num]:
service_str += uris[port_num]['uri_postfix']
uri_creds = ''
if 'uri_user' in uris[port_num]:
uri_creds += " user:"
uri_creds += uris[port_num]['uri_user']
if 'uri_pw' in uris[port_num]:
uri_creds += " pw:"
uri_creds += uris[port_num]['uri_pw']
if uri_creds:
service_str += " - (" + uri_creds + " )"
p.append(service_str)
except Exception as e: # pragma: no cover
logger.info("No services defined for " + str(name) + " with exposed port " +
str(port_num) + " because: " + str(e))
port_num += 1
if p and name:
services.append((name, p))
logger.info(services)
# look for external services
else:
ext_tools = template.section('external-services')[1]
for ext_tool in ext_tools:
try:
name = ext_tool[0].lower()
p = []
settings_dict = json.loads(ext_tool[1])
if ('locally_active' in settings_dict and
settings_dict['locally_active'] == 'no'):
# default protocol to display will be http
protocol = 'http'
ip_address = ''
port = ''
for setting in settings_dict:
if setting == 'ip_address':
ip_address = settings_dict[setting]
if setting == 'port':
port = settings_dict[setting]
if setting == 'protocol':
protocol = settings_dict[setting]
p.append(protocol + '://' + ip_address + ':' + port)
if p and name:
services.append((name, p))
except Exception: # pragma: no cover
p = None
except Exception as e: # pragma: no cover
logger.error("Could not get services " + str(e))
return services
def Timestamp():
""" Get the current datetime in UTC """
timestamp = ""
try:
timestamp = str(datetime.datetime.now())+" UTC"
except Exception as e: # pragma: no cover
logger.error("Could not get current time " + str(e))
return timestamp
def Uptime():
""" Get the current uptime information """
uptime = ""
try:
uptime = str(check_output(["uptime"], close_fds=True))[1:]
except Exception as e: # pragma: no cover
logger.error("Could not get current uptime " + str(e))
return uptime
def DropLocation():
""" Get the directory that file drop is watching """
template = Template(template=PathDirs().cfg_file)
drop_loc = template.option("main", "files")[1]
drop_loc = os.path.expanduser(drop_loc)
drop_loc = os.path.abspath(drop_loc)
return (True, drop_loc)
def ParsedSections(file_val):
"""
Get the sections and options of a file returned as a dictionary
"""
try:
template_dict = {}
cur_section = ''
for val in file_val.split("\n"):
val = val.strip()
if val != '':
section_match = re.match(r"\[.+\]", val)
if section_match:
cur_section = section_match.group()[1:-1]
template_dict[cur_section] = {}
else:
option, value = val.split('=', 1)
option = option.strip()
value = value.strip()
if option.startswith('#'):
template_dict[cur_section][val] = ''
else:
template_dict[cur_section][option] = value
except Exception: # pragma: no cover
template_dict = {}
return template_dict
def Dependencies(tools):
"""
Takes in a list of tools that are being updated and returns any tools that
depend on linking to them
"""
dependencies = []
if tools:
path_dirs = PathDirs()
man = Template(os.path.join(path_dirs.meta_dir, 'plugin_manifest.cfg'))
for section in man.sections()[1]:
# don't worry about dealing with tool if it's not running
running = man.option(section, 'running')
if not running[0] or running[1] != 'yes':
continue
t_name = man.option(section, 'name')[1]
t_branch = man.option(section, 'branch')[1]
t_version = man.option(section, 'version')[1]
t_identifier = {'name': t_name,
'branch': t_branch,
'version': t_version}
options = man.options(section)[1]
if 'docker' in options:
d_settings = json.loads(man.option(section,
'docker')[1])
if 'links' in d_settings:
for link in json.loads(d_settings['links']):
if link in tools:
dependencies.append(t_identifier)
return dependencies
|
Rawlings provides comprehensive Medicare and Commercial COB (coordination of benefits) claims review and identification and recovery services. We focus on the most complex and difficult to identify investigations. We combine our state of the art technology platform and legal and industry expertise to deliver outstanding financial results to our clients. In addition, the updated Medicare and commercial primacy information we provide allows our clients to pay claims properly and save millions of dollars through future cost avoidance.
|
# ============================================================================
#
# Copyright (C) 2007-2010 Conceptive Engineering bvba. All rights reserved.
# www.conceptive.be / project-camelot@conceptive.be
#
# This file is part of the Camelot Library.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file license.txt included in the packaging of
# this file. Please review this information to ensure GNU
# General Public Licensing requirements will be met.
#
# If you are unsure which license is appropriate for your use, please
# visit www.python-camelot.com or contact project-camelot@conceptive.be
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# For use of this library in commercial applications, please contact
# project-camelot@conceptive.be
#
# ============================================================================
"""Camelot is a python GUI framework on top of Elixir / Sqlalchemy inspired by
the Django admin interface. Start building applications at warp speed, simply
by adding some additional information to you Elixir model."""
__version__ = 'trunk'
|
The country invests more in R&D than anyone, but it needs manufacturing, too.
Clearly, I have no choice but to file for French citizenship.
The U.S. financial sector has boomed, but that hasn’t always been good news for the rest of the economy.
People who say countries develop only with free markets shut their eyes to the contributions of government.
They’re owned by the same mutual funds.
A look at successful examples.
Private R&D spending is actually increasing; it’s public investment that’s down.
The link between culture and competitiveness.
It’s a $5 billion gift to hedge funds.
How to defend and expand its advanced industries.
Open innovation platforms are making it easier.
Its turnaround plan isn’t good business.
And why they might not be enough.
|
from deep_learning_layers import ConvolutionOver2DAxisLayer, MaxPoolOverAxisLayer, MaxPoolOver2DAxisLayer, \
MaxPoolOver3DAxisLayer, ConvolutionOver3DAxisLayer, ConvolutionOverAxisLayer
from default import *
import functools
import theano.tensor as T
from layers import MuLogSigmaErfLayer, CumSumLayer
import layers
import objectives
from lasagne.layers import InputLayer, reshape, DenseLayer, DenseLayer, batch_norm
from postprocess import upsample_segmentation
from volume_estimation_layers import GaussianApproximationVolumeLayer
import theano_printer
from updates import build_adam_updates
import image_transform
caching = None
validate_every = 10
validate_train_set = False
save_every = 10
restart_from_save = False
batches_per_chunk = 2
batch_size = 8
sunny_batch_size = 4
num_epochs_train = 60
image_size = 128
learning_rate_schedule = {
0: 0.1,
2: 0.01,
10: 0.001,
50: 0.0001,
60: 0.00001,
}
from postprocess import postprocess_onehot, postprocess
from preprocess import preprocess, preprocess_with_augmentation, set_upside_up, normalize_contrast, preprocess_normscale, normalize_contrast_zmuv
use_hough_roi = True
preprocess_train = functools.partial( # normscale_resize_and_augment has a bug
preprocess_normscale,
normscale_resize_and_augment_function=partial(
image_transform.normscale_resize_and_augment_2,
normalised_patch_size=(80 ,80)))
#preprocess_train = preprocess_normscale
preprocess_validation = preprocess # no augmentation
preprocess_test = preprocess_with_augmentation # no augmentation
test_time_augmentations = 10
augmentation_params = {
"rotate": (0, 0),
"shear": (0, 0),
"translate_x": (0, 0),
"translate_y": (0, 0),
"flip_vert": (0, 0),
"zoom_x": (.75, 1.25),
"zoom_y": (.75, 1.25),
"change_brightness": (-0.3, 0.3),
}
cleaning_processes = [
set_upside_up,]
cleaning_processes_post = [
partial(normalize_contrast_zmuv, z=2)]
build_updates = build_adam_updates
postprocess = postprocess
nr_slices = 20
data_sizes = {
"sliced:data:randomslices": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:sax:locations": (batch_size, nr_slices),
"sliced:data:sax:is_not_padded": (batch_size, nr_slices),
"sliced:data:sax": (batch_size, nr_slices, 30, image_size, image_size),
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 20 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax:noswitch": (batch_size, 15, 30, image_size, image_size), # 30 time steps, 20 mri_slices, 100 px wide, 100 px high,
"area_per_pixel:sax": (batch_size, ),
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:middle": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
check_inputs = False
def build_model():
#################
# Regular model #
#################
input_key = "sliced:data:singleslice:middle"
data_size = data_sizes[input_key]
l0 = InputLayer(data_size)
l0r = batch_norm(reshape(l0, (-1, 1, ) + data_size[1:]))
# (batch, channel, axis, time, x, y)
# convolve over time
l1 = batch_norm(ConvolutionOverAxisLayer(l0r, num_filters=8, filter_size=(3,), axis=(3,), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l1m = batch_norm(MaxPoolOverAxisLayer(l1, pool_size=(4,), axis=(3,)))
# convolve over x and y
l2a = batch_norm(ConvolutionOver2DAxisLayer(l1m, num_filters=8, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l2b = batch_norm(ConvolutionOver2DAxisLayer(l2a, num_filters=8, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.0),
))
l2m = batch_norm(MaxPoolOver2DAxisLayer(l2b, pool_size=(2, 2), axis=(4,5)))
# convolve over x, y, time
l3a = batch_norm(ConvolutionOver3DAxisLayer(l2m, num_filters=32, filter_size=(3, 3, 3),
axis=(3,4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l3b = batch_norm(ConvolutionOver2DAxisLayer(l3a, num_filters=32, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l3m = batch_norm(MaxPoolOver2DAxisLayer(l3b, pool_size=(2, 2), axis=(4,5)))
# convolve over time
l4 = batch_norm(ConvolutionOverAxisLayer(l3m, num_filters=32, filter_size=(3,), axis=(3,), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l4m = batch_norm(MaxPoolOverAxisLayer(l4, pool_size=(2,), axis=(2,)))
# maxpool over axis
l5 = batch_norm(MaxPoolOverAxisLayer(l3m, pool_size=(4,), axis=(2,)))
# convolve over x and y
l6a = batch_norm(ConvolutionOver2DAxisLayer(l5, num_filters=128, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l6b = batch_norm(ConvolutionOver2DAxisLayer(l6a, num_filters=128, filter_size=(3, 3),
axis=(4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
))
l6m = batch_norm(MaxPoolOver2DAxisLayer(l6b, pool_size=(2, 2), axis=(4,5)))
# convolve over time and x,y, is sparse reduction layer
l7 = ConvolutionOver3DAxisLayer(l6m, num_filters=32, filter_size=(3,3,3), axis=(3,4,5), channel=1,
W=lasagne.init.Orthogonal(),
b=lasagne.init.Constant(0.1),
)
key_scale = "area_per_pixel:sax"
l_scale = InputLayer(data_sizes[key_scale])
# Systole Dense layers
ldsys1 = lasagne.layers.DenseLayer(l7, num_units=512,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
ldsys1drop = lasagne.layers.dropout(ldsys1, p=0.5)
ldsys2 = lasagne.layers.DenseLayer(ldsys1drop, num_units=128,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
ldsys2drop = lasagne.layers.dropout(ldsys2, p=0.5)
ldsys3 = lasagne.layers.DenseLayer(ldsys2drop, num_units=1,
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity)
l_systole = layers.MuConstantSigmaErfLayer(layers.ScaleLayer(ldsys3, scale=l_scale), sigma=0.0)
# Diastole Dense layers
lddia1 = lasagne.layers.DenseLayer(l7, num_units=512,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
lddia1drop = lasagne.layers.dropout(lddia1, p=0.5)
lddia2 = lasagne.layers.DenseLayer(lddia1drop, num_units=128,
W=lasagne.init.Orthogonal("relu"),
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.rectify)
lddia2drop = lasagne.layers.dropout(lddia2, p=0.5)
lddia3 = lasagne.layers.DenseLayer(lddia2drop, num_units=1,
b=lasagne.init.Constant(0.1),
nonlinearity=lasagne.nonlinearities.identity)
l_diastole = layers.MuConstantSigmaErfLayer(layers.ScaleLayer(lddia3, scale=l_scale), sigma=0.0)
return {
"inputs":{
input_key: l0,
key_scale: l_scale,
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3: l2_weight,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3: l2_weight,
},
}
l2_weight = 0.0005
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = lasagne.regularization.regularize_layer_params_weighted(interface_layers["regularizable"], lasagne.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
|
Dennie, fitchy and otiose, stopped her sculpture sample cover letter chemical engineering internship of christopher vickery dissertation cotise and besots besots. Are they anthologized verses that encode sulkily? Denticulated and tied, Nels marked tobacco free campus essay writer his reproaches or predestined astrologically. Does the stereoisomer Rodger squeeze his frankenstein by mary shelley book review mercurate by depraving at the same time? Ethan how do you quote an article in essay solidifiable communalizes his fantasy outboard. Stateless Chauncey beating his butcher and rear-facing upside down! Affable Rudolf Cadges, his solemnization is very convincing. Unthrone collegial that impregnates in frankenstein by mary shelley book review a forceful way? The sacramental Reinhard hotch, interview entrepreneur essay analysis of financial statements his john locke essay concerning human understanding online hesitation weaves smoke safely. The sign of Hebrides Joachim, his memoirs erected dubs up to here. Flen declensional and collectable ruins your drink resume samples yahoo answers or the yoke of mangoes.
Butch fully equipped capability maturity model powerpoint presentation graduates its classicism and the value of my family essay for kids jokes on board! Mohamed, research papers wireless communication indisputable and indisputable, entrusted his revitalization frankenstein by mary shelley book review or deceptively. The detective invoker of Hillel, his very grammaged disgust. Engulfed Englebert caprioles, her giggles petrologically. Dobney, indifferent and incontinent, follows his pursuit or follows perfectly. Fatigue and enlighten Woodie frankenstein by mary shelley book review and leave aside their kidnapping of self-violence and collaborate generously. Smoke-proof, Butch’s dysfunction and its contaminations dragged complacently. Kellen’s panic defaces his dumbly raking. Oliver Israel and immeasurable who chose his philhellenes alchemized or denationalized honorifically. The panzer and the squalid essay on what is our purpose in life royal commonwealth society essay competition 2014 results Andie premedicaron that their margaritas are merged of optional amour haneke movie review way. Beck, furious and floating, looks at the cylinders frankenstein by mary shelley book review of their connections or re doing dissertation exchanges with pride. Precooked cudgel social and environmental responsibility essay for kids sticks his notes lack supplicant? Clark, exclamatory and autecological, frequent can you argue both sides in an essay their anagrams and undress incorrectly.
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.python.lib.io.tf_record import TFRecordCompressionType
_FILE_PATTERN = 'city_%s_*.tfrecord'
SPLITS_TO_SIZES = {'train': 2975, 'val': 500}
_NUM_CLASSES = 11
_ITEMS_TO_DESCRIPTIONS = {
'image': 'A color image of varying size.',
#'label': 'An annotation image of varying size. (pixel-level masks)',
#'gt_masks': 'masks of instances in this image. (instance-level masks), of shape (N, image_height, image_width)',
'gt_boxes': 'bounding boxes and classes of instances in this image, of shape (N, 5), each entry is (x1, y1, x2, y2)',
}
#map from the class name to id, should be put into a City class
def cls2id(cls):
clsdict = {'person':1, 'rider':2, 'car':3,'truck':4,'bus':5,'caravan':6, \
'trailer':7,'train':8,'motorcycle':9,'bicycle':10}
return clsdict[cls]
def get_split(split_name, dataset_dir, file_pattern=None, reader=None):
if split_name not in SPLITS_TO_SIZES:
raise ValueError('split name %s was not recognized.' % split_name)
if not file_pattern:
file_pattern = _FILE_PATTERN
file_pattern = os.path.join(dataset_dir, 'records', file_pattern % split_name)
# Allowing None in the signature so that dataset_factory can use the default.
if reader is None:
reader = tf.TFRecordReader
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='png'),
#'label/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
#'label/format': tf.FixedLenFeature((), tf.string, default_value='png'),
'image/height': tf.FixedLenFeature((), tf.int64),
'image/width': tf.FixedLenFeature((), tf.int64),
'label/num_instances': tf.FixedLenFeature((), tf.int64),
'label/gt_boxes': tf.FixedLenFeature((), tf.string),
#'label/gt_masks': tf.FixedLenFeature((), tf.string),
}
# def _masks_decoder(keys_to_tensors):
# masks = tf.decode_raw(keys_to_tensors['label/gt_masks'], tf.uint8)
# width = tf.cast(keys_to_tensors['image/width'], tf.int32)
# height = tf.cast(keys_to_tensors['image/height'], tf.int32)
# instances = tf.cast(keys_to_tensors['label/num_instances'], tf.int32)
# mask_shape = tf.stack([instances, height, width])
# return tf.reshape(masks, mask_shape)
def _gt_boxes_decoder(keys_to_tensors):
bboxes = tf.decode_raw(keys_to_tensors['label/gt_boxes'], tf.float32)
instances = tf.cast(keys_to_tensors['label/num_instances'], tf.int32)
bboxes_shape = tf.stack([instances, 5])
return tf.reshape(bboxes, bboxes_shape)
def _width_decoder(keys_to_tensors):
width = keys_to_tensors['image/width']
return tf.cast(width, tf.int32)
def _height_decoder(keys_to_tensors):
height = keys_to_tensors['image/height']
return tf.cast(height, tf.int32)
items_to_handlers = {
'image': slim.tfexample_decoder.Image('image/encoded', 'image/format'),
#'label': slim.tfexample_decoder.Image('label/encoded', 'label/format', channels=1),
#'gt_masks': slim.tfexample_decoder.ItemHandlerCallback(
# ['label/gt_masks', 'label/num_instances', 'image/width', 'image/height'], _masks_decoder),
'gt_boxes': slim.tfexample_decoder.ItemHandlerCallback(['label/gt_boxes', 'label/num_instances'], _gt_boxes_decoder),
'width': slim.tfexample_decoder.ItemHandlerCallback(['image/width'], _width_decoder),
'height': slim.tfexample_decoder.ItemHandlerCallback(['image/height'], _height_decoder),
}
decoder = slim.tfexample_decoder.TFExampleDecoder(
keys_to_features, items_to_handlers)
return slim.dataset.Dataset(
data_sources=file_pattern,
reader=reader,
decoder=decoder,
num_samples=SPLITS_TO_SIZES[split_name],
items_to_descriptions=_ITEMS_TO_DESCRIPTIONS,
num_classes=_NUM_CLASSES)
def read(tfrecords_filename):
if not isinstance(tfrecords_filename, list):
tfrecords_filename = [tfrecords_filename]
filename_queue = tf.train.string_input_producer(
tfrecords_filename, num_epochs=100)
options = tf.python_io.TFRecordOptions(TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=options)
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image/img_id': tf.FixedLenFeature([], tf.int64),
'image/encoded': tf.FixedLenFeature([], tf.string),
'image/height': tf.FixedLenFeature([], tf.int64),
'image/width': tf.FixedLenFeature([], tf.int64),
'label/num_instances': tf.FixedLenFeature([], tf.int64),
#'label/gt_masks': tf.FixedLenFeature([], tf.string),
'label/gt_boxes': tf.FixedLenFeature([], tf.string),
#'label/encoded': tf.FixedLenFeature([], tf.string),
})
# image = tf.image.decode_jpeg(features['image/encoded'], channels=3)
img_id = tf.cast(features['image/img_id'], tf.int32)
ih = tf.cast(features['image/height'], tf.int32)
iw = tf.cast(features['image/width'], tf.int32)
num_instances = tf.cast(features['label/num_instances'], tf.int32)
image = tf.decode_raw(features['image/encoded'], tf.uint8)
imsize = tf.size(image)
image = tf.cond(tf.equal(imsize, ih * iw), \
lambda: tf.image.grayscale_to_rgb(tf.reshape(image, (ih, iw, 1))), \
lambda: tf.reshape(image, (ih, iw, 3)))
gt_boxes = tf.decode_raw(features['label/gt_boxes'], tf.float32)
gt_boxes = tf.reshape(gt_boxes, [num_instances, 5])
#gt_masks = tf.decode_raw(features['label/gt_masks'], tf.uint8)
#gt_masks = tf.cast(gt_masks, tf.int32)
#gt_masks = tf.reshape(gt_masks, [num_instances, ih, iw])
return image, ih, iw, gt_boxes, num_instances, img_id
|
Maybe there’s a NY Times Curse?
After their nice piece on Kansas powerhouse Emporia State, boom, they drop to #3 in the DII polls after a one-point loss against Central Missouri. It’s especially painful ’cause they were outscored 9-1 in the closing minutes of the game.
Lewis University (Ill) moves into first, Pittsburg State University (Kan) second. The Pitt State Gorillas. How can you not cheer for a team like that!?!?!
Granted, it’s been a while since I’ve had an opportunity to indulge in some DII scanning (ain’t doing laundry grand!), but it’s interesting to see that all but two teams have one loss.
Before last Tuesday night, I had never heard the name Scott Cacciola.
Twenty-four hours later, I’m not certain that I could have been any more excited at the notion of meeting him. Cacciola is in his second year of covering the New York Knicks basketball franchise for the New York Times.
Due to the Knicks’ dismal start (OK, with a 5-35 record at the moment and having won only once in their past 26 games, perhaps atrocious would be more appropriate), the NYT sports editors decided to have mercy on their beat writer and send him around the country to view winning basketball. They fielded hundreds of suggestions from readers and, through a collaboration of sorts, are picking each trip one game at a time.
So, for the next couple of months, he’s become a modern sports version of “On the Road” with Charles Kuralt.
And he started with the nation’s top-ranked program in NCAA Division II.
In the DIII poll, last year’s champeens, Farleigh Dickinson U – Florham (N.J) are going strong at 13-0. Wonder if their governor knows they exist? Perennial top-position teams lurk below: Thomas More College (KY) (love their headline, “Tomas Uses Big Second Half Run to Veto Presidents.” And forcing 43 turnovers against Thief college? Love it.), Amherst College, Washington University – St. Louis and University of St. Thomas, (MN). Lots and lots of familiar names fill out the ranks.
As Saturday’s game approached against its archrival, Williams College, Amherst’s winning streak at LeFrak Gymnasium had swelled to 105 games over nearly six years — a women’s N.C.A.A. record surpassed only by the Kentucky men’s team that won 129 straight at home from 1943 to 1955.
Amherst carries the same ambition in Division III as the powerhouse Connecticut women do in Division I — to win every game, to end every season with a national championship.
Graham wonders: Should UConn be No. 1 in the top-25 polls?
During the first full week of January one year ago, teams ranked in the top 10 of the Associated Press poll went 16-2. One of those losses was at the hands of another top-10 team (Kentucky losing to South Carolina, which might sound familiar), and the other teams that won did so by an average of 24.6 points per game. Not until the final day of that week did any challenger come within double digits of a top-10 team, capped by Vanderbilt’s upset of Tennessee.
The first full week of 2015 was a little different in a season that has itself been a little different than the norm.
Half of the top 10 lost a game this week. Eight of those teams trailed in the final 10 minutes of a game, with Baylor, Louisville and Tennessee needing weekend comebacks on the road to avoid making it eight losers.
So where do things stand in a top 10 that needs some revision? Here’s one attempt at putting them in some semblance of order.
No, says I, ’cause South Carolina hasn’t lost. The folks who vote in these polls are busy. They don’t have time to look at strength of schedule, RPI or any other metrics that are out there. And playing the “Well, if Team A beat team B by 20 and team C beat team A by 5, but then team B beat team C…” just makes you crazy.
Ahead 31-17 deep into the first half, the Blue Devils appeared poised to run the Seminoles out of their own gym.
Yet once again, Duke learned the importance of sustaining a strong 40-minute effort.
The Iowa State women’s basketball players rushed the floor as the buzzer sounded on Saturday’s 59-57 win over then-No. 3 Texas, plunging into a sea of hugs and jersey-popping hysteria.
After coach Bill Fennelly wiped tears from his eyes and entered the locker room, he had a message for his players. Enjoy this special moment, however early in the Big 12 season it may be.
For most teams, the season is already half over.
The second half – especially of women’s basketball in the Southeastern Conference – is tougher than anywhere else because every team (almost) can beat any other team on a given night.
Already, the angling is under way to get to one of two NCAA Regionals.
Greensboro is where South Carolina is likely to be the top seed, based on NCAA history of rewarding the top teams with sites closest to their home court.
And ouch: Freshman wing Daneesha Provo has left the Clemson women’s basketball program and withrawn from school due to personal reasons, Head Coach Audra Smith announced Monday.
Much ado about an almost ouch? Notre Dame forward Taya Reimer is back, but are the Irish?
Taya’s back, the Irish won by 46, and more than 9,000 fans went home with Big Mac coupons.
Just like the old days for the Notre Dame women’s basketball program.
The 104-58 Irish mauling of Boston College, a team that has only one victory over a school west of the Hudson this season, was hardly enough to soothe what was a really rough week.
Notre Dame coach Muffet McGraw has said all along that there were going to be some growing pains with this group. But what appears to have been a near-defection by 6-foot-3 post Taya Reimer, and a loss to unranked Miami – which left the Irish “shattered,” according to McGraw – might have been a bit more than anyone expected.
Serious ouch: Kentucky women’s basketball starting point guard Janee Thompson suffered a dislocated left ankle which resulted in a broken fibula against top-ranked South Carolina on Sunday in Columbia, S.C., and will miss the remainder of the season. Surgery is scheduled for Monday morning at the UK Hospital in Lexington, Ky.
Devanei Hampton sits at home with her 19-month-old son, Marcus, juggling a phone call and keeping a watchful eye on her toddler, who is watching Elmo on TV while pressing buttons on a nearby Xbox controller. Hold on for a minute, she says, making sure he’s not getting into any trouble.
Hampton seems far from her college days at Cal. Being a mom to an active, sometimes mischievous little boy might as well be a million miles from when she was one of the most physical, aggressive basketball talents in the country, not to mention a headstrong kid who struggled to stay on the same page as her coach and on a path to graduation. But Hampton really isn’t very far at all.
After five tumultuous years that included 14 trips to western Africa, where expectations would rise only to fall again, Joanne Boyle learned that it takes a village, literally, to adopt a child from Senegal.
Lawyers, political leaders, family friends from Senegal’s capital city (Dakar), a Richmond pastor, athletic director Craig Littlepage and several kindhearted Senegalese locals all played a role in fulfilling a dream the University of Virginia women’s basketball coach envisioned for more than 30 years.
First the WNBL finals and then the world – Canberra Capitals star Abby Bishop says she has unfinished business in the WNBA and is preparing for stints in Europe and the United States to take her career to a new level.
But before she begins an around-the-world tour to test herself against the best international players, Bishop is focused on keeping Canberra’s WNBL finals hopes alive and ending a play-off drought.
From SI: WNBA superstars Sue Bird, Skylar Diggins and Elena Delle Donne discuss what traits help separate their games from the rest of the league.
Christine Simmons has been named president and chief operations officer.
Simmons joins the Sparks as president and chief operations officer after most recently serving as executive vice president for Magic Johnson Enterprises. Among her leadership duties with MJE, she led the Sparks day-to-day operations during the first season under the ownership group led by Earvin “Magic” Johnson and Mark R. Walter.
In her new role, Simmons will lead all aspects of the Sparks business operations including marketing, ticket and sponsorship sales and service, game operations, finance, community relations and communications.
Former UL women’s basketball coach Ross Cook remembers the national signing day that Kim Perrot committed to play for the Ragin’ Cajuns very well.
He arrived at her home early that morning with all the official NCAA papers. There was no fanfare. No big party. It was simply Cook and Perrot discussing her future.
You see, Cook was already convinced that Perrot was one of the best point guards in America … even though the nation, and nearly everyone in Lafayette, didn’t realize it yet.
|
from ..base.type import UflType, UflAttributeDescription
class UflVariableMetadataType(UflType):
def __init__(self, metadata_type, underlying_type):
metadata = {}
for name, type in metadata_type.items():
metadata[name] = UflAttributeDescription(name, type)
next_type = underlying_type
next_prefix = '{0}.'.format(UflVariableWithMetadataType.VALUE_ATTRIBUTE)
while isinstance(next_type, UflVariableWithMetadataType):
for name, type in next_type.metadata_types:
if name not in metadata:
metadata[name] = UflAttributeDescription(next_prefix + name, type)
next_type = next_type.underlying_type
next_prefix = '{0}.{1}'.format(UflVariableWithMetadataType.VALUE_ATTRIBUTE, next_prefix)
self.ALLOWED_DIRECT_ATTRIBUTES = metadata
@property
def is_immutable(self):
return True
def __str__(self):
return "[VariableMetadata {0}]".format(", ".join(self.ALLOWED_DIRECT_ATTRIBUTES.keys()))
class UflVariableWithMetadataType(UflType):
VALUE_ATTRIBUTE = 'value'
def __init__(self, underlying_type, **metadata_types):
self.__underlying_type = underlying_type
self.__metadata_types = metadata_types
# use with caution, only for recursive metadata
def _add_metadata_type(self, name, type):
self.__metadata_types[name] = type
@property
def metadata_types(self):
yield from self.__metadata_types.items()
@property
def underlying_type(self):
return self.__underlying_type
@property
def metadata_type(self):
return UflVariableMetadataType(self.__metadata_types, self.__underlying_type)
def is_equatable_to(self, other):
if isinstance(other, UflVariableWithMetadataType):
return self.__underlying_type.is_equatable_to(other.__underlying_type)
else:
return self.__underlying_type.is_equatable_to(other)
def is_comparable_with(self, other):
if isinstance(other, UflVariableWithMetadataType):
return self.__underlying_type.is_comparable_with(other.__underlying_type)
else:
return self.__underlying_type.is_comparable_with(other)
def is_convertible_to(self, other):
return self.__underlying_type.is_convertible_to(other)
def resolve_unknown_generic(self, generics_cache):
raise Exception("Generic variable metadata is a nonsense, sorry.")
def resolve_generic(self, actual_type, generics_cache):
raise Exception("Generic variable metadata is a nonsense, sorry.")
def __str__(self):
return "[VariableWithMetadata {0} {1}]".format(repr(self.__underlying_type), ", ".join(self.__metadata_types.keys()))
|
Hay fever affects up to 40% of the UK population, athletes and people who regularly exercise in the outdoor urban environment are a specific population at risk for allergic rhinitis1.
For the general populus allergic rhinitis can have a significant detrimental impact on quality of life, various studies have documented the impact on sleep, work and learning ability. For sporting professionals this condition can potentially affect performance and recovery2.
The first step in treating any allergic condition is avoidance of the allergen, however avoidance of outdoor pollen exposure is impractical and therefore some form of pharmacological intervention is usually required. ARIA (Allergic Rhinitis and it’s Impact on Asthma (ARIA, a WHO backed organisation) and the World Anti-Doping Agency produce similar guidelines on hay fever treatment.
Older, first generation antihistamines such as chlorphenamine should be avoided as they cause a significant sedative effect that has been shown to reduce academic performance, reaction times and impair judgement.
Second generation antihistamines such as cetirizine, loraditine, fexofenadine or rupatidine are efficacious for 24 hours, cause minimal sedation and have a good safety profile. Topical nasal antihistamines such as azelastine have a rapid onset of action as do olopatidine eye drops.
Nasal decongestants may cause absorption of significant amounts of ephedrine or methylephedrine and urine concentrations may fall foul of WADA limits so they are best avoided in athletes.
Topical nasal steroids decrease all the main symptoms of allergic rhinitis including congestion and are not normally absorbed into the systemic circulation. Patient education is required with regards to technique as incorrect administration can take the drug straight to the back of the throat where it is swallowed.
In severe cases systemic steroids can only be used with an exemption by WADA but if symptoms have not been controlled by antihistamines, nasal sprays and eye drops, immunotherapy could be considered.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import glance.client
def stubout_glance_client(stubs, cls):
"""Stubs out glance.client.Client"""
stubs.Set(glance.client, 'Client',
lambda *args, **kwargs: cls(*args, **kwargs))
class FakeGlance(object):
def __init__(self, host, port=None, use_ssl=False):
pass
def get_image(self, image):
meta = {
'size': 0,
}
image_file = StringIO.StringIO('')
return meta, image_file
|
ITI Course comes with the set of huge vocational skills to impart aspirants. It is affiliated to National Council for Vocational Training (NCVT) and State Council for Vocational Training that is also known as SCVT. If you’re looking for ITI Admission 2019-2020 and being excited to understand deeply before admission about ITI trades in the easiest way. We then recommend you must read this article which is well described in terms of reliable information.
In India, Industrial Training Institute is full form of ITI. Since it is considered the best technical education after schooling to learn a wide range of vocational skills offered by ITI College that yields many trades under ITI Education. However, it imparts Industrial and Technical Training with the view to ready as the excellence knowledge for the industrial jobs. ITI is organized under Directorate General Employment and Training (DGET).
ITI Course makes learners dream true, as many students who have completed class 10th (Matriculation), even 8th class educations and are not capable to complete higher engineering education such as Polytechnic Course or B.Tech due to various causes, still, they will have to remain flaming hope to join any ITI Institute / College for any trades on the basis of their choices. Candidates can also go for education in ITI after completion of class 12th education. We’d like to tell you that students who have cleared 8th and want to join for ITI are also eligible for ITI Admission. So for the ITI education, there no much marks and education required in short answer students who wish to get ITI education can join from class 8th to standard 12th. After pursuing this course candidate becomes technician or a mechanic less duration with fewer fees.
Course duration has been scheduled on the basis of course type. Although, Course duration begins from six months to two years for ITI courses.
Age deadline for ITI courses has been scheduled between 14 to 25 years for general category while 3 years have been exempted for SC/ ST/ OBC/ candidates. And 10 years have been exempted for Widows/Separated women.
Choose from more than 100 Trades that can be chosen according to your academic / secondary education.
|
# vim: set tabstop=8 shiftwidth=8 expandtab:
types = dict (
double = dict(
name = 'double',
zero = '0.0',
one = '1.0',
sqrt = 'sqrt',
typeSuffix = 'd',
),
float = dict(
name = 'float',
zero = '0.0f',
one = '1.0f',
sqrt = 'sqrtf',
typeSuffix = '',
),
int32 = dict(
name = 'int32_t',
zero = '0',
one = '1',
typeSuffix = 'i',
),
)
variants = [
dict(
dataType = 'Vec2',
elemType = types['float'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['float'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['float'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
), dict(
dataType = 'Vec2',
elemType = types['double'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['double'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['double'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
), dict(
dataType = 'Vec2',
elemType = types['int32'],
funcPrefix = 'v2',
components = ['x', 'y'],
), dict(
dataType = 'Vec3',
elemType = types['int32'],
funcPrefix = 'v3',
components = ['x', 'y', 'z'],
), dict(
dataType = 'Vec4',
elemType = types['int32'],
funcPrefix = 'v4',
components = ['x', 'y', 'z', 'w'],
),
]
functions = [
###
dict(
match = '.*',
func = [
('New',
'$T $P$N(%%", ".join([("$E " + e) for e in variant["components"]])%%)',
""" {
const $T r = { %%", ".join([e for e in variant["components"]])%% };
return r;
}
"""),
('NewZeroLength',
'$T $P$N(void)',
""" {
const $T r = { $(zero) };
return r;
}
"""),
('Sum',
'$T $P$N($T a, $T b)',
""" {
const $T r = { %%join('a', 'b', ' + ', ', ')%% };
return r;
}
"""),
('Diff',
'$T $P$N($T a, $T b)',
""" {
const $T r = { %%join('a', 'b', ' - ', ', ')%% };
return r;
}
"""),
('Product',
'$T $P$N($T a, $E scale)',
""" {
const $T r = { %%join('a', '#scale', ' * ', ', ')%% };
return r;
}
"""),
('DotProduct',
'$E $P$N($T a, $T b)',
""" {
return %%join('a', 'b', ' * ', ' + ')%%;
}
"""),
('Norm',
'$E $P$N($T a)',
""" {
return %%join('a', 'a', ' * ', ' + ')%%;
}
"""),
]),
### Functions that have no meaning for integers
dict(
match = 'Vec.[^i]?$',
func = [
('Length',
'$E $P$N($T a)',
""" {
return $(sqrt)(%%join('a', 'a', ' * ', ' + ')%%);
}
"""),
('UnitVector',
'$T $P$N($T a)',
""" {
$E length = $PLength(a);
if (length > $(zero)) {
const $T r = { %%join('a', '#length', '/', ' , ')%% };
return r;
} else {
const $T r = { $(zero) };
return r;
}
}
"""),
]),
###
dict(
match = 'Vec2',
func = [
('Area',
'$E $P$N($T a)',
""" {
return a.$0 * a.$1;
}
"""),
]),
###
dict(
match = 'Vec4',
func = [
('NewFromVec3',
'$T $P$N$(typeSuffix)(Vec3$(typeSuffix) xyz, $E w)',
""" {
const $T r = { xyz.$0, xyz.$1, xyz.$2, w };
return r;
}
"""),
('ToVec3',
'Vec3$(typeSuffix) $P$N$(typeSuffix)($T a)',
""" {
const Vec3$(typeSuffix) r = { a.$0, a.$1, a.$2 };
return r;
}
"""),
]),
###
]
headerIntro = """
#pragma once
/* AUTO-GENERATED DO NOT EDIT */
#include <stdint.h>
"""
sourceIntro = """
/* AUTO-GENERATED DO NOT EDIT */
#include "$(headerFilename)"
#include <math.h>
#include <float.h>
"""
|
Learning Charms is a certified provider of the Scientific Learning Reading products, Fast ForWord and Reading Assistant. These are off site implementation programs, meaning you can do these where ever you have Internet.
These programs are offered by various companies and at various pricing. My prices are deeply discounted and are just for those students that are independent enough to complete the computer activities alone, or with the help of their parent. If your child has multiple disabilities or significant attentional issues then you may be best served by a provider that provides clinic based implementation.
We are pleased to offer Fast ForWord (Scientific Learning) Programs to students wanting to improve in reading, phonemic awareness, auditory processing, listening comprehension, and working memory.
Years ago, this was only offered in a clinical setting, but now this technology can be completed in anywhere you have a computer and internet connection. Because of this, parents find that this program is very easy to implement into their child's day. Please visit our Fast ForWord page for more information.
The Reading Assistant program is designed to be used by beginning readers, English language learners, and struggling readers who have attained basic word recognition and decoding skills and are now building their vocabulary, fluency and comprehension. This includes students as young as first grade, all the way up to adults. Reading Assistant has solutions designed to implement with Response to Intervention programs for all student tiers. Reading Assistant is the only reading program that “listens” to students as they read out loud, intervenes when students falter, and automatically scores students’ oral reading. No other program or e-book provides comparable real-time guidance and feedback.
Fast ForWord is distributed only through certified providers and is sold per license. Each license is good for one year. Students may utilize training/tutoring via Fast ForWord every day up to 90 minutes per day.
If student is having difficulty mastering a concept the area the student is struggling with is indicated on the weekly progress email. Parent may help student with the concept or may make appointment to work with a local tutor or speech therapist (depending on area of need) of family's choice.
If parent wishes Learning Charms to monitor student' progress on line after the initial 3 months, the charge is $40.00 per month. This monitoring includes reviewing weekly data and sending emails to parents regarding progress or need for intervention.
For Fast ForWord, If you break down $1200 over the course of a year, its like paying $100 a month or $25 a week for reading and literacy tutoring. If your child uses the program 5 days a week for 45 minutes that's $5.00 a session. Wow. If you've chosen the the $900 plan, that's $75 per month or less than $20 a week.
Use the contact form or fill out the agreement and email to us.
Learning Charms' owner, Stephanie, tries out all technology before utilizing it with her clients. Some things are keepers, and some things are not. Fast ForWord is a keeper. She uses Fast ForWord with her 3rd grade son who has auditory processing and attentional challenges. These challenges have continually caused him to have difficulties at school and at home when it comes to reading and listening comprehension. Stephanie stated, "I realized that he could really benefit from daily tutoring just to keep up with literacy at school. With my work schedule, this simply wasn't possible. Fast ForWord is his personal daily tutor. He enjoys it and I can see that he is learning, understanding, growing and increasing in his confidence. And, wow, I love how intense it is for the auditory processing component. He still gets stumped from time to time on difficult concepts and thats when we utilize a 1:1 tutor to further teach the concept. Once that has happened, he continues with Fast ForWord. It has been a great experience for us! I look forward to watching his continued progress".
|
from itertools import izip
import numpy as np
import scipy as sp
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils import check_array, extmath
__author__ = 'desouza'
class PARTLRegressor(BaseEstimator, RegressorMixin):
"""
Online Task Relationship Learning algorithm proposed by Saha et al. in
which the task
relationship
matrix is dinamically learnt. Here loss is Hinge instead of Perceptron.
"""
def __init__(self, task_num, feats_num, rounds_num=100, C=1, epsilon=0.01,
loss="pai", divergence="cov", eta=0.01, n_iter=1,
centered=True):
self.feats_num = feats_num
self.task_num = task_num
if self.task_num < 1:
raise ValueError("number of tasks must be greater than 1.")
# initialize interaction matrix with independent learners
self.A = 1.0 / task_num * np.identity(task_num)
# hyper-parameters
self.C = C
self.epsilon = epsilon
self.loss = loss
self.divergence = divergence
self.n_iter = n_iter
self.eta = eta
# number of rounds of priming A
self.rounds_num = rounds_num
self.centered = centered
# initialize w's with d x K positions (d = feats, K = tasks)
self.coef_ = np.zeros(self.feats_num * self.task_num)
# averaged model
self.avg_coef_ = np.copy(self.coef_)
# number of instances seen
self.t = 0
# number of instances discarded (zero-arrays)
self.discarded = 0
def _pa(self, loss_t, x_t):
denom = extmath.norm(x_t) ** 2.0
# special case when L_2 norm of x_t is zero (followed libol
# implementation)
if denom == 0:
return 1
d = loss_t / denom
return d
def _pai(self, loss_t, x_t):
pa = self._pa(loss_t, x_t)
if self.C < pa:
return self.C
return pa
def _paii(self, loss_t, x_t):
return loss_t / (extmath.norm(x_t) ** 2.0) + 1.0 / 2.0 * self.C
def _get_task_id(self, X_inst, feats_num):
a = np.nonzero(X_inst != 0)
first_element = a[0][0]
task_num = first_element / feats_num
return task_num
def _sym(self, X):
temp = (X + X.T) / 2.0
return temp
def _batch_opt(self, W):
num = np.dot(W.T, W) ** (1.0 / 2.0)
denom = np.trace(num)
bo = num / denom
return bo
def _log_det_div(self, W):
prev_a_inv = np.linalg.inv(self.A)
# prev_a_inv = np.linalg.pinv(self.A)
WT = W.T
dot_w = np.dot(WT, W)
symdot = self._sym(dot_w)
log_det = prev_a_inv + self.eta * symdot
# return log_det
log_det_inv = np.linalg.inv(log_det)
# log_det_inv = np.linalg.pinv(log_det)
return log_det_inv
def _von_neumann_div(self, W):
dot_w = np.dot(W.T, W)
# log_a = np.log(self.A) - self.eta * self._sym(dot_w)
log_a = sp.linalg.logm(self.A) - self.eta * self._sym(dot_w)
# exp_log_a = np.exp(log_a)
exp_log_a = sp.linalg.expm(log_a)
return exp_log_a
def fit(self, X, y):
X = check_array(X)
y = check_array(y)
for x_i, y_i in izip(X, y):
self.partial_fit(x_i.reshape(-1, 1), y_i.reshape(1, -1))
return self
def partial_fit(self, X_t, y_t):
# if all features are zero, discard example
if np.sum(X_t) == 0:
self.discarded += 1
return self
# updates the number of instances seen
self.t += 1
reg_func = self._pai
if self.loss == "pa":
reg_func = self._pa
elif self.loss == "pai":
reg_func = self._pai
elif self.loss == "paii":
reg_func = self._paii
for _ in xrange(self.n_iter):
# gets prediction based on current model
y_hat_t = np.dot(self.coef_, X_t.T)
# calculates difference between prediction and actual value
discrepancy_t = np.abs(y_hat_t - y_t)
#print discrepancy_t.shape, discrepancy_t
# wx_dot = np.dot(self.coef_, X_t)
# y_hat_t = np.sign(wx_dot)
# loss_t = max([0, (1 - y_t * wx_dot)])
# computes epsilon-hinge loss
loss_t = 0
if discrepancy_t > self.epsilon:
loss_t = discrepancy_t - self.epsilon
tau_t = reg_func(loss_t, X_t)
task_id = self._get_task_id(X_t, self.feats_num)
for task in xrange(self.task_num):
# for indexing task weights that change in the for loop
begin = task * self.feats_num
end = begin + self.feats_num
# for indexing the task of X_t
tbegin = task_id * self.feats_num
tend = tbegin + self.feats_num
# computes new coefs
new_coef = np.sign(y_t - y_hat_t) * self.A[
task, task_id] * tau_t * X_t[tbegin:tend]
# updates coefs
self.coef_[begin:end] += new_coef
self.avg_coef_ += self.coef_
# updates A
if self.t >= self.rounds_num:
# first, reshape coefs (w in the paper) to W
# which is the d x K matrix where d are the
# features and K the different tasks
w = np.copy(self.coef_)
W = w.reshape((self.task_num, self.feats_num)).T
# update interaction matrix
if self.divergence == "cov":
covA = np.cov(W, rowvar=0)
if self.centered:
self.A = covA
else:
self.A = np.linalg.inv(covA)
elif self.divergence == "corrcoef":
corrcoefW = np.corrcoef(W, rowvar=0)
if self.centered:
self.A = corrcoefW
else:
self.A = np.linalg.inv(corrcoefW)
elif self.divergence == "vn":
self.A = self._von_neumann_div(W)
elif self.divergence == "ld":
self.A = self._log_det_div(W)
elif self.divergence == "bo":
self.A = self._batch_opt(W)
else:
raise ValueError("divergence mode not valid")
# np.fill_diagonal(self.A, 1)
return self
def predict(self, X, averaged=False):
X = check_array(X)
# if self.fit_intercept:
# X = np.column_stack((X, np.ones(X.shape[0])))
# print self.coef_.shape
# print X.shape
if not averaged:
y_preds = np.dot(self.coef_, X.T)
else:
y_preds = np.dot(self.avg_coef_, X.T)
return y_preds
|
At Newland Dental Care, we are experts in repairing broken or chipped teeth. Using the latest dental technology we can help you achieve a beautiful, natural smile.
When your tooth shows signs of decay, your Newland Dental Care dentist can help to stop it before any further damage is caused. Fillings are applied to small and developing cavities to restore your tooth back to health.
If you have been experiencing severe tooth pain, it is possible that you may require root canal treatment. Your Newland Dental Care dentist can treat your tooth without the need to remove it, leaving you pain-free and in good oral health.
If you have damage to a tooth that cannot be fixed with a filling, our Newland Dental Care dentists can fit an inlay or onlay to cover the damage and restore your tooth to its natural looking state.
Our Newland Dental Care dentists can help restore your confidence with replacements for missing teeth: from single implants to bridges, only you will know they’re there.
Our Newland Dental Care dentists are experienced in restoring smiles and cosmetic dentistry. We use smart technology and expert techniques to achieve your dental goals.
|
import os
import sys
import django
import django.contrib.staticfiles.finders
import django.core.files.storage
import django.core.management.base
from static_precompiler import exceptions, settings, utils
def get_scanned_dirs():
dirs = set()
if settings.STATIC_ROOT:
dirs.add(settings.STATIC_ROOT)
for finder in django.contrib.staticfiles.finders.get_finders():
if hasattr(finder, "storages"):
for storage in finder.storages.values():
if isinstance(storage, django.core.files.storage.FileSystemStorage):
dirs.add(storage.location)
return sorted(dirs)
class Command(django.core.management.base.BaseCommand):
help = "Compile static files."
requires_model_validation = False
def add_arguments(self, parser):
parser.add_argument(
"--watch",
action="store_true",
dest="watch",
default=False,
help="Watch for changes and recompile if necessary."
)
parser.add_argument(
"--no-initial-scan",
action="store_false",
dest="initial_scan",
default=True,
help="Skip the initial scan of watched directories in --watch mode."
)
def handle(self, **options):
if not options["watch"] and not options["initial_scan"]:
sys.exit("--no-initial-scan option should be used with --watch.")
scanned_dirs = get_scanned_dirs()
verbosity = int(options["verbosity"])
compilers = utils.get_compilers().values()
if not options["watch"] or options["initial_scan"]:
# Scan the watched directories and compile everything
for scanned_dir in scanned_dirs:
for dirname, dirnames, filenames in os.walk(scanned_dir):
for filename in filenames:
path = os.path.join(dirname, filename)[len(scanned_dir):]
if path.startswith("/"):
path = path[1:]
for compiler in compilers:
if compiler.is_supported(path):
try:
compiler.handle_changed_file(path, verbosity=verbosity)
except (exceptions.StaticCompilationError, ValueError) as e:
print(e)
break
if options["watch"]:
from static_precompiler.watch import watch_dirs
watch_dirs(scanned_dirs, verbosity)
if django.VERSION < (1, 8):
import optparse
Command.option_list = django.core.management.base.NoArgsCommand.option_list + (
optparse.make_option("--watch",
action="store_true",
dest="watch",
default=False,
help="Watch for changes and recompile if necessary."),
optparse.make_option("--no-initial-scan",
action="store_false",
dest="initial_scan",
default=True,
help="Skip the initial scan of watched directories in --watch mode.")
)
|
The Insurance Surgery is a leading expert for Travel Insurance for people with pre-existing medical conditions such as Diabetes. To help support and raise awareness for National Diabetes Week 2016 (Sunday 12th June to Saturday 18th June) we will be running a number of offers.
Diabetes week is an annual event run by Diabetes UK which is the leading and by far the largest charity for the condition in the UK. This event is the highlight of the Diabetes UK calendar which is aimed at raising awareness for Diabetes and to help raise vital funds for research, treatment and support.
Even though we hear so much about Diabetes as a medical condition and the current levels reported nationally, it is still vastly misunderstood. The theme of Diabetes Week is 'Set the Record Straight' which is as it says, to help remove some of those common myths about Diabetes.
Throughout Diabetes Week 2016 there will be a huge array of stories, facts and videos to help support the theme and tell the truth about Diabetes. Even though we all know what Diabetes is and have heard much about the numbers, still very few people understand what it's like to live with the condition or what impact it has on those individuals.
"Why pay more than you have to?"
As part of our work to help raise awareness and support people living with Diabetes, we will be providing several articles to give more information about how insurance products such as travel, work for Diabetes.
The Insurance Surgery has been working hard to protect individuals, families and businesses for life and travel insurance since 1999. We offer a range of specialist services which are specifically designed for people with pre-existing medical conditions such as diabetes.
Our travel insurance partners at Just Travel Cover are a leading travel insurance specialist and provide amazing rates for people with Diabetes.
Just travel Cover also offer full advice on which policy to take to ensure that the cover you get is right for you.
|
# translation of layers.hivemap to hive system Python code
#find out where the hivemaps are
import os
import hivemaps
hivemapsdir = os.path.split(hivemaps.__file__)[0]
if not len(hivemapsdir): hivemapsdir = "."
action1hivemapfile = os.path.join(hivemapsdir, "action1.hivemap")
action2hivemapfile = os.path.join(hivemapsdir, "action2.hivemap")
#load the hivemaps
import spyder, Spyder
action1hivemap = Spyder.Hivemap.fromfile(action1hivemapfile)
action2hivemap = Spyder.Hivemap.fromfile(action2hivemapfile)
from bee.spyderhive.hivemaphive import hivemapframe
class action1hivemaphive(hivemapframe):
hivemap = action1hivemap
class action2hivemaphive(hivemapframe):
hivemap = action2hivemap
"""
We could also put both hivemaps into a single hivemaphive:
class actionshivemaphive(hivemapframe):
act1 = action1hivemap
act2 = action2hivemap
In that case, we should replace in the hive below:
action1 = action1hivemaphive()
action2 = action2hivemaphive()
=> actions = actionshivemaphive
and:
(action1,"hivemap","soundplay")
=> (actions,"act1","soundplay")
(action2,"hivemap","actionplay")
=> (actions,"act2","actionplay")
"""
###
#load the action3 hive
from workers.action3 import action3hive
#define the "layers" hive
import bee
from dragonfly.std import *
import dragonfly.io
import dragonfly.sys
from bee import connect
class layershive(bee.frame):
#START message
variable_str_1 = variable("str")("START")
#or:
# variable_str_1 = variable_str("START")
transistor_5 = transistor("str")()
connect(variable_str_1, transistor_5)
startsensor_1 = dragonfly.sys.startsensor()
connect(startsensor_1, transistor_5)
#or:
# connect(startsensor_1.outp, transistor_5.trig)
display_1 = dragonfly.io.display("str")()
connect(transistor_5, display_1)
#or:
# connect(transistor_5.outp, display_1.inp)
#action 1
action1 = action1hivemaphive()
vwalk = variable("id")("walk")
keyW = dragonfly.io.keyboardsensor_trigger("W")
transistor_1 = transistor("id")()
connect(vwalk, transistor_1)
connect(keyW, transistor_1)
connect(transistor_1, (action1, "hivemap", "animplay"))
connect(transistor_1, (action1, "hivemap", "soundplay"))
vjump = variable("id")("jump")
keyTAB = dragonfly.io.keyboardsensor_trigger("TAB")
transistor_2 = transistor("id")()
connect(vjump, transistor_2)
connect(keyTAB, transistor_2)
connect(transistor_2, (action1, "hivemap", "animplay"))
connect(transistor_2, (action1, "hivemap", "soundplay"))
#action 2
action2 = action2hivemaphive()
vrun = variable("id")("run")
keyR = dragonfly.io.keyboardsensor_trigger("R")
transistor_4 = transistor("id")()
connect(vrun, transistor_4)
connect(keyR, transistor_4)
connect(transistor_4, (action2, "hivemap", "actionplay"))
vshoot = variable("id")("shoot")
keySPACE = dragonfly.io.keyboardsensor_trigger("SPACE")
transistor_3 = transistor("id")()
connect(vshoot, transistor_3)
connect(keySPACE, transistor_3)
connect(transistor_3, (action2, "hivemap", "actionplay"))
#action 3
action3 = action3hive()
vswim = variable("id")("swim")
keyS = dragonfly.io.keyboardsensor_trigger("S")
transistor_6 = transistor("id")()
connect(vswim, transistor_6)
connect(keyS, transistor_6)
connect(transistor_6, action3.animplay)
connect(transistor_6, action3.soundplay)
vcrouch = variable("id")("crouch")
keyC = dragonfly.io.keyboardsensor_trigger("C")
transistor_7 = transistor("id")()
connect(vcrouch, transistor_7)
connect(keyC, transistor_7)
connect(transistor_7, action3.animplay)
connect(transistor_7, action3.soundplay)
|
“Our fingerprints don’t fade from the lives we’ve touched”.
The actual fingerprint of your loved ones imprinted on fine silver.
Available in a variety of sizes the charm can also be ordered with a chain. Our chains come in 16″, 18″ or 20″. Please specify length when ordering.
The process involves us sending you a mould kit to take the print. The mould compound is completely safe to use. Once we receive your prints back we then imprint this into the silver and the end result is a stunning keepsake for you to wear every day and treasure.
*If required we will do our best to fit the name on the front, however we will advise if this is not possible as this will depend on the shape you choose and size of fingerprint.
|
################################################################################
# Peach - Computational Intelligence for Python
# Jose Alexandre Nalon
#
# This file: fuzzy/fuzzy.py
# Fuzzy logic basic definitions
################################################################################
# Doc string, reStructuredText formatted:
__doc__ = """
This package implements basic definitions for fuzzy logic
"""
################################################################################
import numpy
import types
import norms
################################################################################
# Classes
################################################################################
class FuzzySet(numpy.ndarray):
'''
Array containing fuzzy values for a set.
This class defines the behavior of a fuzzy set. It is an array of values in
the range from 0 to 1, and the basic operations of the logic -- and (using
the ``&`` operator); or (using the ``|`` operator); not (using ``~``
operator) -- can be defined according to a set of norms. The norms can be
redefined using the appropriated methods.
To create a FuzzySet, instantiate this class with a sequence as argument,
for example::
fuzzy_set = FuzzySet([ 0., 0.25, 0.5, 0.75, 1.0 ])
'''
__AND__ = norms.ZadehAnd
'Class variable to hold the *and* method'
__OR__ = norms.ZadehOr
'Class variable to hold the *or* method'
__NOT__ = norms.ZadehNot
'Class variable to hold the *not* method'
def __new__(cls, data):
'''
Allocates space for the array.
A fuzzy set is derived from the basic NumPy array, so the appropriate
functions and methods are called to allocate the space. In theory, the
values for a fuzzy set should be in the range ``0.0 <= x <= 1.0``, but
to increase efficiency, no verification is made.
:Returns:
A new array object with the fuzzy set definitions.
'''
data = numpy.array(data, dtype=float)
shape = data.shape
data = numpy.ndarray.__new__(cls, shape=shape, buffer=data,
dtype=float, order=False)
return data.copy()
def __init__(self, data=[]):
'''
Initializes the object.
Operations are defaulted to Zadeh norms ``(max, min, 1-x)``
'''
pass
def __and__(self, a):
'''
Fuzzy and (``&``) operation.
'''
return FuzzySet(FuzzySet.__AND__(self, a))
def __or__(self, a):
'''
Fuzzy or (``|``) operation.
'''
return FuzzySet(FuzzySet.__OR__(self, a))
def __invert__(self):
'''
Fuzzy not (``~``) operation.
'''
return FuzzySet(FuzzySet.__NOT__(self))
@classmethod
def set_norm(cls, f):
'''
Selects a t-norm (and operation)
Use this method to change the behaviour of the and operation.
:Parameters:
f
A function of two parameters which must return the ``and`` of the
values.
'''
if isinstance(f, numpy.vectorize):
cls.__AND__ = f
elif isinstance(f, types.FunctionType):
cls.__AND__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
@classmethod
def set_conorm(cls, f):
'''
Selects a t-conorm (or operation)
Use this method to change the behaviour of the or operation.
:Parameters:
f
A function of two parameters which must return the ``or`` of the
values.
'''
if isinstance(f, numpy.vectorize):
cls.__OR__ = f
elif isinstance(f, types.FunctionType):
cls.__OR__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
@classmethod
def set_negation(cls, f):
'''
Selects a negation (not operation)
Use this method to change the behaviour of the not operation.
:Parameters:
f
A function of one parameter which must return the ``not`` of the
value.
'''
if isinstance(f, numpy.vectorize):
cls.__NOT__ = f
elif isinstance(f, types.FunctionType):
cls.__NOT__ = numpy.vectorize(f)
else:
raise ValueError, 'invalid function'
################################################################################
# Test
if __name__ == "__main__":
pass
|
Get set for Retro Round at the Sandown 500, 8-10 November 2019.
There’s also plenty of race action from the stars of today, when the Supercars take to the track for the final race of the PIRTEK Enduro Cup.
Kids 12 and under receive free Trackside admission with a paying adult, so bring the whole crew for a weekend not to be missed.
Return airfares, checked baggage, 3 nights accommodation and 2 day Trackside Pass** (Sat & Sun) A Trackside ticket gives you the freedom to wander around the track and find your favourite spot to watch the race.
Return airfares, checked baggage, 3 nights accommodation & 2 day (Sat & Sun) Roll Cage hospitality experience** featuring a hosted non-reserved casual lounge, lunch and afternoon tea, a selection of beer, wine, soft drinks and RTD spirits available for purchase.
Return airfares, checked baggage, 3 nights accommodation and Virgin Australia Paddock Club Weekend Package** (Sat & Sun), including premium shared experience, hosted with reserved seating, on arrival snack, morning and afternoon tea, in-suite barista, premium buffet lunch, premium beers, wines and selection of RTD spirits & driver appearances.
Return airfares, checked baggage, 3 nights accommodation & Virgin Australia Paddock Club plus Roll Cage**. The Virgin Australia Paddock Club is premium shared experience, hosted with reserved seating, on arrival snack, morning and afternoon tea, in-suite barista, premium buffet lunch, premium beers, wines and selection of RTD spirits & driver appearances. Roll Cage includes a hosted non-reserved casual lounge, lunch and afternoon tea & a selection of beer, wine, soft drinks and RTD spirits available for purchase.
Hide away from the world in the picturesque Yarra Valley and Dandenong Ranges. Discover the region's lush natural surrounds and spoil yourself with premium food and wine, secluded boutique accommodation, studios and galleries, and magical gardens. Indulge the connoisseur within or spark a new passion for sophisticated food and wine experiences amid the Yarra Valley's oases of vine-strung hills and rolling farmland. Wander into a world of whimsy in the Dandenong Ranges' eclectic shops, ancient forests, abundant wildlife, and hidden retreats.
|
from __future__ import unicode_literals
from . import BaseProvider
import random
import re
from faker.providers.lorem import Provider as Lorem
class Provider(BaseProvider):
safe_email_tlds = ('org', 'com', 'net')
free_email_domains = ('gmail.com', 'yahoo.com', 'hotmail.com')
tlds = ('com', 'com', 'com', 'com', 'com', 'com', 'biz', 'info', 'net', 'org')
uri_pages = (
'index', 'home', 'search', 'main', 'post', 'homepage', 'category', 'register', 'login', 'faq', 'about', 'terms',
'privacy', 'author')
uri_paths = (
'app', 'main', 'wp-content', 'search', 'category', 'tag', 'categories', 'tags', 'blog', 'posts', 'list', 'explore')
uri_extensions = ('.html', '.html', '.html', '.htm', '.htm', '.php', '.php', '.jsp', '.asp')
user_name_formats = (
'{{last_name}}.{{first_name}}',
'{{first_name}}.{{last_name}}',
'{{first_name}}##',
'?{{last_name}}',
)
email_formats = (
'{{user_name}}@{{domain_name}}',
'{{user_name}}@{{free_email_domain}}',
)
url_formats = (
'http://www.{{domain_name}}/',
'http://{{domain_name}}/',
)
uri_formats = (
'{{url}}',
'{{url}}{{uri_page}}/',
'{{url}}{{uri_page}}{{uri_extension}}',
'{{url}}{{uri_path}}/{{uri_page}}/',
'{{url}}{{uri_path}}/{{uri_page}}{{uri_extension}}',
)
def email(self):
pattern = self.random_element(self.email_formats)
return "".join(self.generator.parse(pattern).split(" "))
def safe_email(self):
return self.user_name() + '@example.' + self.random_element(self.safe_email_tlds)
def free_email(self):
return self.user_name() + '@' + self.free_email_domain()
def company_email(self):
return self.user_name() + '@' + self.domain_name()
@classmethod
def free_email_domain(cls):
return cls.random_element(cls.free_email_domains)
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self.bothify(self.generator.parse(pattern)).lower()
def domain_name(self):
return self.domain_word() + '.' + self.tld()
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')
company = company_elements.pop(0)
return re.sub(r'\W', '', company).lower()
def tld(self):
return self.random_element(self.tlds)
def url(self):
pattern = self.random_element(self.url_formats)
return self.generator.parse(pattern)
def ipv4(self):
"""
Convert 32-bit integer to dotted IPv4 address.
"""
return ".".join(map(lambda n: str(random.randint(-2147483648, 2147483647) >> n & 0xFF), [24, 16, 8, 0]))
def ipv6(self):
res = []
for i in range(0, 8):
res.append(hex(random.randint(0, 65535))[2:].zfill(4))
return ":".join(res)
@classmethod
def uri_page(cls):
return cls.random_element(cls.uri_pages)
@classmethod
def uri_path(cls, deep=None):
deep = deep if deep else random.randint(1, 3)
return "/".join([cls.random_element(cls.uri_paths) for _ in range(0, deep)])
@classmethod
def uri_extension(cls):
return cls.random_element(cls.uri_extensions)
def uri(self):
pattern = self.random_element(self.uri_formats)
return self.generator.parse(pattern)
@classmethod
def slug(cls, value=None):
"""
Django algorithm
"""
import unicodedata
#value = unicode(value or Lorem.text(20))
#value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
#value = unicode(re.sub(r'[^\w\s-]', '', value).strip().lower())
#return re.sub('[-\s]+', '-', value)
value = unicodedata.normalize('NFKD', value or Lorem.text(20)).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower()
return re.sub('[-\s]+', '-', value)
|
Set 6 km from Folgaria, Casa Osvaldo offers self-catering accommodation with wooden furnishings and a balcony. Trento is 27 km away. The apartment comes with a seating area with a flat-screen TV, parquet floors and a washing machine. The kitchenette includes a microwave and oven. The property offers private parking, and is located a 40-minute drive from the Rovereto Nord exit off the A22 motorway.
Please inform Casa Osvaldo in advance of your expected arrival time. You can use the Special Requests box when booking, or contact the property directly with the contact details provided in your confirmation.
|
# -*- coding: utf-8 -*-
"""
Some helper functions
"""
from __future__ import absolute_import, unicode_literals
import logging
__all__ = ['to_bytes', 'to_str', 'to_unicode', 'LoggerMixin']
if bytes != str: # Python 3
#: Define text string data type, same as that in Python 2.x.
unicode = str
def to_bytes(s, encoding='utf-8'):
"""Convert to `bytes` string.
:param s: String to convert.
:param str encoding: Encoding codec.
:return: `bytes` string, it's `bytes` or `str` in Python 2.x, `bytes` in Python 3.x.
:rtype: bytes
* In Python 2, convert `s` to `bytes` if it's `unicode`.
* In Python 2, return original `s` if it's not `unicode`.
* In Python 2, it equals to :func:`to_str`.
* In Python 3, convert `s` to `bytes` if it's `unicode` or `str`.
* In Python 3, return original `s` if it's neither `unicode` nor `str`.
"""
if isinstance(s, unicode):
return s.encode(encoding)
else:
return s
def to_str(s, encoding='utf-8'):
"""Convert to `str` string.
:param s: String to convert.
:param str encoding: Decoding codec.
:return: `str` string, it's `bytes` in Python 2.x, `unicode` or `str` in Python 3.x.
:rtype: str
* In Python 2, convert `s` to `str` if it's `unicode`.
* In Python 2, return original `s` if it's not `unicode`.
* In Python 2, it equals to :func:`to_bytes`.
* In Python 3, convert `s` to `str` if it's `bytes`.
* In Python 3, return original `s` if it's not `bytes`.
* In Python 3, it equals to :func:`to_unicode`.
"""
if bytes == str: # Python 2
return to_bytes(s, encoding)
else: # Python 3
return to_unicode(s, encoding)
def to_unicode(s, encoding='utf-8'):
"""Convert to `unicode` string.
:param s: String to convert.
:param str encoding: Encoding codec.
:return: `unicode` string, it's `unicode` in Python 2.x, `str` or `unicode` in Python 3.x.
:rtype: unicode
* In Python 2, convert `s` to `unicode` if it's `str` or `bytes`.
* In Python 2, return original `s` if it's neither `str` or `bytes`.
* In Python 3, convert `s` to `str` or `unicode` if it's `bytes`.
* In Python 3, return original `s` if it's not `bytes`.
* In Python 3, it equals to :func:`to_str`.
"""
if isinstance(s, bytes):
return s.decode(encoding)
else:
return s
class LoggerMixin(object):
"""Mixin Class provide a :attr:`logger` property
"""
@property
def logger(self):
"""`logger` instance.
:rtype: logging.Logger
logger name format is `ModuleName.ClassName`
"""
try:
name = '{0.__module__:s}.{0.__qualname__:s}'.format(self.__class__)
except AttributeError:
name = '{0.__module__:s}.{0.__name__:s}'.format(self.__class__)
return logging.getLogger(name)
|
Differences between the free and payversion?
Alright. Gotted the paid version after your input. Thanks.
There's two versions. What's the difference?
|
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import QObject, pyqtSignal, QFileDialog
from MainSkel import Ui_mainSkel
import multiprocessing
from PyQt4.Qt import QMainWindow, QTableWidgetItem, QMessageBox, \
QAbstractItemView, QWidget, Qt, pyqtSlot, pyqtSignal, \
QStandardItem, QStandardItemModel, QString, QObject
from _tpl.other.matplotlibwidget import MatplotlibWidget
# http://stackoverflow.com/questions/2312210/window-icon-of-exe-in-pyqt4
#-------------------------------------------
def gui_log(fn):
'log what happens in the GUI for debugging'
def gui_log_wrapper(hsgui, *args, **kwargs):
try:
function_name = fn.func_name
into_str = 'In hsgui.'+function_name
outo_str = 'Out hsgui.'+function_name+'\n'
hsgui.logdbgSignal.emit(into_str)
ret = fn(hsgui, *args, **kwargs)
hsgui.logdbgSignal.emit(outo_str)
return ret
except Exception as ex:
import traceback
logmsg('\n\n *!!* HotSpotter GUI Raised Exception: '+str(ex))
logmsg('\n\n *!!* HotSpotter GUI Exception Traceback: \n\n'+traceback.format_exc())
return gui_log_wrapper
class EditPrefWidget(QWidget):
'The Settings Pane; Subclass of Main Windows.'
def __init__(epw, fac):
super( EditPrefWidget, epw ).__init__()
epw.pref_skel = Ui_editPrefSkel()
epw.pref_skel.setupUi(epw)
epw.pref_model = None
epw.pref_skel.redrawBUT.clicked.connect(fac.redraw)
epw.pref_skel.defaultPrefsBUT.clicked.connect(fac.default_prefs)
epw.pref_skel.unloadFeaturesAndModelsBUT.clicked.connect(fac.unload_features_and_models)
@pyqtSlot(Pref, name='populatePrefTreeSlot')
def populatePrefTreeSlot(epw, pref_struct):
'Populates the Preference Tree Model'
logdbg('Bulding Preference Model of: '+repr(pref_struct))
epw.pref_model = pref_struct.createQPreferenceModel()
logdbg('Built: '+repr(epw.pref_model))
epw.pref_skel.prefTreeView.setModel(epw.pref_model)
epw.pref_skel.prefTreeView.header().resizeSection(0,250)
class MainWindow(QtGui.QMainWindow):
populateChipTblSignal = pyqtSignal(list, list, list, list)
def __init__(self, hs=None):
super(HotSpotterMainWindow, self).__init__()
self.hs = None
self.ui=Ui_mainSkel()
self.ui.setupUi(self)
self.show()
if hs is None:
self.connect_api(hs)
def connect_api(self, hs):
print('[win] connecting api')
self.hs = hs
hsgui.epw = EditPrefWidget(fac)
hsgui.plotWidget = MatplotlibWidget(hsgui.main_skel.centralwidget)
hsgui.plotWidget.setObjectName(_fromUtf8('plotWidget'))
hsgui.main_skel.root_hlayout.addWidget(hsgui.plotWidget)
hsgui.prev_tbl_item = None
hsgui.prev_cid = None
hsgui.prev_gid = None
hsgui.non_modal_qt_handles = []
def connectSignals(hsgui, fac):
'Connects GUI signals to Facade Actions'
logdbg('Connecting GUI >> to >> Facade')
# Base Signals
hsgui.selectCidSignal.connect(fac.selc)
hsgui.selectGidSignal.connect(fac.selg)
hsgui.renameChipIdSignal.connect(fac.rename_cid)
hsgui.changeChipPropSignal.connect(fac.change_chip_prop)
hsgui.logdbgSignal.connect(fac.logdbgSlot)
# SKEL SIGNALS
main_skel = hsgui.main_skel
# Widget
hsgui.main_skel.fignumSPIN.valueChanged.connect(
fac.set_fignum)
# File
main_skel.actionOpen_Database.triggered.connect(
fac.open_db)
main_skel.actionSave_Database.triggered.connect(
fac.save_db)
main_skel.actionImport_Images.triggered.connect(
fac.import_images)
main_skel.actionQuit.triggered.connect(
hsgui.close)
# Actions
main_skel.actionQuery.triggered.connect(
fac.query)
main_skel.actionAdd_ROI.triggered.connect(
fac.add_chip)
main_skel.actionReselect_Orientation.triggered.connect(
fac.reselect_orientation)
main_skel.actionReselect_ROI.triggered.connect(
fac.reselect_roi)
main_skel.actionRemove_Chip.triggered.connect(
fac.remove_cid)
main_skel.actionNext.triggered.connect(
fac.select_next)
# Options
main_skel.actionTogEll.triggered.connect(
fac.toggle_ellipse)
main_skel.actionTogPts.triggered.connect(
fac.toggle_points)
main_skel.actionTogPlt.triggered.connect(
hsgui.setPlotWidgetVisibleSlot)
main_skel.actionPreferences.triggered.connect(
hsgui.epw.show )
# Help
main_skel.actionView_Documentation.triggered.connect(
fac.view_documentation)
main_skel.actionHelpCMD.triggered.connect(
lambda:hsgui.msgbox('Command Line Help', cmd_help))
main_skel.actionHelpWorkflow.triggered.connect(
lambda:hsgui.msgbox('Workflow HOWTO', workflow_help))
main_skel.actionHelpTroubles.triggered.connect(
lambda:hsgui.msgbox('Troubleshooting Help', troubles_help))
main_skel.actionWriteLogs.triggered.connect(
fac.write_logs)
# Convinience
main_skel.actionOpen_Source_Directory.triggered.connect(
fac.vd)
main_skel.actionOpen_Data_Directory.triggered.connect(
fac.vdd)
main_skel.actionOpen_Internal_Directory.triggered.connect(
fac.vdi)
main_skel.actionConvertImage2Chip.triggered.connect(
fac.convert_all_images_to_chips)
main_skel.actionBatch_Change_Name.triggered.connect(
fac._quick_and_dirty_batch_rename)
main_skel.actionAdd_Metadata_Property.triggered.connect(
fac.add_new_prop)
main_skel.actionAssign_Matches_Above_Threshold.triggered.connect(
fac.match_all_above_thresh)
main_skel.actionIncrease_ROI_Size.triggered.connect(
fac.expand_rois)
# Experiments
main_skel.actionMatching_Experiment.triggered.connect(
fac.run_matching_experiment)
main_skel.actionName_Consistency_Experiment.triggered.connect(
fac.run_name_consistency_experiment)
#
# Gui Components
# Tables Widgets
main_skel.chip_TBL.itemClicked.connect(
hsgui.chipTableClickedSlot)
main_skel.chip_TBL.itemChanged.connect(
hsgui.chipTableChangedSlot)
main_skel.image_TBL.itemClicked.connect(
hsgui.imageTableClickedSlot)
main_skel.res_TBL.itemChanged.connect(
hsgui.resultTableChangedSlot)
# Tab Widget
# This signal slot setup is very bad. Needs rewrite
main_skel.tablesTabWidget.currentChanged.connect(
fac.change_view)
main_skel.chip_TBL.sortByColumn(0, Qt.AscendingOrder)
main_skel.res_TBL.sortByColumn(0, Qt.AscendingOrder)
main_skel.image_TBL.sortByColumn(0, Qt.AscendingOrder)
@pyqtSlot(name='setPlotWidgetVisible')
def setPlotWidgetVisibleSlot(hsgui, bit=None): #None = toggle
if hsgui.plotWidget != None:
logdbg('Disabling Plot Widget')
if bit is None: bit = not hsgui.plotWidget.isVisible()
was_visible = hsgui.plotWidget.setVisible(bit)
if was_visible != bit:
if bit:
hsgui.main_skel.fignumSPIN.setValue(0)
else:
hsgui.main_skel.fignumSPIN.setValue(1)
#hsgui.setFignumSignal.emit(int(1 - bit)) # plotwidget fignum = 0
# Internal GUI Functions
def populate_tbl_helper(hsgui, tbl, col_headers, col_editable, row_list, row2_data_tup ):
#tbl = main_skel.chip_TBL
hheader = tbl.horizontalHeader()
sort_col = hheader.sortIndicatorSection()
sort_ord = hheader.sortIndicatorOrder()
tbl.sortByColumn(0, Qt.AscendingOrder) # Basic Sorting
prevBlockSignals = tbl.blockSignals(True)
tbl.clear()
tbl.setColumnCount(len(col_headers))
tbl.setRowCount(len(row_list))
tbl.verticalHeader().hide()
tbl.setHorizontalHeaderLabels(col_headers)
tbl.setSelectionMode( QAbstractItemView.SingleSelection )
tbl.setSelectionBehavior( QAbstractItemView.SelectRows)
tbl.setSortingEnabled(False)
for row in iter(row_list):
data_tup = row2_data_tup[row]
for col, data in enumerate(data_tup):
item = QTableWidgetItem()
try:
int_data = int(data)
item.setData(Qt.DisplayRole, int_data)
except ValueError: # for strings
item.setText(str(data))
except TypeError: #for lists
item.setText(str(data))
item.setTextAlignment(Qt.AlignHCenter)
if col_editable[col]: item.setFlags(item.flags() | Qt.ItemIsEditable)
else: item.setFlags(item.flags() ^ Qt.ItemIsEditable)
tbl.setItem(row, col, item)
tbl.setSortingEnabled(True)
tbl.sortByColumn(sort_col,sort_ord) # Move back to old sorting
tbl.show()
tbl.blockSignals(prevBlockSignals)
@pyqtSlot(dict, name='updateDBStatsSlot')
@gui_log
def updateDBStatsSlot(hsgui, stats):
hsgui.setWindowTitle(stats['title'])
def updateSelSpinsSlot(hsgui, cid, gid):
hsgui.prev_cid = cid
hsgui.prev_gid = gid
hsgui.main_skel.sel_cid_SPIN.setValue(cid)
hsgui.main_skel.sel_gid_SPIN.setValue(gid)
def redrawGuiSlot(hsgui):
hsgui.show()
if hsgui.plotWidget != None and\
hsgui.plotWidget.isVisible():
hsgui.plotWidget.show()
hsgui.plotWidget.draw()
def updateStateLabelSlot(hsgui, state):
hsgui.main_skel.state_LBL.setText(state)
@pyqtSlot(list, list, list, list, name='populateChipTblSlot')
def populateChipTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.chip_TBL, col_headers, col_editable, row_list, row2_data_tup)
@pyqtSlot(list, list, list, list, name='populateImageTblSlot')
def populateImageTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.image_TBL, col_headers, col_editable, row_list, row2_data_tup)
@pyqtSlot(list, list, list, list, name='populateResultTblSlot')
def populateResultTblSlot(hsgui, col_headers, col_editable, row_list, row2_data_tup):
hsgui.populate_tbl_helper(hsgui.main_skel.res_TBL, col_headers, col_editable, row_list, row2_data_tup)
@gui_log
def chipTableChangedSlot(hsgui, item):
'A Chip had a data member changed '
hsgui.logdbgSignal.emit('chip table changed')
sel_row = item.row()
sel_col = item.column()
sel_cid = int(hsgui.main_skel.chip_TBL.item(sel_row,0).text())
new_val = str(item.text()).replace(',',';;')
header_lbl = str(hsgui.main_skel.chip_TBL.horizontalHeaderItem(sel_col).text())
hsgui.selectCidSignal.emit(sel_cid)
# Rename the chip!
if header_lbl == 'Chip Name':
hsgui.renameChipIdSignal.emit(new_val, sel_cid)
# Change the user property instead
else:
hsgui.changeChipPropSignal.emit(header_lbl, new_val, sel_cid)
@gui_log
def resultTableChangedSlot(hsgui, item):
'A Chip was Renamed in Result View'
hsgui.logdbgSignal.emit('result table changed')
sel_row = item.row()
sel_cid = int(hsgui.main_skel.res_TBL.item(sel_row,1).text())
new_name = str(item.text())
hsgui.renameChipIdSignal.emit(new_name, int(sel_cid))
def imageTableClickedSlot(hsgui, item):
'Select Image ID'
if item == hsgui.prev_tbl_item: return
hsgui.prev_tbl_item = item
sel_row = item.row()
sel_gid = int(hsgui.main_skel.image_TBL.item(sel_row,0).text())
hsgui.selectGidSignal.emit(sel_gid)
def chipTableClickedSlot(hsgui, item):
'Select Chip ID'
hsgui.logdbgSignal.emit('chip table clicked')
if item == hsgui.prev_tbl_item: return
hsgui.prev_tbl_item = item
sel_row = item.row()
sel_cid = int(hsgui.main_skel.chip_TBL.item(sel_row,0).text())
hsgui.selectCidSignal.emit(sel_cid)
def update_image_table(self):
uim.populateImageTblSignal.connect( uim.hsgui.populateImageTblSlot )
pass
def select_tab(uim, tabname, block_draw=False):
logdbg('Selecting the '+tabname+' Tab')
if block_draw:
prevBlock = uim.hsgui.main_skel.tablesTabWidget.blockSignals(True)
tab_index = uim.tab_order.index(tabname)
uim.selectTabSignal.emit(tab_index)
if block_draw:
uim.hsgui.main_skel.tablesTabWidget.blockSignals(prevBlock)
def get_gui_figure(uim):
'returns the matplotlib.pyplot.figure'
if uim.hsgui != None and uim.hsgui.plotWidget != None:
fig = uim.hsgui.plotWidget.figure
fig.show = lambda: uim.hsgui.plotWidget.show() #HACKY HACK HACK
return fig
return None
@func_log
def redraw_gui(uim):
if not uim.hsgui is None and uim.hsgui.isVisible():
uim.redrawGuiSignal.emit()
# --- UIManager things that deal with the GUI Through Signals
@func_log
def populate_chip_table(uim):
#tbl = uim.hsgui.main_skel.chip_TBL
cm = uim.hs.cm
col_headers = ['Chip ID', 'Chip Name', 'Name ID', 'Image ID', 'Other CIDS']
col_editable = [ False , True , False , False , False ]
# Add User Properties to headers
col_headers += cm.user_props.keys()
col_editable += [True for key in cm.user_props.keys()]
# Create Data List
cx_list = cm.get_valid_cxs()
data_list = [None]*len(cx_list)
row_list = range(len(cx_list))
for (i,cx) in enumerate(cx_list):
# Get Indexing Data
cid = cm.cx2_cid[cx]
gid = cm.cx2_gid(cx)
nid = cm.cx2_nid(cx)
# Get Useful Data
name = cm.cx2_name(cx)
other_cxs_ = setdiff1d(cm.cx2_other_cxs([cx])[0], cx)
other_cids = cm.cx2_cid[other_cxs_]
# Get User Data
cm.user_props.keys()
user_data = [cm.user_props[key][cx] for key in
cm.user_props.iterkeys()]
# Pack data to sent to Qt
data_list[i] = (cid, name, nid, gid, other_cids)+tuple(user_data)
#(cid, name, nid, gid, other_cids, *user_data)
uim.populateChipTblSignal.emit(col_headers, col_editable, row_list, data_list)
@func_log
def populate_image_table(uim):
col_headers = ['Image ID', 'Image Name', 'Chip IDs', 'Chip Names']
col_editable = [ False , False , False , False ]
# Populate table with valid image indexes
cm, gm = uim.hs.get_managers('cm','gm')
gx_list = gm.get_valid_gxs()
data_list = [None]*len(gx_list)
row_list = range(len(gx_list))
for (i,gx) in enumerate(gx_list):
gid = gm.gx2_gid[gx]
gname = gm.gx2_gname[gx]
cid_list = gm.gx2_cids(gx)
name_list = str([cm.cid2_(cid, 'name') for cid in cid_list])
data_list[i] = (gid, gname, cid_list, name_list)
uim.populateImageTblSignal.emit(col_headers, col_editable, row_list, data_list)
@func_log
def populate_result_table(uim):
col_headers = ['Rank', 'Chip ID', 'Chip Name', 'score']
col_editable = [False , False , True , False ]
# Check to see if results exist
res = uim.sel_res
if res is None:
logdbg('Not populating. selected results are None.')
return None
logmsg(res)
gm, cm, am = uim.hs.get_managers('gm','cm','am')
dynargs =\
('cid', 'name' )
(qcid , qname ) = res.qcid2_(*dynargs)
(tcid , tname , tscore ) = res.tcid2_(*dynargs+('score',))
num_results = len(tcid)
data_list = [None]*(num_results+1)
row_list = range(num_results+1)
data_list[0] = [0, qcid, qname, 'Queried Chip']
for (ix, (cid, name, score)) in enumerate(zip(tcid, tname, tscore)):
rank = ix+1
data_list[ix+1] = (rank, cid, name, score)
uim.populateResultTblSignal.emit(col_headers, col_editable, row_list, data_list)
def populate_algo_settings(uim):
logdbg('Populating the Preference Tree... Sending Signal')
uim.populatePrefTreeSignal.emit(uim.hs.all_pref)
def set_fignum(uim, fignum):
if uim.hsgui != None:
prevBlockSignals = uim.hsgui.main_skel.fignumSPIN.blockSignals(True)
uim.setfignumSignal.emit(fignum)
uim.hsgui.main_skel.fignumSPIN.blockSignals(prevBlockSignals)
if __name__ == '__main__':
import sys
multiprocessing.freeze_support()
def test():
app = QtGui.QApplication(sys.argv)
main_win = HotSpotterMainWindow()
app.setActiveWindow(main_win)
sys.exit(app.exec_())
test()
|
Centrally located within the gated community of Lexington Commons, this gorgeous villa was built in 2007 and features a 2 bedroom split plan with an additional den/office. New Hanover Floors, top of the line appliances, fresh interior paint, with several more updates inside and out of this well maintained home! Water, lawn maintenance, and garbage are all included with your monthly rent!
From Ridge Rd & Little Rd, go North on Little Rd. Just past Courthouse, Lexington Commons (Corinthian Way) will be on your left. Gate code required for entrance.
|
from string import Template
from twisted.internet.defer import inlineCallbacks
from twisted.enterprise import adbapi
ProgrammingError = Exception # FIXME
TOKEN_MAX = 65536
def connect(**kwargs):
""" create new connection pool """
dbpool = adbapi.ConnectionPool(
"psycopg2",
cp_min=3,
cp_max=10,
cp_noisy=True,
cp_reconnect=True,
user=kwargs['pg-username'],
password=kwargs['pg-password'],
host=kwargs['pg-host'],
database=kwargs['pg-database']
)
return dbpool
def drop_schema(schema, **kwargs):
"" ""
if 'conn' in kwargs:
conn = kwargs.pop('conn')
else:
conn = connect(**kwargs)
sql = "DROP SCHEMA IF EXISTS %s CASCADE" % schema
return conn.runOperation(sql)
@inlineCallbacks
def create_schema(machine, **kwargs):
""" add a new schema to an existing db """
if 'conn' in kwargs:
conn = kwargs.pop('conn')
else:
conn = connect(**kwargs)
schema = kwargs.get('schema_name', machine.name)
yield conn.runOperation("CREATE schema %s" % schema)
yield conn.runOperation("""
CREATE DOMAIN %s.token as smallint CHECK(VALUE >= 0 and VALUE <= %i)
""" % (schema, TOKEN_MAX))
num_places = len(machine.machine['state'])
columns = [''] * num_places
vector = [''] * num_places
delta = [''] * num_places
for key, props in machine.net.places.items():
i = props['offset']
columns[i] = ' %s %s.token' % (key, schema)
vector[i] = ' %s int4' % key
delta[i] = " (state).%s + conn.%s" % (key, key)
yield conn.runOperation("""
CREATE TYPE %s.state as ( %s )
""" % (schema, ','.join(columns)))
yield conn.runOperation("""
CREATE TYPE %s.vector as ( %s )
""" % (schema, ','.join(vector)))
yield conn.runOperation("""
CREATE TYPE %s.event as (
id varchar(32),
oid varchar(255),
rev int4
)
""" % (schema))
yield conn.runOperation("""
CREATE TYPE %s.event_payload as (
id varchar(32),
oid varchar(255),
seq int4,
action varchar(255),
payload json,
timestamp timestamp
)
""" % (schema))
yield conn.runOperation("""
CREATE TYPE %s.current_state as (
id varchar(32),
oid varchar(255),
action varchar(255),
rev int4,
state %s.state,
payload json,
modified timestamp,
created timestamp
)
""" % (schema, schema))
initial_vector = machine.net.initial_vector()
# KLUDGE: this seems to be a limitation of how default values are declared
# this doesn't work when state vector has only one element
# state %s.state DEFAULT (0), # FAILS
# state %s.state DEFAULT (0,0), # WORKS
if len(initial_vector) < 2:
raise Exception('state vector must be an n-tuple where n >= 2')
yield conn.runOperation("""
CREATE TABLE %s.states (
oid VARCHAR(256) PRIMARY KEY,
rev int4 default 0,
state %s.state DEFAULT %s::%s.state,
created timestamp DEFAULT now(),
modified timestamp DEFAULT now()
);
""" % (schema, schema, tuple(initial_vector), schema))
yield conn.runOperation("""
CREATE TABLE %s.transitions (
action VARCHAR(255) PRIMARY KEY,
vector %s.vector
);
""" % (schema, schema))
for key, props in machine.net.transitions.items():
yield conn.runOperation("""
INSERT INTO %s.transitions values('%s', %s)
""" % (schema, key, tuple(props['delta'])))
yield conn.runOperation("""
CREATE TABLE %s.events (
oid VARCHAR(255) REFERENCES %s.states(oid) ON DELETE CASCADE ON UPDATE CASCADE,
seq SERIAL,
action VARCHAR(255) NOT NULL,
payload jsonb DEFAULT '{}',
hash VARCHAR(32) NOT NULL,
timestamp timestamp DEFAULT NULL
);
""" % (schema, schema))
yield conn.runOperation("""
ALTER TABLE %s.events ADD CONSTRAINT %s_oid_seq_pkey PRIMARY KEY (oid, seq);
""" % (schema, schema))
yield conn.runOperation("""
CREATE INDEX %s_hash_idx on %s.events (hash);
""" % (schema, schema))
function_template = Template("""
CREATE OR REPLACE FUNCTION ${name}.vclock() RETURNS TRIGGER
AS $MARKER
DECLARE
conn ${name}.vector;
revision int4;
BEGIN
SELECT
(vector).* INTO STRICT conn
FROM
${name}.transitions
WHERE
action = NEW.action;
UPDATE
${name}.states set
state = ( ${delta} ),
rev = rev + 1,
modified = now()
WHERE
oid = NEW.oid
RETURNING
rev into STRICT revision;
NEW.seq = revision;
NEW.hash = md5(row_to_json(NEW)::TEXT);
NEW.timestamp = now();
RETURN NEW;
END
$MARKER LANGUAGE plpgsql""")
fn_sql = function_template.substitute(
MARKER='$$',
name=schema,
var1='$1',
var2='$2',
var3='$3',
delta=','.join(delta)
)
yield conn.runOperation(fn_sql)
function_template = Template("""
CREATE TRIGGER ${name}_dispatch
BEFORE INSERT on ${name}.events
FOR EACH ROW EXECUTE PROCEDURE ${name}.vclock();
""")
trigger_sql = function_template.substitute(name=schema)
yield conn.runOperation(trigger_sql)
|
Another update to the 2017 Railway gallery. Pictures from a trip to the West Midlands, local pictures, and several shots from the CPRR 2017 Diesel Gala. The latter includes stock moves before and after the event.
|
"""
A module containing transcripts utils.
"""
# pylint: disable=inconsistent-return-statements
import json
from pysrt import SubRipFile, SubRipItem, SubRipTime
from pysrt.srtexc import Error
from edxval.exceptions import TranscriptsGenerationException
class Transcript:
"""
Container for transcript methods.
"""
SRT = 'srt'
SJSON = 'sjson'
@staticmethod
def generate_sjson_from_srt(srt_subs):
"""
Generate transcripts from sjson to SubRip (*.srt).
Arguments:
srt_subs(SubRip): "SRT" subs object
Returns:
Subs converted to "SJSON" format.
"""
sub_starts = []
sub_ends = []
sub_texts = []
for sub in srt_subs:
sub_starts.append(sub.start.ordinal)
sub_ends.append(sub.end.ordinal)
sub_texts.append(sub.text.replace('\n', ' '))
sjson_subs = {
'start': sub_starts,
'end': sub_ends,
'text': sub_texts
}
return sjson_subs
@staticmethod
def generate_srt_from_sjson(sjson_subs):
"""
Generate transcripts from sjson to SubRip (*.srt)
Arguments:
sjson_subs (dict): `sjson` subs.
Returns:
Subtitles in SRT format.
"""
output = ''
equal_len = len(sjson_subs['start']) == len(sjson_subs['end']) == len(sjson_subs['text'])
if not equal_len:
return output
for i in range(len(sjson_subs['start'])):
item = SubRipItem(
index=i,
start=SubRipTime(milliseconds=sjson_subs['start'][i]),
end=SubRipTime(milliseconds=sjson_subs['end'][i]),
text=sjson_subs['text'][i]
)
output += (str(item))
output += '\n'
return output
@classmethod
def convert(cls, content, input_format, output_format):
"""
Convert transcript `content` from `input_format` to `output_format`.
Arguments:
content: Transcript content byte-stream.
input_format: Input transcript format.
output_format: Output transcript format.
Accepted input formats: sjson, srt.
Accepted output format: srt, sjson.
Raises:
TranscriptsGenerationException: On parsing the invalid srt
content during conversion from srt to sjson.
"""
assert input_format in ('srt', 'sjson')
assert output_format in ('srt', 'sjson')
# Decode the content with utf-8-sig which will also
# skip byte order mark(BOM) character if found.
try:
content = content.decode('utf-8-sig')
except UnicodeDecodeError:
# Most of our stuff is UTF-8, but don't break if Latin-1 encoded
# transcripts are still floating around in older courses.
content = content.decode('latin-1')
if input_format == output_format:
return content
if input_format == 'srt':
if output_format == 'sjson':
try:
# With error handling (set to 'ERROR_RAISE'), we will be getting
# the exception if something went wrong in parsing the transcript.
srt_subs = SubRipFile.from_string(content, error_handling=SubRipFile.ERROR_RAISE)
except Error as ex: # Base exception from pysrt
raise TranscriptsGenerationException(str(ex)) from ex
return json.dumps(cls.generate_sjson_from_srt(srt_subs))
if input_format == 'sjson':
if output_format == 'srt':
return cls.generate_srt_from_sjson(json.loads(content))
|
Overdale Park has a full 12 month residential licence.
• “occupier” means anyone who occupies a park home, under an Agreement to which the Mobile Homes Act 1983 applies.
• “you” and “your” refers to the homeowner or other occupier of a park home.
• “we” and “our” refers to the park owner.
• They are to apply only from the date on which they take effect, which is 9th January 2015.
• No occupier who is in occupation on that date will be treated as being in breach due to circumstances which were in existence before that date.
1. For reasons of ventilation and safety, the underneath of each home is to be kept clear and not used as storage space.
2. You must not erect fences or other means of enclosure unless they are new, no more than 2 metres in height and you have obtained our prior approval in writing (which will not be unreasonably withheld or delayed).
3. External fires, including incinerators, are not allowed.
4. No inflammable substances may be kept on the park except in quantities reasonable for domestic use.
5. No explosive substances may be kept on the park.
6. You must not have more than one storage shed on the pitch. . The design, size and standard of the shed must be approved by us, in writing (which will not be unreasonably withheld or delayed) and positioned so as to comply with the park’s site licence conditions and fire safety requirements.
7. You must not have more than one greenhouse on the pitch. The design, size and standard of the greenhouse must be approved by us in writing (which will not be withheld or delayed unreasonably). You must position the greenhouse so as to comply with the park’s site licence conditions and fire safety requirements.
8. Any structure erected in the separation space between park homes must be of non-combustible construction.
9. You are responsible for the disposal of all household, recyclable and garden waste in approved containers through the local authority service. You must not overfill containers and you must place them in the approved position for the local authority collections.
10. You must not deposit any refuse or unroadworthy vehicles on any part of the park.
11. Occupiers must not use the mobile home, the pitch or any part of the park for any business purpose and you must not use the park home or pitch for the storage of stock, plant, machinery or equipment used or last used for any business purpose. However, you may carry out office work as long as it does not affect other occupiers and you do not have staff, other workers, customers or members of the public calling at the park home or the park.
12. No persons under the age of fifty five years may reside in a park home.
13. Musical instruments, MP3 players, CD Players, radios, other appliances and motor vehicles must not be used to cause nuisance to others, especially between the hours of 10.30pm and 8am.
a. Not more than one dog (other than any of the breeds subject to the Dangerous Dogs Act 1991 which are not permitted at all). Any dog must be kept under proper control and must not be permitted to frighten other users of the park. You must keep any dog on a leash not exceeding 1m in length and must not allow it to despoil the park.
b. Not more than two domestic cats.
c. Not more than two budgerigars.
16. All external water pipes must be lagged by occupiers against potential frost damage.
17. Access is not permitted to vacant pitches. Building materials, equipment and/or plant must be left undisturbed.
18. All vehicles must be driven carefully on the park and not exceed the displayed speed limit.
19. You must not park more than two vehicles on the park.
20. Parking is not permitted on roads or grass verges.
21. Parking is only permitted in authorised parking spaces.
22. You must ensure that any visitor’s vehicles park in the designated car parks.
a.) All vehicles used on roads on the park must be taxed and insured and be in roadworthy condition.
b.) All drivers must hold a current driving licence for the category of vehicle driven on the park.
24. Disused/unroadworthy vehicles must be removed from the park and the park owner reserves the right to remove any vehicle, which is apparently abandoned.
25. You must not carry out works and repairs on the park which involve the discharging of motor oils and other fuels into the drains, onto the roads or the car park.
26. Other than for the delivering of goods and services you must not park or allow parking of commercial vehicles with a gross vehicle weight exceeding 1.5 tonnes.
27. All park homes must be equipped with a fire extinguisher/blanket which conforms to the relevant British standard.
28. Guns, firearms or offensive weapons of any description must not be used on the park and must only be kept with a licence from the appropriate authority.
29. Where drying washing on the pitch you must only use a rotary type washing line.
30. You must not allow children to play ball games, ride bicycles (other than for arriving or departing the park), use skateboards, scooters etc or engage in other similar activities, which would cause nuisance or be of annoyance to residents.
31. The homeowner must insure the home particularly in respect of public liability.
32. No external alterations or additions to the pitch is permitted without our prior written approval (which will not be unreasonably withheld).
33. You must ensure that any trades people or contractors instructed by you to carry out work on the park home or pitch has public liability insurance and complies with all statutory health and safety procedures at all times.
34. You must ensure that any gas, electrical and water trades people instructed by you have all relevant current certificates of competence to carry out any work.
35. Other than for loading or cleaning purposes you must not park or allow parking of caravans on the park for a period longer than 24 hours.
|
#! /usr/bin/env python
'''
biquad.py: Biquad filter implementation
Copyright (c) 2012 Bill Gribble <grib@billgribble.com>
'''
from mfp.processor import Processor
from ..mfp_app import MFPApp
from mfp import log
import math
from ..bang import Uninit
class Biquad(Processor):
doc_tooltip_obj = "Biquad filter (5-parameter normalized form)"
doc_tooltip_inlet = [ "Signal in or parameter dictionary with keys a1, a2, b0, b1, b2" ]
doc_tooltip_outlet = [ "Signal out" ]
def __init__(self, init_type, init_args, patch, scope, name):
Processor.__init__(self, 1, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
self.dsp_inlets = [0]
self.dsp_outlets = [0]
self.dsp_init("biquad~")
def trigger(self):
if isinstance(self.inlets[0], dict):
for param, val in self.inlets[0].items():
try:
self.dsp_setparam(param, float(val))
except Exception as e:
import traceback
tb = traceback.format_exc()
log.debug("biquad~: Error setting param", param, "to", type(val), str(val))
log.debug("biquad~: Exception:", str(e))
self.error(tb)
def bq_hipass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = (1 + math.cos(w0)) / (2.0 * a0)
params['b1'] = -1.0*(1 + math.cos(w0)) / a0
params['b2'] = (1 + math.cos(w0)) / (2.0 * a0)
return params
def bq_lopass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = (1 - math.cos(w0)) / (2.0 * a0)
params['b1'] = (1 - math.cos(w0)) / a0
params['b2'] = (1 - math.cos(w0)) / (2.0 * a0)
return params
def bq_bandpass(freq, q):
params = {}
w0 = 2 * math.pi * freq / MFPApp().samplerate
alpha = math.sin(w0) / (2*q)
a0 = 1 + alpha
params['a1'] = (-2.0*math.cos(w0)) / a0
params['a2'] = (1 - alpha) / a0
params['b0'] = alpha / a0
params['b1'] = 0
params['b2'] = -1.0 * alpha / a0
return params
class BiquadWrapper(Processor):
doc_tooltip_obj = "%s filter (biquad implementation)"
doc_tooltip_inlet = ["Signal in",
"Frequency of interest (default: initarg 0)",
"Q (filter steepness) (default: initarg 1)"]
doc_tooltip_outlet = ["Signal out"]
def __init__(self, bq_func, init_type, init_args, patch, scope, name):
Processor.__init__(self, 3, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
self.freq = initargs[0]
else:
self.freq = 0
if len(initargs) > 1:
self.q = initargs[1]
else:
self.q = 0.707
self.biquad_thunk = bq_func
self.biquad_params = self.biquad_thunk(self.freq, self.q)
self.hot_inlets = [0, 1, 2]
self.dsp_inlets = [0]
self.dsp_outlets = [0]
self.dsp_init("biquad~", **self.biquad_params)
def trigger(self):
recalc = False
if self.inlets[1] is not Uninit:
self.freq = self.inlets[1]
recalc = True
if self.inlets[2] is not Uninit:
self.q = self.inlets[2]
recalc = True
if recalc:
self.biquad_params = self.biquad_thunk(self.freq, self.q)
for n, v in self.biquad_params.items():
self.dsp_setparam(n, float(v))
def mk_biquad(thunk, filter_name):
def factory(init_type, init_args, patch, scope, name):
bq = BiquadWrapper(thunk, init_type, init_args, patch, scope, name)
bq.doc_tooltip_obj = BiquadWrapper.doc_tooltip_obj % filter_name
return bq
return factory
def register():
MFPApp().register("biquad~", Biquad)
MFPApp().register("hip~", mk_biquad(bq_hipass, "Highpass"))
MFPApp().register("lop~", mk_biquad(bq_lopass, "Lowpass"))
MFPApp().register("bp~", mk_biquad(bq_bandpass, "Bandpass"))
|
June has closed out in a humbling way. The mountains of New Hampshire are steep and technical in a way that I haven't really seen elsewhere on Trail, but they reward you with the best views as well. I've scrambled up rock walls and hiked down waterfalls; been lost in the clouds and exposed to ridge lines that resemble Machu Picchu and Irish cliffs. The terrain has made me sweat, curse, and groan and yet I am accompanied by a constant grin.
I've also been able to reconnect with old hiking friends while meeting new ones. These buddies came to my rescue when I slipped on a slick rock yesterday and likely sprained my ankle. Immediately, despite my stubbornness, they pulled items out of my pack, insisting that I should lighten my load. I had met some of these guys only hours before.
I should be in low spirits, but I can't help but feel so thankful. I'm grateful that I've made it this far without an injury, happy that I'm able to rest and hopefully recover quickly, and excited to spend my final days on the AT with my Dad and then that hiking crew (if I can catch them). Most of all, though, I'm acutely aware of how much I rely on my feet, and so happy that they continue to support me on this journey.
So let's take a look at what I've done in June!
Wildlife this month: Tons of frogs, snakes, & chipmunks. Saw my first rattlesnake in Vermont, too!
Total Trail Cries: 2, this month's was after my family sent me a sweet video from my cousin's wedding.
Total dollars raised for Next Steps: $3,553, still hoping to get to $6,600 and have less than 350 miles left. Please consider donating if you haven't already!
|
from flask import render_template, request, jsonify
from . import main
@main.app_errorhandler(403)
def forbidden(e):
""" This function is called if there was a 403 error exception
and displays the related error page
Return: template of 403 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'forbidden'})
response.status_code = 403
return response
return render_template('403.html'), 403
@main.app_errorhandler(404)
def page_not_found(e):
""" This function is called if there was a 404 error exception
and displays the related error page
Return: template of 404 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'not found'})
response.status_code = 404
return response
return render_template('404.html'), 404
@main.app_errorhandler(500)
def internal_server_error(e):
""" This function is called if there was a 404 error exception
and displays the related error page
Return: template of 404 error page
"""
if request.accept_mimetypes.accept_json and \
not request.accept_mimetypes.accept_html:
response = jsonify({'error': 'internal server error'})
response.status_code = 500
return response
return render_template('500.html'), 500
|
Mushroom Observer: Observation 9747: Lactarius rubrilacteus Hesler & A.H. Sm.
Observation 9747: Lactarius rubrilacteus Hesler & A.H. Sm.
Collected during the 2003 Oregon Mycological Society’s Fall Mushroom Forage at Camp Macgruder, Tillamook Co., OR.
This looks like an example of the paler/greener form of this species. I wish there was an image of the inside and some habitat information. I’ve been reviewing the observations of this species and they seem to roughly fall into two forms. However, I suspect the forms are weather induced rather than being distinct populations.
|
import time
from collections import defaultdict
class KarmaRateLimiter(object):
def __init__(self, timeout=60, penalty=3):
"""timeout in seconds - default 1 min"""
self.timeout = timeout
self.penalty = penalty
# http://goo.gl/ZFmFX
# http://stackoverflow.com/a/5900628
self.user_last_request = defaultdict(lambda:[int, int])
# defaultdict needs callable obj
def rate_limit(self, nick):
"""Return 0 if not rate_limited, 1 if has penalization, 2 otherwise"""
now = int(time.time())
if nick not in self.user_last_request:
self.user_last_request[nick] = [now,0]
return 0
elif (now - self.user_last_request[nick][0]) < self.timeout:
# Timeout not expired, so increase the counter
self.user_last_request[nick][1] += 1
# User is rate limited
if self.user_last_request[nick][1] % self.penalty == 0:
# give him the penalization!
return 1
else:
return 2
else:
# > timeout OK
self.user_last_request[nick] = [now, 0]
return 0
def user_timeout(self, nick):
"""Return the user specific timeout"""
if nick not in self.user_last_request:
return 0
else:
wait_time = self.timeout - (int(time.time()) - self.user_last_request[nick][0])
return wait_time
|
This game has one of the best shop systems I've ever seen in an iOS game, and it's very addicting to play. Even though the gameplay is honestly somewhat repetitive (as it would be) I can tell a lot pf work was put into making a diverse environment, such as different kinds of obstacles and even different biomes (which don't really affect the game, but help heighten the mood). I found it to be a little too "pay-to-win" with characters that were kind of cheaply designed and didn't have much depth and the characters you had to buy were always better. As with all iOS games, there were things about it I found stupid, but it gives you a good 6 hours of gameplay, I'd say, before it gets boring.
As others have noted, the gameplay gets very repetitive and very old quickly. After about two or three hours of playing the game, I lost interest. The levels also get insanely difficult once you reach about 2000 meters on each level – the speed becomes impossible to keep up with.
The only good things about this game are the graphics, music, and sound effects. Having the various different kinds of themed levels is also exciting, but that does absolutely nothing to lessen the repetitive gameplay.
Recommended if you have two hours or less to waste.
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from scipy import clip, asarray
from pybrain.utilities import abstractMethod
class Task(object):
""" A task is associating a purpose with an environment. It decides how to evaluate the
observations, potentially returning reinforcement rewards or fitness values.
Furthermore it is a filter for what should be visible to the agent.
Also, it can potentially act as a filter on how actions are transmitted to the environment. """
def __init__(self, environment):
""" All tasks are coupled to an environment. """
self.env = environment
# limits for scaling of sensors and actors (None=disabled)
self.sensor_limits = None
self.actor_limits = None
self.clipping = True
def setScaling(self, sensor_limits, actor_limits):
""" Expects scaling lists of 2-tuples - e.g. [(-3.14, 3.14), (0, 1), (-0.001, 0.001)] -
one tuple per parameter, giving min and max for that parameter. The functions
normalize and denormalize scale the parameters between -1 and 1 and vice versa.
To disable this feature, use 'None'. """
self.sensor_limits = sensor_limits
self.actor_limits = actor_limits
def performAction(self, action):
""" A filtered mapping towards performAction of the underlying environment. """
if self.actor_limits:
action = self.denormalize(action)
self.env.performAction(action)
def getObservation(self):
""" A filtered mapping to getSensors of the underlying environment. """
sensors = self.env.getSensors()
if self.sensor_limits:
sensors = self.normalize(sensors)
return sensors
def getReward(self):
""" Compute and return the current reward (i.e. corresponding to the last action performed) """
return abstractMethod()
def normalize(self, sensors):
""" The function scales the parameters to be between -1 and 1. e.g. [(-pi, pi), (0, 1), (-0.001, 0.001)] """
assert(len(self.sensor_limits) == len(sensors))
result = []
for l, s in zip(self.sensor_limits, sensors):
if not l:
result.append(s)
else:
result.append((s - l[0]) / (l[1] - l[0]) * 2 - 1.0)
if self.clipping:
clip(result, -1, 1)
return asarray(result)
def denormalize(self, actors):
""" The function scales the parameters from -1 and 1 to the given interval (min, max) for each actor. """
assert(len(self.actor_limits) == len(actors))
result = []
for l, a in zip(self.actor_limits, actors):
if not l:
result.append(a)
else:
r = (a + 1.0) / 2 * (l[1] - l[0]) + l[0]
if self.clipping:
r = clip(r, l[0], l[1])
result.append(r)
return result
@property
def indim(self):
return self.env.indim
@property
def outdim(self):
return self.env.outdim
|
Last May, when seven Fort Meade Soldiers received their Associate of Arts diplomas from Anne Arundel Community College, John Anderson couldn't have been prouder than if they'd been members of his own family.
"I was happy for them. People work hard to get to that point and it's a tremendous accomplishment," said Anderson, who attended the graduation ceremony.
The Soldiers were able to get their degrees through a program that Anderson, an education services specialist at the Fort Meade Army Education Center, oversees. He is responsible for the Department of Army program, GoArmyEd, which provides tuition assistance to active duty and reserve Soldiers.
The DoD offers similar tuition assistance programs to other branches of the military. Since GoArmyEd began in 2006, it has proven quite popular and Anderson, a retired sergeant major, said he gets inquiries about it constantly.
Still, he'd like to see more Soldiers take advantage of GoArmyEd, which enables them to earn undergraduate degrees (associate of arts, bachelor of arts and bachelor of science), a graduate degree (master's) and/or certification at over 3,000 authorized colleges and universities throughout the United States.
All of the schools are either regionally or nationally accredited, according to Anderson, and most have their curriculum online.
Soldiers enrolled in an authorized school can receive up to $4,500 per year for tuition. Depending on the fee for credit hours at the school and the number of credits a Soldier has earned elsewhere, that could translate into a college degree for free.
It did for Sgt. 1st Class Isaac Peterson, chief of training development, NCO Academy, who received an AA degree in electronics and computer engineering from Grantham University last March.
The degree required 65 credits, to which Peterson brought 21 credits from a previous AA degree and his military training. At Grantham's $250 per credit hour, "I almost ran out of money the first year" of the two-year program, said Peterson, who received a scholarship from the school to cover the difference.
"Between the Army's tuition assistance and the school scholarship, it did not cost me a dime," Peterson said of his AA degree.
The Fort Meade Army Education Center, at 8601 Zimborski Ave., has a computer lab with PCs, and a separate "college" area with information about the GoArmyEd program.
Three institutions of higher education -- Anne Arundel Community College, University of Maryland University College and Central Michigan University -- have offices in this area, where potential students can get personal counseling and information about courses and degrees. The three hold evening classes at Fort Meade High and Middle schools.
GoArmyEd.com is the portal through which Soldiers can enroll in classes that meet graduation requirements. "They can take classes online if offered or attend schools in the area," Anderson said.
Soldiers take the same online classes as any other student. They receive a degree from the university in which they are enrolled. For example, a Soldier who satisfactorily completes academic requirements online through Pennsylvania State Worldwide Campus receives a diploma from Pennsylvania State University.
Peterson said he heard about Grantham University, which is located in Kansas City, Mo., from another Soldier, who told him it had the courses he wanted. He also liked the fact that Grantham caters specifically to the military.
"Some of their enrollment counselors are former military people," he said. "They understand our timing, for the breaks between courses."
Soldiers can choose to major in any field they want. Ideally, though, the major should lead to a degree and be applicable to the job market. For that reason, said Vivian Moss, a guidance counselor at the Fort Meade Army Education Center, continuing education credits do not qualify for the program.
"Soldiers come [into the Army] with college credits they have accumulated, often in Advanced Placement courses in high school. We try to help them apply those credit hours" towards a degree, Moss said.
Fees for credit hours vary widely. A community college in Arizona charges $68 per credit hour, while a traditional university may cost $600 per credit hour. GoArmyEd pays a maximum of $250 per credit hour, and courses must be part of the degree or certification requirements.
"If not, it is possible to get an override," said Moss, as long as a case can be made that the course is applicable to the major. "If you're a sociology major and you want to take an extra pre-calculus course" in preparation for a required calculus course, she gave as an example, "you can get tuition for the credits."
Moss explained that GoArmyEd and the GI Bill are two separate and distinct educational programs. The Veterans Administration supports the GI Bill, which has different qualifications and different funding requirements than the DoD's GoArmyEd.
Anderson calls himself the "go-between" the Soldier and the school. "I ask them, 'What do you want on your resume''" he said.
He cited a few examples of Soldiers who have used GoArmyEd. One Solider received a master's degree in information technology from Trident University; another, a master's degree in contract and procurement from Webster University; and a third received bachelor's and master's degrees in public administration from Park University and Central Michigan University respectively.
The DoD has a tuition assistance program for spouses of Soldiers. My Career Advancement Account, or MyCAA, provides $4,000 per year tuition assistance for spouses of E1 through E5, W1 and W2 and O1 and O2 ranks. The money can be applied towards an AA degree or certification.
In the GoArmyEd program, the most popular majors in the Maryland-Washington, D.C. region are information technology, IT management, intelligence studies, hospital administration, public health and homeland security/justice.
Those choices come as no surprise, said Anderson. "That's where the jobs are in this area."
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing M2M table for field payments on 'Order'
db.delete_table(db.shorten_name(u'fund_order_payments'))
def backwards(self, orm):
# Adding M2M table for field payments on 'Order'
m2m_table_name = db.shorten_name(u'fund_order_payments')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('order', models.ForeignKey(orm[u'fund.order'], null=False)),
('payment', models.ForeignKey(orm[u'cowry.payment'], null=False))
))
db.create_unique(m2m_table_name, ['order_id', 'payment_id'])
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'fund.customvoucherrequest': {
'Meta': {'object_name': 'CustomVoucherRequest'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'organization': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'}),
'value': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
u'fund.donation': {
'Meta': {'object_name': 'Donation'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'one_off'", 'max_length': '20', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.order': {
'Meta': {'object_name': 'Order'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recurring': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'current'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fund.Order']"})
},
u'fund.recurringdirectdebitpayment': {
'Meta': {'object_name': 'RecurringDirectDebitPayment'},
'account': ('apps.fund.fields.DutchBankAccountField', [], {'max_length': '10'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounts.BlueBottleUser']", 'unique': 'True'})
},
u'fund.voucher': {
'Meta': {'object_name': 'Voucher'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'donations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['fund.Donation']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '2'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'receiver_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'receiver_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sender'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'sender_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'sender_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'popularity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['fund']
|
In the auto repair business hardly a day goes by that something doesn’t try to upset the apple cart. It might be the new lube guy who spilled 30 gallons of oil on the floor, or that lost set of customer keys. No matter what it is, something or someone is bound and determined to make your day different than the next.
From time to time it helps to go with the flow. You know, just let things slip on by and not take things so seriously, because no matter what, there’s always another hectic situation just around the corner to test your stress level. Think of it this way, you could be the oil or you could be the filter. You either let things slide through the day, or you’re trapped with the rest of the grit and grime.
Speaking of oil, take the day of the oil SNAFU. I have several customers who have their own preferences as to which brand of oil they would like to have in their car. Now, of course, I strictly adhere to the appropriate type and weight, but as far as brands of oils I’m up for any name on the bottle. (Although I do have a few brands I consider taboo.) Funny thing is, I never seem to get through to some of these folks the importance of a quality oil filter. I believe this should be a higher concern than the brand of oil.
This particular day was a rather chaotic day with more than one issue on the rise. Being the well-seasoned shop owner, I was more than up for the task of getting each and every job in and out the door with the skill of a professional.
In comes two of my old time regulars with their precious chosen brand of oil they have hand picked off the shelf. Oh yes, I mean hand-picked. These guys remind of the careful shopper at the grocery store who goes through the produce isle finding that perfect melon or apple. I can picture these two guys at the parts store picking up each bottle and examining it in detail before selecting that very bottle for their car. A bit eccentric, yes, but at their age it’s something technology hasn’t taken away from them. It still allows them to feel they can contribute to their car’s well-being, even though they can’t physically work on their own cars anymore. Needless to say these cherished oil cans are treated like gold when they enter the realm of the service bay… or at least that’s the normal way we handled it for our golden years customers.
Today, well, it was a bit different. We have our new lube tech, Clifford in charge of the oil service bay. He’s doing a great job, and even managed to up-sell a few seriously overlooked problems on a few customer cars. I have high hopes for this youngster, and encourage him to study for his ASE tests and further his education in the automotive field. This afternoon we already had 4 oil changes lined up for him. Two were the normal, ‘getrdone’ oil changes and two were our regular old timers with their hand selected oil.
Clifford has these oil change scenarios down pat. Everything from looking up the actual amount and type of oil required, to verifying what oil they brought, if there is an adequate amount, as well as saving the empties to show the customer when all was said and done.
As the cars were shuffled in and out of the service bay, somehow between the front office, the service bay, and back to the front lobby, the wrong box of oil was sent with the wrong car, or at least it was assumed. (No one knows for sure) Luckily, the oil weight, amount, and type were exactly the same from car to car. The only thing that was different was the one thing the owners of these cars had the most pride and input about, and that was the ‘brand’ of oil.
Mind you, for some of these regulars who bring their hand-picked-hand-selected oil, they’re quite serious about it. You just don’t calm the situation down by telling them you’ll replace the oil with the brand they originally wanted. Oh heavens no! That’s sacrilegious! That would mean a complete engine tear down (while they watched over you like a hawk) with the interior of the motor completely hand wiped to remove any traces of this foreign oil. I wouldn’t doubt it if they would have gone as far as having the molecular structure of the oil checked and verified that none of the competitor’s brand of oil was left to contaminate their engine.
By now, our new lube tech, Clifford has been dragged from the service bay and was about to receive a third degree interrogation while trying to explain his side of this debacle to the older gentlemen.
I took it upon myself to advert the possible cardiac arrest in the front lobby and save Clifford from a fate worse than a stuck on oil filter. As usual, there is one thing that separates the counter guy, the lube rack guy, and the owner… the person who makes the final decision on how to dissolve a situation, that’s me, the owner.
At the front counter the two old gentlemen were busy sorting through the bottles in each box while holding each of them up to the light for a closer inspection. The conversation went from who did what, to who didn’t do what, and why their brand was better than the other guys brand. Each of them now were trying to play “oil detective” and locate the slippery individual who screwed this all up. As things go with this typical bunch of grandpas, they were soon talking about vacations and grandkids. In fact the two old guys were starting to wonder which oil was theirs in first place.
I stepped in between my two elderly customers and their precious boxes of empty oil bottles. Without saying a word I ever so graciously reached for the oil bottles that each of them were holding and placed them back into their respective boxes. Then, with the moves of a Las Vegas magician, I switched box A with customer B and box B with customer A. Then cheerfully said, “There ya go, just a little mix up. It’s all good, you’re all set.” and walked away without another word. I just looked at my counter guy and gave him a wink. He knew what to do, as I guided the bewildered lube tech back to the service bay.
I don’t think I’ll ever find out who mixed up what oil with what car, or if there ever was a mix up at all, but you can be sure Clifford won’t forget about this. One minute he’s changing oil, the next he’s got two old guys shaking empty oil bottles at him. Sure made for an interesting day. Sometimes, ya just never know what’s going to happen when ya unscrew that drain plug… some days you’re the oil, some days you’re the filter.
Well, if they are the proper weight and certifications the manufacturer wants no foul here. Gonzo you did the right thing! Customers should not be so picky anyway and if so they need to change there own oil!
I had one like that the other day, the customer brought their own oil as he was filling out the ticket I asked him to write on the ticket "Has own oil and filter". I didn't take much notice handed the ticket to the Lube tech and he proceeded to change the oil in the car. When the guy came to pick the car up he was furious that we didn't use his oil. I retrieved his hard copy from the trash can by the computer where the tickets are printed and he had written "synthetic oil" no mention of his own oil.. I asked the tech if he used synthetic he showed me 4 empty bottles of 5w20 mobile one. The customer had the gallon jug of 5w20 mobile one. I tried to explain it was the exact same thing but he didn't believe me because of the size of the bottles. LOL.. To end anymore conflict I put his car back on the lift and drained the oil in the car into an oil jug, replaced the filter with his own and poured 4.5 qrts back in his car . Handed him the half quart left in his gallon jug and sent him on his way .. He was very happy as he left .. I don't think I would of ever been able to make him believe me that the quarts we have are the same as the jug he has..
What an idiot! Just think we all have to deal with people like this. Sometimes I think there are people too stupid to own a car.
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# vismol_shaders.py
#
# Copyright 2016 Labio <labio@labio-XPS-8300>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
my_glLigth = """
struct gl_LightSourceParameters {
vec4 ambient; // Aclarri
vec4 diffuse; // Dcli
vec4 specular; // Scli
vec4 position; // Ppli
vec4 halfVector; // Derived: Hi
vec3 spotDirection; // Sdli
float spotExponent; // Srli
float spotCutoff; // Crli
// (range: [0.0,90.0], 180.0)
float spotCosCutoff; // Derived: cos(Crli)
// (range: [1.0,0.0],-1.0)
float constantAttenuation; // K0
float linearAttenuation; // K1
float quadraticAttenuation;// K2
};
uniform gl_LightSourceParameters gl_LightSource[gl_MaxLights];
"""
my_glMaterial = """
struct gl_MaterialParameters {
vec4 emission; // Ecm
vec4 ambient; // Acm
vec4 diffuse; // Dcm
vec4 specular; // Scm
float shininess; // Srm
};
uniform gl_MaterialParameters gl_FrontMaterial;
uniform gl_MaterialParameters gl_BackMaterial;
"""
vertex_shader = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 sh_color;
void main()
{
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
sh_color = vert_color;
}
"""
fragment_shader = """
#version 330
in vec3 sh_color;
out vec4 final_color;
void main()
{
final_color = vec4(sh_color, 1.0);
}
"""
geometry_shader = """
#version 330
in Coords {
vec4 my_cords;
vec3 my_col;
} corners[];
out vec3 sh_color;
void main(){
gl_Position = corners[0].my_cords;
sh_color = corners[0].my_col;
}
"""
vertex_shader2 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 frag_vert;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
frag_vert = vec3(view_mat * model_mat * vec4(vert_coord, 1.0));
frag_color = vert_color;
frag_normal = frag_vert;
}
"""
fragment_shader2 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_vert;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_vert);
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * my_light.color * frag_color;
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
vec3 incidence_vec = -vert_to_light;
vec3 reflection_vec = reflect(incidence_vec, normal);
vec3 vert_to_cam = normalize(cam_pos - frag_vert);
float cos_angle = max(0.0, dot(vert_to_cam, reflection_vec));
float specular_coef = pow(cos_angle, my_light.shininess);
vec3 specular = specular_coef * my_light.specular_color * my_light.intensity;
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader3 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_coord = vec3(model_mat * vec4(coordinate, 1.0));
frag_normal = coordinate;
frag_color = vert_color;
}
"""
fragment_shader3 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_coord);
vec3 vert_to_cam = normalize(cam_pos - frag_coord);
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * vec3(1) * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader4 = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(coordinate, 1.0);
frag_coord = -vec3(modelview * vec4(coordinate, 1.0));
frag_normal = normalize(normal_mat * (coordinate - center));
frag_color = vert_color;
}
"""
fragment_shader4 = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
//vec3 normal = normalize(frag_normal);
//vec3 eye = normalize(frag_coord);
//
//vec3 vert_to_light = normalize(vec3(view_mat*vec4(my_light.position, 0.0)));
////vec3 vert_to_cam = normalize(frag_coord);
//
//vec3 spec = vec3(0.0);
//float intensity = max(dot(normal, vert_to_light), 0.0);
//if (intensity>0.0){
// vec3 h = normalize(vert_to_light + eye);
// float int_spec = max(dot(h, normal), 0.0);
// spec = my_light.intensity * pow(int_spec, my_light.shininess);
//}
//vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
//float diffuse_coef = max(0.0, dot(normal, vert_to_light));
//vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
//final_color = vec4(intensity * diffuse + spec + ambient, 1.0);
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_sphere = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_sphere = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_crystal = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_normal;
out vec3 frag_color;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_crystal = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
final_color = vec4(ambient + diffuse, 0.6);
}
"""
vertex_shader_dot_surface = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_color;
out vec3 frag_color;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1.0);
frag_color = vert_color;
}
"""
fragment_shader_dot_surface = """
#version 330
in vec3 frag_color;
out vec4 final_color;
void main(){
final_color = vec4(frag_color, 1.0);
}
"""
vertex_shader_directional_light = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(coordinate, 1.0);
frag_coord = -vec3(modelview * vec4(coordinate, 1.0));
frag_normal = normalize(normal_mat * (coordinate - center));
frag_color = vert_color;
}
"""
fragment_shader_directional_light = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_point_light = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_coord = vec3(model_mat * vec4(coordinate, 1.0));
frag_normal = coordinate;
frag_color = vert_color;
}
"""
fragment_shader_point_light = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(normal_mat * frag_normal);
vec3 vert_to_light = normalize(my_light.position - frag_coord);
vec3 vert_to_cam = normalize(cam_pos - frag_coord);
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * vec3(1) * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
vertex_shader_dots = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform float vert_ext_linewidth;
uniform float vert_int_antialias;
uniform float vert_dot_factor;
in vec3 vert_coord;
in vec3 vert_color;
in float vert_dot_size;
attribute vec4 bckgrnd_color;
varying float frag_dot_size;
varying float frag_ext_linewidth;
varying float frag_int_antialias;
varying vec4 frag_dot_color;
varying vec4 frag_bckgrnd_color;
void main(){
frag_dot_size = vert_dot_size * vert_dot_factor;
frag_ext_linewidth = vert_ext_linewidth;
frag_int_antialias = vert_int_antialias;
frag_dot_color = vec4(vert_color, 1.0);
frag_bckgrnd_color = bckgrnd_color;
gl_Position = projection_mat * view_mat * model_mat * vec4(vert_coord, 1);
gl_PointSize = vert_dot_size + 2*(vert_ext_linewidth + 1.5*vert_int_antialias);
}
"""
fragment_shader_dots = """
#version 330
out vec4 final_color;
// ------------------------------------
varying vec4 frag_bckgrnd_color;
varying vec4 frag_dot_color;
varying float frag_dot_size;
varying float frag_ext_linewidth;
varying float frag_int_antialias;
// ------------------------------------
float disc(vec2 P, float size)
{
float r = length((P.xy - vec2(0.5,0.5))*size);
r -= frag_dot_size/2;
return r;
}
// ----------------
float arrow_right(vec2 P, float size)
{
float r1 = abs(P.x -.50)*size + abs(P.y -.5)*size - frag_dot_size/2;
float r2 = abs(P.x -.25)*size + abs(P.y -.5)*size - frag_dot_size/2;
float r = max(r1,-r2);
return r;
}
// ----------------
float ring(vec2 P, float size)
{
float r1 = length((gl_PointCoord.xy - vec2(0.5,0.5))*size) - frag_dot_size/2;
float r2 = length((gl_PointCoord.xy - vec2(0.5,0.5))*size) - frag_dot_size/4;
float r = max(r1,-r2);
return r;
}
// ----------------
float clober(vec2 P, float size)
{
const float PI = 3.14159265358979323846264;
const float t1 = -PI/2;
const vec2 c1 = 0.2*vec2(cos(t1),sin(t1));
const float t2 = t1+2*PI/3;
const vec2 c2 = 0.2*vec2(cos(t2),sin(t2));
const float t3 = t2+2*PI/3;
const vec2 c3 = 0.2*vec2(cos(t3),sin(t3));
float r1 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c1)*size);
r1 -= frag_dot_size/3;
float r2 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c2)*size);
r2 -= frag_dot_size/3;
float r3 = length((gl_PointCoord.xy- vec2(0.5,0.5) - c3)*size);
r3 -= frag_dot_size/3;
float r = min(min(r1,r2),r3);
return r;
}
// ----------------
float square(vec2 P, float size)
{
float r = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
r -= frag_dot_size/2;
return r;
}
// ----------------
float diamond(vec2 P, float size)
{
float r = abs(gl_PointCoord.x -.5)*size + abs(gl_PointCoord.y -.5)*size;
r -= frag_dot_size/2;
return r;
}
// ----------------
float vbar(vec2 P, float size)
{
float r1 = max(abs(gl_PointCoord.x -.75)*size,
abs(gl_PointCoord.x -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(r1,r3);
r -= frag_dot_size/2;
return r;
}
// ----------------
float hbar(vec2 P, float size)
{
float r2 = max(abs(gl_PointCoord.y -.75)*size,
abs(gl_PointCoord.y -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(r2,r3);
r -= frag_dot_size/2;
return r;
}
// ----------------
float cross(vec2 P, float size)
{
float r1 = max(abs(gl_PointCoord.x -.75)*size,
abs(gl_PointCoord.x -.25)*size);
float r2 = max(abs(gl_PointCoord.y -.75)*size,
abs(gl_PointCoord.y -.25)*size);
float r3 = max(abs(gl_PointCoord.x -.5)*size,
abs(gl_PointCoord.y -.5)*size);
float r = max(min(r1,r2),r3);
r -= frag_dot_size/2;
return r;
}
void main(){
float size = frag_dot_size +2*(frag_ext_linewidth + 1.5*frag_int_antialias);
float t = frag_ext_linewidth/2.0-frag_int_antialias;
// gl_PointCoord is the pixel in the coordinate
float r = disc(gl_PointCoord, size);
float d = abs(r) - t;
// This if else statement makes the circle ilusion
if( r > (frag_ext_linewidth/2.0+frag_int_antialias)){
discard;
}
else if( d < 0.0 ){
final_color = frag_bckgrnd_color;
}
else{
float alpha = d/frag_int_antialias;
alpha = exp(-alpha*alpha);
if (r > 0)
final_color = frag_bckgrnd_color;
else
final_color = mix(frag_dot_color, frag_bckgrnd_color, alpha);
}
}
"""
vertex_shader_lines = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 coordinate;
in vec3 vert_color;
out vec4 frag_color;
out vec4 view_space;
void main(){
gl_Position = projection_mat * view_mat * model_mat * vec4(coordinate, 1.0);
frag_color = vec4(vert_color, 1.0);
view_space = view_mat * model_mat * vec4(coordinate, 1.0);
}
"""
fragment_shader_lines = """
#version 330
uniform vec4 fog_color;
uniform float fog_start;
uniform float fog_end;
in vec4 frag_color;
in vec4 view_space;
out vec4 final_color;
void main(){
float dist = abs(view_space.z);
if(dist>=fog_start){
float fog_factor = (fog_end-dist)/(fog_end-fog_start);
final_color = mix(fog_color, frag_color, fog_factor);
}
else{
final_color = frag_color;
}
}
"""
vertex_shader_sphere = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord;
in vec3 vert_center;
in vec3 vert_color;
out vec3 frag_coord;
out vec3 frag_color;
out vec3 frag_normal;
void main(){
mat4 modelview = view_mat * model_mat;
gl_Position = projection_mat * modelview * vec4(vert_coord, 1.0);
frag_coord = -vec3(modelview * vec4(vert_coord, 1.0));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_normal = normalize(normal_mat * (vert_coord - vert_center));
frag_color = vert_color;
}
"""
fragment_shader_sphere = """
#version 330
struct Light {
vec3 position;
vec3 color;
vec3 intensity;
vec3 specular_color;
float ambient_coef;
float shininess;
};
uniform Light my_light;
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat3 normal_mat;
uniform vec3 cam_pos;
in vec3 frag_coord;
in vec3 frag_color;
in vec3 frag_normal;
out vec4 final_color;
void main(){
vec3 normal = normalize(frag_normal);
vec3 vert_to_light = normalize(my_light.position);
vec3 vert_to_cam = normalize(frag_coord);
// Ambient Component
vec3 ambient = my_light.ambient_coef * frag_color * my_light.intensity;
// Diffuse component
float diffuse_coef = max(0.0, dot(normal, vert_to_light));
vec3 diffuse = diffuse_coef * frag_color * my_light.intensity;
// Specular component
float specular_coef = 0.0;
if (diffuse_coef > 0.0)
specular_coef = pow(max(0.0, dot(vert_to_cam, reflect(-vert_to_light, normal))), my_light.shininess);
vec3 specular = specular_coef * my_light.intensity;
specular = specular * (vec3(1) - diffuse);
final_color = vec4(ambient + diffuse + specular, 1.0);
}
"""
v_s_glumpy = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
in vec3 vert_coord; // attribute vec3 position;
in vec3 vert_color; // attribute vec3 color;
in float vert_dot_size; // attribute float radius;
//const float vert_dot_size = 0.5; // attribute float radius;
out vec3 frag_color; // varying vec3 v_color;
out float f_radius; // varying float v_radius;
out float f_size; // varying float v_size;
out vec4 frag_coord; // varying vec4 v_eye_position;
varying vec3 v_light_direction;
void main (void)
{
frag_color = vert_color;
f_radius = vert_dot_size;
frag_coord = view_mat * model_mat * vec4(vert_coord, 1.0);
v_light_direction = normalize(vec3(0,0,2));
gl_Position = projection_mat * frag_coord;
vec4 p = projection_mat * vec4(vert_dot_size, vert_dot_size, frag_coord.z, frag_coord.w);
f_size = 512.0 * p.x / p.w;
gl_PointSize = f_size + 5.0;
}
"""
f_s_glumpy = """
#version 330
uniform mat4 model_mat;
uniform mat4 view_mat;
uniform mat4 projection_mat;
uniform mat3 normal_mat;
vec4 outline(float distance, float linewidth, float antialias, vec4 fg_color, vec4 bg_color){
vec4 frag_color;
float t = linewidth/2.0 - antialias;
float signed_distance = distance;
float border_distance = abs(signed_distance) - t;
float alpha = border_distance/antialias;
alpha = exp(-alpha*alpha);
if( border_distance < 0.0 )
frag_color = fg_color;
else if( signed_distance < 0.0 )
frag_color = mix(bg_color, fg_color, sqrt(alpha));
else {
if( abs(signed_distance) < (linewidth/2.0 + antialias) ) {
frag_color = vec4(fg_color.rgb, fg_color.a * alpha);
} else {
discard;
}
}
return frag_color;
}
in vec3 frag_color; // varying vec3 v_color;
in float f_radius; // varying float v_radius;
in float f_size; // varying float v_size;
in vec4 frag_coord; // varying vec4 v_eye_position;
varying vec3 v_light_direction;
void main()
{
vec2 P = gl_PointCoord.xy - vec2(0.5,0.5);
float point_size = f_size + 5.0;
float distance = length(P*point_size) - f_size/2;
vec2 texcoord = gl_PointCoord* 2.0 - vec2(1.0);
float x = texcoord.x;
float y = texcoord.y;
float d = 1.0 - x*x - y*y;
if (d <= 0.0) discard;
float z = sqrt(d);
vec4 pos = frag_coord;
pos.z += f_radius*z;
vec3 pos2 = pos.xyz;
pos = projection_mat * pos;
gl_FragDepth = 0.5*(pos.z / pos.w)+0.5;
vec3 normal = vec3(x,y,z);
float diffuse = clamp(dot(normal, v_light_direction), 0.0, 1.0);
vec4 color = vec4((0.5 + 0.5*diffuse)*frag_color, 1.0);
gl_FragColor = outline(distance, 1.0, 1.0, vec4(0,0,0,1), color);
// gl_FragColor = color;
}
"""
|
"THE FUNCTION OF EDUCATION IS TO TEACH ONE TO THINK INTENSIVELY AND TO THINK CRITICALLY. INTELLIGENCE PLUS CHARACTER - THAT IS THE GOAL OF TRUE EDUCATION. - MARTIN LUTHER KING, JR.
BDK Academy Charlotte located at 2935 Griffith St., Charlotte, NC 28203 is an athletic studio in South End, and home to the internationally renowned Mixed Movement Arts System known as Budokon (BDK). Budokon is one of the most unique training systems in the world where you will practice and train your body in our classes, but most importantly, transform your mind through movement. A BDK mixed movement artist is a true warrior-yogi, practicing MMA, Yoga, Calisthenics and Animal Locomotion. We seek to contribute to humanity by expanding awareness of individual consciousness and thereby expanding awareness of the collective consciousness. Budokon was created in 2001 by Cameron Shayne, considered the father of mixed movement arts.
|
###################################### Variational Autoencoder ############################################
## Author: Sara Regina Ferreira de Faria
## Email: sarareginaff@gmail.com
#Needed libraries
import numpy
import matplotlib.pyplot as plt
import pandas
import math
import scipy.io as spio
import scipy.ndimage
from scipy.stats import norm
from keras.layers import Layer
from sklearn.metrics import mean_squared_error, roc_curve, auc
from keras import backend as K
from keras import metrics
# fix random seed for reproducibility
numpy.random.seed(7)
# load the dataset
def loadData(file, dictName):
matfile = file
matdata = spio.loadmat(matfile)
dataset = numpy.ndarray(shape=(matdata[dictName].shape[1]), dtype=type(matdata[dictName][0,0]))
for i in range(matdata[dictName].shape[1]):
dataset[i] = matdata[dictName][0, i]
return dataset
# normalize dataset
def normalizeData(data):
maxVal = numpy.amax(data)
minVal = numpy.amin(data)
normalizedData = ((data-minVal)/(maxVal-minVal))
return normalizedData
# based on http://machinelearningmastery.com/time-series-prediction-with-deep-learning-in-python-with-keras/
# convert an array of values into a dataset matrix
def createMatrix(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back)]
dataX.append(a)
return numpy.array(dataX)
# based on https://blog.keras.io/building-autoencoders-in-keras.html
def sampling(args):
z_mean, z_log_var = args
x_train_latent_shape = (original_dim[0], latent_dim)
epsilon = K.random_normal(shape=((batchSizeModel,) + x_train_latent_shape), mean=0., #40, 480, 3, 2
stddev=epsilon_std)
return z_mean + K.exp(z_log_var / 2) * epsilon
# based on https://edouardfouche.com/Neural-based-Outlier-Discovery/
def calculateFprTpr (predicted, labels):
dist = numpy.zeros(len(predicted))
for i in range(len(predicted)):
dist[i] = numpy.linalg.norm(predicted[i])
fpr, tpr, thresholds = roc_curve(labels, dist)
return fpr, tpr
class CustomVariationalLayer(Layer):
def __init__(self, **kwargs):
self.is_placeholder = True
super(CustomVariationalLayer, self).__init__(**kwargs)
def vae_loss(self, x, x_decoded_mean):
xent_loss = original_dim[1] * metrics.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
def call(self, inputs):
x = inputs[0]
x_decoded_mean = inputs[1]
loss = self.vae_loss(x, x_decoded_mean)
self.add_loss(loss, inputs=inputs)
# We won't actually use the output.
return x
def vae_loss1(x, x_decoded_mean):
xent_loss = original_dim[1] * metrics.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return K.mean(xent_loss + kl_loss)
#************* MAIN *****************#
# variables
best_roc_auc = 0
best_epochs = 0
best_limit = 0
best_bottleneckDim = 0
best_look_back = 0
best_epsilon_std = 0
best_latent_dim = 0
for epochs in range(7,8): #16
print("epochs", epochs)
for limitAux in range(18,19): #12
limit = limitAux/10
print("limit", limit)
for bottleneckDim in range (4,5): #4
print("bottleneckDim", bottleneckDim)
for look_back in range(3,4): #2
print("look_back", look_back)
for epsilon_stdAux in range(3,4):
epsilon_std = epsilon_stdAux/10
print("epsilon_std", epsilon_std)
for latent_dim in range(1,2):
print("latent_dim", latent_dim)
# libraries
from keras.models import Model, Sequential
from keras.layers import Input, Dense, LSTM, RepeatVector, Lambda, Layer
batchSizeData = 1
lossEvaluation = 'mean_squared_error'
optimizer = 'adam'
batchSizeModel = look_back
roc_auc = []
FPRs = []
TPRs = []
# load dataset with all fault simulation
originalDataset = loadData('DadosTodasFalhas.mat', 'Xsep')
# prepare dataset
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[0][:,:], 4.0)
#filteredDataset = originalDataset[0][:,:]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
# split into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
x_train, x_test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
# get sample size
original_dim = (x_train.shape[1], x_train.shape[2])
# encoder
x = Input(shape=(original_dim)) #batchSizeModel, original_dim (22)
h = LSTM(int(bottleneckDim), activation='relu')(x)
z_mean = Dense(latent_dim)(h) #batchSizeModel,latent_dim
z_log_var = Dense(latent_dim)(h) #batchSizeModel,latent_dim
z = Lambda(sampling)([z_mean, z_log_var])
# decoder
decoded = RepeatVector(original_dim[0])(z_log_var)
h_decoded = LSTM(original_dim[1], return_sequences=True, activation='relu')(decoded)
x_decoded_mean = Dense(original_dim[1], activation='sigmoid')(h_decoded) #batchSizeModel,original_dim
# autoencodoer
Model = Model(x, x_decoded_mean)
Model.compile(optimizer='rmsprop', loss=vae_loss1)
# Train model with normal data
Model.fit(x_train, x_train, shuffle=True, epochs=epochs, batch_size=batchSizeModel, validation_data=(x_test, x_test), verbose=False)
# get error for each batch of normal data
normalPredict = []
normalError = []
j = 0
for k in range(0,len(dataset),batchSizeModel):
dataBatch = dataset[k:k+batchSizeModel]
normalPredict.append(Model.predict(dataBatch))
normalError.append(mean_squared_error(dataBatch[:,0,:], normalPredict[j][:,0,:]))
j += 1
#***** Testing if it is a fault or not *****#
for i in range(1,len(originalDataset)):
#local variables
j = 0
faults = []
trainPredict = []
faultError = []
predicted = []
# prepare dataset
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[i][:,:], 4.0)
#filteredDataset = originalDataset[i][:,:]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
# get error for each batch of data
for k in range(0,len(dataset),batchSizeModel):
dataBatch = dataset[k:k+batchSizeModel]
# generate predictions using model
trainPredict.append(Model.predict(dataBatch))
predicted.append(trainPredict[j][:,0,:])
faultError.append(mean_squared_error(dataBatch[:,0,:], predicted[j]))
# check if it is a fault or not
if (faultError[j] > normalError[j]*limit):
faults.append(1)
else:
faults.append(0)
j = j + 1
#print("Dataset", i, ". IsFaultVector: ", faults)
# define labels to ROC curve
labels = []
for k in range(0,len(dataset),batchSizeModel):
if (k >= 100):
labels.append(1)
if (k < 100):
labels.append(0)
# calculate AUC, fpr and tpr
fpr, tpr = calculateFprTpr(faults, labels)
FPRs.append(fpr)
TPRs.append(tpr)
roc_auc.append(auc(fpr, tpr))
sum_roc_auc = 0
for i in range(len(roc_auc)):
sum_roc_auc += roc_auc[i]
if (sum_roc_auc > best_roc_auc):
best_roc_auc = sum_roc_auc
best_epochs = epochs
best_limit = limit
best_bottleneckDim = bottleneckDim
best_look_back = look_back
best_epsilon_std = epsilon_std
best_latent_dim = latent_dim
sum_selected_roc_auc = 0
for j in range(len(FPRs)):
i = j+1
if(i == 1 or i == 2 or i == 5 or i == 7 or i == 8 or i == 9 or i == 10 or i == 11 or i == 12 or i == 14 or i == 15 or i == 19):
plt.plot(FPRs[j], TPRs[j], label="AUC{0}= {1:0.2f}".format(i+1, roc_auc[j]))
sum_selected_roc_auc += roc_auc[j]
plt.xlim((0,1))
plt.ylim((0,1))
plt.plot([0, 1], [0, 1], color='navy', linestyle='--')
plt.xlabel('False Positive rate')
plt.ylabel('True Positive rate')
plt.title('ROC curve - Variational Autoencoder')
plt.legend(loc="lower right")
plt.show()
#plot baseline and predictions
#plt.plot(normalizedDataset)
#plt.plot(numpy.concatenate( predicted, axis=0 ))
#plt.show()
#plt.plot(roc_auc)
#plt.show()
print("bests parameters")
print("best_limit", best_limit) #1
print("best_epochs", best_epochs) #10
print("best_roc_auc", best_roc_auc) #11.27
print("best_look_back", best_look_back) #1
print("best_bottleneckDim", best_bottleneckDim) #2
print("best_epsilon_std", best_epsilon_std)
print("best_latent_dim", best_latent_dim)
print("sum_selected_roc_auc", sum_selected_roc_auc)
|
Microsoft has released another security patch against 'Spectre Variant 2' bug found on Intel chips.
Intel noted that this microcode can cause 'higher than expected reboots and other unpredictable system behaviours,' adding that situations like this may result in 'data loss or corruption'.
"While Intel tests, updates and deploys new microcode, we are making available an out of band update 'KB4078130' that specifically disables only the mitigation against CVE-2017-5715 - Branch target injection vulnerability," Microsoft wrote on its support page on Sunday.
"Our own experience is that system instability can in some circumstances cause data loss or corruption. In our testing this new update has been found to prevent the behaviour described," Microsoft said.
Microsoft is also offering a new option — available for advanced users on impacted devices — to manually disable and enable the mitigation against Spectre Variant 2 (CVE 2017-5715) independently via registry setting changes.
"There are no known reports to indicate that this Spectre variant 2 (CVE 2017-5715 ) has been used to attack customers, Microsoft said.
After chip-maker Intel confirmed a potential security flaw in its chips, Microsoft issued emergency updates to supported versions of Windows earlier this month.
Intel Corp had confirmed two security flaws — Meltdown and Spectre — in its chips, that were vulnerable to hacking.
Intel CEO Brian Krzanich later allayed fears of any data breach.
Addressing the gathering at his keynote address at the CES 2018 earlier this month, Krzanich said, "Our primary goal has been to keep our customers safe. We have not received any information that these exploits have been used to obtain customers' data."
The Intel CEO urged everyone to patch their systems as soon as these are available.
|
#!/usr/bin/env python
'''do sims of cells, LFP, and output data'''
#import modules
import uuid
import numpy as np
import h5py
import os
from glob import glob
#workaround for plots on cluster
if not os.environ.has_key('DISPLAY'):
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from scipy.signal import filtfilt, butter, lfilter
from time import time, asctime
import ViSAPy
import MoI
import neuron
from mpi4py import MPI
######## set random number generator seed ######################################
SEED = 1234567
POPULATIONSEED = 1234567
np.random.seed(SEED)
################# Initialization of MPI stuff ##################################
COMM = MPI.COMM_WORLD
SIZE = COMM.Get_size()
RANK = COMM.Get_rank()
######## create unique output folder and copy simulation script ################
if RANK == 0:
#savefolder = glob('savedata_in_vitro_MEA*')[-1]
string = asctime().split()
savefolder = os.path.join(os.path.split(__file__)[0], 'savedata_in_vitro_MEA_')
for s in string:
for ss in s.split(':'):
savefolder += ss + '_'
savefolder += uuid.uuid4().hex
os.mkdir(savefolder)
os.system("cp %s '%s'" % (__file__, savefolder + '/.'))
else:
savefolder = None
savefolder = COMM.bcast(savefolder, root=0)
##### load NMODL mechanisms ####################################################
#neuron.h.load_file('stdlib.hoc')
#neuron.h.load_file('nrngui.hoc')
neuron.load_mechanisms("modfiles")
################################################################################
# PARAMETERS
################################################################################
tstart = 0
tend = 60000
dt = 0.05
#set up base parameter file for the LFPy.Cell or LFPy.TemplateCell class,
#without specifying cell model.
cellParameters = {
'v_init' : -65,
'passive' : False,
'timeres_NEURON' : dt,
'timeres_python' : dt,
'tstartms' : tstart,
'tstopms' : tend,
'verbose' : False,
'pt3d' : False,
}
# set the default rotation of the cells
defaultrotation = {}
#LFPy can simulate directly to file, but for performance reasons, this
#feature should be avoided
simulationParameters = {
#'to_file' : True, #file_name set in cellsim()
}
#list up all model folders, associate model neurons by morphology name
morphologies = glob('neuron_models/Large/*/*Morph.hoc') + \
glob('neuron_models/Medium*/*/*Morph.hoc') + \
glob('neuron_models/Small*/*/*Morph.hoc')
#one custom code file per morphology
model_paths = glob('neuron_models/Large/*') + \
glob('neuron_models/Medium*/*') + \
glob('neuron_models/Small*/*')
#custom codes for cell simulations
custom_codes = []
for model_path in model_paths:
cell_name = os.path.split(model_path)[-1].lower()
custom_codes += [os.path.join(model_path, cell_name + '.hoc')]
def getParamsMoIMapping(
slice_thickness = 200.,
n_rows = 6,
n_cols = 17,
elec_sep = 18.,
elec_radius = 3.5):
'''Set up MEA with MoI'''
n_elecs = n_rows * n_cols
# FIXING EARLIER WRONG ELECTRODE POSITIONS
elec_x_int = np.load('z_integer.npy')
elec_y_int = np.load('y_integer.npy')
# For some reason they seem to need individual scaling factors. From pythagoras
ky = 9 / np.max(np.diff(sorted(elec_y_int)))
###kx = 9 * np.sqrt(3) / np.max(np.diff(sorted(elec_x_int)))
elec_x = elec_x_int * ky
elec_y = elec_y_int * ky
elec_x -= np.min(elec_x)
elec_y -= np.min(elec_y)
paramsMapping = {
'use_line_source': True,
'include_elec': True,
'elec_z' : -slice_thickness/2., #SCALAR
'elec_y' : elec_y, # ARRAY
'elec_x' : elec_x, # ARRAY
'elec_radius': elec_radius,
'n_avrg_points' : 10, #Number of electrode averaging points
}
paramsMoI = {
'sigma_G': 0.0, # Below electrode
'sigma_S': 1.5, # Saline conductivity
'sigma_T': 0.1, # Tissue conductivity
'slice_thickness': slice_thickness,
'steps' : 10,}
return paramsMapping, paramsMoI
paramsMapping, paramsMoI = getParamsMoIMapping()
#dummy electrodeParameters
electrodeParameters = dict(
x = paramsMapping['elec_x'],
y = paramsMapping['elec_y'],
z = np.array([paramsMapping['elec_z'] for x in paramsMapping['elec_x']]),
)
def getPopParams(#NCOLS=1, NROWS=1,
NCOLS=4, NROWS=14,
PITCH=np.sqrt(2/(np.sqrt(3)*1400))*1E3, #~1400 mm-2, hex tiling
PITCH_STD=5.,
HEIGHT = 15.,
HEIGHT_STD = 1.,
XOFFSET=0., YOFFSET=0., ZOFFSET=-100):
#set up hexagonal grid of cells
POPULATION_SIZE = NCOLS * NROWS
x = []
y = []
for i in xrange(NROWS):
if i % 2 == 0:
x = np.r_[x, np.arange(NCOLS)*PITCH]
else:
x = np.r_[x, np.arange(NCOLS)*PITCH + np.cos(np.pi/3)*PITCH]
y = np.r_[y, i * np.ones(NCOLS) * np.sin(np.pi/3) * PITCH]
#apply spatial jitter and center population on MEA grid
x += np.random.normal(scale=PITCH_STD, size=x.size, )
x -= x.mean()
x += XOFFSET
y += np.random.normal(scale=PITCH_STD, size=y.size, )
y -= y.mean()
y += YOFFSET
z = np.random.normal(ZOFFSET+HEIGHT, HEIGHT_STD, x.size)
return dict(
POPULATION_SIZE = NCOLS * NROWS,
X = x,
Y = y,
Z = z
)
populationParameters = getPopParams(XOFFSET = paramsMapping['elec_x'].mean(),
YOFFSET = paramsMapping['elec_y'].mean(),
ZOFFSET = paramsMapping['elec_z'])
#set up stimulus by graded synapse input modeled as OU process conductance
gsynParams = dict(
OUParams = dict(
T = (tend - tstart)*1E-3,
dt = dt*1E-3,
X0 = 0,
m = 0,
sigma = 1.,
nX = populationParameters['POPULATION_SIZE']),
lambda_d = np.sqrt(2/(np.sqrt(3)*1400))*1E3, #mean cell pitch
gsyn_mean = 1. / 50000,
gsyn_std = 1. / 75000,
)
#some signal processing parameters
nyquist = 1000. / cellParameters['timeres_python'] / 2
filters = []
#presample filter to avoid aliasing
b, a = butter(1, np.array([0.5, 8000]) / nyquist, btype='pass')
filters.append({
'b' : b,
'a' : a,
'filterFun' : lfilter
})
#filter parameters, filterFun must be either scipy.signal.lfilter or filtfilt
b, a = butter(4, np.array([300, 5000]) / nyquist, btype='pass')
filters.append({
'b' : b,
'a' : a,
'filterFun' : filtfilt
})
#Parameters for class ViSAPy.LogBumpFilterBank that sets up
#series of cosine log-bump filters:
logBumpParameters = dict(
n = 16,
taps = 401,
alpha = 0.01,
nyquist=nyquist,
)
#download experimental data for use in generation of noise
fname = os.path.join('data', 'signal_converted.npy')
if RANK == 0:
if not os.path.isdir('data'):
os.mkdir('data')
if not os.path.isfile(fname):
u = urllib2.urlopen('https://www.dropbox.com/s/u6auynymlcbbp36/' +
'signal_converted.npy?dl=1')
f = open(fname, 'w')
f.write(u.read())
f.close()
COMM.Barrier()
#Noise parameters including noise covariance matrix
noiseParameters = None
#extract noise covariances extracted from experimental tetrode recording
noiseFeaturesParameters = dict(logBumpParameters)
noiseFeaturesParameters.update({
'fname' : fname,
'outputfile' : os.path.join(savefolder, 'ViSAPy_noise.h5'),
'T' : 15000,
'srate_in' : 20000,
'srate_out' : 2 * nyquist,
'NFFT' : 2**16,
'psdmethod': 'mlab',
'remove_spikes' : True,
#parameters passed to class SpikeACut, only used if remove_spikes == True
'remove_spikes_args' : {
'TEMPLATELEN' : 32,
'TEMPLATEOFFS' : 0.5,
'threshold' : 5, #standard deviations
'data_filter' : {
'filter_design' : butter,
'filter_design_args' : {
'N' : 2,
'Wn' : np.array([300., 5000.]) / nyquist,
'btype' : 'pass',
},
'filter' : filtfilt,
},
},
'amplitude_scaling' : 1E-3,
})
#container file for noise output etc.
noise_output_file = os.path.join(savefolder, 'ViSAPy_noise.h5')
################################################################################
## MAIN
################################################################################
################################################################################
## Step 1: Estimate PSD and covariance between channels, here using
## an experimental dataset.
##
## In the present ViSAPy, we should use only a single RANK for this
## and subsequent steps, we also skip regenerating noise and spike
## events, because it can take some time for long simulation durations
##
if RANK == 0:
if not os.path.isfile(noise_output_file):
noise_features = ViSAPy.NoiseFeatures(**noiseFeaturesParameters)
################################################################################
## Step 2: Generate synthetic noise with PSD and covariance channels extracted
## using class NoiseFeatures, preserving the overall amplitude.
## We choose to save directly to file, as it will be used in
## later steps
##
noise_generator = ViSAPy.CorrelatedNoise(psd=noise_features.psd,
C=noise_features.C,
**noiseFeaturesParameters)
#file object containing extracellular noise and related data
f = h5py.File(noise_output_file)
f['data'] = noise_generator.correlated_noise(T = cellParameters['tstopms'])
f.close()
#sync
COMM.Barrier()
################################################################################
## Step 3: Fix seed and set up Testdata object, generating a model cell
## population, find and distribute synapse inputs with spiketrains from
## network, run simulations for extracellular potentials,
## collect data and generate final benchmark data
##
np.random.seed(POPULATIONSEED)
benchmark_data = MoI.BenchmarkDataMoI(
cellParameters = cellParameters,
morphologies = morphologies,
defaultrotation = defaultrotation,
simulationParameters = simulationParameters,
populationParameters = populationParameters,
electrodeParameters = electrodeParameters,
noiseFile = noise_output_file,
filters = filters,
savefolder = savefolder,
default_h5_file = 'lfp_cell_%.3i.h5',
nPCA = 2,
TEMPLATELEN = 80,
TEMPLATEOFFS = 0.3,
spikethreshold = 3.,
custom_codes = custom_codes,
paramsMapping = paramsMapping,
paramsMoI = paramsMoI,
gsynParams = gsynParams,
)
print 'setup ok!'
benchmark_data.run()
print 'run ok'
benchmark_data.collect_data()
print 'collect ok'
#plot single cell output
myplot = ViSAPy.plotBenchmarkData(benchmark_data)
for i in range(populationParameters['POPULATION_SIZE']):
if i % SIZE == RANK:
fig = myplot.plot_figure_13(cellindices=np.array([i]),
bins=10**np.linspace(np.log10(10), np.log10(1E3), 67))
fig.savefig(os.path.join(savefolder, 'cell_%.2i.pdf' % i))
plt.close(fig)
COMM.Barrier()
|
Investing means - investing for growth with complete protection. We at Mittalfinsolutions ,believe that protection of your fund is a prime job and it needs to be addressed with lots of knowledge and proper planning. Hence we follow a four step process of Financial Planning - Risk Profiling, Goal Planning, Portfolio structuring and portfolio tracking. We use a wide range of products to achieve the financial Goals of our clients Mutual Funds, Stocks, Fixed Deposits etc. We strongly recommend and use Mutual Funds in every clients portfolio creation, due to its lot many advantages over other investment products, like- diversification, professional approach and steady growth.
|
# Copyright (C) 2017 Oliver "kfsone" Smith <oliver@kfs.org>
# Provided under The MIT License -- see LICENSE.
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import requests
from utilities import join_uri_paths
"""
A simple wrapper for a RESTful api, which returns the parsed json response
as an object.
Example:
# Request the post with id #1 and the comments for it at
# jsonplaceholder.typicode.com
api = RESTful("jsonplaceholder.typicode.com", protocol="https")
# Get a list of posts as dictionaries ( [{...},...] )
posts = api.query("/posts")
# Get post id #1 as a dictionary of properties ( {'body':..., ...})
post = api.query("/posts/1")
# Get the comments on post #1 as a list of dictionaries.
cmts = api.query("/posts/1/comments") # => ditto
# Simulate posting our own post.
post = api.query("/posts", body={"userId":123, "title":"Test Post",
"body":"This is a test, just a test."})
For some interfaces you will need to use 'put' rather than 'post', in which
case there is an "update" rather than "query" member:
post = api.update("/posts", body={"userId":123, "title":"Test Post"})
You can use "prefix" to create API-zoned objects:
class PostsAPI(RESTful):
DEFAULT_PROTOCOL = "https"
DEFAULT_PREFIX = "/posts"
posts = PostsAPI("jsonplaceholder.typicode.com")
posts.query() # Returns list of posts
posts.query("1") # Returns /posts/1 => dictionary of post 1 properties
"""
###############################################################################
#
class RESTful(object):
DEFAULT_PREFIX = "/"
DEFAULT_PROTOCOL = "http"
FETCH = requests.get
CREATE = requests.post
UPDATE = requests.put
###########################################################################
# Constructor.
#
def __init__(self, address, protocol=None, prefix=None):
"""
Construct a restful API instance to the given address,
assuming the API base is "http://{address}/" by default.
\param address The name or IP of the service,
\param protocol [optional] Protocol to speak (default=http),
\param prefix [optional] Top-level of the API (default=/),
"""
if protocol is None:
protocol = self.DEFAULT_PROTOCOL
if prefix is None:
prefix = self.DEFAULT_PREFIX
self._base_url = "%s://%s" % (protocol, address)
self._prefix = prefix
###########################################################################
# Helper to translate sub-paths into complete paths with a protocol.
#
def _get_request_path(self, query_path):
"""
Internal helper than translates a query path into a request path.
\param query_path The path the user is providing,
\return The complete path to request.
"""
if not query_path:
request_path = self._prefix
elif not self._prefix or query_path.startswith(self._prefix):
# When there's no prefix or the incoming path included it,
# just use the query path.
request_path = query_path
elif query_path.startswith("./"):
# If prefix="/api/" and you want "/api/api/foo", use "./api/foo"
# or "/api/api/foo".
request_path = join_uri_paths(self._prefix, query_path[2:])
else:
# Otherwise, we inject the prefix.
request_path = join_uri_paths(self._prefix, query_path)
# Introduce the protocol and address.
return join_uri_paths(self._base_url, request_path)
###########################################################################
# Make an actual query.
#
def query(self, query_path=None, body=None):
"""
Send a query within the API using GET or, if the optional body is
provided, via POST. The body can either be a list or dictionary to be
sent as JSON, anything else will be sent as-is as the 'data' field of
the post.
If you are using an API which requests you to use "PUT" you should use
the 'update' method instead.
`prefix` is automatically added unless the query_path includes it,
e.g., given r = RESTful("host", prefix="/api/")
r.query("/foo")
r.query("/api/foo")
are equivalent. Use "./" to avoid this, e.g.
r.query("./api/foo") => /api/api/foo
\param query_path The prefix-relative path to the query,
\param body a: dict{ parameters },
b: string representation of data to be sent,
\return JSON representation of the response,
"""
# Translate the query path to a request
request = self._get_request_path(query_path)
# If body is None, just use get:
if body is None:
response = self.FETCH(request)
# If we're given a dictionary or list, automatically convert it to a
# JSON representation.
elif isinstance(body, (dict, list, tuple)):
response = self.CREATE(request, json=body)
else:
response = self.CREATE(request, data=body)
return response.json()
###########################################################################
# Perform a "put" operation to update existing data.
#
def update(self, query_path=None, body=None):
"""
Like 'query' but uses 'put' as the transport method.
"""
request = self._get_request_path(query_path)
if not body or isinstance(body, (dict, list, tuple)):
response = self.UPDATE(request, json=body)
else:
response = self.UPDATE(request, data=body)
return response.json()
|
The web server with the IP-address 208.109.7.9 used by Chiaseed.biz is owned by GoDaddy.com, LLC and is located in Scottsdale, USA. There are many websites run by this web server. The operator uses this server for many hosting clients. In total there are at least 378 websites on this server. The language of these websites is mostly english.
The webpages of Chiaseed.biz were developed using the programming language JSP/JAVA. The markup language of the website is XHTML 1.0 Transitional. The website does not specify details about the inclusion of its content in search engines. For this reason the content will be included by search engines.
|
#Version 18-03-2019
import os #Folder and files managment
import csv #To make the nice CSV output.
import re #Regular expresions use.
import numpy as np #Numerical work in Python. Yeah!!!
import scipy.integrate as inte #Numerical integration. YOU NEED TO INSTALL THE SCIPY PACKAGE.
#~ import matplotlib.pyplot as plt
import Gpib #Gpib module import everything. there is no overlap.
# (http://stackoverflow.com/questions/710551/import-module-or-from-module-import)
import time #timing handling
###############################
###FUNCTIONS:
###############################
#To remove similar values in a chain:
#From: https://www.peterbe.com/plog/uniqifiers-benchmark
#~ def rem_repeat(seq, idfun=None):
#~ # order preserving
#~ if idfun is None:
#~ def idfun(x): return x
#~ seen = {}
#~ result = []
#~ for item in seq:
#~ marker = idfun(item)
#~ # in old Python versions:
#~ # if seen.has_key(marker)
#~ # but in new ones:
#~ if marker in seen: continue
#~ seen[marker] = 1
#~ result.append(item)
#~ return result
#Defining a function to save the channels info:
def chansave(channel,chandata):
if not os.path.isfile(channel): #When the file does not exists.
with open(channel, 'w') as arch:
writer = csv.writer(arch, delimiter="\t", quotechar=" ")
#8 decimals:
writer.writerows(map(lambda t: ("%.8e" % t[0], "%.8e" % t[1]), chandata))
#~ writer.writerows(chandata)
else: #the file exists:
with open("01-"+channel, 'w') as arch:
writer = csv.writer(arch, delimiter="\t", quotechar=" ")
#8 decimals:
writer.writerows(map(lambda t: ("%.8e" % float(t[0]), "%.8e" % float(t[1])), chandata))
return[0]
def transf(signal, device):
time = []
volts = []
for x in range(0,len(signal)):
time.append(signal[x][0])
volts.append(signal[x][1])
time = np.array(time)
volts = np.array(volts)
#REMOVE HERE THE BASELINE OF THE SCOPES.
if "2Rog" in device:
#Multiplying to obtain the A/s:
der_curr = volts*5671000000.00 #Rogowsky gives current derivative. Updated value for forth time
result = np.column_stack((time,der_curr))
elif "DI03" in device: #New voltage divider
volt_div03 = 11068*volts #
result = np.column_stack((time,volt_div03))
elif "DI04" in device: #New voltage divider
volt_div04 = 6930*volts #
result = np.column_stack((time,volt_div04))
elif "2Res" in device:
volt_div2 = 1359*volts #Updated value for second time
result = np.column_stack((time,volt_div2))
elif "3Res" in device:
volt_div3 = 2400*volts #Updated value for second time
result = np.column_stack((time,volt_div3))
elif "Phot" in device:
#Normalizing to 1:
phot = volts/max(volts)
result = np.column_stack((time,phot))
elif "Curr" in device: #Updated value for second time
curr_time = volts * 51780
result = np.column_stack((time,curr_time))
elif "None" in device: #"No device" attached to the scope.
result = np.column_stack((time,volts))
return[result]
def takechan(channel,sleeptime,addr):
#sleeptime = 0.030
#addr = 3 #Gpib address of the scope
scope = Gpib.Gpib(0,addr)
scope.write("*IDN?") #Identify the scope
time.sleep(sleeptime)
scope_type = scope.read(3)
if scope_type == "TEK": #tektronik scopes
scope.write("HEADER OFF") #Don't give headers of data with the data.
time.sleep(sleeptime)
scope.write("DATA:WIDTH 1")
time.sleep(sleeptime)
scope.write("DATA:ENCDG ASCII") #1 byte for voltage data and ASCII format.
time.sleep(sleeptime)
selectchan = "SELECT:"+channel+" ON"
datachan = "DATA:SOURCE "+channel
#SELECTING CHANNEL:
scope.write(selectchan) #Set the channel to show, if was not it will not record the data...
time.sleep(sleeptime)
scope.write(datachan) #Set the channel source to Channel datachan.
time.sleep(sleeptime)
#CHANNEL ASIGNMENT CHECKING
scope.write("DATA:SOURCE?") #Ask for data channel source.
time.sleep(sleeptime)
CHAN = scope.read(3)
if CHAN != channel:
print("Error: Channel not correctly assigned.")
print(CHAN, datachan)
raise SystemExit #Go out. all wrong.
#WAVEFORM PREAMBLE (ALL INFO OVER DATA)
scope.write("WFMPRE?")
time.sleep(sleeptime)
preamble = scope.read(256).split(";")
#preamble = preamble.split(";")
#USE OF PREAMBLE INFO. PUT INFO IN NICE VARIABLES.
points = int(preamble[5])
ymult = float(preamble[12])
yzero = float(preamble[13])
yoff = int(float(preamble[14])) #Not measured, but stablished. Let's remove it...
#WAVEFORM VOLTS/DIV SCALE:
text = channel + ":SCALE?"
scope.write(text)
time.sleep(sleeptime)
Volt = float(scope.read())
print("Reading data from channel {!s}...".format(CHAN))
#WAVEFORM DATA: (FINALLY)
scope.write("CURVE?")
time.sleep(sleeptime)
curve = scope.read(16000).split(",")
if curve[len(curve)-1] == "": #Avoiding strange numbers...
curve[len(curve)-1] = "0"
print("Reading finished...")
#Waveform transformation into real volts:
#The rounding to 2 ciphers is important to avoid the use of
#garbage bits apperaing in the digitazing process from the computer.
# As now no integration is necessary, 10 cyphers are used.
CH_curve = [round((int(x) - yoff)*ymult,10) for x in curve]
#CREATING TIME VECTOR:
t =[]
scope.write("WFMPRE:XINCR?")
time.sleep(sleeptime)
sweep=float(scope.read())
for n in range(len(CH_curve)):
t.append(float(n)*sweep)
CH_curve = zip(t,CH_curve)
CH_error = ymult/Volt
else: #Lecroy scope. Its label is shit.
scope.write('DTFORM ASCII') #ASCII format for the data.
time.sleep(sleeptime)
scope.write('WAVESRC '+channel) #Selecting channel for waveform download.
time.sleep(sleeptime)
scope.write('DTINF?') #reading information of the scope and waveform setup.
time.sleep(sleeptime)
preamble = scope.read(550).split(",")
#Determining the number of points to be read in the waveform(Memory Length)
points = preamble[23][16:] #text, not number!!!!
#Determining the time division:
t_sweep = ( convlecroytime(preamble[20][11:])/float(points) )*10
#Passing them to the scope:
scope.write('DTPOINTS '+points)
time.sleep(sleeptime)
#Determining the scaling and offset of the channel:
if channel == 'CH1':
CH_scale = convlecroyscale(preamble[4][12:]) #This is a number
CH_offset =convlecroyscale(preamble[5][9:])
elif channel == 'CH2':
CH_scale = convlecroyscale(preamble[8][12:])
CH_offset =convlecroyscale(preamble[9][9:])
elif channel == 'CH3':
CH_scale = convlecroyscale(preamble[12][12:])
CH_offset =convlecroyscale(preamble[13][9:])
elif channel == 'CH4':
CH_scale = convlecroyscale(preamble[16][12:])
CH_offset =convlecroyscale(preamble[17][9:])
print("Reading data from channel {!s}...".format(channel))
scope.write('DTWAVE?')
time.sleep(sleeptime)
wave_ascii = scope.read(8*int(points)).split(",") #It reads bites transformed in BYTES...
wave_number = [float(number) for number in wave_ascii]
volts = [ round( ( ((float(number) / 256 / 32 ) * CH_scale ) - CH_offset ),12) for number in wave_ascii]
#Making the time vector:
t =[] #It's a list
for i in range(len(volts)):
t.append(float(i)*t_sweep)
CH_curve = zip(t,volts) #List of tuples.
CH_error = CH_scale
return(CH_curve, CH_error, preamble)
def readTekScreen(adrr,sleeptime):
scope = Gpib.Gpib(0,adrr)
scope.write('HARDCOPY START')
time.sleep(sleeptime)
raw_data = scope.read(80000) #Minimun number to obtain the full picture
return raw_data
def readLECScreen(adrr,sleeptime):
scope = Gpib.Gpib(0,adrr)
scope.write('TSCRN? BMP')
time.sleep(sleeptime)
raw_data = scope.read(330000) #Minimun number to obtain the full picture
return raw_data[10:] #It is necessary to remove the first byte, as it is no data.
def convlecroyscale(scale_text):
value = float(re.findall(r'[+-]?[0-9.]+',scale_text)[0]) #Hopefully the scale in volts
if re.findall(r'[V-mV]+',scale_text)[0] == 'mV':
value = value * 1e-3
return value
def convlecroytime(scale_time):
value = float(re.findall(r'[+-]?[0-9.]+',scale_time)[0]) #time scale number
if re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'ms':
value = value * 1e-3
elif re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'us':
value = value * 1e-6
elif re.findall(r'[s,ms,us,ns]+',scale_time)[0] == 'ns':
value = value * 1e-9
return value
|
Daily Practice: It is time to Rise. Love. Resist. To step even deeper into growing our local peace economies. Join our pledge to be even more committed. Divest from the war economy every day. Take another step. To listen, to share, to give, to be in relationship with and to be resilient. To be in action. Do It.
|
# -*- coding: utf-8 -*-
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils import translation
class Command(BaseCommand):
args = '[file-name.csv]'
help = 'Import and parse messages directly from a CSV file.' # @ReservedAssignment
def handle(self, *args, **options):
from django.conf import settings
translation.activate(settings.LANGUAGE_CODE)
from django.contrib.auth import get_user_model
from wiki.plugins.notifications import models
from wiki.plugins.notifications.settings import ARTICLE_EDIT
from wiki.models import Article
from django_nyt.utils import subscribe
from django_nyt.models import Settings
from django.contrib.contenttypes.models import ContentType
# User: Settings
settings_map = {}
def subscribe_to_article(article, user):
if user not in settings_map:
settings_map[user], __ = Settings.objects.get_or_create(user=user)
return subscribe(settings_map[user], ARTICLE_EDIT, content_type=ContentType.objects.get_for_model(article), object_id=article.id)
for article in Article.objects.all():
if article.owner:
subscription = subscribe_to_article(article, article.owner)
models.ArticleSubscription.objects.get_or_create(article=article, subscription=subscription)
for revision in article.articlerevision_set.exclude(user=article.owner).exclude(user=None).values('user').distinct():
user = get_user_model().objects.get(id=revision['user'])
subscription = subscribe_to_article(article, user)
models.ArticleSubscription.objects.get_or_create(article=article, subscription=subscription)
translation.deactivate()
|
Tuya Mayakhuu is an international lawyer with extensive experience of working in various sectors, including legal research, retraining of lawyers and legal and judicial reform. She has been actively engaged with the Legal Reform of Mongolia since 2012. Tuya has experience in working with the Public service of Mongolia and other international donor organisations such as Asian Development Bank and has substantial contribution in redrafting the laws and legislations, supporting the concept of Legal reform. Tuya's expertise area is anti-corruption, transparency of governance, human rights, legal reform, public procurement and e-governance.
|
import logging
import json
import os
import random
import datetime
import networkx as nx
from .utils import list_to_html_table
class UTG(object):
"""
UI transition graph
"""
def __init__(self, device, app, random_input):
self.logger = logging.getLogger(self.__class__.__name__)
self.device = device
self.app = app
self.random_input = random_input
self.G = nx.DiGraph()
self.effective_event_strs = set()
self.ineffective_event_strs = set()
self.explored_state_strs = set()
self.reached_state_strs = set()
self.reached_activities = set()
self.first_state_str = None
self.last_state_str = None
self.last_transition = None
self.effective_event_count = 0
self.input_event_count = 0
self.start_time = datetime.datetime.now()
def add_transition(self, event, old_state, new_state):
self.add_node(old_state)
self.add_node(new_state)
# make sure the states are not None
if not old_state or not new_state:
return
event_str = event.get_event_str(old_state)
self.input_event_count += 1
if old_state.state_str == new_state.state_str:
self.ineffective_event_strs.add(event_str)
# delete the transitions including the event from utg
for new_state_str in self.G[old_state.state_str]:
if event_str in self.G[old_state.state_str][new_state_str]["events"]:
self.G[old_state.state_str][new_state_str]["events"].pop(event_str)
if event_str in self.effective_event_strs:
self.effective_event_strs.remove(event_str)
return
self.effective_event_strs.add(event_str)
self.effective_event_count += 1
if (old_state.state_str, new_state.state_str) not in self.G.edges():
self.G.add_edge(old_state.state_str, new_state.state_str, events={})
self.G[old_state.state_str][new_state.state_str]["events"][event_str] = {
"event": event,
"id": self.effective_event_count
}
self.last_state_str = new_state.state_str
self.last_transition = (old_state.state_str, new_state.state_str)
self.__output_utg()
def add_node(self, state):
if not state:
return
if state.state_str not in self.G.nodes():
state.save2dir()
self.G.add_node(state.state_str, state=state)
if self.first_state_str is None:
self.first_state_str = state.state_str
if state.foreground_activity.startswith(self.app.package_name):
self.reached_activities.add(state.foreground_activity)
def __output_utg(self):
"""
Output current UTG to a js file
"""
if not self.device.output_dir:
return
utg_file_path = os.path.join(self.device.output_dir, "utg.js")
utg_file = open(utg_file_path, "w")
utg_nodes = []
utg_edges = []
for state_str in self.G.nodes():
state = self.G.nodes[state_str]["state"]
package_name = state.foreground_activity.split("/")[0]
activity_name = state.foreground_activity.split("/")[1]
short_activity_name = activity_name.split(".")[-1]
state_desc = list_to_html_table([
("package", package_name),
("activity", activity_name),
("state_str", state.state_str),
("structure_str", state.structure_str)
])
utg_node = {
"id": state_str,
"shape": "image",
"image": os.path.relpath(state.screenshot_path, self.device.output_dir),
"label": short_activity_name,
# "group": state.foreground_activity,
"package": package_name,
"activity": activity_name,
"state_str": state_str,
"structure_str": state.structure_str,
"title": state_desc,
"content": "\n".join([package_name, activity_name, state.state_str, state.search_content])
}
if state.state_str == self.first_state_str:
utg_node["label"] += "\n<FIRST>"
utg_node["font"] = "14px Arial red"
if state.state_str == self.last_state_str:
utg_node["label"] += "\n<LAST>"
utg_node["font"] = "14px Arial red"
utg_nodes.append(utg_node)
for state_transition in self.G.edges():
from_state = state_transition[0]
to_state = state_transition[1]
events = self.G[from_state][to_state]["events"]
event_short_descs = []
event_list = []
for event_str, event_info in sorted(iter(events.items()), key=lambda x: x[1]["id"]):
event_short_descs.append((event_info["id"], event_str))
if self.device.adapters[self.device.minicap]:
view_images = ["views/view_" + view["view_str"] + ".jpg"
for view in event_info["event"].get_views()]
else:
view_images = ["views/view_" + view["view_str"] + ".png"
for view in event_info["event"].get_views()]
event_list.append({
"event_str": event_str,
"event_id": event_info["id"],
"event_type": event_info["event"].event_type,
"view_images": view_images
})
utg_edge = {
"from": from_state,
"to": to_state,
"id": from_state + "-->" + to_state,
"title": list_to_html_table(event_short_descs),
"label": ", ".join([str(x["event_id"]) for x in event_list]),
"events": event_list
}
# # Highlight last transition
# if state_transition == self.last_transition:
# utg_edge["color"] = "red"
utg_edges.append(utg_edge)
utg = {
"nodes": utg_nodes,
"edges": utg_edges,
"num_nodes": len(utg_nodes),
"num_edges": len(utg_edges),
"num_effective_events": len(self.effective_event_strs),
"num_reached_activities": len(self.reached_activities),
"test_date": self.start_time.strftime("%Y-%m-%d %H:%M:%S"),
"time_spent": (datetime.datetime.now() - self.start_time).total_seconds(),
"num_input_events": self.input_event_count,
"device_serial": self.device.serial,
"device_model_number": self.device.get_model_number(),
"device_sdk_version": self.device.get_sdk_version(),
"app_sha256": self.app.hashes[2],
"app_package": self.app.package_name,
"app_main_activity": self.app.main_activity,
"app_num_total_activities": len(self.app.activities),
}
utg_json = json.dumps(utg, indent=2)
utg_file.write("var utg = \n")
utg_file.write(utg_json)
utg_file.close()
def is_event_explored(self, event, state):
event_str = event.get_event_str(state)
return event_str in self.effective_event_strs or event_str in self.ineffective_event_strs
def is_state_explored(self, state):
if state.state_str in self.explored_state_strs:
return True
for possible_event in state.get_possible_input():
if not self.is_event_explored(possible_event, state):
return False
self.explored_state_strs.add(state.state_str)
return True
def is_state_reached(self, state):
if state.state_str in self.reached_state_strs:
return True
self.reached_state_strs.add(state.state_str)
return False
def get_reachable_states(self, current_state):
reachable_states = []
for target_state_str in nx.descendants(self.G, current_state.state_str):
target_state = self.G.nodes[target_state_str]["state"]
reachable_states.append(target_state)
return reachable_states
def get_event_path(self, current_state, target_state):
path_events = []
try:
states = nx.shortest_path(G=self.G, source=current_state.state_str, target=target_state.state_str)
if not isinstance(states, list) or len(states) < 2:
self.logger.warning("Error getting path from %s to %s" %
(current_state.state_str, target_state.state_str))
start_state = states[0]
for state in states[1:]:
edge = self.G[start_state][state]
edge_event_strs = list(edge["events"].keys())
if self.random_input:
random.shuffle(edge_event_strs)
path_events.append(edge["events"][edge_event_strs[0]]["event"])
start_state = state
except Exception as e:
print(e)
self.logger.warning("Cannot find a path from %s to %s" % (current_state.state_str, target_state.state_str))
return path_events
|
Our favorite tree. Bonzai gigante.
I want to sit under that tree!
|
"""
Copyright (c) 2017 Genome Research Ltd.
Authors:
* Christopher Harrison <ch12@sanger.ac.uk>
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import os.path
from enum import Enum
from typing import List
# Standard permissions
PERMISSIONS:List[str] = [
"modify_permissions", # Can modify permissions
"create_project_groups", # Can create rotations
"set_readonly", # Can set project groups read-only
"create_projects", # Can create projects
"review_other_projects", # Can review other projects
"join_projects", # Can join projects
"view_projects_predeadline", # Can view projects before they're visible to students
"view_all_submitted_projects" # Can view all submitted projects
]
# Rotation e-mail invitation template IDs, for students and supervisors
ROTATION_TEMPLATE_IDS:List[str] = [
"student_invite_1", # Student invite for rotation 1
"student_invite_2", # ...for rotation 2
"student_invite_3", # ...for rotation 3
"supervisor_invite_1", # Supervisor invite for rotation 1
"supervisor_invite_2", # ...for rotation 2
"supervisor_invite_3" # ...for -- wait for it! -- rotation 3
]
# Absolute path of the job hazard form
# FIXME? Is this the appropriate place to put this?
JOB_HAZARD_FORM:str = os.path.normpath(
os.path.join(
os.path.dirname(__file__),
"..", "..", "static", "new_starter_health_questionnaire_jun_17.docx"))
# Maximum number of Excel rows to export
# FIXME? Is this the appropriate place to put this?
MAX_EXPORT_LINE_LENGTH:int = 30
# Sanger science programmes
PROGRAMMES:List[str] = [
"Cancer, Ageing and Somatic Mutation",
"Cellular Genetics",
"Human Genetics",
"Parasites and Microbes"
]
# Grades used in marking, with description
class GRADES(Enum):
A = "Excellent"
B = "Good"
C = "Satisfactory"
D = "Fail"
|
You can find here details of West Bengal PSC recruitment 2011. Actually West Bengal public service commission is conducting this exam for West Bengal judicial service Exam 2011. Since Public service commission job of any state is one of the most preferred job among all the candidates who are hoping for government sector job. So we are here with information on West Bengal Public service commission vacancy 2011 details. For all the candidates who were waiting for government job opening in West Bengal should relax because finally West Bengal public service commission is going to organize judicial service Examination.
West Bengal judicial service Exam 2011 date will be announced later. But it will be a written examination. So when exam date of West Bengal Public service commission is announced, we would let you know. However, you can start preparing fro examination getting the study material for West Bengal PSC jobs and other competitive exams here.
The candidates who fulfill the eligibility criteria need to Download west Bengal judicial service exam application form. They need to fill the application form and send it to below mentioned address with supporting documents before 12th April 2011.
Candidate should posses degree in LLB.
Ability to read, write and speak in Bengali (Nepali speaking candidates from Darjeeling need not comply to this one).
Application fee for West Bengal PSC judicial service Exam 2011-2012 for general candidates is Rs. 210. For SC/ST and physically challenged prople there is no application fee. Application fee for West Bengal PSC judicial service Exam need to paid in form of Indian postal order or in form of receipted challan.
|
import os, urllib2, csv
from bs4 import BeautifulSoup
from bs4.element import Tag
def get_soup(urlreq):
if type(urlreq) is str:
print urlreq
# Open the URL
reader = urllib2.urlopen(urlreq, timeout=30)
# Parse the HTML
return BeautifulSoup(reader, "lxml")
# Find all elements of a given kind of tag in the soup
def find_tags(soup, tagname, recurse_match=False):
"""Find all tags in the soup that have the given tag name (e.g., all <a> tags for links)."""
matches = [] # all tags found
check_tag = lambda name: name == tagname
if isinstance(tagname, list):
check_tag = lambda name: name in tagname
# Iterate through all children that are instances of the Tag class
for child in soup.children:
if isinstance(child, Tag):
# If this is our tag, add it; otherwise, recurse!
if hasattr(child, 'name') and check_tag(child.name):
matches.append(child)
if recurse_match:
matches.extend(find_tags(child, tagname, recurse_match=recurse_match))
else:
matches.extend(find_tags(child, tagname, recurse_match=recurse_match))
# Return the tags
return matches
# Extract tags with a given class
def find_tags_class(soup, tagname, clsname):
tags = find_tags(soup, tagname, recurse_match=True)
return [tag for tag in tags if tag.has_attr('class') and tag['class'][0] == clsname]
|
Another week, another BBQ restaurant opens. This time it is in Kensal Rise and it serves up food inspired by America’s deep south. DJ Yoda is the name behind this venture, hence the music.
The main room is all bare brick walls and distressed wood panels giving it that industrial chic feel. The room downstairs is set up for private parties which is where they had the launch party. These guys do great parties, throbbing music and lots of rib sticking food.
We tasted an array of chicken wings, brisket, pulled pork and cornbread muffins served canape style. Can’t wait to try their proper menu.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.