text
stringlengths 29
850k
|
|---|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
from functools import partial
from PyQt5.Qt import QTimer, QProgressDialog, Qt
from calibre import force_unicode
from calibre.gui2 import gprefs
from calibre.gui2.actions import InterfaceAction
class EmbedAction(InterfaceAction):
name = 'Embed Metadata'
action_spec = (_('Embed metadata'), 'modified.png', _('Embed metadata into book files'), None)
action_type = 'current'
action_add_menu = True
action_menu_clone_qaction = _('Embed metadata into book files')
accepts_drops = True
def accept_enter_event(self, event, mime_data):
if mime_data.hasFormat("application/calibre+from_library"):
return True
return False
def accept_drag_move_event(self, event, mime_data):
if mime_data.hasFormat("application/calibre+from_library"):
return True
return False
def drop_event(self, event, mime_data):
mime = 'application/calibre+from_library'
if mime_data.hasFormat(mime):
self.dropped_ids = tuple(map(int, str(mime_data.data(mime)).split()))
QTimer.singleShot(1, self.do_drop)
return True
return False
def do_drop(self):
book_ids = self.dropped_ids
del self.dropped_ids
if book_ids:
self.do_embed(book_ids)
def genesis(self):
self.qaction.triggered.connect(self.embed)
self.embed_menu = self.qaction.menu()
m = partial(self.create_menu_action, self.embed_menu)
m('embed-specific',
_('Embed metadata into files of a specific format from selected books..'),
triggered=self.embed_selected_formats)
self.qaction.setMenu(self.embed_menu)
self.pd_timer = t = QTimer()
t.timeout.connect(self.do_one)
def embed(self):
rb = self.gui.iactions['Remove Books']
ids = rb._get_selected_ids(err_title=_('Cannot embed'))
if not ids:
return
self.do_embed(ids)
def embed_selected_formats(self):
rb = self.gui.iactions['Remove Books']
ids = rb._get_selected_ids(err_title=_('Cannot embed'))
if not ids:
return
fmts = rb._get_selected_formats(
_('Choose formats to be updated'), ids)
if not fmts:
return
self.do_embed(ids, fmts)
def do_embed(self, book_ids, only_fmts=None):
pd = QProgressDialog(_('Embedding updated metadata into book files...'), _('&Stop'), 0, len(book_ids), self.gui)
pd.setWindowTitle(_('Embedding metadata...'))
pd.setWindowModality(Qt.WindowModal)
errors = []
self.job_data = (0, tuple(book_ids), pd, only_fmts, errors)
self.pd_timer.start()
def do_one(self):
try:
i, book_ids, pd, only_fmts, errors = self.job_data
except (TypeError, AttributeError):
return
if i >= len(book_ids) or pd.wasCanceled():
pd.setValue(pd.maximum())
pd.hide()
self.pd_timer.stop()
self.job_data = None
self.gui.library_view.model().refresh_ids(book_ids)
if i > 0:
self.gui.status_bar.show_message(_('Embedded metadata in %d books') % i, 5000)
if errors:
det_msg = '\n\n'.join([_('The {0} format of {1}:\n\n{2}\n').format(
(fmt or '').upper(), force_unicode(mi.title), force_unicode(tb)) for mi, fmt, tb in errors])
from calibre.gui2.dialogs.message_box import MessageBox
title, msg = _('Failed for some files'), _(
'Failed to embed metadata into some book files. Click "Show details" for details.')
d = MessageBox(MessageBox.WARNING, _('WARNING:')+ ' ' + title, msg, det_msg, parent=self.gui, show_copy_button=True)
tc = d.toggle_checkbox
tc.setVisible(True), tc.setText(_('Show the &failed books in the main book list'))
tc.setChecked(gprefs.get('show-embed-failed-books', False))
d.resize_needed.emit()
d.exec_()
gprefs['show-embed-failed-books'] = tc.isChecked()
if tc.isChecked():
failed_ids = {mi.book_id for mi, fmt, tb in errors}
db = self.gui.current_db
db.data.set_marked_ids(failed_ids)
self.gui.search.set_search_string('marked:true')
return
pd.setValue(i)
db = self.gui.current_db.new_api
book_id = book_ids[i]
def report_error(mi, fmt, tb):
mi.book_id = book_id
errors.append((mi, fmt, tb))
db.embed_metadata((book_id,), only_fmts=only_fmts, report_error=report_error)
self.job_data = (i + 1, book_ids, pd, only_fmts, errors)
|
21 Things for Students is a site that was created as part of a grant program in Michigan for creating an educational resource for educational technology for students. As the name suggests there are 21 different resources. Each resource also includes tips and links for using it and project ideas. There is a Teacher Overview page that explains the alignment of the project with NETS and a Teacher Assessment Page that has information about the assessments and rubrics for each project.
The resource categories are: Staying Organized, Web Presence, Digital Citizenship, Be Legal and Fair, Productivity Suite, Project Collaboration, Digital Video, Information Literacy, Personal Networks, Self-Directed Learning, Online Learning, Interactive Tools, e-Commerce, Career Prep, Troubleshooting, Visual Organizers, Powerful Presentations, VAI-Casting, Virtual Programming, Data Quest, and Mobile Computing.
Projects include: Delicious, Google Docs, tips on web presence and copyright, Google Docs collaboration, Thinkfinity, Google Maps, Quizlet, how to be a better presenter, how to use mobile computing, and much, much more.
Each of the 21 Resources includes an app or website, project instructions, and tips.
There is also a page with Fun and Educational resources and games and a Feedback Page. The Feedback page is so that they can collect feedback on the site and make changes based on suggestions.
This is an excellent resource for students. Please share it with your colleagues and students!
|
# This file is part of the Astrometry.net suite.
# Licensed under a 3-clause BSD style license - see LICENSE
from __future__ import print_function
import numpy
def write_pnm_to(img, f, maxval=255):
if len(img.shape) == 1:
raise RuntimeError('write_pnm: img is one-dimensional: must be 2 or 3.')
elif len(img.shape) == 2:
#pnmtype = 'G'
pnmcode = 5
(h,w) = img.shape
elif len(img.shape) == 3:
(h,w,planes) = img.shape
#pnmtype = 'P'
pnmcode = 6
if planes != 3:
raise RuntimeError('write_pnm: img must have 3 planes, not %i' % planes)
else:
raise RuntimeError('write_pnm: img must have <= 3 dimensions.')
if img.max() > maxval:
print('write_pnm: Some pixel values are > maxval (%i): clipping them.' % maxval)
if img.min() < 0:
print('write_pnm: Some pixel values are < 0: clipping them.')
clipped = img.clip(0, maxval)
maxval = int(maxval)
if maxval > 65535:
raise RuntimeError('write_pnm: maxval must be <= 65535')
if maxval < 0:
raise RuntimeError('write_pnm: maxval must be positive')
f.write('P%i %i %i %i ' % (pnmcode, w, h, maxval))
if maxval <= 255:
f.write(img.astype(numpy.uint8).data)
else:
f.write(img.astype(numpy.uint16).data)
def write_pnm(img, filename, maxval=255):
f = open(filename, 'wb')
write_pnm_to(img, f, maxval)
f.close()
|
Episode 1-Your Will Shall Decide Your Destiny: Download!
Episode 2-I Am No Bird, and No Net Ensnares Me: Download!
Episode 3- But What Is So Headstrong as Youth: Download!
Episode 4-An Inward Treasure Born: Download!
Episode 5-Tightly Knotted to a Similar String: Download!
Episode 6-Remorse is the Poison of Life: Download!
Episode 7-Wherever You Are Is My Home: Download!
Episode 1-Youth is the Season of Hope: Download!
Episode 2-Signs are Small Measurable Things, but Interpretations are Illimitable: Download!
Episode 3-The True Seeing is Within: Download!
Episode 4-The Painful Eagerness of Unfed Hope: Download!
Episode 5-The Determining Acts of Her Life: Download!
Episode 6-I Protest Against Any Absolute Conclusion: Download!
Episode 7-Memory Has as Many Moods as The Temper: Download!
Episode 8-Struggling Against the Perception of Facts: Download!
Episode 9-What We Have Been Makes Us What We Are: Download!
Episode 10-The Growing Good of the World: Download!
Anne is © CBC. All of the screencaps provided under "Northern Winds" are believed to be within my rights under US Copyright Fair Use Act [Title 17, US Code.]. These screencaputres have been created for use in fan work derivitives. In no way are these screencaptures being sold or in anyway used to generate a profit. I am not responsible for what any downloader does with them.
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from gestion.models import ValorTipo,Producto
from django.contrib.auth.models import User
from django.http import HttpResponse
import json
from ventas.models import VentaMaestro,VentaDetalle,PagoVentaMaestro
from django.utils.dateformat import DateFormat
from django.db.models import Q
from django.utils.timezone import get_default_timezone
# Create your views here.
class Categoria:
listProductos=[]
@login_required
def venta_desktop(request,nroFactura=None):
nroFactura=request.GET.get('nroFact',None)
print(get_default_timezone())
ventaMaestro=None
if nroFactura!=None :
#si viene con una m es una mesa
if nroFactura[0]=='m':
#buscar pedido en estado sin pagar con la mesa m
try:
ventaMaestro=VentaMaestro.objects.get(mesa=nroFactura[1:])
except VentaMaestro.DoesNotExist:
ventaMaestro=None
else:
#buscar por el numero de factura
try:
ventaMaestro=VentaMaestro.objects.get(numFactura=nroFactura)
except VentaMaestro.DoesNotExist:
ventaMaestro=None
#Consultar los detalles
listDetalles=None
if(ventaMaestro!=None):
listDetalles=ventaMaestro.ventadetalle_set.all()
else:
#crear un ventaMaestro con valores por defecto...cliente y vendedor
ventaMaestro=VentaMaestro()
ventaMaestro.cliente=User.objects.get(id=2)
ventaMaestro.vendedor=request.user
#Calcular el total para cada detalle y total general
dictTotales={}
granTotal=0
ivaTotal=0
ivaTmp=100
if listDetalles!=None:
for detalle in listDetalles:
dictTotales[detalle.id]=(detalle.valorUni*detalle.cantidad)-detalle.descuento
granTotal=granTotal+dictTotales[detalle.id]
if detalle.iva==0 :
ivaTmp=100
else:
ivaTmp=detalle.iva
ivaTotal=ivaTotal+(dictTotales[detalle.id]*ivaTmp)/100
dictTotales['granTotal']=granTotal
dictTotales['ivaTotal']=ivaTotal
dictTotales['subTotal']=granTotal-ivaTotal
context = {'message':'ok','ventaMaestro':ventaMaestro,'listDetalles':listDetalles,'dictTotales':dictTotales}
return render(request, 'venta.html',context)
@login_required
def venta_mobile(request):
list_cate_prod={}
list_categories=ValorTipo.objects.filter(padre=1)
list_productos=Producto.objects.all()
for producto in list_productos:
for categoria in list_categories:
if categoria.id==producto.tipoCategoria.id :
if categoria.id in list_cate_prod :
temp=list_cate_prod[categoria.id]
else :
temp=Categoria()
temp.listProductos=[]
temp.tipoCategoria=categoria
temp.listProductos.append(producto)
list_cate_prod[categoria.id]=temp
context = {'message':'ok','list_categories':list_cate_prod}
return render(request, 'mobile/venta.html',context)
@login_required
def clientes(request):
query = request.GET.get('q','')
if(len(query) > 0):
results = User.objects.filter(Q(last_name__icontains=query)|Q(first_name__icontains=query))
result_list = []
for item in results:
result_list.append({'id':item.id,'nombre_completo':item.last_name+' '+item.first_name})
else:
result_list = []
response_text = json.dumps(result_list, separators=(',',':'))
return HttpResponse(response_text, content_type="application/json")
@login_required
def vendedores(request):
query = request.GET.get('q','')
if(len(query) > 0):
results = User.objects.filter(Q(last_name__icontains=query)|Q(first_name__icontains=query))
result_list = []
for item in results:
result_list.append({'id':item.id,'nombre_completo':item.last_name+' '+item.first_name})
else:
result_list = []
response_text = json.dumps(result_list, separators=(',',':'))
return HttpResponse(response_text, content_type="application/json")
@login_required
def codproducto(request):
query = request.GET.get('q','')
if(len(query) > 0):
results = Producto.objects.filter(codigo__icontains=query)
result_list = []
for item in results:
result_list.append({'id':item.id,'nombre':item.nombre,'codigo':item.codigo,'valorVenta':item.valorVenta,'iva':item.ivaPorcentaje})
else:
result_list = []
response_text = json.dumps(result_list, separators=(',',':'))
return HttpResponse(response_text, content_type="application/json")
@login_required
def nomproducto(request):
query = request.GET.get('q','')
if(len(query) > 0):
results = Producto.objects.filter(nombre__icontains=query)
result_list = []
for item in results:
result_list.append({'id':item.id,'nombre':item.nombre,'codigo':item.codigo,'valorVenta':item.valorVenta,'iva':item.ivaPorcentaje})
else:
result_list = []
response_text = json.dumps(result_list, separators=(',',':'))
return HttpResponse(response_text, content_type="application/json")
@login_required
def savePedido(request,anyway=None):
#Validar si se debe guardar anyway
#request.GET.get('anyway')
if anyway==None:
#Buscar si ya esta esa mesa con un pedido sin pagar
idMaestroDetalle=buscarPedido(int((request.POST['mesa_p'],'0')[request.POST['mesa_p']=='']))
if idMaestroDetalle:
response_text = {'code':'01'}#Ya existe un pedido para la mesa sin pagar
return HttpResponse(json.dumps(response_text), content_type="application/json")
idFactura = None
if request.POST['idFactura']!='':
idFactura=int(request.POST['idFactura'])
factura=VentaMaestro.objects.get(id=idFactura)
else:
factura=VentaMaestro()
factura.cliente=User(id=(int(request.POST['idcliente_p']),1)[request.POST['idcliente_p']==''])
factura.vendedor=User(id=(int(request.POST['idvendedor_p']),1)[request.POST['idvendedor_p']==''])
factura.cajero=request.user
#factura.valorPropina=int((request.POST['propina_p'],'0')[request.POST['propina_p']==''])
factura.mesa=int((request.POST['mesa_p'],'0')[request.POST['mesa_p']==''])
factura.save()
df = DateFormat(factura.fechaVenta)
response_text = {'code':'00','nroFactura':factura.id,'fechaVenta':df.format('d/M/Y'),'horaVenta':df.format('h:i A')}
return HttpResponse(json.dumps(response_text), content_type="application/json")
def buscarPedido(nroMesa):
lista=VentaMaestro.objects.filter(mesa=nroMesa,pagoventamaestro__valorPago__gt=0)
return len(lista)
@login_required
def saveDetalle(request):
factura=None
if request.POST['idFacturaD']!='':
idFactura=int(request.POST['idFacturaD'])
factura=VentaMaestro.objects.get(id=idFactura)
if request.POST['iddetalle_p']!='':
idDetalle=int(request.POST['iddetalle_p'])
ventaDetalle=VentaDetalle.objects.get(id=idDetalle)
else:
ventaDetalle=VentaDetalle()
idProducto=int(request.POST['idproducto_p'])
ventaDetalle.ventaMaestro=factura
ventaDetalle.producto=Producto(id=idProducto)
ventaDetalle.cantidad=int(request.POST['cantidad_p'])
ventaDetalle.valorUni=int(request.POST['unitario_p'])
ventaDetalle.iva=int(request.POST['valori_p'])
ventaDetalle.descuento=int(request.POST['descuento_p'])
ventaDetalle.save()
response_text = {'idDetalle':ventaDetalle.id}
return HttpResponse(json.dumps(response_text), content_type="application/json")
@login_required
def deleteDetalle(request,id):
ventaDetalle=VentaDetalle.objects.get(id=id)
ventaDetalle.delete()
response_text = {'code':'00'}
return HttpResponse(json.dumps(response_text), content_type="application/json")
@login_required
def pagarPedido(request):
idFactura = None
print("id factura="+request.POST['idFactura'])
if request.POST['idFactura']!='':
idFactura=int(request.POST['idFactura'])
factura=VentaMaestro.objects.get(id=idFactura)
else:
response_text = {'code':'01'}#No se envio un identificador de pedido
return HttpResponse(json.dumps(response_text), content_type="application/json")
pago=PagoVentaMaestro()
pago.ventaMaestro=factura
pago.valorPago=(int(request.POST['vlr-efectivo']),0)[request.POST['vlr-efectivo']=='']
pago.tipoMedioPago=ValorTipo(id=1)
pago.save()
factura.valorPropina=int((request.POST['propina_p'],'0')[request.POST['propina_p']==''])
factura.descuento=int((request.POST['descuento_p'],'0')[request.POST['descuento_p']==''])
factura.save()
response_text = {'code':'00'}
return HttpResponse(json.dumps(response_text), content_type="application/json")
|
The previous owners of our house tiled around the toilet, rather than removing the toilet and placing it on top of the new tile.
Find a toilet that has the exact same size (or bigger) on the floor.
Put down the new toilet, and find something to put in the gap between the toilet and the tile.
Put something above the tile, but underneath the toilet.
What is generally the best solution here? For #1, is there an easy way of finding a toilet with the right size? Are there any common ways of doing #2 or #3, and what material should be used?
Pull up the old toilet, finish the tiling job, and drop in the new toilet. Anything else is going to leave you with a hack job that will eat away at your satisfaction in having your own home every time you turn around to flush.
Finishing the tiling job may turn out to be dead simple if matching tile is available. This is not likely, though, as anyone too lazy to pull the toilet when tiling a bathroom was probably also too cheap to buy tile from a reputable open stock.
So -- find some tile that co-ordinates with the existing floor. For example, if your tile has a pattern, pick up one of the pattern colors with a solid color. Or, you might like the effect of a solid color matching the new toilet.
Pull the old toilet. Enlarge the hole in the tile to a neat shape and fill it in with new tile. Make the replacement area just large enough to look like an expensive decorating consultant did it on purpose. Of course you might need a flange extender when installing the new toilet.
Now, to really finish off the look, find some other feature in the bathroom that can be highlighted in the same way. For example, you might re-color the area where you stand at the sink. Or, you could call out the door sweep area.
Save the removed tiles as well as any unused new tiles for future repair needs.
Is anyone in your family artistic and crafty? Use a combination of removed tiles and new tiles to decorate a tissue box.
Not the answer you're looking for? Browse other questions tagged tile toilet or ask your own question.
How do I tile over a leveled floor with a feathered edge?
How do I prep and plaster a bowed wall before tiling?
Can you tile on finished plaster wall in a shower?
How do I repair improperly backed shower tile?
How to put vinyl click-together flooring up against cove base tile?
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ipa_user
author: Thomas Krahn (@Nosmoht)
short_description: Manage FreeIPA users
description:
- Add, modify and delete user within IPA server
options:
displayname:
description: Display name
required: false
givenname:
description: First name
required: false
loginshell:
description: Login shell
required: false
mail:
description:
- List of mail addresses assigned to the user.
- If an empty list is passed all assigned email addresses will be deleted.
- If None is passed email addresses will not be checked or changed.
required: false
password:
description:
- Password
required: false
sn:
description: Surname
required: false
sshpubkey:
description:
- List of public SSH key.
- If an empty list is passed all assigned public keys will be deleted.
- If None is passed SSH public keys will not be checked or changed.
required: false
state:
description: State to ensure
required: false
default: "present"
choices: ["present", "absent", "enabled", "disabled"]
telephonenumber:
description:
- List of telephone numbers assigned to the user.
- If an empty list is passed all assigned telephone numbers will be deleted.
- If None is passed telephone numbers will not be checked or changed.
required: false
title:
description: Title
required: false
uid:
description: uid of the user
required: true
aliases: ["name"]
ipa_port:
description: Port of IPA server
required: false
default: 443
ipa_host:
description: IP or hostname of IPA server
required: false
default: "ipa.example.com"
ipa_user:
description: Administrative account used on IPA server
required: false
default: "admin"
ipa_pass:
description: Password of administrative user
required: true
ipa_prot:
description: Protocol used by IPA server
required: false
default: "https"
choices: ["http", "https"]
validate_certs:
description:
- This only applies if C(ipa_prot) is I(https).
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
required: false
default: true
version_added: "2.3"
requirements:
- base64
- hashlib
'''
EXAMPLES = '''
# Ensure pinky is present
- ipa_user:
name: pinky
state: present
givenname: Pinky
sn: Acme
mail:
- pinky@acme.com
telephonenumber:
- '+555123456'
sshpubkeyfp:
- ssh-rsa ....
- ssh-dsa ....
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
# Ensure brain is absent
- ipa_user:
name: brain
state: absent
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
'''
RETURN = '''
user:
description: User as returned by IPA API
returned: always
type: dict
'''
import base64
import hashlib
from ansible.module_utils.ipa import IPAClient
class UserIPAClient(IPAClient):
def __init__(self, module, host, port, protocol):
super(UserIPAClient, self).__init__(module, host, port, protocol)
def user_find(self, name):
return self._post_json(method='user_find', name=None, item={'all': True, 'uid': name})
def user_add(self, name, item):
return self._post_json(method='user_add', name=name, item=item)
def user_mod(self, name, item):
return self._post_json(method='user_mod', name=name, item=item)
def user_del(self, name):
return self._post_json(method='user_del', name=name)
def user_disable(self, name):
return self._post_json(method='user_disable', name=name)
def user_enable(self, name):
return self._post_json(method='user_enable', name=name)
def get_user_dict(displayname=None, givenname=None, loginshell=None, mail=None, nsaccountlock=False, sn=None,
sshpubkey=None, telephonenumber=None, title=None, userpassword=None):
user = {}
if displayname is not None:
user['displayname'] = displayname
if givenname is not None:
user['givenname'] = givenname
if loginshell is not None:
user['loginshell'] = loginshell
if mail is not None:
user['mail'] = mail
user['nsaccountlock'] = nsaccountlock
if sn is not None:
user['sn'] = sn
if sshpubkey is not None:
user['ipasshpubkey'] = sshpubkey
if telephonenumber is not None:
user['telephonenumber'] = telephonenumber
if title is not None:
user['title'] = title
if userpassword is not None:
user['userpassword'] = userpassword
return user
def get_user_diff(ipa_user, module_user):
"""
Return the keys of each dict whereas values are different. Unfortunately the IPA
API returns everything as a list even if only a single value is possible.
Therefore some more complexity is needed.
The method will check if the value type of module_user.attr is not a list and
create a list with that element if the same attribute in ipa_user is list. In this way I hope that the method
must not be changed if the returned API dict is changed.
:param ipa_user:
:param module_user:
:return:
"""
# return [item for item in module_user.keys() if module_user.get(item, None) != ipa_user.get(item, None)]
result = []
# sshpubkeyfp is the list of ssh key fingerprints. IPA doesn't return the keys itself but instead the fingerprints.
# These are used for comparison.
sshpubkey = None
if 'ipasshpubkey' in module_user:
module_user['sshpubkeyfp'] = [get_ssh_key_fingerprint(pubkey) for pubkey in module_user['ipasshpubkey']]
# Remove the ipasshpubkey element as it is not returned from IPA but save it's value to be used later on
sshpubkey = module_user['ipasshpubkey']
del module_user['ipasshpubkey']
for key in module_user.keys():
mod_value = module_user.get(key, None)
ipa_value = ipa_user.get(key, None)
if isinstance(ipa_value, list) and not isinstance(mod_value, list):
mod_value = [mod_value]
if isinstance(ipa_value, list) and isinstance(mod_value, list):
mod_value = sorted(mod_value)
ipa_value = sorted(ipa_value)
if mod_value != ipa_value:
result.append(key)
# If there are public keys, remove the fingerprints and add them back to the dict
if sshpubkey is not None:
del module_user['sshpubkeyfp']
module_user['ipasshpubkey'] = sshpubkey
return result
def get_ssh_key_fingerprint(ssh_key):
"""
Return the public key fingerprint of a given public SSH key
in format "FB:0C:AC:0A:07:94:5B:CE:75:6E:63:32:13:AD:AD:D7 [user@host] (ssh-rsa)"
:param ssh_key:
:return:
"""
parts = ssh_key.strip().split()
if len(parts) == 0:
return None
key_type = parts[0]
key = base64.b64decode(parts[1].encode('ascii'))
fp_plain = hashlib.md5(key).hexdigest()
key_fp = ':'.join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2])).upper()
if len(parts) < 3:
return "%s (%s)" % (key_fp, key_type)
else:
user_host = parts[2]
return "%s %s (%s)" % (key_fp, user_host, key_type)
def ensure(module, client):
state = module.params['state']
name = module.params['name']
nsaccountlock = state == 'disabled'
module_user = get_user_dict(displayname=module.params.get('displayname'),
givenname=module.params.get('givenname'),
loginshell=module.params['loginshell'],
mail=module.params['mail'], sn=module.params['sn'],
sshpubkey=module.params['sshpubkey'], nsaccountlock=nsaccountlock,
telephonenumber=module.params['telephonenumber'], title=module.params['title'],
userpassword=module.params['password'])
ipa_user = client.user_find(name=name)
changed = False
if state in ['present', 'enabled', 'disabled']:
if not ipa_user:
changed = True
if not module.check_mode:
ipa_user = client.user_add(name=name, item=module_user)
else:
diff = get_user_diff(ipa_user, module_user)
if len(diff) > 0:
changed = True
if not module.check_mode:
ipa_user = client.user_mod(name=name, item=module_user)
else:
if ipa_user:
changed = True
if not module.check_mode:
client.user_del(name)
return changed, ipa_user
def main():
module = AnsibleModule(
argument_spec=dict(
displayname=dict(type='str', required=False),
givenname=dict(type='str', required=False),
loginshell=dict(type='str', required=False),
mail=dict(type='list', required=False),
sn=dict(type='str', required=False),
uid=dict(type='str', required=True, aliases=['name']),
password=dict(type='str', required=False, no_log=True),
sshpubkey=dict(type='list', required=False),
state=dict(type='str', required=False, default='present',
choices=['present', 'absent', 'enabled', 'disabled']),
telephonenumber=dict(type='list', required=False),
title=dict(type='str', required=False),
ipa_prot=dict(type='str', required=False, default='https', choices=['http', 'https']),
ipa_host=dict(type='str', required=False, default='ipa.example.com'),
ipa_port=dict(type='int', required=False, default=443),
ipa_user=dict(type='str', required=False, default='admin'),
ipa_pass=dict(type='str', required=True, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
),
supports_check_mode=True,
)
client = UserIPAClient(module=module,
host=module.params['ipa_host'],
port=module.params['ipa_port'],
protocol=module.params['ipa_prot'])
# If sshpubkey is defined as None than module.params['sshpubkey'] is [None]. IPA itself returns None (not a list).
# Therefore a small check here to replace list(None) by None. Otherwise get_user_diff() would return sshpubkey
# as different which should be avoided.
if module.params['sshpubkey'] is not None:
if len(module.params['sshpubkey']) == 1 and module.params['sshpubkey'][0] is "":
module.params['sshpubkey'] = None
try:
client.login(username=module.params['ipa_user'],
password=module.params['ipa_pass'])
changed, user = ensure(module, client)
module.exit_json(changed=changed, user=user)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
|
64 Norman Ross Dr - Welcome Home!! This Fully Detached 5 Bedroom Homes Sits On A Premium Sunny Corner Lot On One Of The Most Prestigious Neighbourhoods In Markham. Upgraded In All The Rights Places; Included New Oak Stairs, Hardwood Flooring, Quartz Counter Tops, Freshly Painted And So Much More. This Corner Lot Is A Gardner's Dream!! Nurture The Beauty Of This Sun Filled House With The Ideal Layout While Enjoying The Convenience Of Top Ranked Schools, Transit, Shopping. Hardwood Floors 25 Years Warranty (2017), 25 Yrs Roof Shingles (2009), A/C & Furnace (2015), New Ss Dishwasher And New Ss Stove, 2 Fridges, Washer& Dryer, Central Vacuum And Attachments.New Attic Insulation!! Wood Burning Fire Place!
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class SignatureDef(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSignatureDef(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SignatureDef()
x.Init(buf, n + offset)
return x
@classmethod
def SignatureDefBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# SignatureDef
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# SignatureDef
def Inputs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from tflite.TensorMap import TensorMap
obj = TensorMap()
obj.Init(self._tab.Bytes, x)
return obj
return None
# SignatureDef
def InputsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.VectorLen(o)
return 0
# SignatureDef
def InputsIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
return o == 0
# SignatureDef
def Outputs(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
x = self._tab.Vector(o)
x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4
x = self._tab.Indirect(x)
from tflite.TensorMap import TensorMap
obj = TensorMap()
obj.Init(self._tab.Bytes, x)
return obj
return None
# SignatureDef
def OutputsLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
# SignatureDef
def OutputsIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
return o == 0
# SignatureDef
def MethodName(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
# SignatureDef
def Key(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
def SignatureDefStart(builder): builder.StartObject(4)
def SignatureDefAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0)
def SignatureDefStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def SignatureDefAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0)
def SignatureDefStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def SignatureDefAddMethodName(builder, methodName): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(methodName), 0)
def SignatureDefAddKey(builder, key): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(key), 0)
def SignatureDefEnd(builder): return builder.EndObject()
import tflite.TensorMap
try:
from typing import List
except:
pass
class SignatureDefT(object):
# SignatureDefT
def __init__(self):
self.inputs = None # type: List[tflite.TensorMap.TensorMapT]
self.outputs = None # type: List[tflite.TensorMap.TensorMapT]
self.methodName = None # type: str
self.key = None # type: str
@classmethod
def InitFromBuf(cls, buf, pos):
signatureDef = SignatureDef()
signatureDef.Init(buf, pos)
return cls.InitFromObj(signatureDef)
@classmethod
def InitFromObj(cls, signatureDef):
x = SignatureDefT()
x._UnPack(signatureDef)
return x
# SignatureDefT
def _UnPack(self, signatureDef):
if signatureDef is None:
return
if not signatureDef.InputsIsNone():
self.inputs = []
for i in range(signatureDef.InputsLength()):
if signatureDef.Inputs(i) is None:
self.inputs.append(None)
else:
tensorMap_ = tflite.TensorMap.TensorMapT.InitFromObj(signatureDef.Inputs(i))
self.inputs.append(tensorMap_)
if not signatureDef.OutputsIsNone():
self.outputs = []
for i in range(signatureDef.OutputsLength()):
if signatureDef.Outputs(i) is None:
self.outputs.append(None)
else:
tensorMap_ = tflite.TensorMap.TensorMapT.InitFromObj(signatureDef.Outputs(i))
self.outputs.append(tensorMap_)
self.methodName = signatureDef.MethodName()
self.key = signatureDef.Key()
# SignatureDefT
def Pack(self, builder):
if self.inputs is not None:
inputslist = []
for i in range(len(self.inputs)):
inputslist.append(self.inputs[i].Pack(builder))
SignatureDefStartInputsVector(builder, len(self.inputs))
for i in reversed(range(len(self.inputs))):
builder.PrependUOffsetTRelative(inputslist[i])
inputs = builder.EndVector(len(self.inputs))
if self.outputs is not None:
outputslist = []
for i in range(len(self.outputs)):
outputslist.append(self.outputs[i].Pack(builder))
SignatureDefStartOutputsVector(builder, len(self.outputs))
for i in reversed(range(len(self.outputs))):
builder.PrependUOffsetTRelative(outputslist[i])
outputs = builder.EndVector(len(self.outputs))
if self.methodName is not None:
methodName = builder.CreateString(self.methodName)
if self.key is not None:
key = builder.CreateString(self.key)
SignatureDefStart(builder)
if self.inputs is not None:
SignatureDefAddInputs(builder, inputs)
if self.outputs is not None:
SignatureDefAddOutputs(builder, outputs)
if self.methodName is not None:
SignatureDefAddMethodName(builder, methodName)
if self.key is not None:
SignatureDefAddKey(builder, key)
signatureDef = SignatureDefEnd(builder)
return signatureDef
|
There are currently 6 homes available in Eagle Creek with an average price of $257,148. For availability or more information about homes for sale in Eagle Creek call Patsy Wilson at 904-501-9331 .
Eagle Creek is located in St Augustine FL, St. Johns County. There are currently 6 homes active and for sale inside the neighborhood with the average price being $257,148. The number of homes for sale in Eagle Creek is always changing so if you see a home you like and you would like to schedule a time to take a look please give me a call at 904-501-9331.
|
# -*- coding: utf-8 -*-
# Copyright (c) 2009 - Luca Invernizzi <invernizzi.l@gmail.com>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
import os
from gi.repository import Gtk
from threading import Timer
from GTG.tools.logger import Log
from GTG.tools.dates import Date
class pluginReaper:
DEFAULT_PREFERENCES = {'max_days': 30,
'is_automatic': False,
'show_menu_item': True}
PLUGIN_NAME = "task-reaper"
# In case of automatic removing tasks, the time
# between two runs of the cleaner function
TIME_BETWEEN_PURGES = 60 * 60
def __init__(self):
self.path = os.path.dirname(os.path.abspath(__file__))
# GUI initialization
self.builder = Gtk.Builder()
self.builder.add_from_file(os.path.join(
os.path.dirname(os.path.abspath(__file__)) +
"/reaper.ui"))
self.preferences_dialog = self.builder.get_object("preferences_dialog")
self.pref_chbox_show_menu_item = \
self.builder.get_object("pref_chbox_show_menu_item")
self.pref_chbox_is_automatic = \
self.builder.get_object("pref_chbox_is_automatic")
self.pref_spinbtn_max_days = \
self.builder.get_object("pref_spinbtn_max_days")
SIGNAL_CONNECTIONS_DIC = {
"on_preferences_dialog_delete_event":
self.on_preferences_cancel,
"on_btn_preferences_cancel_clicked":
self.on_preferences_cancel,
"on_btn_preferences_ok_clicked":
self.on_preferences_ok,
}
self.builder.connect_signals(SIGNAL_CONNECTIONS_DIC)
self.menu_item = Gtk.MenuItem("Delete old closed tasks")
self.menu_item.connect('activate', self.delete_old_closed_tasks)
def activate(self, plugin_api):
self.plugin_api = plugin_api
# preferences initialization
self.menu_item_is_shown = False
self.is_automatic = False
self.timer = None
self.preferences_load()
self.preferences_apply()
def onTaskClosed(self, plugin_api):
pass
def onTaskOpened(self, plugin_api):
pass
def onQuit(self, plugin_api):
if self.is_automatic is True:
self.cancel_autopurge()
def deactivate(self, plugin_api):
if self.is_automatic is True:
self.cancel_autopurge()
if self.menu_item_is_shown is True:
plugin_api.remove_menu_item(self.menu_item)
## HELPER FUNCTIONS ###########################################################
def __log(self, message):
Log.debug(message)
## CORE FUNCTIONS #############################################################
def schedule_autopurge(self):
self.timer = Timer(self.TIME_BETWEEN_PURGES,
self.delete_old_closed_tasks)
self.timer.setDaemon(True)
self.timer.start()
self.__log("Automatic deletion of old tasks scheduled")
def cancel_autopurge(self):
if self.timer:
self.__log("Automatic deletion of old tasks cancelled")
self.timer.cancel()
def delete_old_closed_tasks(self, widget=None):
self.__log("Starting deletion of old tasks")
today = Date.today()
max_days = self.preferences["max_days"]
requester = self.plugin_api.get_requester()
closed_tree = requester.get_tasks_tree(name='inactive')
closed_tasks = [requester.get_task(tid) for tid in
closed_tree.get_all_nodes()]
to_remove = [t for t in closed_tasks
if (today - t.get_closed_date()).days > max_days]
for task in to_remove:
if requester.has_task(task.get_id()):
requester.delete_task(task.get_id())
# If automatic purging is on, schedule another run
if self.is_automatic:
self.schedule_autopurge()
## Preferences methods ########################################################
def is_configurable(self):
"""A configurable plugin should have this method and return True"""
return True
def configure_dialog(self, manager_dialog):
self.preferences_load()
self.preferences_dialog.set_transient_for(manager_dialog)
self.pref_chbox_is_automatic.set_active(
self.preferences["is_automatic"])
self.pref_chbox_show_menu_item.set_active(
self.preferences["show_menu_item"])
self.pref_spinbtn_max_days.set_value(
self.preferences["max_days"])
self.preferences_dialog.show_all()
def on_preferences_cancel(self, widget=None, data=None):
self.preferences_dialog.hide()
return True
def on_preferences_ok(self, widget=None, data=None):
self.preferences["is_automatic"] = \
self.pref_chbox_is_automatic.get_active()
self.preferences["show_menu_item"] = \
self.pref_chbox_show_menu_item.get_active()
self.preferences["max_days"] = \
self.pref_spinbtn_max_days.get_value()
self.preferences_apply()
self.preferences_store()
self.preferences_dialog.hide()
def preferences_load(self):
self.preferences = self.plugin_api.load_configuration_object(
self.PLUGIN_NAME, "preferences",
default_values=self.DEFAULT_PREFERENCES)
def preferences_store(self):
self.plugin_api.save_configuration_object(self.PLUGIN_NAME,
"preferences",
self.preferences)
def preferences_apply(self):
# Showing the GUI
if self.preferences['show_menu_item'] is True and \
self.menu_item_is_shown is False:
self.plugin_api.add_menu_item(self.menu_item)
self.menu_item_is_shown = True
elif self.preferences['show_menu_item'] is False and \
self.menu_item_is_shown is True:
self.plugin_api.remove_menu_item(self.menu_item)
self.menu_item_is_shown = False
# Auto-purge
if self.preferences['is_automatic'] is True and \
self.is_automatic is False:
self.is_automatic = True
# Run the first iteration immediately and schedule next iteration
self.delete_old_closed_tasks()
elif self.preferences['is_automatic'] is False and \
self.is_automatic is True:
self.cancel_autopurge()
self.is_automatic = False
|
MHRP's acute cohorts provide insight into crucial stages of early HIV infection.
The Joint West Africa Research Group (JWARG) is a collaborative initiative to leverage existing research platforms and relationships to improve biopreparedness in the region.
The Walter Reed Army Institute of Research (WRAIR) is a leader in global efforts against the world’s most pervasive and high impact infectious diseases, such as malaria, HIV/AIDS, Ebola, and dengue.
Learn more about how MHRP is leading the battle against HIV.
For over a century, the military medical community has solved significant international health problems. Cutting edge vaccine development continues today in HIV, malaria, dengue and enteric diseases.
|
# -*- coding: utf-8 -*-
import re
import codecs
PinyinToneMark = {
0: u"aoeiuv\u00fc",
1: u"\u0101\u014d\u0113\u012b\u016b\u01d6\u01d6",
2: u"\u00e1\u00f3\u00e9\u00ed\u00fa\u01d8\u01d8",
3: u"\u01ce\u01d2\u011b\u01d0\u01d4\u01da\u01da",
4: u"\u00e0\u00f2\u00e8\u00ec\u00f9\u01dc\u01dc",
}
def decode(s):
"""
Converts text in the numbering format of pinyin ("ni3hao3") to text with the
appropriate tone marks ("nǐhǎo").
"""
s = s.lower()
r = ""
t = ""
for c in s:
if c >= 'a' and c <= 'z':
t += c
elif c == ':':
try:
if t[-1] == 'u':
t = t[:-1] + u"\u00fc"
except:
pass
else:
if c >= '0' and c <= '5':
tone = int(c) % 5
if tone != 0:
m = re.search(u"[aoeiuv\u00fc]+", t)
if m is None:
t += c
elif len(m.group(0)) == 1:
t = t[:m.start(0)] + PinyinToneMark[tone][PinyinToneMark[0].index(m.group(0))] + t[m.end(0):]
else:
if 'a' in t:
t = t.replace("a", PinyinToneMark[tone][0])
elif 'o' in t:
t = t.replace("o", PinyinToneMark[tone][1])
elif 'e' in t:
t = t.replace("e", PinyinToneMark[tone][2])
elif t.endswith("ui"):
t = t.replace("i", PinyinToneMark[tone][3])
elif t.endswith("iu"):
t = t.replace("u", PinyinToneMark[tone][4])
else:
t += "!"
r += t
t = ""
r += t
return r
|
Our mission is to help you convince your customers with stunning visuals. To help start that process today, here are 5 free tips to help you improve your renders today!
To make a gorgeous render nothing is as important as detail. The more detail you are able to embed in your render, the more interesting and appealing your render will be. Small logos, markings and imperfections can help tremendously in creating beautiful visualisations.
To add realism to your render, fillets are the name of the game. This is due to the fact that truly sharp corners do not exist in the physical world. Look around you, every corner you can find is rounded to some degree. This makes fillets especially important in photo-realistic renders. Small slots and grooves are also features that can be found in many products. These features can be used to increase the realism in your renders.
Due to manufacturing processes, every product around you has a different texture. Adding textures to the products in you renders can increase the realism and appeal of the render. By combining textures on smaller and larger scale very complex looking textures can be created with relative ease.
By playing with lighting and reflections one can create very aesthetic images that draw your attention. By introducing imperfections in you materials and product the image will seem more life-like and be more convincing to your customers.
One of the benefits of renders, is the fact that the object or product you want to show doesn’t necessarily have to exist. This enables you to perk up you render with 49 diamonds that you do not possess.
See our Portfolio to see these tips being applied to improve visuals for our customers. Or contact us to discuss how we can help you convince your customers with stunning visuals.
|
# -*- coding: utf-8 -*-
import threading
from hbconfig import Config
from ..background import schedule
from ..functions import FunctionRunner
from ..nlp.ner import NamedEntitiyRecognizer
from ..notifier.scheduler import Scheduler
from ..slack.resource import MsgResource
from ..slack.slackbot import SlackerAdapter
from ..utils.data_handler import DataHandler
from ..utils.logger import Logger
class Worker(object):
def __init__(self, text=None, slackbot=None):
self.input = text
self.data_handler = DataHandler()
self.logger = Logger().get_logger()
self.ner = NamedEntitiyRecognizer()
self.function_runner = FunctionRunner().load_function
if slackbot is None:
self.slackbot = SlackerAdapter()
else:
self.slackbot = slackbot
if Config.profile.personal:
from ..utils.profile import Profile
self.profile = Profile()
else:
self.profile = None
def create(self):
ner_dict = {
k: self.ner.parse(v, self.input) for k, v in self.ner.schedule.items()
}
day_of_week = self.ner.parse(
self.ner.schedule["day_of_week"], self.input, get_all=True
)
ner_dict["day_of_week"] = day_of_week
time_unit = self.ner.parse(
self.ner.schedule["time_unit"], self.input, get_all=True
)
ner_dict["time_unit"] = time_unit
skill_keywords = {k: v["keyword"] for k, v in self.ner.skills.items()}
func_name = self.ner.parse(skill_keywords, self.input)
ner_dict["skills"] = func_name
params = {k: self.ner.parse(v, self.input) for k, v in self.ner.params.items()}
ner_dict["params"] = params
Scheduler().create_with_ner(**ner_dict)
def run(self, init=False):
if self.is_running():
self.logger.info("Already running.")
return
self.set_schedules()
schedule.run_continuously(interval=1)
if not init:
self.slackbot.send_message(text=MsgResource.WORKER_START)
def is_running(self):
if len(schedule.jobs) > 0:
return True
else:
return False
def set_schedules(self):
if self.profile:
self.__set_profile_schedule()
self.__set_custom_schedule()
def __set_profile_schedule(self):
self.__excute_profile_schedule(
self.profile.get_schedule("WAKE_UP"),
False,
"good_morning",
{},
True,
)
self.__excute_profile_schedule(
self.profile.get_schedule("WORK_START"),
False,
"send_message",
{"text": MsgResource.PROFILE_WORK_START},
True,
)
self.__excute_profile_schedule(
self.profile.get_schedule("WORK_END"),
False,
"send_message",
{"text": MsgResource.PROFILE_WORK_END},
True,
)
self.__excute_profile_schedule(
self.profile.get_schedule("GO_TO_BED"),
False,
"good_night",
{},
False,
)
# Toggl Tasks <-> Activity Tasks Sync
self.__excute_profile_schedule(
"23:55",
False,
"activity_task_sync",
{},
False,
)
# slack presence issue
# self.__excute_profile_schedule(
# self.profile.get_schedule('CHECK_GO_TO_BED'), False,
# 'check_go_to_bed', {}, False)
interval = Config.profile.feed.INTERVAL
self.__excute_feed_schedule(interval)
self.__excute_health_check()
def __excute_profile_schedule(self, time, repeat, func_name, params, not_holiday):
schedule.every().day.at(time).do(
self.__run_threaded,
self.function_runner,
{
"repeat": repeat,
"func_name": func_name,
"params": params,
"day_of_week": [0],
"not_holiday": not_holiday,
},
)
def __excute_feed_schedule(self, interval):
schedule.every(interval).minutes.do(
self.__run_threaded,
self.function_runner,
{
"repeat": True,
"func_name": "feed_notify",
"params": {},
"day_of_week": [0],
"not_holiday": False,
},
)
def __excute_health_check(self):
schedule.every(30).minutes.do(
self.__run_threaded,
self.function_runner,
{
"repeat": True,
"func_name": "health_check",
"params": {},
"day_of_week": [0],
"not_holiday": False,
},
)
def __set_custom_schedule(self):
schedule_fname = "schedule.json"
schedule_data = self.data_handler.read_file(schedule_fname)
alarm_data = schedule_data.get("alarm", {})
between_data = schedule_data.get("between", {})
for _, v in alarm_data.items():
if not isinstance(v, type({})):
continue
day_of_week = v.get("day_of_week", [0])
if "time" in v:
time = v["time"]
param = {
# Do only once
"repeat": False,
"func_name": v["f_name"],
"day_of_week": day_of_week,
"params": v.get("f_params", {}),
}
try:
schedule.every().day.at(time).do(
self.__run_threaded, self.function_runner, param
)
except Exception as e:
print("Function Schedule Error: ", e)
self.slackbot.send_message(text=MsgResource.ERROR)
if "between_id" in v:
between = between_data[v["between_id"]]
start_time, end_time = self.__time_interval2start_end(
between["time_interval"]
)
# Repeat
period = v["period"].split(" ")
number = int(period[0])
datetime_unit = self.__replace_datetime_unit_ko2en(period[1])
param = {
"start_time": start_time,
"end_time": end_time,
"repeat": True,
"day_of_week": day_of_week,
"func_name": v["f_name"],
"params": v.get("f_params", {}),
}
try:
getattr(schedule.every(number), datetime_unit).do(
self.__run_threaded, self.function_runner, param
)
except Exception as e:
print("Error: " + e)
def __replace_datetime_unit_ko2en(self, datetime_unit):
ko2en_dict = {"초": "seconds", "분": "minutes", "시": "hours", "시간": "hours"}
if datetime_unit in ko2en_dict:
return ko2en_dict[datetime_unit]
return datetime_unit
def __time_interval2start_end(self, time_interval):
if "~" in time_interval:
time_interval = time_interval.split("~")
start_time = time_interval[0].split(":")
end_time = time_interval[1].split(":")
start_time = tuple(map(lambda x: int(x), start_time))
end_time = tuple(map(lambda x: int(x), end_time))
else:
start_time = time_interval
end_time = None
return start_time, end_time
def __run_threaded(self, job_func, param):
job_thread = threading.Thread(target=job_func, kwargs=param)
job_thread.start()
def stop(self, init=False):
schedule.clear()
if not init:
self.slackbot.send_message(text=MsgResource.WORKER_STOP)
|
Dmitry Leontiev, Ph.D., Dr. Sc., is Head of the International laboratory of positive psychology of motivation and personality at Higher School of Economics, Moscow, Russia, Professor of psychology, Lomonosov Moscow State University, the founder and the head of Institute of Existential Psychology and Life Enhancement (EXPLIEN) in Moscow. He strives to integrate the existentialist approach to human personality with cultural-historical activity theory approach and synergetic views on human self-regulation and self-organization. Author of numerous publications in psychology of personality and motivation, theory and history of psychology, psychology of art and empirical aesthetics. Both his Ph.D. and habilitation works were focused on the problem of personal meaning. Dmitry is a recipient of the Promotional Award of Viktor Frankl Foundation of the city of Vienna (2004), honorary member of Society for Logotherapy and Existential Analysis by Viktor Frankl Institute (Vienna).
Interested in more from Dr. Leontiev? Find out more here.
The idea of PP2 as an integrative framework (Wong, 2009; 2010) signifies the striving to bring together the highlights of humanistic, existential, and positive psychology, namely the inherently creative and developing human nature (HP), the role of human freedom, agency, and effort (EP), and the conditions of well-being and the life worth living (PP). However, as everyone engaged in the empirical studies in the field knows, all good things covariate, and the key problem of our conclusions is the one of discriminant validity. A developmental view (Leontiev, 2006) suggests that the key moderator of the processes of positive development toward psychological maturity is the investment of goal-directed efforts to one’s own development (“the labour of personality” (P. Janet) or “cultivation” (M. Csikszentmihalyi)).
The relevant domain can be called “height psychology.” This term has been coined independently by two great thinkers: Viktor Frankl in his papers of the late 1930s and Lev Vygotsky in his manuscripts of early 1930 published only in 1980s. Both wrote that what was badly needed is height psychology opposed to both surface (i.e. behavioristic – D.L.) psychology and depth psychology. Frankl identified height psychology with his existential analysis, and Vygotsky with his cultural-historical psychology of higher mental functions and deliberate actions. Both viewed human person in terms of multi-level organization, where lower levels are fully causally determined by uncontrollable physiological and psychological mechanisms, while by virtue of higher levels human person may master one’s own behavior.
The ideas of multilevel psychological structure of human self-regulation has recently found reincarnation in dual-system psychological models (“hot” and “cool” systems – Metcalfe, Mischel, 1999; “reflexive” and “reflective” systems – Carver a.o., 2008; “fast” and “slow” systems – Kahneman, 2011). It follows that the person’s developing capacity to take control over one’s own development and well-being and to invest oneself to these processes should be treated as the central issue of the emerging PP2.
Distinguishing involuntary and voluntary (self-controlled) processes and systems in human self-regulation.
Dr. Piers Worth, Ph.D. is a ‘Reader’ (Associate Professor) at Bucks New University. He is a Charted Psychologist and accredited psychotherapist. Piers’ PhD research focused on how creativity changes as we age, and how it may support positive ageing. Piers wrote and launched the University’s MSc Applied Positive Psychology programme which is now in its fourth year. He co-authored the ‘Second Wave Positive Psychology: Embracing the Dark Side of Life’ book that was published in November 2015. Piers is of the view that positive psychology and ‘second wave positive psychology’ perspectives combined with it represent hugely exciting opportunities for teaching and researching psychology.
His research and writing focus at the present time is on subjects and applications that may broaden the base of positive psychology, such as restorative justice, and the training of medical staff specialists in different disciplines. Piers’ teaching is exploring how theories of Carl Rogers support and even amplify some of the affects found in learning positive psychology; and how appreciative inquiry may be taught and used with positive psychology. Prior to this part of his career, Piers worked for over 35 years in industry and blue chip companies, and for 20 years as an organisation development consultant. He was work and project experience in 17 countries.
Interested in more from Dr. Worth? Find out more here.
The Bucks New University MSc Applied Positive Psychology programme works on the basis that our students are on a personal and professional journey which influences their study goals. We support them in choosing assignment subjects related to their goals. They explore existing theory and research, often questioning, challenging and extending current ideas in the discipline.
This presentation will summarise contributions from students over a two year period that relate to the thinking and development of ‘second wave’ positive psychology, and implicitly the psychology of meaning. The students offer us insightful and often moving experiences for our discipline that have the potential to add to existing thinking and practice. They add nuance in such areas as culture and gender to what we may anticipate from descriptions of research-based ‘positive psychology interventions’.
New perspectives on Happiness, well-being and flourishing gained from MAPP student work.
New perspectives on Strengths based theory and development gained from MAPP student work.
New perspectives on the psychology of hope and resilience gained from MAPP student work.
Implications or contributions to ‘positive psychology in practice’.
Considerable research has investigated how individuals develop and maintain an ego-identity that fosters positive character development. Many researchers assume that having a stable commitment to one’s identity is crucial for maturity and integrity. Less credence is paid to the idea that having a flexible and evolving identity is necessary for healthy adaptation to life’s challenges. Yet, individuals with flexible identities must possess particular ways of processing new information about themselves and the world that ultimately leads to greater self-knowledge and wisdom. In this presentation, I will argue that identity flexibility is possible with the use of a growth-oriented identity style, one that is oriented toward the growth of integrated/wise self-knowledge. In addition, I will present a working theory and supporting evidence that the growth-oriented style predicts wisdom byway of identity processes that are contemplative in nature.
This project describes the development of a 12-session manual for middle school students, aged 12-15 who attend McLean Hospital’s Program in Education, Afterschool & Resiliency (PEAR). The program is based on the Clover model, which focuses on four elements necessary for youth to thrive: Active Engagement, Assertiveness, Belonging and Reflections. This project details the creation of the new Reflection “leaf” of the Clover model.
The approach that this manual highlights will be especially useful for adolescents with well-developed skills in analysis, insight, observation and understanding, but who are at risk for rumination and over-analysis. The goal of this group curriculum is to amplify the participant’s well-honed ability for self-reflection and wed it with self-compassion. Positive psychology research indicates that this is likely to increase resiliency by strengthening protective factors such as optimism and social connection. Increasing one’s self-compassion will not only be used to cultivate self-reflection, but it is also expected to increase optimism and connectedness. This poster presentation outlines the overall context and specific steps of the manual, and includes useful exercises that are practical and applicable to clinical practice.
This presentation demonstrates that inclusion of negative emotions and dire circumstances in qualitative PP studies illumines practical paths to the “good life.” Perceiving the impact of happy childhood memories as a search for meaning, three exemplars portray positive-negative interactions that are paradoxical, symbiotic, and convoluted. Virtue-oriented activities finally culminate in self-transcendent happiness with collective benefits, appropriate for each person’s context, culture, and age. Abstract “lessons” also embellish exemplars’ meanings.
Post-traumatic growth (PTG) refers to the transformative process that can lead to positive changes after dealing with a traumatic event. Furthermore, different traumas have been found to lead to differing processes and outcomes from each other. This study is expanding on the Hefferon, Grealy and Mutrie (2009) study in which they discovered that the PTG experience among people with life threatening illnesses has unique, corporeal features. The experience of being severely injured has elements unique from other traumas due to its direct and substantial impact to the body. This investigation aims to advance the knowledge of the corporeal elements of PTG and PTG in general. This study reviewed and synthesised the qualitative data on the experience of post-traumatic growth in people with severe physical injury. Thirteen journal articles – published before 1st of September 2014 in PsychINFO, SPORTDiscus, CINAHLPlus and Academic Search Complete- were reviewed. Key themes included: Existential reflection; Humanity; Meaningful leisure engagement; and New abilities: awareness of physiological and psychological potential. Findings support that there are unique elements to the PTG experience in people with severe injuries. Additional research is needed to understand the trauma experiences among different injuries and the overall processes and outcomes of severe injury related trauma.
Cynthia L. S. Pury ,is a Professor of Psychology at Clemson University. She is the author of numerous papers on courage as well as the editor (with Shane Lopez) of the book The Psychology of Courage: Modern Research on an Ancient Virtue, published in 2010 by the American Psychological Association. She is also an Associate Editor for the Journal of Positive Psychology.
Although psychologists have frequently defined courage as “acting despite fear”, research into both the process of taking a courageous action and the labeling of an action taken by others as courageous argues otherwise. In this session, I will review the past decade of research into courage and argue that courage depends as much or more on the meaning of the goal being pursued as it does on the risk posed to the actor (and, parenthetically, that the subjective sense of risk to the actor matters more than the fear that they feel).
Session attendees should be able to identify the three main components of courage (intentional action, causing personal risk to the actor, in pursuit of a noble and worthwhile goal) as well as describe the ways that both the goals and risks of a courageous action relate to meaning.
Melissa Weinberg is Senior Research Fellow at the Young and Well CRC and an Honorary Fellow of the Australian Centre of Quality of Life at Deakin University. She completed her Ph.D., entitled “Subjective Wellbeing in Australian Families of Holocaust Survivors” in 2001 and became the Principal Research Fellow of the Australian Unity Wellbeing Index, a project that has monitored the wellbeing of Australians for over 15 years. She is a TEDx speaker and has appeared on various TV and radio programs where she discusses the science of happiness and what makes Australians happy.
The theory of subjective wellbeing (SWB) homeostasis proposes that life satisfaction is maintained within a healthy normal range by an inbuilt psychological control system. When adversity is encountered, the system is engaged to respond, and the availability of internal and external resources to defend against threat is critical to recovery. However, in the face of chronic and persistent exposure to challenge, the operation of this system, and regular maintenance of wellbeing is compromised (Cummins, 2010). This framework is grounded in rigorous empirical evidence, including 15 years of research in Australia, and estimation of individual set-points for wellbeing based on longitudinal monitoring (Cummins, Li, Wooden, & Stokes, 2014).
The growing focus on wellbeing research may be partially attributed to the field of positive psychology, but the history and science of SWB precedes this attention. Further, SWB homeostasis theory has implications for the measurement of resilience, which requires exposure to adversity and subsequent demonstration of effective coping. With reference to a series of recent empirical studies (e.g., Weinberg, Heath, & Tomyn, 2015; Tomyn, Weinberg, & Cummins, 2015; Weinberg & Tomyn, 2015), this presentation reveals why many positive psychology interventions that aim to improve resilience and wellbeing are inherently designed to fail. Considerations and implications are presented in the context of Positive Psychology 2.0, and will advocate for a dialectical approach to the theory and measurement of wellbeing.
Suffering is a universal and inevitable aspect of our human existence. We live in a problematic world where death, tragic events, and losses are sources of our inferior feelings and may threaten our wholeness, alienating us from the society and ourselves. Inherent in our suffering is a deep feeling of isolation and a crisis of meaning. Understanding about and relief from our pain and suffering call for the spiritual values that are traditionally not addressed in psychology. In psychology, we must ask such questions as: What are the causes of suffering? How could these conditions be eliminated? How do we differ in our responses to suffering? What are the therapeutic values of suffering? What could deliver us from vulnerability? What is the meaning of suffering and under what conditions can it make us more human?
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
import azure.cli.core.telemetry as telemetry
from knack.util import CLIError
from knack.log import get_logger
logger = get_logger(__name__)
# pylint: disable=unnecessary-pass
# Error types in AzureCLI are from different sources, and there are many general error types like CLIError, AzureError.
# Besides, many error types with different names are actually showing the same kind of error.
# For example, CloudError, CLIError and ValidationError all could be a resource-not-found error.
# Therefore, here we define the new error classes to map and categorize all of the error types from different sources.
# region: Base Layer
# Base class for all the AzureCLI defined error classes.
class AzCLIError(CLIError):
""" Base class for all the AzureCLI defined error classes.
DO NOT raise this error class in your codes. """
def __init__(self, error_msg, recommendation=None):
# error message
self.error_msg = error_msg
# manual recommendations provided based on developers' knowledge
self.recommendations = []
self.set_recommendation(recommendation)
# AI recommendations provided by Aladdin service, with tuple form: (recommendation, description)
self.aladdin_recommendations = []
# exception trace for the error
self.exception_trace = None
super().__init__(error_msg)
def set_recommendation(self, recommendation):
"""" Set manual recommendations for the error.
Command module or extension authors could call this method to provide recommendations,
the recommendations will be printed after the error message, one recommendation per line
"""
if isinstance(recommendation, str):
self.recommendations.append(recommendation)
elif isinstance(recommendation, list):
self.recommendations.extend(recommendation)
def set_aladdin_recommendation(self, recommendations):
""" Set aladdin recommendations for the error.
One item should be a tuple with the form: (recommendation, description)
"""
self.aladdin_recommendations.extend(recommendations)
def set_exception_trace(self, exception_trace):
self.exception_trace = exception_trace
def print_error(self):
from azure.cli.core.azlogging import CommandLoggerContext
from azure.cli.core.style import print_styled_text
with CommandLoggerContext(logger):
# print error message
logger.error(self.error_msg)
# print exception trace if there is
if self.exception_trace:
logger.exception(self.exception_trace)
# print recommendations to action
if self.recommendations:
for recommendation in self.recommendations:
print(recommendation, file=sys.stderr)
if self.aladdin_recommendations:
print('\nTRY THIS:', file=sys.stderr)
for recommendation, description in self.aladdin_recommendations:
print_styled_text(recommendation, file=sys.stderr)
print_styled_text(description, file=sys.stderr)
def send_telemetry(self):
telemetry.set_error_type(self.__class__.__name__)
# endregion
# region: Second Layer
# Main categories of the AzureCLI error types, used for Telemetry analysis
class UserFault(AzCLIError):
""" Users should be responsible for the errors.
DO NOT raise this error class in your codes. """
def send_telemetry(self):
super().send_telemetry()
telemetry.set_user_fault(self.error_msg)
class ServiceError(AzCLIError):
""" Azure Services should be responsible for the errors.
DO NOT raise this error class in your codes. """
def send_telemetry(self):
super().send_telemetry()
telemetry.set_failure(self.error_msg)
class ClientError(AzCLIError):
""" AzureCLI should be responsible for the errors.
DO NOT raise this error class in your codes. """
def send_telemetry(self):
super().send_telemetry()
telemetry.set_failure(self.error_msg)
if self.exception_trace:
telemetry.set_exception(self.exception_trace, '')
class UnknownError(AzCLIError):
""" Unclear errors, could not know who should be responsible for the errors.
DO NOT raise this error class in your codes. """
def send_telemetry(self):
super().send_telemetry()
telemetry.set_failure(self.error_msg)
# endregion
# region: Third Layer
# Specific categories of the AzureCLI error types
# Raise the error classes here in your codes. Avoid using fallback error classes unless you can not find a proper one.
# Command related error types
class CommandNotFoundError(UserFault):
""" Command is misspelled or not recognized by AzureCLI. """
pass
# Argument related error types
class UnrecognizedArgumentError(UserFault):
""" Argument is misspelled or not recognized by AzureCLI. """
pass
class RequiredArgumentMissingError(UserFault):
""" Required argument is not specified. """
pass
class MutuallyExclusiveArgumentError(UserFault):
""" Arguments can not be specified together. """
pass
class InvalidArgumentValueError(UserFault):
""" Argument value is not valid. """
pass
class ArgumentUsageError(UserFault):
""" Fallback of the argument usage related errors.
Avoid using this class unless the error can not be classified
into the Argument related specific error types. """
pass
# Response related error types
class BadRequestError(UserFault):
""" Bad request from client: 400 error """
pass
class UnauthorizedError(UserFault):
""" Unauthorized request: 401 error """
pass
class ForbiddenError(UserFault):
""" Service refuse to response: 403 error """
pass
class ResourceNotFoundError(UserFault):
""" Can not find Azure resources: 404 error """
pass
class AzureInternalError(ServiceError):
""" Azure service internal error: 5xx error """
pass
class AzureResponseError(UserFault):
""" Fallback of the response related errors.
Avoid using this class unless the error can not be classified
into the Response related specific error types. """
pass
# Request related error types
class AzureConnectionError(UserFault):
""" Connection issues like connection timeout, aborted or broken. """
pass
class ClientRequestError(UserFault):
""" Fallback of the request related errors. Error occurs while attempting
to make a request to the service. No request is sent.
Avoid using this class unless the error can not be classified
into the Request related specific errors types. """
pass
# File operation related error types
class FileOperationError(UserFault):
""" For file or directory operation related errors. """
pass
# Keyboard interrupt error type
class ManualInterrupt(UserFault):
""" Keyboard interrupt. """
pass
class NoTTYError(UserFault):
""" No tty available for prompt. """
pass
# ARM template related error types
class InvalidTemplateError(UserFault):
""" ARM template validation fails. It could be caused by incorrect template files or parameters """
pass
class DeploymentError(UserFault):
""" ARM template deployment fails. Template file is valid, and error occurs in deployment. """
pass
# Validation related error types
class ValidationError(UserFault):
""" Fallback of the errors in validation functions.
Avoid using this class unless the error can not be classified into
the Argument, Request and Response related specific error types. """
pass
class UnclassifiedUserFault(UserFault):
""" Fallback of the UserFault related error types.
Avoid using this class unless the error can not be classified into
the UserFault related specific error types.
"""
pass
# CLI internal error type
class CLIInternalError(ClientError):
""" AzureCLI internal error """
pass
# Client error for az next
class RecommendationError(ClientError):
""" The client error raised by `az next`. It is needed in `az next` to skip error records. """
pass
class AuthenticationError(ServiceError):
""" Raised when AAD authentication fails. """
# endregion
|
Are you looking for the best prices on appliance repair in Felton, California? SortFix can help you save time and money on Felton appliance repair. When you use SortFix to hire a appliance repair contractor in Felton you know that you can expect to pay between $157 and $221.
The average cost for appliance repair in Felton is $198.
When you hire a Felton appliance repair contractor from SortFix you know that you'll be working with a fully licensed and insured, highly rated contractor. SortFix makes it easy to compare appliance repair costs in Felton from a few of the top local contractors. Fill out the quote request to see how much you can save on Felton appliance repair.
|
import tensorflow as tf
from networks.network import Network
class caffenet(Network):
def __init__(self, trainable=True):
self.inputs = []
self.data = tf.placeholder(tf.float32, shape=[None, None, None, 3])
self.rois = tf.placeholder(tf.float32, shape=[None, 5])
self.keep_prob = tf.placeholder(tf.float32)
self.layers = dict({'data':self.data, 'rois':self.rois})
self.trainable = trainable
self.setup()
def setup(self):
(self.feed('data')
.conv(11, 11, 96, 4, 4, padding='VALID', name='conv1', trainable=False)
.max_pool(3, 3, 2, 2, padding='VALID', name='pool1')
.lrn(2, 2e-05, 0.75, name='norm1')
.conv(5, 5, 256, 1, 1, group=2, name='conv2')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool2')
.lrn(2, 2e-05, 0.75, name='norm2')
.conv(3, 3, 384, 1, 1, name='conv3')
.conv(3, 3, 384, 1, 1, group=2, name='conv4')
.conv(3, 3, 256, 1, 1, group=2, name='conv5')
.feature_extrapolating([1.0, 2.0, 3.0, 4.0], 4, 4, name='conv5_feature'))
(self.feed('conv5_feature','im_info')
.conv(3,3,)
(self.feed('conv5_feature', 'rois')
.roi_pool(6, 6, 1.0/16, name='pool5')
.fc(4096, name='fc6')
.dropout(self.keep_prob, name='drop6')
.fc(4096, name='fc7')
.dropout(self.keep_prob, name='drop7')
.fc(174, relu=False, name='subcls_score')
.softmax(name='subcls_prob'))
(self.feed('subcls_score')
.fc(4, relu=False, name='cls_score')
.softmax(name='cls_prob'))
(self.feed('subcls_score')
.fc(16, relu=False, name='bbox_pred'))
|
WARNING: This Website contains explicit adult material. You may only enter this website if you are at least 18 age of age, or at small the age of number in the jurisdiction wherever you shack or from which you approach this Website. If you do not meet these requirements, then you do not experience permission to use the Website.
Please add a one-time donation to help fund our most imperative campaigns to tilt basic cognitive process and enlarge LGBTQ rights. Remember, 100% of your influence fuels the struggle for LGBTQ position and makes you an about component of the cause Rights Campaign.
|
#!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from test.base import BaseTest
from activitystreams import parse as as_parser
from dino import api
from dino.config import ApiActions
from dino.utils import b64d
class ApiJoinTest(BaseTest):
def setUp(self):
super(ApiJoinTest, self).setUp()
self.create_channel_and_room()
def test_join_non_owner_no_acl(self):
self.assert_join_succeeds()
def test_join_owner_no_acl(self):
self.set_owner()
self.assert_join_succeeds()
def test_join_non_owner_correct_country(self):
self.remove_owner_channel()
self.remove_owner()
self.set_acl({ApiActions.JOIN: {'country': 'de,cn,dk'}})
self.assert_join_succeeds()
def test_join_non_owner_with_all_acls(self):
self.remove_owner_channel()
self.remove_owner()
self.set_acl({ApiActions.JOIN: {
'country': 'de,cn,dk',
'city': 'Beijing,Shanghai,Berlin,Copenhagen',
'age': '18:45',
'gender': 'm,f',
'membership': '0,1',
'has_webcam': 'y',
'fake_checked': 'y,n',
'image': 'y'
}})
self.assert_join_succeeds()
def test_join_owner_with_all_acls(self):
self.set_owner()
self.set_acl({ApiActions.JOIN: {
'country': 'de,cn,dk',
'city': 'Beijing,Shanghai,Berlin,Copenhagen',
'age': '18:45',
'gender': 'm,f',
'membership': '0,1',
'has_webcam': 'y',
'fake_checked': 'y,n',
'image': 'n'
}})
self.assert_join_succeeds()
def test_join_returns_activity_with_4_attachments(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
self.assertEqual(4, len(response[1]['object']['attachments']))
def test_join_returns_activity_with_acl_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
acls = self.get_attachment_for_key(attachments, 'acl')
self.assertIsNotNone(acls)
def test_join_returns_activity_with_history_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
history = self.get_attachment_for_key(attachments, 'history')
self.assertIsNotNone(history)
def test_join_returns_activity_with_owner_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
owners = self.get_attachment_for_key(attachments, 'owner')
self.assertIsNotNone(owners)
def test_join_returns_activity_with_users_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
users = self.get_attachment_for_key(attachments, 'user')
self.assertIsNotNone(users)
def test_join_returns_activity_with_empty_acl_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
self.assert_attachment_equals(attachments, 'acl', [])
def test_join_returns_activity_with_empty_history_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
self.assert_attachment_equals(attachments, 'history', [])
def test_join_returns_activity_with_empty_owner_attachment(self):
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
self.assert_attachment_equals(attachments, 'owner', [])
def test_join_returns_activity_with_one_user_as_attachment(self):
third_user_id = "9876"
self.env.db.set_user_name(third_user_id, third_user_id)
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
users = self.get_attachment_for_key(attachments, 'user')
self.assertEqual(0, len(users))
act = self.activity_for_join()
act['actor']['id'] = third_user_id
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
users = self.get_attachment_for_key(attachments, 'user')
self.assertEqual(1, len(users))
def test_join_returns_activity_with_one_owner(self):
self.set_owner()
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
owners = self.get_attachment_for_key(attachments, 'owner')
self.assertEqual(1, len(owners))
def test_join_returns_activity_with_correct_owner(self):
self.set_owner()
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
owners = self.get_attachment_for_key(attachments, 'owner')
user_id, user_name = owners[0]['id'], owners[0]['displayName']
self.assertEqual(ApiJoinTest.USER_ID, user_id)
self.assertEqual(ApiJoinTest.USER_NAME, b64d(user_name))
def test_join_returns_correct_nr_of_acls(self):
correct_acls = {ApiActions.JOIN: {'country': 'de,cn,dk', 'city': 'Shanghai,Berlin,Copenhagen'}}
self.set_acl(correct_acls)
self.set_owner()
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
returned_acls = self.get_attachment_for_key(attachments, 'acl')
self.assertEqual(len(correct_acls.get(ApiActions.JOIN)), len(returned_acls))
def test_join_returns_correct_acls(self):
correct_acls = {ApiActions.JOIN: {'country': 'de,cn,dk', 'city': 'Shanghai,Berlin,Copenhagen'}}
self.set_acl(correct_acls)
self.set_owner()
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
returned_acls = self.get_attachment_for_key(attachments, 'acl')
for acl in returned_acls:
acl_key = acl['objectType']
acl_value = acl['content']
self.assertTrue(acl_key in correct_acls.get(ApiActions.JOIN))
self.assertEqual(correct_acls.get(ApiActions.JOIN)[acl_key], acl_value)
def test_join_returns_history(self):
msg = 'this is a test message'
self.set_owner()
self.assert_join_succeeds()
self.send_message(msg)
self.assert_in_room(True)
self.leave_room()
self.assert_in_room(False)
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
returned_history = self.get_attachment_for_key(attachments, 'history')
self.assertEqual(1, len(returned_history))
def test_join_returns_correct_history(self):
msg = 'this is a test message'
self.set_owner()
self.assert_join_succeeds()
msg_response = self.send_message(msg)[1]
self.leave_room()
act = self.activity_for_join()
response = api.on_join(act, as_parser(act))
attachments = response[1]['object']['attachments']
from pprint import pprint
pprint(self.get_attachment_for_key(attachments, 'history'))
all_history = self.get_attachment_for_key(attachments, 'history')
self.assertEqual(1, len(all_history))
history_obj = all_history[0]
self.assertEqual(msg_response['id'], history_obj['id'])
self.assertEqual(msg, b64d(history_obj['content']))
self.assertEqual(msg_response['published'], history_obj['published'])
self.assertEqual(ApiJoinTest.USER_NAME, b64d(history_obj['author']['displayName']))
def assert_attachment_equals(self, attachments, key, value):
found = self.get_attachment_for_key(attachments, key)
self.assertEqual(value, found)
def get_attachment_for_key(self, attachments, key):
for attachment in attachments:
if attachment['objectType'] == key:
return attachment['attachments']
return None
|
Frequent Reference Question: How Many Federal Laws Are. securities and current coin of the United. hunt and the government pays you the value of the. Before You Board. Q. Waives the non-refundable cancellation provision of your cruise ticket contract and pays you the value of the unused portion of your prepaid.
Coinranking Crypto-X (CWX) price information, coin market cap and trade volume. Visit ESPN to get up-to-the-minute sports news coverage, scores, highlights and commentary for NFL, MLB, NBA, College Football, NCAA Basketball and more. June 2016: The Month in Ransomware. not many end users put that much value.
The huge number of projects and blockchains that can be built on the platform can only mean that the value of the native coin will.
At Jacksonville State University we have more than 49 academic programs that can provide you with the knowledge and skills to help you reach your maximum potential. The Good Samaritan Society and Good Samaritan Canada provide specialized care services for the elderly, disabled and those in need or at risk.
Leader-Member Exchange (LMX) is the foremost dyadic approach to leadership.
II and inscriptions that include the face value of the coin,.
AquaJet Express started with the purchase of a small coin-op self service car wash in the.Free custom technical stocks chart for CWX.TO. Latest real-time Bats price quote, charts, technicals and opinions.
You do realize while coin prices go up rest went down a lot so prices are same.The official website of the Colorado Rockies with the most up-to-date information on scores, schedule, stats, tickets,.HSBC Holdings PLC ADR stock price, stock quotes and financial overviews from MarketWatch.
The Mystic Coin bubble has really killed my motivation to make a legendary.
Web site for Tennessee Secretary of State, Business Division Online Services.
CasinoCoin (CSC) Buy CSC. CasinoCoin is worth what the market values it at and it can be bought,.
And the only way they can keep that up is to include the coins that most people want to. increased in value by 16% within just 24 hours as I. called Zimbra X.Manage your finances your way—the convenient way, the flexible way, the anytime way.
|
"""
Some parts are copied from rest_framework.exceptions, which is licensed under the BSD license:
*******************************************************************************
Copyright (c) 2011-2016, Tom Christie
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*******************************************************************************
Handled exceptions raised by REST framework.
In addition Django's built in 403 and 404 exceptions are handled.
(`django.http.Http404` and `django.core.exceptions.PermissionDenied`)
"""
from __future__ import unicode_literals
import math
from django.utils.encoding import force_text
from django.utils.translation import ungettext, ugettext_lazy as _
from django.db import (
DatabaseError,
OperationalError as DatabaseOperationalError,
InterfaceError as DatabaseInterfaceError,
)
from redis.exceptions import (
TimeoutError as RedisTimeoutError,
ConnectionError as RedisConnectionError,
)
from kombu.exceptions import (
TimeoutError as RabbitTimeoutError,
ConnectionError as RabbitConnectionError,
)
from api import status
# List of operational errors that affect the application in a serious manner
# (e.g. callback tasks that fail because of this must be retried)
OPERATIONAL_ERRORS = (
DatabaseOperationalError,
DatabaseInterfaceError,
RabbitConnectionError,
RabbitTimeoutError,
RedisTimeoutError,
RedisConnectionError,
)
class APIException(Exception):
"""
Base class for REST framework exceptions.
Subclasses should provide `.status_code` and `.default_detail` properties.
"""
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
default_detail = _('A server error occurred.')
def __init__(self, detail=None):
if detail is None:
self.detail = force_text(self.default_detail)
else:
self.detail = force_text(detail)
def __str__(self):
return self.detail
class TransactionError(DatabaseError):
"""Use this to break atomic transactions"""
pass
class ObjectAPIException(APIException):
"""Inject object's name or model's verbose name into detail"""
default_object_name = _('Object')
default_model = None
def __init__(self, detail=None, object_name=None, model=None, task_id=None):
super(ObjectAPIException, self).__init__(detail=detail)
self.task_id = task_id
if not object_name:
model = model or self.default_model
if model:
# noinspection PyProtectedMember
object_name = model._meta.verbose_name_raw
else:
object_name = self.default_object_name
self.detail = self.detail.format(object=object_name)
class BadRequest(APIException):
status_code = status.HTTP_400_BAD_REQUEST
default_detail = _('Bad request')
class ParseError(APIException):
status_code = status.HTTP_400_BAD_REQUEST
default_detail = _('Malformed request')
class AuthenticationFailed(APIException):
status_code = status.HTTP_401_UNAUTHORIZED
default_detail = _('Incorrect authentication credentials.')
class NotAuthenticated(APIException):
status_code = status.HTTP_401_UNAUTHORIZED
default_detail = _('Authentication credentials were not provided.')
class PermissionDenied(APIException):
status_code = status.HTTP_403_FORBIDDEN
default_detail = _('You do not have permission to perform this action.')
class NotFound(APIException):
status_code = status.HTTP_404_NOT_FOUND
default_detail = _('Not found')
class MethodNotAllowed(APIException):
status_code = status.HTTP_405_METHOD_NOT_ALLOWED
default_detail = _('Method "{method}" not allowed.')
def __init__(self, method, detail=None):
if detail is None:
self.detail = force_text(self.default_detail).format(method=method)
else:
self.detail = force_text(detail)
class NotAcceptable(APIException):
status_code = status.HTTP_406_NOT_ACCEPTABLE
default_detail = _('Could not satisfy the request Accept header.')
def __init__(self, detail=None, available_renderers=None):
if detail is None:
self.detail = force_text(self.default_detail)
else:
self.detail = force_text(detail)
self.available_renderers = available_renderers
class ObjectNotFound(ObjectAPIException):
status_code = status.HTTP_404_NOT_FOUND
default_detail = _('{object} not found')
class ObjectAlreadyExists(ObjectAPIException):
status_code = status.HTTP_406_NOT_ACCEPTABLE
default_detail = _('{object} already exists')
class ObjectOutOfRange(ObjectAPIException):
status_code = status.HTTP_406_NOT_ACCEPTABLE
default_detail = _('{object} out of range')
class ItemNotFound(ObjectNotFound):
default_object_name = _('Item')
class ItemAlreadyExists(ObjectAlreadyExists):
default_object_name = _('Item')
class ItemOutOfRange(ObjectOutOfRange):
default_object_name = _('Item')
class InvalidInput(APIException):
status_code = status.HTTP_412_PRECONDITION_FAILED
default_detail = _('Invalid input')
class UnsupportedMediaType(APIException):
status_code = status.HTTP_415_UNSUPPORTED_MEDIA_TYPE
default_detail = _('Unsupported media type "{media_type}" in request.')
def __init__(self, media_type, detail=None):
if detail is None:
self.detail = force_text(self.default_detail).format(media_type=media_type)
else:
self.detail = force_text(detail)
class NodeIsNotOperational(APIException):
status_code = status.HTTP_423_LOCKED
default_detail = _('Node is not operational')
class VmIsNotOperational(APIException):
status_code = status.HTTP_423_LOCKED
default_detail = _('VM is not operational')
class VmIsLocked(APIException):
status_code = status.HTTP_423_LOCKED
default_detail = _('VM is locked or has slave VMs')
class TaskIsAlreadyRunning(APIException):
status_code = status.HTTP_423_LOCKED
default_detail = _('Task is already running')
class NodeHasPendingTasks(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Node has pending tasks')
class VmHasPendingTasks(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('VM has pending tasks')
class ExpectationFailed(APIException):
status_code = status.HTTP_417_EXPECTATION_FAILED
default_detail = _('Expectation Failed')
class PreconditionRequired(APIException):
status_code = status.HTTP_428_PRECONDITION_REQUIRED
default_detail = _('Precondition Required')
class FailedDependency(APIException):
status_code = status.HTTP_424_FAILED_DEPENDENCY
default_detail = _('Failed Dependency')
class Throttled(APIException):
status_code = status.HTTP_429_TOO_MANY_REQUESTS
default_detail = _('Request was throttled.')
extra_detail_singular = 'Expected available in {wait} second.'
extra_detail_plural = 'Expected available in {wait} seconds.'
def __init__(self, wait=None, detail=None):
if detail is None:
self.detail = force_text(self.default_detail)
else:
self.detail = force_text(detail)
if wait is None:
self.wait = None
else:
self.wait = math.ceil(wait)
self.detail += ' ' + force_text(ungettext(
self.extra_detail_singular.format(wait=self.wait),
self.extra_detail_plural.format(wait=self.wait),
self.wait
))
class APIError(APIException):
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
default_detail = _('Internal Server Error')
class OperationNotSupported(APIException):
status_code = status.HTTP_501_NOT_IMPLEMENTED
default_detail = _('Operation not supported')
class ServiceUnavailable(APIException):
status_code = status.HTTP_503_SERVICE_UNAVAILABLE
default_detail = _('Service Unavailable')
class GatewayTimeout(APIException):
status_code = status.HTTP_504_GATEWAY_TIMEOUT
default_detail = _('Gateway Timeout')
|
A wonderfully intimate and unique destination wedding in the Bahamas with a perfect couple.
The formal, yet sweet and endearing, wedding of Camila and Christiaan in Palm Beach, FL.
The Breakers served as the exquisite backdrop for Ainsley and Dom’s incredible wedding day.
An intimate wedding at the Ancient Spanish Monastery with beautiful bride and a handsome groom, followed by a romantic reception at the Ritz Bal Harbour.
The weather was perfect for April and Timothy’s outdoor wedding ceremony and reception at Villa Woodbine.
Hayley and Andrew’s waterside wedding at the Rusty Pelican in Tampa, Florida.
The beautiful Miami Beach sunrise wedding of Barbara and Jose Luis.
|
import tensorflow as tf
from .distance import DistanceRepresentation
class PointLineDistanceRepresentation:
def __init__(self):
self.distance = DistanceRepresentation()
def __call__(self, p1s: tf.Tensor, p2s: tf.Tensor, p3s: tf.Tensor) -> tf.Tensor:
"""
Distance between the point p1s to the line <p2s, p3s>
:param p1s: tf.Tensor (Points, Batch, Len, Dims)
:param p2s: tf.Tensor (Points, Batch, Len, Dims)
:param p3s: tf.Tensor (Points, Batch, Len, Dims)
:return: tf.Tensor (Points, Batch, Len)
"""
# Following Heron's Formula https://en.wikipedia.org/wiki/Heron%27s_formula
a = self.distance.distance(p1s, p2s)
b = self.distance.distance(p2s, p3s)
c = self.distance.distance(p1s, p3s)
s: tf.Tensor = (a + b + c) / 2
squared = s * (s - a) * (s - b) * (s - c)
area = tf.sqrt(squared)
# Calc "height" of the triangle
square_area: tf.Tensor = area * 2
distance = tf.math.divide_no_nan(square_area, b)
# TODO add .zero_filled()
return distance
|
Raneri Web Design released a new free online service, thinking about eBay users: eBalert is able to create RSS feeds from any eBay search, on the fly. eBalert is for people that is looking for something in particular: it allows users to avoid the annoying activity to check daily on eBay, monitoring the same search, to discover which new bids have started.
You can do the search once (it works also with advanced filters, like geo-distance or the ordering by criteria), then copy and paste the URL of the results’ page to eBalert: it will instantly create your feed, updated every 12 hours and ready to be added to your preferred feedreader. The service doesn’t require any signup.
This entry was posted in Web Sites and tagged ebalert, ebay, feed, rss. Bookmark the permalink. Trackbacks are closed, but you can post a comment.
|
#! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import errorvalues as ev # github.com/stefantkeller/errorvalues
from VECSELsetup.eval.varycolor import varycolor
from VECSELsetup.eval.gen_functions import extract, lut_from_calibfolder, lut_interp_from_calibfolder
def main():
logfile = '20150204_sample21-1-d6/spot333um.csv'
calib_folder = '20150204_calib_333um_s21-1-d6'
# calibration
#pump_lut, refl_lut, emis_lut = lut_from_calibfolder(calib_folder)
emis_lut = lut_from_calibfolder(calib_folder,identifiers=['Laser'],ignore_error=False) # emission has constant value solely due to BS, no ND in front of detector etc.
pump_lut, refl_lut = lut_interp_from_calibfolder(calib_folder,identifiers=['Pump','Refl'])
#------------------------------------
# load measurement
current_set, current, pump, refl, laser, meantemp = extract(logfile, identifiers=['Current','Pump','Refl','Laser','Temperature'])
Temperatures = sorted(current_set.keys())
absorbed, reflected, emitted, pumped, dissipated, relref = {}, {}, {}, {}, {}, {}
for T in Temperatures:
reflected[T] = refl_lut(refl[T])
pumped[T] = pump_lut(pump[T])
absorbed[T] = pumped[T] - reflected[T]
emitted[T] = emis_lut(laser[T])
dissipated[T] = absorbed[T] - emitted[T]
relref[T] = reflected[T]/pumped[T]*100
cols = varycolor(3*len(Temperatures))
cnt = 0
#plt.subplot(1,2,1)
baserefl = ev.errvallist()
for T in Temperatures:
# plot
pstart, pend = 1, 9 # W pumped
istart, iend = np.sum([pumped[T].v()<pstart]), np.sum([pumped[T].v()<pend])
baserefl.append(ev.wmean(relref[T][istart:iend]))
xplot = current
xlabel = 'Pump current (A)'
plt.errorbar(xplot[T].v(),relref[T].v(),
xerr=xplot[T].e(),yerr=relref[T].e(),
c=cols[cnt],linestyle=' ',
label='$({0})^\circ$C'.format(meantemp[T].round(2)))
plt.plot(xplot[T][istart:iend].v(), (iend-istart)*[baserefl[-1].v()],color='k')
cnt+=3
plt.xlabel(xlabel)
plt.ylabel('Reflectivity (%)')
#plt.xlim([0, 20])
reflylim = [25, 70]
plt.ylim(reflylim)
plt.legend(loc='best',prop={'size':12},labelspacing=-0.4)
plt.grid('on')
plt.show()
##
#plt.subplot(1,2,2)
templist = [meantemp[T] for T in Temperatures]
Temp = ev.errvallist(templist)
q,m = ev.linreg(Temp.v(),baserefl.v(),baserefl.e())
plt.errorbar(Temp.v(),baserefl.v(),
xerr=Temp.e(),yerr=baserefl.e(),
color='r',linestyle=' ')
plt.plot(Temp.v(),q.v()+Temp.v()*m.v(),'k')
plt.text((Temp[0].v()+Temp[1].v())/2.0,baserefl[0].v()+2,
r'$({})+({})T_{{hs}}$'.format(q.round(2),m.round(2)))
plt.ylim(reflylim)
plt.xlabel('Heat sink temperature ($^\circ$C)')
plt.ylabel('Reflectivity (%)')
plt.grid('on')
##
plt.show()
if __name__ == "__main__":
main()
|
The quantity of people that need to undertake international journeys is rising day by day. Plus by booking separate, I lower out the middle man such at or Travelocity in case there is a downside. Guide your flight + hotel with our Flight + Lodge Package deal for cheaper charges. It is all very confusing, however the bottom line is it’ll take you endlessly to earn a free flight, so if you are going to use Expedia’s program, you’re in all probability better off redeeming for hotels.
Every journey firm would not need some of your reserving cash, they need all of it, and they’ll do their greatest to get you to guide every piece with them. Airbnb does not just wish to be a substitute for hotels; it’s growing into a full-service journey company. Save on Great Lodges in standard locations throughout the U.S. with Travelocity.
You may e-book your stay before buying your flight, add it to your itinerary later by Handle a booking and even make a reservation without reserving a flight. Even the information on flight availability comparability websites is very often out-of-date meaning that when you click on by to the page on which you count on to see low-cost flights on the time you need, you may be greeted with a “not available”.
While you’re on the lookout for multi city flights, you possibly can’t discover better deals anyplace than CheapOair. It is extremely hectic and tiresome for a disabled traveler to make use of connecting flights. Research revealed that the very best time to e-book air journey is Tuesday at three PM ET, the time when airways will release their discounted seat stock. Make the most of unique financial savings with as much as $300 off per booking on Delta Vacations worldwide.
Specializing in locations in Australia, Qantas lets users experience the attractive scenery earlier than they e-book their flights. Virtually each lodge on the Las Vegas Strip has a social media worker tweeting about their latest deals. From expertise, I additionally discover that Thursday and Saturday can supply the perfect value typically as well. The method of reserving flights is commonly painful, and the prices for tickets change always.
|
#!/usr/bin/python3
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import psycopg2
import xlsxwriter
import os
import sys
import smtplib
#usage python3 colum.py filename toaddress
SQL_Code = open(str(sys.argv[3]), 'r').read()
#Connecting to PostgreSQL
def main():
conn_string = "host='db' dbname='directski' user='pgsql' password=''"
print ("Connecting to database\n ->%s" % (conn_string))
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
print ("Connected!\n")
cursor.execute(SQL_Code)
filename = str(sys.argv[1]).replace(" ", "_").lower()
workbook = xlsxwriter.Workbook(filename + ".xlsx", {'remove_timezone': True})
worksheet = workbook.add_worksheet()
data = cursor.fetchall()
# Headers
for colidx,heading in enumerate(cursor.description):
worksheet.write(0, colidx, heading[0])
# Writing the Rows
for rowid, row in enumerate(data):
for colid, col in enumerate(row):
worksheet.write(rowid+1, colid, col)
# Saving
workbook.close()
fromaddr = "temp@temp.com"
toaddr = str(sys.argv[2])
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = str(sys.argv[1])
body = ""
msg.attach(MIMEText(body, 'plain'))
attachment = open(filename + ".xlsx", "rb")
part = MIMEBase('application', 'octet-stream')
part.set_payload((attachment).read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', "attachment; filename= %s" % filename + ".xlsx")
msg.attach(part)
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(fromaddr, "temp123")
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
if __name__ == "__main__":
main()
|
In October of last year, the Securities and Exchange Commission (“SEC”), adopted final rules (1) amending Rule 147, also known as Intrastate Crowdfunding and Rule 504 under the Securities Act of 1933, as amended (the “Securities Act”), and (2) establishing a new Securities Act exemption designated Rule 147A. Amended Rule 147 and new Rule 147A took effect on April 20, 2017, and amended Rule 504 took effect on January 20, 2017. As amended, Rule 147 facilitates offerings relying on intrastate crowdfunding exemptions under state securities laws. Further, Rule 147A further accommodates offers accessible to out-of-state residents and companies that are incorporated out-of-state.
On April 19, 2017, the SEC issued a new compliance and disclosure interpretation addressing intrastate crowdfunded offerings under new Rule 147A under the Securities Act. The new Intrastate Crowdfunding compliance and disclosure interpretation provides that under Rule 147A(g)(1), offers and sales made in reliance on Rule 147A will not be integrated with prior offers and sales of securities. The issuer must still comply with all applicable state securities law requirements.
Rule 147A, which took effect this month, provides an exemption from registration for issuers conducting an intrastate crowdfunded offering that satisfy certain conditions.
an integration safe harbor includes any prior offers or sales of securities by the issuer, as well as certain subsequent offers or sales occurring after the completion of the offering.
Under the Rule 147 intrastate crowdfunding exemption, an issuer’s principal place of business is defined as the location where the officers, partners, or managers of the issuer primarily direct, control, and coordinate the issuer’s activities. An issuer may have a principal place of business only within a single state. Under Rule 147, if an Issuer changes its principal place of business after making sales in an intrastate offering, it will not be allowed to conduct an intrastate offering under Rule 147 in another state for a period of six months from the date of the last sale in the prior state.
A majority of the issuer’s employees must be located in the state or territory.
The issuer had a reasonable belief that the purchaser was a resident of the state or territory.
Evidence of the prospective investor’s home address on a recent bill, pay stub, state or federal tax return, or on any state-issued driver’s license or identification card.
The Rule 147 intrastate crowdfunding exemption requires issuers to obtain a written representation from each purchaser as to the purchaser’s residency. Obtaining a written representation from purchasers of in-state residency status will not, without more, be sufficient to establish a reasonable belief that those purchasers are in-state residents.
The residency of an investor that is a legal entity, such as a corporation, partnership, trust, or other form of business entity, is the location where, at the time of sale, the entity has its principal place of business. A legal entity’s principal place of business is defined as the location in which the officers, partners, or managers of the entity primarily direct, control, and coordinate the activities of the entity.
Rule 147(e) provides that resales of a security offered and sold in reliance on the Rule 147 intrastate crowdfunding exemption must be made only to residents of the state or territory in which the issuer was resident at the time of sale of the security by the issuer for a period of six months from the sale by the issuer to the original investor.
All sales to investors will be made only to residents of the same state or territory as the issuer.
For a period of six months from the date of sale by the issuer, any resale must be made only to residents in the same state or territory in which the issuer resided at the time that the initial investment was made.
Rule 147 also requires issuers to include a prominent legend on all securities sold under the exemption advising of the resale restrictions under Rule 147(e).
Prior offers or sales of securities.
made more than six months after the offering is complete.
Rule 147(h) provides that, where an issuer decides to register an offering after making offers in reliance on Rule 147 limited only qualified institutional buyers (“QIB”) or institutional accredited investors (“IAI”), the offers will not be subject to integration with any subsequent registered offering. If the issuer makes offers in reliance on Rule 147 to persons other than QIBs or IAIs, the offers will not be subject to integration if the issuer waits at least 30 calendar days between the last offer made in reliance on Rule 147 and the filing of a registration statement under the Securities Act.
Additionally, Rule 147 offerings are not be integrated with other exempt offerings made concurrently by the issuer as long as each offering complies with the requirements of the exemption that the issuer relies on for the specific offering. When the integration safe harbor is not available, whether subsequent offers and sales are integrated with any securities offered or sold under Rule 147 will depend on the particular facts and circumstances.
Issuers relying on Rule 147A may make offers available to out-of-state residents using general solicitation or general advertising on the internet or other means, so long as sales are made only to in-state residents while Rule 147 requires that issuers make offers and sales only to in-state residents.
For further information about rule 147A and this securities law blog post, please contact Brenda Hamilton, Securities Attorney at 101 Plaza Real S, Suite 202 N, Boca Raton, Florida, (561) 416-8956 or by email at [email protected]. This securities law blog post is provided as a general informational service to clients and friends of Hamilton & Associates Law Group and should not be construed as, and does not constitute, legal and compliance advice on any specific matter, nor does this message create an attorney-client relationship. Please note that the prior results discussed herein do not guarantee similar outcomes.
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8
# License: GPL v3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
import re
from typing import Dict, Generator, Iterable, List, Optional, Tuple
from kitty.fast_data_types import coretext_all_fonts
from kitty.fonts import FontFeature
from kitty.options.types import Options
from kitty.typing import CoreTextFont
from kitty.utils import log_error
from . import ListedFont
attr_map = {(False, False): 'font_family',
(True, False): 'bold_font',
(False, True): 'italic_font',
(True, True): 'bold_italic_font'}
FontMap = Dict[str, Dict[str, List[CoreTextFont]]]
def create_font_map(all_fonts: Iterable[CoreTextFont]) -> FontMap:
ans: FontMap = {'family_map': {}, 'ps_map': {}, 'full_map': {}}
for x in all_fonts:
f = (x['family'] or '').lower()
s = (x['style'] or '').lower()
ps = (x['postscript_name'] or '').lower()
ans['family_map'].setdefault(f, []).append(x)
ans['ps_map'].setdefault(ps, []).append(x)
ans['full_map'].setdefault(f + ' ' + s, []).append(x)
return ans
def all_fonts_map() -> FontMap:
ans: Optional[FontMap] = getattr(all_fonts_map, 'ans', None)
if ans is None:
ans = create_font_map(coretext_all_fonts())
setattr(all_fonts_map, 'ans', ans)
return ans
def list_fonts() -> Generator[ListedFont, None, None]:
for fd in coretext_all_fonts():
f = fd['family']
if f:
fn = (f + ' ' + (fd['style'] or '')).strip()
is_mono = bool(fd['monospace'])
yield {'family': f, 'full_name': fn, 'postscript_name': fd['postscript_name'] or '', 'is_monospace': is_mono}
def find_font_features(postscript_name: str) -> Tuple[FontFeature, ...]:
"""Not Implemented"""
return ()
def find_best_match(family: str, bold: bool = False, italic: bool = False) -> CoreTextFont:
q = re.sub(r'\s+', ' ', family.lower())
font_map = all_fonts_map()
def score(candidate: CoreTextFont) -> Tuple[int, int, int, float]:
style_match = 1 if candidate['bold'] == bold and candidate[
'italic'
] == italic else 0
monospace_match = 1 if candidate['monospace'] else 0
is_regular_width = not candidate['expanded'] and not candidate['condensed']
# prefer demi-bold to bold to heavy, less bold means less chance of
# overflow
weight_distance_from_medium = abs(candidate['weight'])
return style_match, monospace_match, 1 if is_regular_width else 0, 1 - weight_distance_from_medium
# First look for an exact match
for selector in ('ps_map', 'full_map'):
candidates = font_map[selector].get(q)
if candidates:
return sorted(candidates, key=score)[-1]
# Let CoreText choose the font if the family exists, otherwise
# fallback to Menlo
if q not in font_map['family_map']:
log_error('The font {} was not found, falling back to Menlo'.format(family))
q = 'menlo'
candidates = font_map['family_map'][q]
return sorted(candidates, key=score)[-1]
def resolve_family(f: str, main_family: str, bold: bool = False, italic: bool = False) -> str:
if (bold or italic) and f == 'auto':
f = main_family
if f.lower() == 'monospace':
f = 'Menlo'
return f
def get_font_files(opts: Options) -> Dict[str, CoreTextFont]:
ans: Dict[str, CoreTextFont] = {}
for (bold, italic), attr in attr_map.items():
face = find_best_match(resolve_family(getattr(opts, attr), opts.font_family, bold, italic), bold, italic)
key = {(False, False): 'medium',
(True, False): 'bold',
(False, True): 'italic',
(True, True): 'bi'}[(bold, italic)]
ans[key] = face
if key == 'medium':
setattr(get_font_files, 'medium_family', face['family'])
return ans
def font_for_family(family: str) -> Tuple[CoreTextFont, bool, bool]:
ans = find_best_match(resolve_family(family, getattr(get_font_files, 'medium_family')))
return ans, ans['bold'], ans['italic']
|
The chain is solid steel with gold-tone finish that looks and feels like fine jewelry. Its substantial weight, elegant look and solid feel, communicate luxury. The chain links make a soft/subtle and beautiful noise when moving. This is NOT aluminum or plastic chain, often sold by other retailers.
Available from 12" to 68" (inches) in length, in 4-inch increments, so you can choose the perfect length for your needs.
|
import os
import glob
import sys
import numpy as np
import matplotlib.pyplot as plt
if __name__ == "__main__":
dirs = [ name for name in os.listdir(".") if os.path.isdir(os.path.join(".", name)) ]
files = []
for d in sorted(dirs):
if d == "graphs":
continue
p = "." + "/" + d
files = (sorted(glob.glob(p + "/*.csv")))
csvs = dict()
dataset = p.split("./")[1]
dataset_results = open(dataset + "_results.csv", 'w')
for i in files:
csv = np.genfromtxt(i, dtype='string' , delimiter=',')
pipeline = i.split("/")[2].split("_")[1:]
pipeline = (''.join(str(elem) + "_" for elem in pipeline)).replace(".csv_", "")
csvs[pipeline] = csv
stats = csv[0][:]
pipelines = csv[1:,[0]]
for i in range(1,len(stats)):
if stats[i] == 'sorted_features':
print dataset
for j in sorted(csvs.keys()):
print "\t" + j
p = -1
for l in (csvs[j])[1:,[i]]:
data = str(l[0])
data = data.replace("[","")
data = data.replace("]","")
data = data.replace("/","")
data = data.replace("'","")
p+=1
if data[0] == 'P' or data[0] == 'n':
continue
# print "\t\t", pipelines[p][0]
# print "\t\t\t", data.split(" ")
graph = open( "graphs/" + dataset + "_" + j + "_anova_frequencies.txt", "w")
feat_seq = 0;
bf = []
for feat in data.split(" "):
if feat.find("MFCC") != -1:
continue
else:
s = feat.split("_")
if len(s) <=4:
continue
graph.write(s[0][0] + "_" + s[2][0] + "_" + s[3] + ", " + s[3] + ", " + str(feat_seq) + ", " + s[-1] + "\n" )
bf.append(s)
feat_seq+=1
graph.close()
# feats = [s[0][0] + "_" + s[2][0] + "_" + s[3] for s in bf]
# freqs = [max(5,int(s[-1])) for s in bf]
# feat_seqs = [fs for fs in range(len(bf))]
#
# print feats
# print freqs
#
# f = plt.figure(0)
# lefts = np.arange(len(bf))
# plt.bar(lefts, freqs, width=0.5)
# plt.show()
continue
dataset_results.write("\n" + stats[i] + "\n\n")
dataset_results.write("configuration,")
for k in csv[1:,[0]]:
dataset_results.write(k[0] + ",")
dataset_results.write("\n")
for j in sorted(csvs.keys()):
dataset_results.write(j + ",")
for l in (csvs[j])[1:,[i]]:
dataset_results.write(str(l[0]) + ",")
dataset_results.write("\n")
dataset_results.close()
|
With Baselamp you can turn almost anything into a lamp. Whether it’s your favorite bottle of whiskey, can be bottle of your choice, whether it is wine bottle or liquor bottle like Whiskey/Whisky/Scotch one (any brands, Scottish or Irish), a glass incense burner, a vintage book, a flower-filled vase, a particularly imposing action figure, or even a fishbowl for that goldfish you won at a carnival back in 2003 – the uses for Baselamp are endless, and limited only by your imagination.
Baselamp was designed and refined over the past year and a half with a focus on versatility and ease of use – with an integral chip-on-board LED and a recessed touch-capacitive dimmer switch, we’ve created a lamp that is not only super bright (410 lumens!) but also doesn’t create too much heat or any UV light (a particular concern for the aforementioned whiskey application).
|
"""The WaveBlocks Project
Provides several computation routines for
handling time and timesteps.
@author: R. Bourquin
@copyright: Copyright (C) 2010, 2011, 2012, 2013, 2015, 2016 R. Bourquin
@license: Modified BSD License
"""
from scipy import floor
__all__ = ["TimeManager"]
class TimeManager(object):
r"""This class performs several computations with time, timesteps and so forth.
The important quantities here are:
============ ============== ======================================================
Quantity Parameter Name Description
============ ============== ======================================================
:math:`T` T the fixed simulation end time
:math:`\tau` dt the size of the timestep
:math:`N` nsteps the overall number of timesteps.
:math:`t` an unspecified time in the interval :math:`[0, T]`
:math:`n` an unspecified timestep in the interval :math:`[0, N]`
============ ============== ======================================================
The important relations that hold are :math:`T = N \tau` and
in analogy :math:`t = n \tau`. There are also conversion routines
for :math:`t` and :math:`n`.
The simulation parameters handed over to the constructor must contain at least
two out of the three values :math:`T`, :math:`\tau` and :math:`N`. If all three
are given, the user is responsible for compatible values.
Additionally the class contains some routines for determining
if and when some events (for example saving data) should occur.
"""
def __init__(self, parameters):
if parameters is None:
parameters = {}
# We need two out of three: T, dt and nsteps
have_enough = 0
if "T" in parameters:
self._T = float(parameters["T"])
have_enough += 1
else:
self._T = None
if "dt" in parameters:
self._dt = float(parameters["dt"])
have_enough += 1
else:
self._dt = None
if "nsteps" in parameters:
self._nsteps = int(parameters["nsteps"])
have_enough += 1
else:
self._nsteps = None
if have_enough < 2:
raise KeyError("Parameters provide to little data to construct a 'TimeManager'.")
if self._T is None:
self._T = self.compute_endtime()
if self._dt is None:
self._dt = self.compute_timestep_size()
if self._nsteps is None:
self._nsteps = self.compute_number_timesteps()
# Interval for regular events
self._interval = 1
if "write_nth" in parameters:
self.set_interval(int(parameters["write_nth"]))
# List of timesteps of irregular events
self._eventtimes = []
if "save_at" in parameters:
self.add_to_eventlist(parameters["save_at"])
def __str__(self):
s = "TimeManager configured with:\n"
s += " Final time T: "+str(self._T)+"\n"
s += " Timestep size dt: "+str(self._dt)+"\n"
s += " Number of steps : "+str(self._nsteps)+"\n"
return s
def set_T(self, T):
r"""Set the simulation endtime :math:`T`.
:param T: The simulation end time.
"""
self._T = float(T)
def set_dt(self, dt):
r"""Set the simulation timestep size :math:`\tau`.
:param dt: The simulation timestep size.
"""
self._dt = float(dt)
def set_nsteps(self, nsteps):
r"""Set the number of timesteps the simulation runs.
:param nsteps: The number :math:`n` timesteps we do.
"""
self._nsteps = int(nsteps)
def get_T(self):
r"""Set the simulation endtime :math:`T`.
:returns: The endtime :math:`T`.
"""
return self._T
def get_dt(self):
r"""Get the simulation timestep size :math:`\tau`.
:returns: The timestep :math:`\tau`.
"""
return self._dt
def get_nsteps(self):
r"""Get the number :math:`n` of timesteps the simulation runs.
:returns: the number :math:`n` of timesteps.
"""
return self._nsteps
def compute_endtime(self):
r"""Computes the simulation endtime :math:`T`.
:returns: The endtime :math:`T`.
"""
if self._T is not None:
return self._T
else:
return float(self._nsteps * self._dt)
def compute_timestep_size(self):
r"""Computes the simulation timestep size :math:`\tau`.
:returns: The timestep :math:`\tau`.
"""
if self._dt is not None:
return self._dt
else:
return self._T / float(self._nsteps)
def compute_number_timesteps(self):
r"""Computes the number :math:`n` of time steps we will perform.
:returns: the number :math:`n` of timesteps.
"""
if self._nsteps is not None:
return self._nsteps
else:
return int(floor(self._T / float(self._dt)))
def compute_timestep(self, t):
r"""Compute the timestep :math:`n` from a time :math:`t` such that
:math:`t = n \tau` holds.
:param t: The time t of which we want to find the timestep number.
:returns: The corresponding timestep :math:`n`.
Note that the user has to ensure that time :math:`t` is an integral
multiple of :math:`\tau`.
"""
stepo = t / self._dt
step = round(stepo)
if abs(stepo - step) > 1e-10:
print("Warning: Questionable rounding for timestep computation!")
return int(step)
def compute_time(self, n):
r"""Compute the time :math:`t` from a timestep :math:`n` such that
:math:`t = n \tau` holds.
:param n: The timestep n of which we want to find the corresponding time.
:returns: The corresponding time :math:`t`.
"""
return float(n * self._dt)
def set_interval(self, interval):
r"""Set the interval for regular events.
:param interval: The interval at which regular events get triggered.
Note that a value of ``0`` means there are no regular events.
"""
self._interval = int(interval)
def add_to_eventlist(self, alist):
r"""Add a list of times and/or timesteps to the list of
times when irregular events get triggered.
:param alist: A list with integers (interpreted as timesteps)
and/or floats (interpreted as times)
Note that the times and timesteps can be mixed and need not to be
given in monotone order.
"""
timesteps = []
# If the list is empty (global default), shortcut
if len(alist) == 0:
return
# Integers are interpreted as timesteps, floats are interpreted as times (and converted to timesteps)
for item in alist:
if type(item) == int:
timesteps.append(item)
elif type(item) == float:
timesteps.append(self.compute_timestep(item))
# Validate timesteps and check if n in [0,...,N]
tmp = len(timesteps)
timesteps = [i for i in timesteps if 0 <= i <= self._nsteps]
if tmp != len(timesteps):
print("Warning: Dropped %d timestep(s) due to invalidity!" % (tmp - len(timesteps)))
# Assure unique elements, just silently remove duplicates
oldlist = set(self._eventtimes)
newlist = set(timesteps)
times = list(oldlist.union(newlist))
# Sort in ascending order
times.sort()
# Write back
self._eventtimes = times
def compute_number_events(self):
r"""Compute the number of events we will perform during the simulation.
This can for example be used to determine how much space to allocate
in the output files if the events are times at which simulation data
is saved.
:returns: The number of events.
"""
# We do not save at regular intervals
if self._interval == 0:
# The number of saves resulting from saving at a regular interval is zero
n_si = 0
# Determine the number of saves resulting from the savelist
n_sl = len(self._eventtimes)
# We do save at regular intervals
else:
# Determine the number of saves resulting from saving at a regular interval
n_si = 1 + self._nsteps // self._interval
# Determine the number of saves resulting from the savelist and
# exclude the timesteps which coincide with the regular intervals
n_sl = len([i for i in self._eventtimes if i % self._interval != 0])
# Total number of saves we will perform is given by the sum
number_events = n_si + n_sl
return number_events
def is_event(self, n):
r"""Determine if an event occurs right now.
:param n: The current timestep in question.
:returns: ``True`` or ``False``.
"""
if self._interval == 1:
# Save every timestep
return True
elif self._interval != 0 and n % self._interval == 0:
# Save every k-th timestep specified by the interval
return True
elif n in self._eventtimes:
# Save if the n is in the list of timesteps
return True
return False
|
''As - Is'' Sale. All original inside. House needs work and some TLC but has great potential. Very desirable block with wooded parkland across the street, with a children's playground on the corner (Drumgoole Tot Lot). Listing Agent is owner.
|
from JumpScale import j
import JumpScale.baselib.remote
import sys
# import importlib
import imp
try:
import ujson as json
except:
import json
import JumpScale.baselib.redis
import copy
import time
import JumpScale.baselib.webdis
from fabric.api import hide
import time
redis=j.clients.redis.getRedisClient("127.0.0.1", 9999)
class Node():
def __init__(self,name,args={}):
self.model=j.core.admin.hrd.getDictFromPrefix("node.%s"%name)
self.ssh=None
self.args=args
def executeCmds(self,cmds,die=True,insandbox=False):
scriptRun=self.getScriptRun()
out=scriptRun.out
for line in cmds.split("\n"):
if line.strip()<>"" and line[0]<>"#":
self.log("execcmd",line)
if insandbox:
line2="source /opt/jsbox/activate;%s"%line
else:
line2=line
try:
out+="%s\n"%self.ssh.run(line2)
except BaseException,e:
if die:
self.raiseError("execcmd","error execute:%s"%line,e)
def killProcess(self,filterstr,die=True):
found=self.getPids(filterstr)
for item in found:
self.log("killprocess","kill:%s"%item)
try:
self.ssh.run("kill -9 %s"%item)
except Exception,e:
if die:
self.raiseError("killprocess","kill:%s"%item,e)
def getPids(self,filterstr,die=True):
self.log("getpids","")
with hide('output'):
try:
out=self.ssh.run("ps ax")
except Exception,e:
if die:
self.raiseError("getpids","ps ax",e)
found=[]
for line in out.split("\n"):
if line.strip()<>"":
if line.find(filterstr)<>-1:
line=line.strip()
found.append(int(line.split(" ")[0]))
return found
def deployssh(self):
self.connectSSH()
keyloc="/root/.ssh/id_dsa.pub"
if not j.system.fs.exists(path=keyloc):
if j.console.askYesNo("do you want to generate new local ssh key, if you have one please put it there manually!"):
do=j.system.process.executeWithoutPipe
do("ssh-keygen -t dsa")
else:
j.application.stop()
key=j.system.fs.fileGetContents(keyloc)
self.ssh.ssh_authorize("root",key)
def jpackageStop(self,name,filterstr,die=True):
self.log("jpackagestop","%s (%s)"%(name,filterstr))
try:
self.ssh.run("source /opt/jsbox/activate;jpackage stop -n %s"%name)
except Exception,e:
if die:
self.raiseError("jpackagestop","%s"%name,e)
found=self.getPids(filterstr)
if len(found)>0:
for item in found:
try:
self.ssh.run("kill -9 %s"%item)
except:
pass
def jpackageStart(self,name,filterstr,nrtimes=1,retry=1):
found=self.getPids(filterstr)
self.log("jpackagestart","%s (%s)"%(name,filterstr))
for i in range(retry):
if len(found)==nrtimes:
return
scriptRun=self.getScriptRun()
try:
self.ssh.run("source /opt/jsbox/activate;jpackage start -n %s"%name)
except Exception,e:
if die:
self.raiseError("jpackagestart","%s"%name,e)
time.sleep(1)
found=self.getPids(filterstr)
if len(found)<nrtimes:
self.raiseError("jpackagestart","could not jpackageStart %s"%name)
def serviceStop(self,name,filterstr):
self.log("servicestop","%s (%s)"%(name,filterstr))
try:
self.ssh.run("sudo stop %s"%name)
except:
pass
found=self.getPids(filterstr)
scriptRun=self.getScriptRun()
if len(found)>0:
for item in found:
try:
self.ssh.run("kill -9 %s"%item)
except:
pass
found=self.getPids(filterstr)
if len(found)>0:
self.raiseError("servicestop","could not serviceStop %s"%name)
def serviceStart(self,name,filterstr,die=True):
self.log("servicestart","%s (%s)"%(name,filterstr))
found=self.getPids(filterstr)
if len(found)==0:
try:
self.ssh.run("sudo start %s"%name)
except:
pass
found=self.getPids(filterstr)
if len(found)==0 and die:
self.raiseError("servicestart","could not serviceStart %s"%name)
def serviceReStart(self,name,filterstr):
self.serviceStop(name,filterstr)
self.serviceStart(name,filterstr)
def raiseError(self,action,msg,e=None):
scriptRun=self.getScriptRun()
scriptRun.state="ERROR"
if e<>None:
msg="Stack:\n%s\nError:\n%s\n"%(j.errorconditionhandler.parsePythonErrorObject(e),e)
scriptRun.state="ERROR"
scriptRun.error+=msg
for line in msg.split("\n"):
toadd="%-10s: %s\n" % (action,line)
scriptRun.error+=toadd
print "**ERROR** %-10s:%s"%(self.name,toadd)
self.lastcheck=0
j.admin.setNode(self)
j.admin.setNode(self)
raise RuntimeError("**ERROR**")
def log(self,action,msg):
out=""
for line in msg.split("\n"):
toadd="%-10s: %s\n" % (action,line)
print "%-10s:%s"%(self.name,toadd)
out+=toadd
def setpasswd(self,passwd):
#this will make sure new password is set
self.log("setpasswd","")
cl=j.tools.expect.new("sh")
if self.args.seedpasswd=="":
self.args.seedpasswd=self.findpasswd()
try:
cl.login(remote=self.name,passwd=passwd,seedpasswd=None)
except Exception,e:
self.raiseError("setpasswd","Could not set root passwd.")
def findpasswd(self):
self.log("findpasswd","find passwd for superadmin")
cl=j.tools.expect.new("sh")
for passwd in j.admin.rootpasswds:
try:
pass
cl.login(remote=self.name,passwd=passwd,seedpasswd=None)
except Exception,e:
self.raiseError("findpasswd","could not login using:%s"%passwd,e)
continue
self.passwd=passwd
j.admin.setNode(self)
return "unknown"
def check(self):
j.base.time.getTimeEpoch()
def connectSSH(self):
ip=self.model["ip"]
port=self.model["port"]
passwd=self.model["passwd"]
self.ssh=j.remote.cuisine.connect(ip,port,passwd)
# if j.system.net.pingMachine(self.args.remote,1):
# self.ip=self.args.remote
# else:
# j.events.opserror_critical("Could not ping node:'%s'"% self.args.remote)
return self.ssh
def uploadFromCfgDir(self,ttype,dest,additionalArgs={}):
dest=j.dirs.replaceTxtDirVars(dest)
cfgdir=j.system.fs.joinPaths(self._basepath, "cfgs/%s/%s"%(j.admin.args.cfgname,ttype))
additionalArgs["hostname"]=self.name
cuapi=self.ssh
if j.system.fs.exists(path=cfgdir):
self.log("uploadcfg","upload from %s to %s"%(ttype,dest))
tmpcfgdir=j.system.fs.getTmpDirPath()
j.system.fs.copyDirTree(cfgdir,tmpcfgdir)
j.dirs.replaceFilesDirVars(tmpcfgdir)
j.application.config.applyOnDir(tmpcfgdir,additionalArgs=additionalArgs)
items=j.system.fs.listFilesInDir(tmpcfgdir,True)
done=[]
for item in items:
partpath=j.system.fs.pathRemoveDirPart(item,tmpcfgdir)
partpathdir=j.system.fs.getDirName(partpath).rstrip("/")
if partpathdir not in done:
cuapi.dir_ensure("%s/%s"%(dest,partpathdir), True)
done.append(partpathdir)
try:
cuapi.file_upload("%s/%s"%(dest,partpath),item)#,True,True)
except Exception,e:
j.system.fs.removeDirTree(tmpcfgdir)
self.raiseError("uploadcfg","could not upload file %s to %s"%(ttype,dest))
j.system.fs.removeDirTree(tmpcfgdir)
def upload(self,source,dest):
args=j.admin.args
if not j.system.fs.exists(path=source):
self.raiseError("upload","could not find path:%s"%source)
self.log("upload","upload %s to %s"%(source,dest))
# from IPython import embed
# print "DEBUG NOW implement upload in Admin" #@todo
# embed()
for item in items:
partpath=j.system.fs.pathRemoveDirPart(item,cfgdir)
partpathdir=j.system.fs.getDirName(partpath).rstrip("/")
if partpathdir not in done:
print cuapi.dir_ensure("%s/%s"%(dest,partpathdir), True)
done.append(partpathdir)
cuapi.file_upload("%s/%s"%(dest,partpath),item)#,True,True)
def __repr__(self):
roles=",".join(self.roles)
return ("%-10s %-10s %-50s %-15s %-10s %s"%(self.gridname,self.name,roles,self.ip,self.host,self.enabled))
__str__=__repr__
|
Today we have in our hand is the not so new HyperX Predator RGB Memory Kit. While we will work through the main review slowly, we will leave you all some pictures to drool over since this is a drool-worthy kit. The kit in question comes with 2933MHz rated speed with Cas latency of 15. 2933 C15 while is not very high up in the enthusiast ladder, it is the overclocking potential that has got us hooked! Which is one more reason we are going through the review slowly.
Coming to the kit itself, we have the standard black PCB with Black heat spreaders on top with LEDS on top.
The memory has Infrared Sync technology, this technology creates a new way to experience RGB lighting. HyperX has designed IR communication channels onto each memory module, allowing multiple modules to sync LED lighting and produce an exceptional color and pattern display, allowing gamers and overclockers to further personalize their gaming visual experience and customize their PCs and system builds. It is compatible with lighting control software from a range of motherboard vendors including ASUS Aura Sync, Gigabyte RGB Fusion, and MSI Mystic Light Sync.
The HyperX Predator RGB is optimized for Intel platforms on Z170/Z270/X370 and X99/X299 as well as AM4 platforms, we did our test in AM4 with a Ryzen 5 1600 and B350 Motherboard.
The HyperX Predator RGB comes with the Hynix CJR Chips and as such, we expected a fair bit of overclocking from it. So we fired up our tesdt system and tried our hand a little bit of overclocking without going too hard on the sticks. We kept the voltages on the stick within acceptable limits and went on clocking till the Ryzen IMC allowed us to clock.
On the outset the stick comes with two XMP settings of 2667 MHz and 2933 MHz. We took them as our pointers and went on clocking from there. Do note, The XMP settings wont work in Ryzen systems.
With a little tweak in voltages, we were able to bump up the speed to 3200Mhz 16-17-17-39 and finally to 3466Mhz 18-18-18-39 which is not too shabby for a Hynix kit on Ryzen first gen IMC. While this humble reviewer wanted to go beyond to the 3600Mhz territory, our board refused to cooperate. Till this point, our system was completely stable with normal synthetic workloads and some heavy overwatch gaming sessions.
We ran the kit through our short benchmark suite which included memory performance sensitive workloads like Aida64, SuperPi etc. The kit passed all the tests with flying colors as expected from a Kingston kit.
AIDA64 implements a set of 64-bit benchmarks to measure how fast the computer performs various data processing tasks and mathematical calculations. Multi-threaded memory and cache benchmarks are available to analyze system RAM bandwidth and latency. Four our review we used the Memory subsystem benchmark.
wPrime uses a recursive call of Newton’s method for estimating functions, with f(x)=x2-k, where k is the number we’re sqrting, until Sgn(f(x)/f'(x)) does not equal that of the previous iteration, starting with an estimation of k/2. It then uses an iterative calling of the estimation method a set amount of times to increase the accuracy of the results. It then confirms that n(k)2=k to ensure the calculation was correct. It repeats this for all numbers from 1 to the requested maximum. System stability is the key with this benchmark so we decided to include it.
Rounding off, this kit left us with mixed feelings, on one hand, the kit performed beautifully with whatever threw at it while looking damn good, given the fact this kit has the “HyperX” moniker, I personally was a little bit disappointed with the lack of b die goodness. Overall, this is a damn good kit for someone who wants to dip toes into memory overclocking but doesn’t want to lose the RGB bling. Keep this kit into consideration while shopping for new memory and you will not be disappointed.
We feel that we weren’t able to reach the limit of this kit and we will come back to this with a new platform to see if things change but till then this is going to be our go-to kit.
The HyperX Predator RGB gets a Silver from me!
|
from pprint import pprint
import numpy
import random
import redis
import argparse
from sklearn import cluster
from sklearn import metrics
from sklearn.feature_extraction import DictVectorizer
from sklearn.metrics.pairwise import pairwise_distances
def vectorize_key(key):
return dict(enumerate(key.split(':')))
def unvectorize_key(key):
return ':'.join(key.values())
def clusterize_keys(keys_vector, dbname):
vectorizer = DictVectorizer()
X = vectorizer.fit_transform(keys_vector)
if dbname == 'kmeans':
db = cluster.KMeans(n_clusters=10)
else:
X = pairwise_distances(X, metric='cosine')
db = cluster.DBSCAN(min_samples=1)
print "Feature len: {}".format(len(vectorizer.get_feature_names()))
db.fit(X)
labels = db.labels_
nb_clusters = len(set(labels)) - (1 if -1 in labels else 0)
print 'Number of cluster found: {}'.format(nb_clusters)
return labels
parser = argparse.ArgumentParser(description="Configuration for redis stats")
parser.add_argument('-r', '--redis-host',
default='luke2.mapado.com', help='Redis hostname (default: localhost)')
parser.add_argument('-p', '--redis-port', type=int,
default=6379, help='Redis port (default: 6379)')
parser.add_argument('--max-keys', type=int,
default=None, help='Redis port (default: None)')
args = parser.parse_args()
print args
redis = redis.StrictRedis(host=args.redis_host, port=args.redis_port)
keys = redis.keys()
print "Keys OK: {}".format(len(keys))
keys_vector = [vectorize_key(key) for key in keys]
if args.max_keys:
random.shuffle(keys_vector)
keys_vector = keys_vector[:args.max_keys]
# X = pairwise_distances(X, metric='cosine')
# db = cluster.DBSCAN()
# import ipdb; ipdb.set_trace()
labels =clusterize_keys(keys_vector, 'kmeans')
groups = {}
keys_map = {}
for index, c in enumerate(labels):
if c == -1:
continue
key = unvectorize_key(keys_vector[index])
if not keys_map.get(c):
keys_map[c] = key
groups[key] = 1
else:
groups[keys_map[c]] += 1
pprint(groups)
second_keys = [vectorize_key(key) for key in groups.keys()]
labels = clusterize_keys(second_keys, 'dbscan')
out = {}
for index, c in enumerate(labels):
key = unvectorize_key(second_keys[index])
if not groups.get(c):
out[c] = {
'example': key,
'number': groups[key]
}
else:
out[c]['number'] += groups[key]
pprint(out)
#Y = vectorizer.fit_transform(second_keys)
#Y = pairwise_distances(Y, metric='cosine')
#dby = cluster.DBSCAN()
#dby.fit(Y)
#
|
With the recent revelations that eBay was hacked and that customer’s data may have been compromised, we’re all reminded how important it is to do as much as we can to make sure our websites are as secure as possible. While no one can guarantee that your site will never get attacked, there are some definitive steps you can take to protect your website. Here are some important points to remember.
Always make sure your software is up to date. Experts agree that this is the most important step you can take to prevent a malicious attack on your site. Whether you’re using a CMS like WordPress or Joomla (or another type of application) as your website platform, it’s critical that you make sure to install the latest updates and security patches. Outdated software is probably the biggest culprit in website attacks—in the past, I’ve unfortunately paid the price for not updating software and have since learned my lesson!
Automated programs have become more advanced in aiding malicious attacks on websites by “guessing” your passwords. So, it isn’t enough anymore to just avoid using “PASSWORD”, “123”, or your name as your password. The stronger you can make your password, the better. In addition to using upper and lower case letters, numbers, and special characters, extra-long passwords are excellent in thwarting the malicious password guessing programs. Try adding a phrase or saying together with your letters, numbers, and special characters for maximum password protection.
I just saw an item in the news about an Australian web hosting company that lost their customer’s data! While this is an extreme and uncommon occurrence, it does point out the importance of having quality web hosting and a well-established, well regarded web host. Avoid free web hosting and ultra-cheap hosting deals from obscure or unfamiliar hosting companies. There are plenty of great deals on web hosting plans you can find from quality, top-notch web hosting companies.
Which leads us to the most important safeguard: Make sure you backup your website data yourself. Most web hosting companies will run backups of your site for free. Although most hosts will charge a fee to restore your data, you can find some that will do it for free. In either case, you should definitely make sure you regularly run a backup of your website and keep a copy of the data yourself. That way, you’re covered no matter what your web host’s policy is.
|
#!/usr/bin/python
# udp_interface.py
#
#
# Copyright (C) 2008-2018 Veselin Penev, https://bitdust.io
#
# This file (udp_interface.py) is part of BitDust Software.
#
# BitDust is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BitDust Software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with BitDust Software. If not, see <http://www.gnu.org/licenses/>.
#
# Please contact us if you have any questions at bitdust.io@gmail.com
#
#
#
#
"""
..
module:: udp_interface
"""
#------------------------------------------------------------------------------
from __future__ import absolute_import
import six
#------------------------------------------------------------------------------
import os
import sys
#------------------------------------------------------------------------------
try:
from twisted.internet import reactor
except:
sys.exit('Error initializing twisted.internet.reactor in udp_interface.py')
from twisted.web import xmlrpc
from twisted.internet.defer import Deferred, succeed, fail
#------------------------------------------------------------------------------
from logs import lg
from lib import nameurl
#------------------------------------------------------------------------------
_Debug = True
#------------------------------------------------------------------------------
_GateProxy = None
#------------------------------------------------------------------------------
def proxy():
global _GateProxy
return _GateProxy
#------------------------------------------------------------------------------
def idurl_to_id(idurl):
"""
"""
proto, host, port, filename = nameurl.UrlParse(idurl)
assert proto == 'http'
user_id = filename.replace('.xml', '') + '@' + host
if port and port not in ['80', 80, ]:
user_id += ':%s' % str(port)
return user_id
def id_to_idurl(user_id):
try:
filename, host = user_id.split('@')
filename += '.xml'
except:
return None
return 'http://%s/%s' % (host, filename)
#------------------------------------------------------------------------------
class GateInterface():
def init(self, xml_rpc_url_or_object):
"""
"""
global _GateProxy
if _Debug:
lg.out(4, 'udp_interface.init %s' % xml_rpc_url_or_object)
if isinstance(xml_rpc_url_or_object, six.string_types):
_GateProxy = xmlrpc.Proxy(xml_rpc_url_or_object, allowNone=True)
else:
_GateProxy = xml_rpc_url_or_object
_GateProxy.callRemote('transport_initialized', 'udp')
return True
def shutdown(self):
"""
"""
from transport.udp import udp_node
global _GateProxy
if _Debug:
lg.out(4, 'udp_interface.shutdown')
udp_node.Destroy()
if _GateProxy:
# del _GateProxy
_GateProxy = None
return succeed(True)
def connect(self, options):
"""
"""
from transport.udp import udp_node
if _Debug:
lg.out(8, 'udp_interface.connect %s' % str(options))
udp_node.A('go-online', options)
return True
def disconnect(self):
"""
"""
from transport.udp import udp_node
if _Debug:
lg.out(4, 'udp_interface.disconnect')
udp_node.A('go-offline')
return succeed(True)
def build_contacts(self, id_obj):
"""
"""
result = []
result.append(
'udp://%s@%s' %
(id_obj.getIDName().lower(),
id_obj.getIDHost()))
if _Debug:
lg.out(4, 'udp_interface.build_contacts : %s' % str(result))
return result
def verify_contacts(self, id_obj):
"""
"""
udp_contact = 'udp://%s@%s' % (id_obj.getIDName().lower(),
id_obj.getIDHost())
if id_obj.getContactIndex(contact=udp_contact) < 0:
if _Debug:
lg.out(
4,
'udp_interface.verify_contacts returning False: udp contact not found or changed')
return False
if _Debug:
lg.out(4, 'udp_interface.verify_contacts returning True')
return True
def send_file(self, remote_idurl, filename, host, description=''):
"""
"""
from transport.udp import udp_session
from transport.udp import udp_node
# lg.out(20, 'udp_interface.send_file %s %s %s' % (filename, host, description))
result_defer = Deferred()
# if udp_node.A().state not in ['LISTEN', 'DHT_READ',]:
# result_defer.callback(False)
# lg.out(4, 'udp_interface.send_file WARNING udp_node state is %s' % udp_node.A().state)
# return result_defer
active_sessions = udp_session.get_by_peer_id(host)
if active_sessions:
if description.startswith('Identity') or description.startswith('Ack'):
active_sessions[0].file_queue.insert_outbox_file(
filename, description, result_defer, keep_alive=True)
else:
active_sessions[0].file_queue.append_outbox_file(
filename, description, result_defer, keep_alive=True)
else:
udp_session.add_pending_outbox_file(
filename, host, description, result_defer, keep_alive=True)
udp_node.A('connect', host)
return result_defer
def send_file_single(self, remote_idurl, filename, host, description=''):
"""
"""
return self.send_file(self, remote_idurl, filename, host, description, keep_alive=False)
def send_keep_alive(self, host):
"""
"""
from transport.udp import udp_session
for sess in udp_session.sessions_by_peer_id().get(host, []):
sess.automat('send-keep-alive')
def connect_to_host(self, host=None, idurl=None):
"""
"""
from transport.udp import udp_node
if not host:
host = idurl_to_id(idurl)
if _Debug:
lg.out(12, 'udp_interface.connect %s' % host)
udp_node.A('connect', host)
def disconnect_from_host(self, host):
"""
"""
def cancel_outbox_file(self, host, filename):
"""
"""
from transport.udp import udp_session
ok = False
for sess in udp_session.sessions().values():
if sess.peer_id != host:
continue
i = 0
while i < len(sess.file_queue.outboxQueue):
fn, descr, result_defer, keep_alive = sess.file_queue.outboxQueue[i]
if fn == filename:
if _Debug:
lg.out(14, 'udp_interface.cancel_outbox_file removed %s in %s' % (os.path.basename(fn), sess))
sess.file_queue.outboxQueue.pop(i)
ok = True
else:
i += 1
udp_session.remove_pending_outbox_file(host, filename)
# for fn, descr, result_defer, single in sess.file_queue.outboxQueue:
# if fn == filename and sess.peer_id == host:
# lg.out(6, 'udp_interface.cancel_outbox_file host=%s want to close session' % host)
# sess.automat('shutdown')
# return True
return ok
def cancel_file_sending(self, transferID):
"""
"""
from transport.udp import udp_session
for sess in udp_session.sessions().values():
for out_file in sess.file_queue.outboxFiles.values():
if out_file.transfer_id and out_file.transfer_id == transferID:
out_file.cancel()
return True
return False
def cancel_file_receiving(self, transferID):
"""
"""
# at the moment for UDP transport we can not stop particular file transfer
# we can only close the whole session which is not we really want
# for sess in udp_session.sessions().values():
# for in_file in sess.file_queue.inboxFiles.values():
# if in_file.transfer_id and in_file.transfer_id == transferID:
# if _Debug:
# lg.out(6, 'udp_interface.cancel_file_receiving transferID=%s want to close session' % transferID)
# sess.automat('shutdown')
# return True
# return False
return False
def list_sessions(self):
"""
"""
from transport.udp import udp_session
return list(udp_session.sessions().values())
def list_streams(self, sorted_by_time=True):
"""
"""
from transport.udp import udp_stream
result = []
for stream in udp_stream.streams().values():
result.append(stream.consumer)
if sorted_by_time:
result.sort(key=lambda stream: stream.started)
return result
def find_session(self, host):
"""
"""
from transport.udp import udp_session
return udp_session.sessions_by_peer_id().get(host, [])
def find_stream(self, stream_id=None, transfer_id=None):
"""
"""
from transport.udp import udp_stream
for stream in udp_stream.streams().values():
if stream_id and stream_id == stream.consumer.stream_id:
return stream.consumer
if transfer_id and transfer_id == stream.consumer.transfer_id:
return stream.consumer
return None
#------------------------------------------------------------------------------
def proxy_errback(x):
if _Debug:
lg.out(6, 'udp_interface.proxy_errback ERROR %s' % x)
return None
#------------------------------------------------------------------------------
def interface_transport_initialized():
"""
"""
if proxy():
return proxy().callRemote('transport_initialized', 'udp').addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_receiving_started(host, new_options={}):
"""
"""
if proxy():
return proxy().callRemote('receiving_started', 'udp', host, new_options).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_receiving_failed(error_code=None):
"""
"""
if proxy():
return proxy().callRemote('receiving_failed', 'udp', error_code).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_disconnected(result=None):
"""
"""
if proxy():
return proxy().callRemote('disconnected', 'udp', result).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
# return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
return succeed(result)
def interface_register_file_sending(host, receiver_idurl, filename, size, description=''):
"""
"""
if proxy():
return proxy().callRemote(
'register_file_sending', 'udp', host, receiver_idurl, filename, size, description).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_register_file_receiving(host, sender_idurl, filename, size):
"""
"""
if proxy():
return proxy().callRemote(
'register_file_receiving', 'udp', host, sender_idurl, filename, size).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_unregister_file_sending(transfer_id, status, bytes_sent, error_message=None):
"""
"""
if proxy():
return proxy().callRemote(
'unregister_file_sending', transfer_id, status,
bytes_sent, error_message).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_unregister_file_receiving(transfer_id, status, bytes_received, error_message=None):
"""
"""
if proxy():
return proxy().callRemote(
'unregister_file_receiving', transfer_id, status,
bytes_received, error_message).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_cancelled_file_sending(host, filename, size, description=None, error_message=None):
"""
"""
if proxy():
return proxy().callRemote(
'cancelled_file_sending', 'udp', host, filename,
size, description, error_message).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
def interface_cancelled_file_receiving(host, filename, size, error_message=None):
"""
"""
if proxy():
return proxy().callRemote(
'cancelled_file_receiving', 'udp', host, filename, size, error_message).addErrback(proxy_errback)
lg.warn('transport_udp is not ready')
return fail(Exception('transport_udp is not ready')).addErrback(proxy_errback)
|
Hey hey! I'm over on Maggie's blog this week sharing a look at my sweet first grader! My how time files!
|
import Sofa
import numpy as np
import math
from SofaPython import Quaternion as quat
# use numpy vectors directly (watch out, string conversion might be
# lossy)
np.set_string_function( lambda x: ' '.join( map(str, x)),
repr=False )
def createScene(node):
node.createObject('RequiredPlugin',
pluginName = 'Compliant')
ode = node.createObject('CompliantImplicitSolver')
num = node.createObject('SequentialSolver')
# ode.debug = 1
node.dt = 0.01
pos = np.zeros(7)
vel = np.zeros(6)
force = np.zeros(6)
alpha = math.pi / 4.0
q = quat.exp([0, 0, alpha])
pos[:3] = [-0.5, 0, 0]
pos[3:] = q
mass = 1.0
# change this for more fun
dim = np.array([1, 2, 1])
dim2 = dim * dim
inertia = mass / 12.0 * (dim2[ [1, 2, 0] ] + dim2[ [2, 0, 1] ])
volume = 1.0
force[3:] = quat.rotate(q, [0, 1, 0])
scene = node.createChild('scene')
good = scene.createChild('good')
dofs = good.createObject('MechanicalObject',
template = 'Rigid',
name = 'dofs',
position = pos,
velocity = vel,
showObject = 1)
good.createObject('RigidMass',
template = 'Rigid',
name = 'mass',
mass = mass,
inertia = inertia)
good.createObject('ConstantForceField',
template = 'Rigid',
name = 'ff',
forces = force)
bad = scene.createChild('bad')
pos[:3] = [0.5, 0, 0]
dofs = bad.createObject('MechanicalObject',
template = 'Rigid',
name = 'dofs',
position = pos,
velocity = vel,
showObject = 1)
inertia_matrix = np.diag(inertia)
def cat(x): return ' '.join( map(str, x))
def print_matrix(x):
return '[' + ','.join(map(str, x)) + ']'
bad.createObject('UniformMass',
template = 'Rigid',
name = 'mass',
mass = cat([mass, volume, print_matrix(inertia_matrix / mass)]))
bad.createObject('ConstantForceField',
template = 'Rigid',
name = 'ff',
forces = force)
node.gravity = '0 0 0'
|
Gringo Tours always looks for ways to support the local communities and initiatives reason why we are one of only a few companies that offer Mangrove Boat tours at the Barra de Santiago which is much further away and remote than the other well-known mangroves of Jiquilisco bay. That said we can offer a truly unique experience either as a half day add on to another tour like when in route to the Impossible Park or as a separate full day experience from areas like, San Salvador, Santa Ana, Ruta de Flores and Suchitoto. Relatively close to the Ruta de Flores but about 3.5 hours from Suchitoto and 2.5 from San Salvador makes for a long drive, why we recommend as an in route tour to another destination western El Salvador.
Working with AMBAS the local association that looks after the protected area and in coordination with the Ministry of Environment and Natural Resources where permits must be requested with at least 15 day advance notice to enter the protected reserve area of mangroves makes are tours different than those just hiring a boat to access the initial waterways within the area. On a good day caimans and crocodiles can be seen up close, this not to mention the numerous types of local crabs and birds. We add on a stop on a small island where the endangered Blue Crabs are manages as a small economic initiative of the habitants on the island. We also can arrange tortilla classes and a wonderful fresh sea food lunch, trips to the Bocana where the estuary meets the ocean and of course time for a walk on the beach or swim in the ocean and or estuary.
|
#!/usr/bin/env python
import sys, os.path, math, fnmatch
from glob import glob
import optparse
from popGraphUtil import plotFromCSV, plotFromAll
parser = optparse.OptionParser(" [options] csv-file chart-file")
parser.add_option("", "--xlim", dest="xlim", help="cut off X-axis at this point", default = None)
parser.add_option("", "--ylim", dest="ylim", help="cut off Y-axis at this point", default = None)
parser.add_option("", "--logy", dest="logy", action="store_true",
help="Log scale for Y axis", default = False)
parser.add_option("", "--yscale", dest="yscale", help="Y-axis scale factor", default = 1)
parser.add_option("", "--width", dest="width",
help="figure width. Integral value with units: 50mm 2cm 3 (inches)", default = None)
# parser.add_option("", "--ms", dest="ms", help="", default = None)
parser.add_option("", "--lw", dest="lw", help="Line width", default = None)
parser.add_option("", "--font", dest="font", help="name of font for figure text ", default = None)
parser.add_option("", "--fontsize", dest="fontsize", help="font size of figure text", default = None)
# parser.add_option("", "--axes", dest="axesSize", help="", default = None)
parser.add_option("", "--ticks", dest="ticklabelsize",
help="font size of ticks labels ", default = None)
parser.add_option("", "--nxticks", dest="nxticks",
help="number of X-axis ticks", default = None)
parser.add_option("", "--title", dest="title",
help="Figure title", default = None)
parser.add_option("", "--hist", dest="hist", action="store_true",help="", default = False)
parser.add_option("", "--alldemo", dest="alldfile",
help="plot all demographic functions in this file",
default = None)
parser.add_option("-a", "--alphaout", dest="alpha", help="transparancy value of outline.", default = 1)
parser.add_option("", "--alpha", dest="alldalpha",
help="transparancy value to use when plotting all" +
" demographic. 1 - no transparancy, 0 fully transparent.", default = 0.1)
parser.add_option("", "--ratio", dest="ratio",
help="height/width ratio of figure.", default = 0.75)
options, args = parser.parse_args()
if len(args) != 2 :
print >> sys.stderr, "usage:", sys.argv[0], "csv-file", "chart-file"
sys.exit(1)
name = args[0]
trueDemo = None
plotOptionsDict = { 'alpha' : float(options.alpha),
'logy' : options.logy,
'doHist': options.hist }
if options.lw :
plotOptionsDict['mainlw'] = float(options.lw)
plotOptionsDict['hpdOutline'] = float(options.lw)/2
labelsFont = None
if options.font :
import matplotlib.font_manager
labelsFont = matplotlib.font_manager.FontProperties(options.font)
if labelsFont.get_name() != options.font :
print >> sys.stderr, "*warning:", labelsFont.get_name(),"!=",options.font
if options.fontsize :
labelsFont.set_size(float(options.fontsize))
import pylab
def convertToInches(w) :
if w[-2:] == 'mm' :
return int(w[:-2]) / 25.4
if w[-2:] == 'cm' :
return int(w[:-2]) / 2.54
return int(w)
if options.width is None :
fig = pylab.figure()
else :
w = convertToInches(options.width)
h = w * float(options.ratio)
fig = pylab.figure(figsize=(w,h))
if labelsFont :
labelFontDict = {'fontproperties': labelsFont}
plotOptionsDict['labelProps'] = labelFontDict
if options.alldfile:
pylab.ioff()
plotFromAll(options.alldfile, yScale = float(options.yscale),
logy = options.logy, alpha = float(options.alldalpha))
plotFromCSV(name, trueDemo, yScale = float(options.yscale), **plotOptionsDict)
if options.xlim :
pylab.xlim((0, float(options.xlim)))
if options.ylim :
pylab.ylim((0, float(options.ylim)))
if options.title :
pylab.title(options.title)
pylab.legend(loc='best')
if options.nxticks :
from matplotlib.ticker import MaxNLocator
pylab.gca().xaxis.set_major_locator(MaxNLocator(int(options.nxticks)))
if labelsFont :
ltext = pylab.gca().get_legend().get_texts()
for l in ltext :
pylab.setp(l, fontproperties = labelsFont)
if options.ticklabelsize :
s = float(options.ticklabelsize)
if labelsFont :
fp = matplotlib.font_manager.FontProperties(labelsFont.get_name())
fp.set_size(s)
fp = {'fontproperties' : fp}
else :
fp = dict()
for p in ('xticklabels', 'yticklabels') :
l = pylab.getp(pylab.gca(), p)
pylab.setp(l, fontsize=s, **fp)
if options.alldfile:
pylab.ion()
pylab.savefig(args[1], dpi=300)
|
Cynthia Hill, 52, is a vocal champion of Neighborhood House.
The mother of four and grandmother of 12 is one of the most recent success stories of the community outreach, and gives them praise for her transformation.
Struggling in school when she was young, she dropped out and spent years battling poverty and addiction, until one day she decided to give the outreach’s GED program a try.
Along the way, staff and volunteers helped her overcome major hurdles, including abscessed teeth, a need for glasses and a drug addiction. She said Franciscan Sister Noreen Buttimer and Nikki Grimball, director, were with her every step of the way, never letting her quit.
Today, Hill is attending Trident Technical College for a degree in culinary arts and was on the dean’s list.
She is just one of a multitude that the outreach center has helped in its 100-year history, which will be celebrated in April.
Neighborhood House started through St. Francis Infirmary as a way to help immigrants arriving in the port city in 1915. The idea was conceived by Mary McKenna, superintendent of the infirmary, and approved by Bishop Henry P. Northrop, who asked the Sisters of Charity of Our Lady of Mercy to serve there. They accepted, and McKenna and Sister Michael Leary led the way, offering health-related programs, Bible school, and classes for women.
The first building was at 90½ Columbus St., now a residential area of old Charleston homes. In 1918, the sisters bought a house at 88 America St., in the block where Trident Tech sits now. The outreach operated there until 1953 when, unable to maintain the old building, it was sold and the sisters moved down the street into space at Our Lady of Mercy Church at the invitation of Bishop John J. Russell.
As the years passed, the center continually evolved to meet the needs of the community. In its century of work, the social service has seen many changes, but one thing that has never changed is the outpouring of community support. In the late ’20s and ’30s, local farmers helped feed the hungry during the Great Depression.
After World War II, the face of the surrounding community changed to predominantly African American. Healthcare and community classes continued to be part of the outreach, along with helping the unemployed find jobs.
In 1968, the diocesan office of Catholic Charities took over the outreach and established a soup kitchen, which remains the largest program to date. In 2013, they served over 47,000 meals and provided almost 4,000 people with groceries from the food pantry.
Grimball, current director of Neighborhood House, recalls the day he realized how important the service was to the wellbeing of the community. He was talking to people as they left the soup kitchen, telling them to have a good weekend and enjoy the four-day holiday. After everyone left, a man came up and said, “Ya know, for a lot of people, it’s not going to be a good weekend.” He pointed out that without the soup kitchen, many wouldn’t have a meal for four days. That marked the start of the weekend food program.
Today, the outreach offers four different food services, a clothing center, and about 14 education and assistance programs, including parenting and budgeting classes.
Grimball said the goal is not to provide handouts, but to teach people to help themselves.
And of course, new programs and services are always being added. For example, the outreach is in the final stages of obtaining a water fountain for the community, said Sister Bridget Sullivan, OLM.
Such a simple human need, but so hard to find when you’re poor or homeless on hot summer days, Sister Bridget said, noting that she doesn’t know of a single water fountain in the city.
Neighborhood House is also hoping to provide laundromat vouchers to those in need.
As for Hill, she encourages everyone she meets to go to the outreach for help in turning their lives around, saying that if she can do it, they can too.
|
# coding=utf-8
import logging
import threading
from functools import wraps
import autosubliminal
log = logging.getLogger(__name__)
_lock = threading.Lock()
def get_wanted_queue_lock():
with _lock:
if autosubliminal.WANTEDQUEUELOCK:
log.debug('Cannot get wanted queue lock, skipping')
return False
else:
log.debug('Getting wanted queue lock')
autosubliminal.WANTEDQUEUELOCK = True
return True
def release_wanted_queue_lock():
with _lock:
if autosubliminal.WANTEDQUEUELOCK:
log.debug('Releasing wanted queue lock')
autosubliminal.WANTEDQUEUELOCK = False
else:
log.warning('Trying to release a wanted queue lock while there is no lock')
def release_wanted_queue_lock_on_exception(func):
"""
Decorator to force the release of the wanted queue lock on unexpected exceptions.
This should be used on every place where we do a get_wanted_queue_lock to release it also on unexpected exceptions.
"""
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
with _lock:
if autosubliminal.WANTEDQUEUELOCK:
log.exception('Releasing wanted queue lock with force due to exception')
autosubliminal.WANTEDQUEUELOCK = False
raise e
return wrapper
def count_wanted_queue_items(item_type=None):
size = 0
if not item_type:
size = len(autosubliminal.WANTEDQUEUE)
else:
for item in autosubliminal.WANTEDQUEUE:
if item.type == item_type:
size += 1
return size
|
He was traveling with his wife and family. The report of the 35th quorum, as printed in the Deseret News, may suggest that he was in the Valley prior to 1866. Further research is needed.
"Report of the 35th Quorum," Deseret News [Weekly], 13 Apr. 1854, .
|
# -*- coding: utf-8 -*-
import time
import random
from ..backoff import Backoff
from ..constants import INT_ERROR, POS_ERROR
class ExponentialBackOff(Backoff):
"""
ExponentialBackOff is a backoff implementation that increases the backoff
period for each retry attempt using a randomization function that grows
exponentially.
`next()` returned interval is calculated using the following formula:
randomized interval = (
interval * (random value in range [1 - factor, 1 + factor]))
`next()` will range between the randomization factor percentage below
and above the retry interval.
For example, given the following parameters:
- interval = 0.2
- factor = 0.5
- multiplier = 2
the actual backoff period used in the next retry attempt will range
between 1 and 3 seconds, multiplied by the exponential, that is, between
2 and 6 seconds.
Note: `max_internval` caps the `interval` and not the randomized interval.
If the time elapsed since an `ExponentialBackOff` instance is created
goes past the `max_elapsed` time, then the method `next()` starts
returning `Backoff.STOP`.
The elapsed time can be reset by calling `reset()``.
Example: Given the following default arguments, for 10 tries the sequence
will be, and assuming we go over the `max_elapsed` on the 10th try::
Request # RetryInterval (seconds) Randomized Interval (seconds)
1 0.5 [0.25, 0.75]
2 0.75 [0.375, 1.125]
3 1.125 [0.562, 1.687]
4 1.687 [0.8435, 2.53]
5 2.53 [1.265, 3.795]
6 3.795 [1.897, 5.692]
7 5.692 [2.846, 8.538]
8 8.538 [4.269, 12.807]
9 12.807 [6.403, 19.210]
10 19.210 Backoff.STOP
For the opposite backoff strategy, see `riprova.ConstantBackoff`.
`ExponentialBackOff` is expected to run in a single-thread context.
Arguments:
interval (int): interval time in seconds.
Defaults to `500`.
factor (int|float): multiplier factor for exponential retries.
Defaults to `0.5`. It should be between `0` and `1` number range.
max_interval (int): max allowed internval in seconds.
Defaults to `60`.
max_elapsed (int): max elapsed total allowed time in seconds.
Defaults to `15` minutes == `15 * 60` seconds.
multiplier (int|float): exponential multiplier.
Defaults to `1.5`.
Raises:
AssertionError: in case of invalid params.
Usage::
@riprova.retry(backoff=riprova.ExponentialBackOff(interval=100))
def task(x):
return x * x
"""
def __init__(self,
interval=.5,
factor=0.5,
max_interval=60,
max_elapsed=15 * 60,
multiplier=1.5):
# Assert valid params
assert isinstance(interval, (int, float)), INT_ERROR.format('interval')
assert isinstance(multiplier, (int, float)), INT_ERROR.format('multiplier') # noqa
assert isinstance(factor, (int, float)), INT_ERROR.format('factor')
assert isinstance(max_elapsed, (int, float)), INT_ERROR.format('max_elapsed') # noqa
assert isinstance(max_interval, int), INT_ERROR.format('max_interval')
assert interval >= 0, POS_ERROR.format('interval')
assert multiplier >= 0, POS_ERROR.format('multiplier')
self.started = None # start time in seconds
self.multiplier = multiplier
self.max_elapsed = int(max_elapsed * 1000)
self.max_interval = int(max_interval * 1000)
self.factor = min(max(factor, 0), 1)
self.interval = int(interval * 1000)
self.current_interval = self.interval
@property
def elapsed(self):
"""
Returns the elapsed time since an `ExponentialBackOff` instance
is created and is reset when `reset()` is called.
"""
return int(time.time() * 1000) - self.started
def reset(self):
"""
Reset the interval back to the initial retry interval and
restarts the timer.
"""
self.started = None
self.current_interval = self.interval
def next(self):
"""
Returns the number of seconds to wait before the next try,
otherwise returns `Backoff.STOP`, which indicates the max number
of retry operations were reached.
Returns:
int: time to wait in seconds before the next try.
"""
# Store start time
if self.started is None:
self.started = int(time.time() * 1000)
# Make sure we have not gone over the maximum elapsed time.
if self.max_elapsed != 0 and self.elapsed > self.max_elapsed:
return Backoff.STOP
# Get random exponential interval
interval = self._get_random_value()
# Incremental interval
self._increment_interval()
# Return interval
return round(interval / 1000, 2)
def _increment_interval(self):
"""
Increments the current interval by multiplying it with the multiplier.
"""
# Check for overflow, if overflow is detected set the current
# interval to the max interval.
if self.current_interval >= (self.max_interval / self.multiplier):
self.current_interval = self.max_interval
else:
self.current_interval = self.current_interval * self.multiplier
def _get_random_value(self):
"""
Returns a random value from the following interval:
[factor * current_interval, factor * current_interval]
Returns:
int: interval seconds to wait before next try.
"""
rand = random.random()
delta = self.factor * rand
min_interval = self.current_interval - delta
max_interval = self.current_interval + delta
# Get a random value from the range [min_interval, max_interval].
# The formula used below has a +1 because if the min_interval is 1 and
# the max_interval is 3 then we want a 33% chance for selecting either
# 1, 2 or 3.
return int(min_interval + (rand * (max_interval - min_interval + 1)))
|
Girl Scouts or Girl Guides?
The girls are out for a walk but where, when and who? Please let us have any help you can. Thanks.
These are Girl Guides. I don’t think we had Girl Scouts.
|
#! /usr/bin/env python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module handling communication with gdb.
Users of this module probably want to use the Inferior class, as it provides a
clean interface for communicating with gdb and a couple of functions for
performing common tasks (e.g. listing threads, moving around the stack, etc.)
"""
# TODO: split this file in two, with GdbProxy in a separate file.
import collections
import errno
import functools
import json
import logging
import os
import re
import select
import signal
import subprocess
import tempfile
import time
# Setting these overrides the defaults. See _SymbolFilePath.
SYMBOL_FILE = None # default: <PAYLOAD_DIR>/python2.7.debug
PAYLOAD_DIR = os.path.join(os.path.dirname(__file__), 'payload')
TIMEOUT_DEFAULT = 3
TIMEOUT_FOREVER = None
_GDB_STARTUP_FILES = [
'importsetup.py',
'gdb_service.py',
]
_GDB_ARGS = ['gdb', '--nw', '--quiet', '--batch-silent']
def _SymbolFilePath():
return SYMBOL_FILE or os.path.join(PAYLOAD_DIR, 'python2.7.debug')
class Error(Exception):
pass
class ProxyError(Error):
"""A proxy for an exception that happened within gdb."""
class TimeoutError(Error):
pass
class PositionError(Error):
"""Raised when a nonsensical debugger position is requested."""
class GdbProcessError(Error):
"""Thrown when attempting to start gdb when it's already running."""
### RPC protocol for gdb service ###
#
# In order to ensure compatibility with all versions of python JSON was
# chosen as the main data format for the communication protocol between
# the gdb-internal python process and the process using this module.
# RPC requests to GdbService ('the service') are JSON objects containing exactly
# two keys:
# * 'func' : the name of the function to be called in the service. RPCs for
# function names starting with _ will be rejected by the service.
# * 'args' : An array containing all the parameters for the function. Due to
# JSON's limitations, only positional arguments work. Most API
# functions require a 'position' argument which is required to be a
# 3-element array specifying the selected pid, python thread id and
# depth of the selected frame in the stack (where 0 is the outermost
# frame).
# The session is terminated upon sending an RPC request for the function
# '__kill__' (upon which args are ignored).
#
# RPC return values are not wrapped in JSON objects, but are bare JSON
# representations of return values.
# Python class instances (old and new-style) will also be serialized to JSON
# objects with keys '__pyringe_type_name__' and '__pyringe_address__', which
# carry the expected meaning. The remaining keys in these objects are simple
# JSON representations of the attributes visible in the instance (this means the
# object includes class-level attributes, but these are overshadowed by any
# instance attributes. (There is currently no recursion in this representation,
# only one level of object references is serialized in this way.)
# Should an exception be raised to the top level within the service, it will
# write a JSON-representation of the traceback string to stderr
# TODO: add message-id to the protocol to make sure that canceled operations
# that never had their output read don't end up supplying output for the wrong
# command
class ProxyObject(object):
def __init__(self, attrdict):
self.__dict__ = attrdict
def __repr__(self):
return ('<proxy of %s object at remote 0x%x>'
% (self.__pyringe_type_name__, self.__pyringe_address__))
class GdbProxy(object):
"""The gdb that is being run as a service for the inferior.
Most of the logic of this service is actually run from within gdb, this being
a stub which handles RPC for that service. Communication with that service
is done by pushing around JSON encoded dicts specifying RPC requests and
their results. Automatic respawning is not handled by this class and must be
implemented on top of this if it is to be available.
"""
firstrun = True
def __init__(self, args=None, arch=None):
super(GdbProxy, self).__init__()
gdb_version = GdbProxy.Version()
if gdb_version < (7, 4, None) and GdbProxy.firstrun:
# The user may have a custom-built version, so we only warn them
logging.warning('Your version of gdb may be unsupported (< 7.4), '
'proceed with caution.')
GdbProxy.firstrun = False
arglist = _GDB_ARGS
# Due to a design flaw in the C part of the gdb python API, setting the
# target architecture from within a running script doesn't work, so we have
# to do this with a command line flag.
if arch:
arglist = arglist + ['--eval-command', 'set architecture ' + arch]
arglist = (arglist +
['--command=' + os.path.join(PAYLOAD_DIR, fname)
for fname in _GDB_STARTUP_FILES])
# Add version-specific args
if gdb_version >= (7, 6, 1):
# We want as little interference from user settings as possible,
# but --nh was only introduced in 7.6.1
arglist.append('--nh')
if args:
arglist.extend(args)
# We use a temporary file for pushing IO between pyringe and gdb so we
# don't have to worry about writes larger than the capacity of one pipe
# buffer and handling partial writes/reads.
# Since file position is automatically advanced by file writes (so writing
# then reading from the same file will yield an 'empty' read), we need to
# reopen the file to get different file offset. We can't use os.dup for
# this because of the way os.dup is implemented.
outfile_w = tempfile.NamedTemporaryFile(mode='w', bufsize=1)
errfile_w = tempfile.NamedTemporaryFile(mode='w', bufsize=1)
self._outfile_r = open(outfile_w.name)
self._errfile_r = open(errfile_w.name)
logging.debug('Starting new gdb process...')
self._process = subprocess.Popen(
bufsize=0,
args=arglist,
stdin=subprocess.PIPE,
stdout=outfile_w.file,
stderr=errfile_w.file,
close_fds=True,
preexec_fn=os.setpgrp,
)
outfile_w.close()
errfile_w.close()
self._poller = select.poll()
self._poller.register(self._outfile_r.fileno(),
select.POLLIN | select.POLLPRI)
self._poller.register(self._errfile_r.fileno(),
select.POLLIN | select.POLLPRI)
def __getattr__(self, name):
"""Handles transparent proxying to gdb subprocess.
This returns a lambda which, when called, sends an RPC request to gdb
Args:
name: The method to call within GdbService
Returns:
The result of the RPC.
"""
return lambda *args, **kwargs: self._Execute(name, *args, **kwargs)
def Kill(self):
"""Send death pill to Gdb and forcefully kill it if that doesn't work."""
try:
if self.is_running:
self.Detach()
if self._Execute('__kill__') == '__kill_ack__':
# acknowledged, let's give it some time to die in peace
time.sleep(0.1)
except (TimeoutError, ProxyError):
logging.debug('Termination request not acknowledged, killing gdb.')
if self.is_running:
# death pill didn't seem to work. We don't want the inferior to get killed
# the next time it hits a dangling breakpoint, so we send a SIGINT to gdb,
# which makes it disable instruction breakpoints for the time being.
os.kill(self._process.pid, signal.SIGINT)
# Since SIGINT has higher priority (with signal number 2) than SIGTERM
# (signal 15), SIGTERM cannot preempt the signal handler for SIGINT.
self._process.terminate()
self._process.wait()
self._errfile_r.close()
self._outfile_r.close()
@property
def is_running(self):
return self._process.poll() is None
@staticmethod
def Version():
"""Gets the version of gdb as a 3-tuple.
The gdb devs seem to think it's a good idea to make --version
output multiple lines of welcome text instead of just the actual version,
so we ignore everything it outputs after the first line.
Returns:
The installed version of gdb in the form
(<major>, <minor or None>, <micro or None>)
gdb 7.7 would hence show up as version (7,7)
"""
output = subprocess.check_output(['gdb', '--version']).split('\n')[0]
# Example output (Arch linux):
# GNU gdb (GDB) 7.7
# Example output (Debian sid):
# GNU gdb (GDB) 7.6.2 (Debian 7.6.2-1)
# Example output (Debian wheezy):
# GNU gdb (GDB) 7.4.1-debian
# Example output (centos 2.6.32):
# GNU gdb (GDB) Red Hat Enterprise Linux (7.2-56.el6)
# As we've seen in the examples above, versions may be named very liberally
# So we assume every part of that string may be the "real" version string
# and try to parse them all. This too isn't perfect (later strings will
# overwrite information gathered from previous ones), but it should be
# flexible enough for everything out there.
major = None
minor = None
micro = None
for potential_versionstring in output.split():
version = re.split('[^0-9]', potential_versionstring)
try:
major = int(version[0])
except (IndexError, ValueError):
pass
try:
minor = int(version[1])
except (IndexError, ValueError):
pass
try:
micro = int(version[2])
except (IndexError, ValueError):
pass
return (major, minor, micro)
# On JSON handling:
# The python2 json module ignores the difference between unicode and str
# objects, emitting only unicode objects (as JSON is defined as
# only having unicode strings). In most cases, this is the wrong
# representation for data we were sent from the inferior, so we try to convert
# the unicode objects to normal python strings to make debugger output more
# readable and to make "real" unicode objects stand out.
# Luckily, the json module just throws an exception when trying to serialize
# binary data (that is, bytearray in py2, byte in py3).
# The only piece of information deemed relevant that is lost is the type of
# non-string dict keys, as these are not supported in JSON. {1: 1} in the
# inferior will thus show up as {"1": 1} in the REPL.
# Properly transmitting python objects would require either substantially
# building on top of JSON or switching to another serialization scheme.
def _TryStr(self, maybe_unicode):
try:
return str(maybe_unicode)
except UnicodeEncodeError:
return maybe_unicode
def _JsonDecodeList(self, data):
rv = []
for item in data:
if isinstance(item, unicode):
item = self._TryStr(item)
elif isinstance(item, list):
item = self._JsonDecodeList(item)
rv.append(item)
return rv
def _JsonDecodeDict(self, data):
"""Json object decode hook that automatically converts unicode objects."""
rv = {}
for key, value in data.iteritems():
if isinstance(key, unicode):
key = self._TryStr(key)
if isinstance(value, unicode):
value = self._TryStr(value)
elif isinstance(value, list):
value = self._JsonDecodeList(value)
rv[key] = value
if '__pyringe_type_name__' in data:
# We're looking at a proxyobject
rv = ProxyObject(rv)
return rv
# There is a reason for this messy method signature, it's got to do with
# python 2's handling of function arguments, how this class is expected to
# behave and the responsibilities of __getattr__. Suffice it to say that if
# this were python 3, we wouldn't have to do this.
def _Execute(self, funcname, *args, **kwargs):
"""Send an RPC request to the gdb-internal python.
Blocks for 3 seconds by default and returns any results.
Args:
funcname: the name of the function to call.
*args: the function's arguments.
**kwargs: Only the key 'wait_for_completion' is inspected, which decides
whether to wait forever for completion or just 3 seconds.
Returns:
The result of the function call.
"""
wait_for_completion = kwargs.get('wait_for_completion', False)
rpc_dict = {'func': funcname, 'args': args}
self._Send(json.dumps(rpc_dict))
timeout = TIMEOUT_FOREVER if wait_for_completion else TIMEOUT_DEFAULT
result_string = self._Recv(timeout)
try:
result = json.loads(result_string, object_hook=self._JsonDecodeDict)
if isinstance(result, unicode):
result = self._TryStr(result)
elif isinstance(result, list):
result = self._JsonDecodeList(result)
except ValueError:
raise ValueError('Response JSON invalid: ' + str(result_string))
except TypeError:
raise ValueError('Response JSON invalid: ' + str(result_string))
return result
def _Send(self, string):
"""Write a string of data to the gdb-internal python interpreter."""
self._process.stdin.write(string + '\n')
def _Recv(self, timeout):
"""Receive output from gdb.
This reads gdb's stdout and stderr streams, returns a single line of gdb's
stdout or rethrows any exceptions thrown from within gdb as well as it can.
Args:
timeout: floating point number of seconds after which to abort.
A value of None or TIMEOUT_FOREVER means "there is no timeout", i.e.
this might block forever.
Raises:
ProxyError: All exceptions received from the gdb service are generically
reraised as this.
TimeoutError: Raised if no answer is received from gdb in after the
specified time.
Returns:
The current contents of gdb's stdout buffer, read until the next newline,
or `None`, should the read fail or timeout.
"""
buf = ''
# The messiness of this stems from the "duck-typiness" of this function.
# The timeout parameter of poll has different semantics depending on whether
# it's <=0, >0, or None. Yay.
wait_for_line = timeout is TIMEOUT_FOREVER
deadline = time.time() + (timeout if not wait_for_line else 0)
def TimeLeft():
return max(1000 * (deadline - time.time()), 0)
continue_reading = True
while continue_reading:
poll_timeout = None if wait_for_line else TimeLeft()
fd_list = [event[0] for event in self._poller.poll(poll_timeout)
if event[1] & (select.POLLIN | select.POLLPRI)]
if not wait_for_line and TimeLeft() == 0:
continue_reading = False
if self._outfile_r.fileno() in fd_list:
buf += self._outfile_r.readline()
if buf.endswith('\n'):
return buf
# GDB-internal exception passing
if self._errfile_r.fileno() in fd_list:
exc = self._errfile_r.readline()
if exc:
exc_text = '\n-----------------------------------\n'
exc_text += 'Error occurred within GdbService:\n'
try:
exc_text += json.loads(exc)
except ValueError:
# whatever we got back wasn't valid JSON.
# This usually means we've run into an exception before the special
# exception handling was turned on. The first line we read up there
# will have been "Traceback (most recent call last):". Obviously, we
# want the rest, too, so we wait a bit and read it.
deadline = time.time() + 0.5
while self.is_running and TimeLeft() > 0:
exc += self._errfile_r.read()
try:
exc_text += json.loads(exc)
except ValueError:
exc_text = exc
raise ProxyError(exc_text)
# timeout
raise TimeoutError()
class Inferior(object):
"""Class modeling the inferior process.
Defines the interface for communication with the inferior and handles
debugging context and automatic respawning of the underlying gdb service.
"""
_gdb = None
_Position = collections.namedtuple('Position', 'pid tid frame_depth') # pylint: disable=invalid-name
# tid is the thread ident as reported by threading.current_thread().ident
# frame_depth is the 'depth' (as measured from the outermost frame) of the
# requested frame. A value of -1 will hence mean the most recent frame.
def __init__(self, pid, auto_symfile_loading=True, architecture='i386:x86-64'):
super(Inferior, self).__init__()
self.position = self._Position(pid=pid, tid=None, frame_depth=-1)
self._symbol_file = None
self.arch = architecture
self.auto_symfile_loading = auto_symfile_loading
# Inferior objects are created before the user ever issues the 'attach'
# command, but since this is used by `Reinit`, we call upon gdb to do this
# for us.
if pid:
self.StartGdb()
def needsattached(func):
"""Decorator to prevent commands from being used when not attached."""
@functools.wraps(func)
def wrap(self, *args, **kwargs):
if not self.attached:
raise PositionError('Not attached to any process.')
return func(self, *args, **kwargs)
return wrap
@needsattached
def Cancel(self):
self.ShutDownGdb()
def Reinit(self, pid, auto_symfile_loading=True):
"""Reinitializes the object with a new pid.
Since all modes might need access to this object at any time, this object
needs to be long-lived. To make this clear in the API, this shorthand is
supplied.
Args:
pid: the pid of the target process
auto_symfile_loading: whether the symbol file should automatically be
loaded by gdb.
"""
self.ShutDownGdb()
self.__init__(pid, auto_symfile_loading, architecture=self.arch)
@property
def gdb(self):
# when requested, make sure we have a gdb session to return
# (in case it crashed at some point)
if not self._gdb or not self._gdb.is_running:
self.StartGdb()
return self._gdb
def StartGdb(self):
"""Starts gdb and attempts to auto-load symbol file (unless turned off).
Raises:
GdbProcessError: if gdb is already running
"""
if self.attached:
raise GdbProcessError('Gdb is already running.')
self._gdb = GdbProxy(arch=self.arch)
self._gdb.Attach(self.position)
if self.auto_symfile_loading:
try:
self.LoadSymbolFile()
except (ProxyError, TimeoutError) as err:
self._gdb = GdbProxy(arch=self.arch)
self._gdb.Attach(self.position)
if not self.gdb.IsSymbolFileSane(self.position):
logging.warning('Failed to automatically load a sane symbol file, '
'most functionality will be unavailable until symbol'
'file is provided.')
logging.debug(err.message)
def ShutDownGdb(self):
if self._gdb and self._gdb.is_running:
self._gdb.Kill()
self._gdb = None
def LoadSymbolFile(self, path=None):
# As automatic respawning of gdb may happen between calls to this, we have
# to remember which symbol file we're supposed to load.
if path:
self._symbol_file = path
s_path = self._symbol_file or _SymbolFilePath()
logging.debug('Trying to load symbol file: %s' % s_path)
if self.attached:
self.gdb.LoadSymbolFile(self.position, s_path)
if not self.gdb.IsSymbolFileSane(self.position):
logging.warning('Symbol file failed sanity check, '
'proceed at your own risk')
@needsattached
def Backtrace(self):
return self.gdb.BacktraceAt(self.position)
@needsattached
def Up(self):
depth = self.position.frame_depth
if self.position.frame_depth < 0:
depth = self.gdb.StackDepth(self.position) + self.position.frame_depth
if not depth:
raise PositionError('Already at outermost stack frame')
self.position = self._Position(pid=self.position.pid,
tid=self.position.tid,
frame_depth=depth-1)
@needsattached
def Down(self):
if (self.position.frame_depth + 1 >= self.gdb.StackDepth(self.position)
or self.position.frame_depth == -1):
raise PositionError('Already at innermost stack frame')
frame_depth = self.position.frame_depth + 1
self.position = self._Position(pid=self.position.pid,
tid=self.position.tid,
frame_depth=frame_depth)
@needsattached
def Lookup(self, var_name):
return self.gdb.LookupInFrame(self.position, var_name)
@needsattached
def InferiorLocals(self):
return self.gdb.InferiorLocals(self.position)
@needsattached
def InferiorGlobals(self):
return self.gdb.InferiorGlobals(self.position)
@needsattached
def InferiorBuiltins(self):
return self.gdb.InferiorBuiltins(self.position)
@property
def is_running(self):
if not self.position.pid:
return False
try:
# sending a 0 signal to a process does nothing
os.kill(self.position.pid, 0)
return True
except OSError as err:
# We might (for whatever reason) simply not be permitted to do this.
if err.errno == errno.EPERM:
logging.debug('Reveived EPERM when trying to signal inferior.')
return True
return False
@property
def pid(self):
return self.position.pid
@property
@needsattached
def threads(self):
# return array of python thread idents. Unfortunately, we can't easily
# access the given thread names without taking the GIL.
return self.gdb.ThreadIds(self.position)
@property
@needsattached
def current_thread(self):
threads = self.threads
if not threads:
self.position = self._Position(pid=self.position.pid, tid=None,
frame_depth=-1)
return None
if not self.position.tid or self.position.tid not in threads:
self.position = self._Position(pid=self.position.pid, tid=self.threads[0],
frame_depth=-1)
return self.position.tid
@needsattached
def SelectThread(self, tid):
if tid in self.gdb.ThreadIds(self.position):
self.position = self._Position(self.position.pid, tid, frame_depth=-1)
else:
logging.error('Thread ' + str(tid) + ' does not exist')
@needsattached
def Continue(self):
self.gdb.Continue(self.position)
@needsattached
def Interrupt(self):
return self.gdb.Interrupt(self.position)
@property
def attached(self):
if (self.position.pid
and self.is_running
and self._gdb
and self._gdb.is_running):
return True
return False
|
new advertised plans 4 GB unlimited calls and SMS.
I am told by TPG this can't be done as the promotional plan is for new customers only.
I also note that the fact is stated ( new customers only) in the advertising.
However I think this says a lot about TPG'S treatment of existing customers.
I need the bigger plan.
Existing customers can take advantage of this promotional offer when taking up a new TPG mobile service. The mobile promotion does not apply to existing TPG mobile service plan changes and/or where the promotional offer has already been applied to a mobile service.
TPG is excited to be building our own mobile network. Register now and we'll make sure you're one of the first to know when we're ready with our offers. https://www.tpg.com.au/register-mobile.
|
import immlib
import getopt, string
import immutils
import os
def usage(imm):
imm.Log("Usage: !scd -f FILETOCHECK")
def checkop(op):
instr = op.getDisasm()
junk = ["IN", "OUT", "LES", "FSUBR", "DAA",
"BOUND", "???", "AAM", "STD", "FIDIVR",
"FCMOVNE", "FADD", "LAHF", "SAHF", "CMC",
"FILD", "WAIT", "RETF", "SBB", "ADC",
"IRETD", "LOCK", "POP SS", "POP DS", "HLT",
"LEAVE", "ARPL", "AAS", "LDS", "SALC",
"FTST", "FIST", "PADD", "CALL FAR", "FSTP",
"AAA", "FIADD"]
for j in junk:
if instr.startswith(j):
return False
if op.isCall() or op.isJmp():
if op.getJmpAddr() > 0x7FFFFFFF:
return False
return True
def main (args):
imm = immlib.Debugger()
scfile = None
conditional = False
try:
opts, argo = getopt.getopt(args, "f:")
except getopt.GetoptError:
usage(imm)
return
for o,a in opts:
if o == "-f":
try:
scfile = a
except ValueError, msg:
return "Invalid argument: %s" % a
if scfile == None or not os.path.isfile(scfile):
usage(imm)
return
# Get something going so the context is valid
imm.openProcess("c:\\windows\\system32\\notepad.exe")
# Read file contents
buf = open(scfile, "rb").read()
cb = len(buf)
# Copy the contents to process memory
mem = imm.remoteVirtualAlloc(cb)
imm.writeMemory(mem, buf)
# Clarify the start and end of the buffer
start = mem
end = mem + cb
table = imm.createTable('Shell Code Detect',\
['Ofs', 'Abs', 'Op', 'Op2', 'Op3'])
while start < end:
# Disassemble the instruction
d = imm.disasm(start)
c = d.getSize()
# Skip anything that isn't a jump/call
if (not d.isCall()) and (not d.isJmp()):
start += c
continue
# Get the destination address of the jump/call
dest = d.getJmpAddr()
# The destination must land within the shell code
# buffer or else we've just located a false positive
if dest < start or dest > end:
start += c
continue
# Disassemble the first 3 ops at destination
op2 = imm.disasm(dest)
op3 = imm.disasm(dest+op2.getSize())
op4 = imm.disasm(dest+op2.getSize()+op3.getSize())
# Use a simple validity check to reduce fp's
if checkop(op2) and checkop(op3) and checkop(op4):
table.add('', ['0x%x' % (start - mem),\
'0x%x' % start,\
'%s' % d.getDisasm(),\
'%s' % op2.getDisasm(),\
'%s' % op3.getDisasm()])
start += c
return "done"
|
Impacted by China’s economic slowdown and the tendency of the new normal of China’s economy, the total at-home beverage spending of China’s urban households declined for the first time in the recent years, comparing past 12 months to end of June 2016 vs. year ago. According to the study of Kantar Worldpanel, the total beverage volume also declined for the second year in a row. Frequency decline is the main reason behind. The frequency of urban households beverage purchase declined 7.6%, decreased 1.7 times per household. Under the weak performance of beverage market, however, the major manufacturers are still seeking the new opportunities through unremitting product innovations. After a rapid development in past 2 years, the light-flavor beverages, such as Haizhiyan from President and Shuidongle from Coca-Cola, are the biggest dark horses in the market now, and become consumers’ new love with their light and fresh taste. Its sales volume soared 44% in last year; almost 2/3 of the growth comes from the new light-flavor beverage brands.
According to the continuous research from Kantar Worldpanel that regards to FMCG purchase of China’s urban households, the findings pointed out that 20% of the urban China households bought the light-flavor beverage such as Haizhiyan or Shuidongle in last year; it was only 15% one year ago. The expanding buyer group is the key driver for light-flavor beverages. Nowadays, shoppers actually pay much more attention to the food health. The traditional high oil, high salt and high sugar food taste becomes lighter. For instance, the volume per household of cooking oil declined 4% in the past 12 months. The organic and natural concepts greatly influence people’s dinning and daily consumption. Light-flavor beverage manufacturers were aware of the intense demands from the consumers, and offered this healthier choice to the health-concerned buyers. Most of the light-flavor beverages contain less than 100 KJ per 100ML, some products like Qinningshui and Shuidongle only provide around 70 KJ per 100ML. Comparing to the traditional beverages that usually contain around 200 KJ per 100ML, light-flavor beverages bring us less energy intake but fresher taste and healthier life style.
Shoppers buy light-flavor beverages to replace some higher energy drinks. This trend helps the light-flavor beverage market to easily gain share over other categories. According to the research from Kantar Worldpanel, two-thirds of the light-flavor beverages growth was contributed by switching from the existing drink products. CSD and low concentration juice products were replaced mostly, and some packaged water was also replaced by the light-flavor beverages. Because of multiple benefits: not only the rehydrate function, light-flavor beverages also provide light fruity taste and some nutrition inside.
The healthy concept appeals to young people in Key and A tier cities who pursue more fashion diet style. This is another reason why light-flavor beverages become popular. Among the households who bought light-flavor beverage in recent one year, 60% of them come from young families and teenager families, 6% higher than total beverage; 43% come from the Key & A tier cities households, 10% higher than total beverage. According to the feature of the young generations, many manufacturers began to cater the young generation’s taste. For example, Mizone Shuiyue launched in JingDong first, use the online channel to attract young people who like online shopping. While, Shuiquduo launched in CVS first to attract daily commuters in major cities.
There are more and more light-flavor beverage choices to the consumers in the market now. However, many light-flavor beverages are quite similar in function and flavor. Thus, manufacturers have to pay more attention to the distinct packaging, such as appearance design and bottle shape, and more interactions on social media platforms that can motivate young consumers to have a try of the new products. Actually, the light-flavor beverages do attract a lot people who seek variety.and are willing to try new products Light-flavor beverage consumers usually buy 6.3 beverage categories each year. It is 0.9 categories more than the total beverage buyer average. Meanwhile, these consumers also select 18.6 beverage brands each year. It is 5.7 brands more than the total beverage buyer average. For the manufacturers, launching new products, it is easier to recruit those buyers, but they also change their preferences quickly. For example, Haizhiyan’s volume increased 36% in the past year, but still lost 6% of its volume to other new light-flavor beverages in competition as well. With the fiercer competition, it will be a huge challenge to enhance consumers’ brand loyalty. Moreover, how to attract the consumers who do not buy that many different beverage categories and brands is another challenge.
Light-flavor beverage is not the only hot topic in today’s beverage market. Manufacturers are always seeking the possible growth opportunities in the market. For instance, coconut water is increasingly popular in China. Mango and blueberry juices are niche flavors but grow rapidly in market. Some dessert type beverages provide a heavy & rich taste to the market to attract the consumers who looking for an indulging experience. Product innovation is definitely one of the most important ways for market growth. Light-flavor beverage manufacturers successfully use the product features to recruit shoppers in Key & A tier cities who care about healthy lifestyle and are willing to try new products. We believe that there will be other new beverage sub-categories to meet consumers’ special demands in the future. The beverage market will be more diversified and we will see more and more cross category innovations. For manufacturers, it is not only a severe challenge, but also a huge opportunity. Keeping product innovation and performance tracking will be the key to stay ahead of competition.
1. In this article, beverages include package water, sodas, juice, ready-to-drink tea,energy drinks,ready-to-drink coffee,yogurt drinks,and Asia traditional drinks, such as herbal tea,vegetable protein drinks, fruit vinegar, cereal protein drinks, and chrysanthemum tea, etc.
2. Light-flavor beverages include President Haizhiyan, Shuiquduo, SUNTORY Qin series, Master Kong Sea Crystal Lemon, Nongfu Spring Shui series, Coca-Cola Shuidongle,Pepsi Weidongli, Mizone Suiyue, Wantwant Qinshidai.
3. Light-flavor beverage is included in ‘juice and functional drinks’, not calculated individually.
|
'''
Created on 04-Feb-2014
@author: dgraja
'''
from sqlalchemy import create_engine
from sqlalchemy import Table
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import MetaData
from sqlalchemy import Column
from sqlalchemy.exc import OperationalError
from sqlalchemy.ext.declarative import declarative_base
import os
#help ('sqlalchemy')
#dir ('sqlalchemy')
working_directory = 'D:\\Temp\\Python'
os.chdir(working_directory)
db_path = os.path.join("D:\\Temp\\Python", "test01.db")
print db_path
engine = create_engine('sqlite:///' +db_path)
metadata = MetaData()
users = Table(
'users', metadata,
Column('id', Integer, primary_key=True),
Column('username', String(512), nullable=False),
Column('email', String(512), nullable=True)
)
metadata.create_all(engine)
# Connect to the actual database
conn = engine.connect()
def record(i):
return ('User ' + str(i), "user" + str(i) + "@example.com")
for i in range(10) :
rec = record(i)
# Create an INSERT expression
insert_expression = users.insert().values(username=rec[0], email=rec[1])
print str(insert_expression)
# execute the insert query
result = conn.execute(insert_expression)
# print the result
print (result.inserted_primary_key)
|
The Royal Highland Show, one of Scotland’s most iconic and enduring annual events, is putting the finishing touches to its 175th Show, which runs from 18 -21 June 2015 at the Royal Highland Centre, Ingliston, Edinburgh.
Billed as the biggest and best celebration of farming, food and rural life, the ‘Royal Highland’ is one of Europe’s most impressive rural shop windows, showcasing handpicked livestock, the finest food & drink and rural living at its most vibrant.
The Royal Highland Show is the largest equestrian show in Scotland with almost 3,000 horses and ponies competing in classes for light and heavy horses, private driving, heavy horse turnouts, harness and grooming plus top class show jumping.
With the vast majority of agricultural manufacturers, dealers and specialists represented at the Show, farmers and others in the agricultural sector have a unique opportunity to review the latest releases and developments.
Show where The Royal Highland Education Trust (RHET) is providing free interactive activities for school groups and the public at their Education Centre.
The Show offers a unique ‘High Street’ experience for all visitors, offering everything from luxury fashions, arts, crafts and home and garden accessories. Visitors will have the chance to pick up the latest country styles or take home a piece of rural life.
June 18 to Sunday, June 21.
|
__author__ = 'thor'
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import re
from ut.pplot.to import simple_plotly
class ParallelTimeSeriesDacc(object):
def __init__(self, data_source, date_var, index_var, ts_vars_name='vars', **kwargs):
if isinstance(data_source, pd.DataFrame):
self.df = data_source
elif isinstance(data_source, str):
if data_source == 'elasticsearch':
from ut.dacc.es.com import ElasticCom
es_kwargs = kwargs.get('es_kwargs', {})
if 'index' in list(kwargs.keys()):
es_kwargs['index'] = kwargs.pop('index')
if 'data_type' in list(kwargs.keys()):
es_kwargs['data_type'] = kwargs.pop('data_type')
ec = ElasticCom(**es_kwargs)
search_kwargs = kwargs.get('search_kwargs', {})
search_kwargs = dict({'_id': False}, **search_kwargs)
exclude_fields = search_kwargs.pop('exclude_fields', [])
self.df = ec.search_and_export_to_df(exclude_fields=exclude_fields, **search_kwargs)
else:
raise NotImplementedError("Unrecognized data_source: {}".format(data_source))
else:
raise NotImplementedError("Unrecognized data_source type: {}".format(type(data_source)))
assert set([date_var, index_var]).issubset(self.df.columns), \
"Both {} and {} must be columns of the data".format(date_var, index_var)
self.date_var = date_var
self.index_var = index_var
self.ts_vars_name = ts_vars_name
self.var_names = [x for x in self.df.columns if x not in [self.date_var, self.index_var]]
self.df.columns.set_names([self.ts_vars_name], inplace=True)
# pivoting data
original_length = len(self.df)
self.df.drop_duplicates(subset=[self.index_var, self.date_var], inplace=True)
if len(self.df) != original_length:
raise RuntimeWarning("There are duplicate ({},{}), so I'm deleting offending records"
.format(self.index_var, self.date_var))
self.df = self.df[~self.df[self.date_var].notnull()]
raise AssertionError("There are duplicate ({},{}), so I can't pivot the data"
.format(self.index_var, self.date_var))
self.df = self.df.pivot(index=self.date_var, columns=self.index_var)
self.df.sort_index(inplace=True)
def vars_list(self, df=None):
if df is None:
df = self.df
return np.unique(df.columns.get_level_values(level=0))
def indices_list(self, df=None):
if df is None:
df = self.df
return np.unique(df.columns.get_level_values(level=1))
@staticmethod
def drop_columns_with_insufficient_dates(d, min_num_of_dates):
"""
Drop columns that don't have a minimum number of non-NaN dates
"""
print(("original shape: {}".format(d.shape)))
num_of_dates = (~d.isnull()).sum()
num_of_dates = num_of_dates[num_of_dates > min_num_of_dates].sort(inplace=False, ascending=False)
d = d[num_of_dates.index.values].dropna(how='all')
print(("shape with at least {} dates: {}".format(min_num_of_dates, d.shape)))
return d
@staticmethod
def latest_full_shape_choices(d):
"""
Get a table describing the shapes of all
"""
shape_choices = list()
for i in range(1, len(d)):
this_shape = d.iloc[-i:].dropna(axis=1).shape
shape_choices.append({'i': i, 'rows': this_shape[0], 'cols': this_shape[1]})
shape_choices = pd.DataFrame(shape_choices).set_index('i')
shape_choices['pts'] = shape_choices['rows'] * shape_choices['cols']
return shape_choices
def print_percentages_of_xvar_more_than_yvar(self, xvar, yvar, min_y=0, df=None):
if df is None:
df = self.df.stack(self.index_var)
t = df[[xvar, yvar]].dropna()
t = t[t[yvar] >= min_y]
n_xvar_more_than_yvar = sum(t[xvar] > t[yvar])
print(("{:.2f}% ({}/{}) of '{}' > '{}'".format(100 * n_xvar_more_than_yvar / float(len(t)),
n_xvar_more_than_yvar, len(t),
xvar, yvar)))
def plot_time_series(self, d, title=None, y_labels=None,
width_factor=2, length=18, only_first_non_null=True, with_plotly=False):
# idSite = 349
if isinstance(d, tuple):
d = self.df.loc[:, d]
if only_first_non_null:
lidx = np.any(d.notnull(), axis=1)
d = d.iloc[lidx]
default_title, default_y_labels = _choose_title_and_y_label(d)
title = title or default_title
y_labels = y_labels or default_y_labels
last_ax = None
n = len(d.columns)
fig = plt.figure(figsize=(length, min(n, 50) * width_factor))
for i, tt in enumerate(d.items()):
plt.subplot(n, 1, i + 1)
tt[1].index = tt[1].index.map(pd.to_datetime)
tt[1].plot(sharex=last_ax)
ax = plt.gca()
if title == 'y_labels':
ax.set_title(y_labels[i])
else:
if i == 0:
ax.set_title(title)
if isinstance(y_labels[i], str):
plt.ylabel(y_labels[i].replace('_', '\n'))
else:
plt.ylabel(y_labels[i])
ax.yaxis.set_label_position("right")
if i + 1 < n:
plt.xlabel('')
last_ax = ax
if with_plotly:
return simple_plotly(fig)
def get_plotly_url(self, plotly_obj):
if hasattr(plotly_obj, 'embed_code'):
return re.compile('src="([^"]*)"').search(plotly_obj.embed_code).group(1)
def _choose_title_and_y_label(d):
col_vals = d.columns.values
try:
level_1_vals, level_2_vals = list(zip(*col_vals))
if len(np.unique(level_1_vals)) == 1:
return level_1_vals[1], level_2_vals
elif len(np.unique(level_2_vals)) == 1:
return level_2_vals[0], level_1_vals
else:
return " & ".join(d.columns.names), col_vals
except TypeError:
return " & ".join(d.columns.names), col_vals
|
Elegance was introduced in 2007 by Inge-Glas of Germany® as part of the 2007 Very Merry Christmas Collection and features the exclusive 5-Point Star Crown™.
This unique glass Christmas ornament has been lovingly handcrafted by the talented Inge-Glas® artisans in Neustadt near Coburg in Northern Bavaria using centuries-old glassblowing techniques. Elegance was mouth-blown with clear crystal glass, coated inside with a layer of fine sterling silver, and hand painted using only the finest lead-free paints, glitters, and decorative materials.
Elegance was retired in 2008 so availability is limited.
|
#!/usr/bin/python3
from multiprocessing import Queue
from queue import Empty
import logging
import time
from manager import Manager
import server
""" Main file for the stoplight daemon. """
def init_logging(logfile):
""" Initializes logging.
Args:
logfile: File that stuff will be logged to. """
root = logging.getLogger()
root.setLevel(logging.DEBUG)
file_handler = logging.FileHandler(logfile)
file_handler.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(name)s@%(asctime)s: " +
"[%(levelname)s] %(message)s")
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)
root.addHandler(file_handler)
root.addHandler(stream_handler)
def main():
# Initialize logging.
init_logging("stoplightd.log")
# Start the server.
server_queue = Queue()
server.start(server_queue)
# Create and run the manager.
manager = Manager()
while True:
# Check for new jobs and add them.
try:
command = server_queue.get(block=False)
if command["type"] == "add_job":
# Add the job.
manager.add_job(command["job_dir"])
except Empty:
pass
manager.update()
time.sleep(5)
if __name__ == "__main__":
main()
|
Welcome to the W Facial Aesthetics Blog! Check this space regularily for news, updates and new content from Dr. Ivan Wayne and his staff!
Welcome to the W Facial Aesthetics Blog! Here you’ll find curated articles and content directly from Dr. Ivan Wayne and his team, as well as news on events and new products and services at our practice. Check this space often for new content!
What should I consider before having Facial Plastic Surgery?
There are three key reasons you would choose a facial plastic surgeon – superior training, background and focus. Facial plastic surgery is a unique field, separate from (general) plastic surgery.
|
#!/usr/bin/env py.test
"""Unit test for HarmonicSmoothing and ALE"""
# Copyright (C) 2013 Jan Blechta
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import pytest
from dolfin import UnitSquareMesh, BoundaryMesh, Expression, \
CellFunction, SubMesh, Constant, MPI, MeshQuality,\
mpi_comm_world, ALE
from dolfin_utils.test import skip_in_parallel
def test_HarmonicSmoothing():
# Create some mesh and its boundary
mesh = UnitSquareMesh(10, 10)
boundary = BoundaryMesh(mesh, 'exterior')
# Move boundary
disp = Expression(("0.3*x[0]*x[1]", "0.5*(1.0-x[1])"), degree=2)
ALE.move(boundary, disp)
# Move mesh according to given boundary
ALE.move(mesh, boundary)
# Check that new boundary topology corresponds to given one
boundary_new = BoundaryMesh(mesh, 'exterior')
assert boundary.topology().hash() == boundary_new.topology().hash()
# Check that coordinates are almost equal
err = sum(sum(abs(boundary.coordinates() \
- boundary_new.coordinates()))) / mesh.num_vertices()
print("Current CG solver produced error in boundary coordinates", err)
assert round(err - 0.0, 5) == 0
# Check mesh quality
magic_number = 0.35
rmin = MeshQuality.radius_ratio_min_max(mesh)[0]
assert rmin > magic_number
@skip_in_parallel
def test_ale():
# Create some mesh
mesh = UnitSquareMesh(4, 5)
# Make some cell function
# FIXME: Initialization by array indexing is probably
# not a good way for parallel test
cellfunc = CellFunction('size_t', mesh)
cellfunc.array()[0:4] = 0
cellfunc.array()[4:] = 1
# Create submeshes - this does not work in parallel
submesh0 = SubMesh(mesh, cellfunc, 0)
submesh1 = SubMesh(mesh, cellfunc, 1)
# Move submesh0
disp = Constant(("0.1", "-0.1"))
ALE.move(submesh0, disp)
# Move and smooth submesh1 accordignly
ALE.move(submesh1, submesh0)
# Move mesh accordingly
parent_vertex_indices_0 = \
submesh0.data().array('parent_vertex_indices', 0)
parent_vertex_indices_1 = \
submesh1.data().array('parent_vertex_indices', 0)
mesh.coordinates()[parent_vertex_indices_0[:]] = \
submesh0.coordinates()[:]
mesh.coordinates()[parent_vertex_indices_1[:]] = \
submesh1.coordinates()[:]
# If test passes here then it is probably working
# Check for cell quality for sure
magic_number = 0.28
rmin = MeshQuality.radius_ratio_min_max(mesh)[0]
assert rmin > magic_number
|
The Indian Space Research Organization launched the IRNSS-1I as part of India's regional navigation system. The launch took the satellite to geosynchronous orbit. The previous satellite in this series, IRNSS-1H, was ultimately lost after its rocket's payload fairing failed to deploy.
The launch date was Wednesday, April 11, 2018 at 10:34 PM (UTC).
|
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
import threading
from typing import Callable, Generic, Iterable, MutableMapping, Optional, TypeVar, Union
from prometheus_client import Gauge
from twisted.internet import defer
from twisted.python import failure
from synapse.util.async_helpers import ObservableDeferred
from synapse.util.caches.lrucache import LruCache
from synapse.util.caches.treecache import TreeCache, iterate_tree_cache_entry
cache_pending_metric = Gauge(
"synapse_util_caches_cache_pending",
"Number of lookups currently pending for this cache",
["name"],
)
T = TypeVar("T")
KT = TypeVar("KT")
VT = TypeVar("VT")
class _Sentinel(enum.Enum):
# defining a sentinel in this way allows mypy to correctly handle the
# type of a dictionary lookup.
sentinel = object()
class DeferredCache(Generic[KT, VT]):
"""Wraps an LruCache, adding support for Deferred results.
It expects that each entry added with set() will be a Deferred; likewise get()
will return a Deferred.
"""
__slots__ = (
"cache",
"thread",
"_pending_deferred_cache",
)
def __init__(
self,
name: str,
max_entries: int = 1000,
tree: bool = False,
iterable: bool = False,
apply_cache_factor_from_config: bool = True,
):
"""
Args:
name: The name of the cache
max_entries: Maximum amount of entries that the cache will hold
keylen: The length of the tuple used as the cache key. Ignored unless
`tree` is True.
tree: Use a TreeCache instead of a dict as the underlying cache type
iterable: If True, count each item in the cached object as an entry,
rather than each cached object
apply_cache_factor_from_config: Whether cache factors specified in the
config file affect `max_entries`
"""
cache_type = TreeCache if tree else dict
# _pending_deferred_cache maps from the key value to a `CacheEntry` object.
self._pending_deferred_cache = (
cache_type()
) # type: Union[TreeCache, MutableMapping[KT, CacheEntry]]
def metrics_cb():
cache_pending_metric.labels(name).set(len(self._pending_deferred_cache))
# cache is used for completed results and maps to the result itself, rather than
# a Deferred.
self.cache = LruCache(
max_size=max_entries,
cache_name=name,
cache_type=cache_type,
size_callback=(lambda d: len(d) or 1) if iterable else None,
metrics_collection_callback=metrics_cb,
apply_cache_factor_from_config=apply_cache_factor_from_config,
) # type: LruCache[KT, VT]
self.thread = None # type: Optional[threading.Thread]
@property
def max_entries(self):
return self.cache.max_size
def check_thread(self):
expected_thread = self.thread
if expected_thread is None:
self.thread = threading.current_thread()
else:
if expected_thread is not threading.current_thread():
raise ValueError(
"Cache objects can only be accessed from the main thread"
)
def get(
self,
key: KT,
callback: Optional[Callable[[], None]] = None,
update_metrics: bool = True,
) -> defer.Deferred:
"""Looks the key up in the caches.
For symmetry with set(), this method does *not* follow the synapse logcontext
rules: the logcontext will not be cleared on return, and the Deferred will run
its callbacks in the sentinel context. In other words: wrap the result with
make_deferred_yieldable() before `await`ing it.
Args:
key:
callback: Gets called when the entry in the cache is invalidated
update_metrics (bool): whether to update the cache hit rate metrics
Returns:
A Deferred which completes with the result. Note that this may later fail
if there is an ongoing set() operation which later completes with a failure.
Raises:
KeyError if the key is not found in the cache
"""
callbacks = [callback] if callback else []
val = self._pending_deferred_cache.get(key, _Sentinel.sentinel)
if val is not _Sentinel.sentinel:
val.callbacks.update(callbacks)
if update_metrics:
m = self.cache.metrics
assert m # we always have a name, so should always have metrics
m.inc_hits()
return val.deferred.observe()
val2 = self.cache.get(
key, _Sentinel.sentinel, callbacks=callbacks, update_metrics=update_metrics
)
if val2 is _Sentinel.sentinel:
raise KeyError()
else:
return defer.succeed(val2)
def get_immediate(
self, key: KT, default: T, update_metrics: bool = True
) -> Union[VT, T]:
"""If we have a *completed* cached value, return it."""
return self.cache.get(key, default, update_metrics=update_metrics)
def set(
self,
key: KT,
value: defer.Deferred,
callback: Optional[Callable[[], None]] = None,
) -> defer.Deferred:
"""Adds a new entry to the cache (or updates an existing one).
The given `value` *must* be a Deferred.
First any existing entry for the same key is invalidated. Then a new entry
is added to the cache for the given key.
Until the `value` completes, calls to `get()` for the key will also result in an
incomplete Deferred, which will ultimately complete with the same result as
`value`.
If `value` completes successfully, subsequent calls to `get()` will then return
a completed deferred with the same result. If it *fails*, the cache is
invalidated and subequent calls to `get()` will raise a KeyError.
If another call to `set()` happens before `value` completes, then (a) any
invalidation callbacks registered in the interim will be called, (b) any
`get()`s in the interim will continue to complete with the result from the
*original* `value`, (c) any future calls to `get()` will complete with the
result from the *new* `value`.
It is expected that `value` does *not* follow the synapse logcontext rules - ie,
if it is incomplete, it runs its callbacks in the sentinel context.
Args:
key: Key to be set
value: a deferred which will complete with a result to add to the cache
callback: An optional callback to be called when the entry is invalidated
"""
if not isinstance(value, defer.Deferred):
raise TypeError("not a Deferred")
callbacks = [callback] if callback else []
self.check_thread()
existing_entry = self._pending_deferred_cache.pop(key, None)
if existing_entry:
existing_entry.invalidate()
# XXX: why don't we invalidate the entry in `self.cache` yet?
# we can save a whole load of effort if the deferred is ready.
if value.called:
result = value.result
if not isinstance(result, failure.Failure):
self.cache.set(key, result, callbacks)
return value
# otherwise, we'll add an entry to the _pending_deferred_cache for now,
# and add callbacks to add it to the cache properly later.
observable = ObservableDeferred(value, consumeErrors=True)
observer = observable.observe()
entry = CacheEntry(deferred=observable, callbacks=callbacks)
self._pending_deferred_cache[key] = entry
def compare_and_pop():
"""Check if our entry is still the one in _pending_deferred_cache, and
if so, pop it.
Returns true if the entries matched.
"""
existing_entry = self._pending_deferred_cache.pop(key, None)
if existing_entry is entry:
return True
# oops, the _pending_deferred_cache has been updated since
# we started our query, so we are out of date.
#
# Better put back whatever we took out. (We do it this way
# round, rather than peeking into the _pending_deferred_cache
# and then removing on a match, to make the common case faster)
if existing_entry is not None:
self._pending_deferred_cache[key] = existing_entry
return False
def cb(result):
if compare_and_pop():
self.cache.set(key, result, entry.callbacks)
else:
# we're not going to put this entry into the cache, so need
# to make sure that the invalidation callbacks are called.
# That was probably done when _pending_deferred_cache was
# updated, but it's possible that `set` was called without
# `invalidate` being previously called, in which case it may
# not have been. Either way, let's double-check now.
entry.invalidate()
def eb(_fail):
compare_and_pop()
entry.invalidate()
# once the deferred completes, we can move the entry from the
# _pending_deferred_cache to the real cache.
#
observer.addCallbacks(cb, eb)
# we return a new Deferred which will be called before any subsequent observers.
return observable.observe()
def prefill(
self, key: KT, value: VT, callback: Optional[Callable[[], None]] = None
):
callbacks = [callback] if callback else []
self.cache.set(key, value, callbacks=callbacks)
def invalidate(self, key):
"""Delete a key, or tree of entries
If the cache is backed by a regular dict, then "key" must be of
the right type for this cache
If the cache is backed by a TreeCache, then "key" must be a tuple, but
may be of lower cardinality than the TreeCache - in which case the whole
subtree is deleted.
"""
self.check_thread()
self.cache.del_multi(key)
# if we have a pending lookup for this key, remove it from the
# _pending_deferred_cache, which will (a) stop it being returned
# for future queries and (b) stop it being persisted as a proper entry
# in self.cache.
entry = self._pending_deferred_cache.pop(key, None)
# run the invalidation callbacks now, rather than waiting for the
# deferred to resolve.
if entry:
# _pending_deferred_cache.pop should either return a CacheEntry, or, in the
# case of a TreeCache, a dict of keys to cache entries. Either way calling
# iterate_tree_cache_entry on it will do the right thing.
for entry in iterate_tree_cache_entry(entry):
entry.invalidate()
def invalidate_all(self):
self.check_thread()
self.cache.clear()
for entry in self._pending_deferred_cache.values():
entry.invalidate()
self._pending_deferred_cache.clear()
class CacheEntry:
__slots__ = ["deferred", "callbacks", "invalidated"]
def __init__(
self, deferred: ObservableDeferred, callbacks: Iterable[Callable[[], None]]
):
self.deferred = deferred
self.callbacks = set(callbacks)
self.invalidated = False
def invalidate(self):
if not self.invalidated:
self.invalidated = True
for callback in self.callbacks:
callback()
self.callbacks.clear()
|
Are you stuck in a co-dependent relationship and retired?
When two people have a very close relationship, especially in our later years, it’s natural to depend on each other for certain things.
My guess is that it's normally the woman who becomes the Retirement Housekeeper, Appointment Maker, Peace Keeper, Partner Pleaser... but not necessarily so!
“Codependency” is defined as an unhealthy relationship where partners are overly reliant on one another.
My mother-in-law was overly reliant on her husband since marriage. When he passed away, 39 years before her, she was totally lost. She didn’t know how to shop, how to pay the bills, he did it all — and she was the Princess Homemaker. That was good while it lasted, but later, she was one lost woman.
Answer these questions to examine whether you might be involved in a codependent relationship. Use these questions to guide you in correcting your behaviors and emotional expressions in your loving relationship.
Are you afraid to express your feelings? If you notice you often hold back for fear of how he/she will react, that’s a sign the relationship is not as healthy as it could be. — OR — If you do express feelings honestly, do you then feel guilty? Perhaps you think “I shouldn’t have said anything... it just made matters worse”.
Are you afraid of asking for help? If you can’t seek assistance from your partner, it’s very frustrating. In a healthy relationship, partners freely and regularly ask for a hand. — OR — When you do ask for help, does he/she help? Hopefully, they are open and willing to help. If you’re codependent, you might not feel comfortable with asking… thus you just do the work.
Do you find yourself feeling hurt or angry because nobody notices your needs? Although you try to take care of everything, you feel disappointed that they don’t see what’s happening or how frustrated you are?
Do you have hobbies and activities to enjoy separate from your partner? To maintain a healthy identity, it’s important to enjoy your own hobbies and interests. You may enjoy doing some things together — but surely there are other interests that could be done outside the relationship?
Do you try to control things to make life better? If you feel like you are walking on eggshells, you don’t want to upset your partner. Therefore, you take steps to control situations however you can… but that’s simply adding stress to your own life. Stress isn’t good.
Are you a perfectionistic? After all, if you get things perfect, then maybe life will be happier, more satisfied, and less angry, disappointed, or annoyed with you. If you feel this way, your relationship is likely codependent. Perfection is not a good way to enjoy life.
Would you describe your partner as needy, emotionally distant, or unreliable? These qualities often draw in partners who are seen as “caretakers.” Thus, the codependency begins… and if you’ve been doing this for years, it’s not easy to break either. Are you the caregiving type?
How is your health as it relates to stress? Often, people involved in codependent relationships experience health issues that might be related to stress like asthma, allergies, out-of-control eating, chest pain, and skin disorders.
The good news is that if you believe you’re in a codependent relationship, you can begin altering your behavior right away to gain back a healthy sense of individuality. Take it one baby step at a time… ask for help, go to lunch together and talk about how you feel, find new friends to balance out life.
If you feel you need help, seek out a professional trained in helping those with codependency. You’ll feel better and your relationship will be stronger when you can relate to each other in more positive ways.
|
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
from neutron.agent.linux import async_process
from neutron.agent.linux import utils
from neutron.tests import base
class AsyncProcessTestFramework(base.BaseTestCase):
def setUp(self):
super(AsyncProcessTestFramework, self).setUp()
self.test_file_path = self.get_temp_file_path('test_async_process.tmp')
self.data = [str(x) for x in range(4)]
with open(self.test_file_path, 'w') as f:
f.writelines('%s\n' % item for item in self.data)
def _check_stdout(self, proc):
# Ensure that all the output from the file is read
output = []
while output != self.data:
new_output = list(proc.iter_stdout())
if new_output:
output += new_output
eventlet.sleep(0.01)
class TestAsyncProcess(AsyncProcessTestFramework):
def _safe_stop(self, proc):
try:
proc.stop()
except async_process.AsyncProcessException:
pass
def test_stopping_async_process_lifecycle(self):
proc = async_process.AsyncProcess(['tail', '-f',
self.test_file_path])
self.addCleanup(self._safe_stop, proc)
proc.start(block=True)
self._check_stdout(proc)
proc.stop(block=True)
# Ensure that the process and greenthreads have stopped
proc._process.wait()
self.assertEqual(proc._process.returncode, -9)
for watcher in proc._watchers:
watcher.wait()
def test_async_process_respawns(self):
proc = async_process.AsyncProcess(['tail', '-f',
self.test_file_path],
respawn_interval=0)
self.addCleanup(self._safe_stop, proc)
proc.start()
# Ensure that the same output is read twice
self._check_stdout(proc)
pid = proc.pid
utils.execute(['kill', '-9', pid])
utils.wait_until_true(
lambda: proc.is_active() and pid != proc.pid,
timeout=5,
sleep=0.01,
exception=RuntimeError(_("Async process didn't respawn")))
self._check_stdout(proc)
|
After staying hushed for months, the inaugural edition of the Upstream Music Fest and Summit finally has a lineup. Taking place in Pioneer Square the weekend of May 11–13, Paul Allen's Northwest counter to SXSW will feature Flying Lotus, AlunaGeorge, Shabazz Palaces, and a ridiculously deep collection of local acts. The summit—more a gathering for the music industry—boasts talks from Macklemore, Quincy Jones, and more. While there's certainly plenty of terrific Seattle talent on the bill—Mike McCready, the Maldives, Shelby Earl, Dave B, Chris Staples, DoNormaal, and Tangerine to name just a few—without huge headliners (Flying Lotus and Shabazz Palaces are tremendous, but far from massive draws), it remains to be seen if people will actually drop over a $100 on an Upstream badge when they can see most of the acts around town for a fraction of the cost without the festival chaos.
Passes for Upstream Music Fest and Summit are on sale now at UpstreamMusicFest.com. Basic festival passes run $110–$135, ones that include the summit cost $225–$325, and a VIP option is available for $425.
|
# Copyright The IETF Trust 2010, All Rights Reserved
from django.shortcuts import render_to_response as render
from django.template import RequestContext
from django.conf import settings
import debug # pyflakes:ignore
def preferences(request, **kwargs):
preferences = request.COOKIES.copy()
new_cookies = {}
del_cookies = []
for key in settings.USER_PREFERENCE_DEFAULTS.keys():
if key in kwargs:
if kwargs[key] == None:
del_cookies += [key]
else:
# ignore bad kwargs
if key in ['new_enough', 'expires_soon'] and not kwargs[key].isdigit():
pass
elif key in ['full_draft', 'left_menu'] and not kwargs[key] in ['on', 'off']:
pass
else:
preferences[key] = new_cookies[key] = kwargs[key]
if not key in preferences or preferences[key] in [None, 'None', ''] or key in del_cookies:
preferences[key] = settings.USER_PREFERENCE_DEFAULTS[key]
# reset bad cookie values
if key in ['new_enough', 'expires_soon'] and not preferences[key].isdigit():
preferences[key] = settings.USER_PREFERENCE_DEFAULTS[key]
del_cookies += [key]
elif key in ['full_draft', 'left_menu'] and not preferences[key] in ['on', 'off']:
preferences[key] = settings.USER_PREFERENCE_DEFAULTS[key]
del_cookies += [key]
request.COOKIES.update(preferences)
response = render("cookies/settings.html", preferences, context_instance=RequestContext(request))
for key in new_cookies:
response.set_cookie(key, new_cookies[key], settings.SESSION_COOKIE_AGE)
for key in del_cookies:
response.delete_cookie(key)
return response
def new_enough(request, days=None):
return preferences(request, new_enough=days)
def expires_soon(request, days=None):
return preferences(request, expires_soon=days)
def full_draft(request, enabled=None):
return preferences(request, full_draft=enabled)
def left_menu(request, enabled=None):
return preferences(request, left_menu=enabled)
|
What is the best DJ application for Android?
Good question but for me the answer is it depends on what you want to do!
There are several types of applications, pads, synths, or mixers.
To help you make the choices we have made a selection of the best Android DJ applications.
DJ Pad also called Launchpad or MIDI Launcher is a control pad to compose and manage your music intuitively. We designed our own DJ Pad Android offering sample music library and a control surface to trigger sounds and edit it during live session.
A synthesizer that lets you create your own rhythm of music, a DJ does not necessarily need that to make music, it is only in case he wants to create a single note to his taste. Once the bill creates the DJ imports it into a pad to mix with other sounds.
DJ turntable Also known mixers, it is done to select and play existing music, the work of DJ is to create transitions between the sounds of each deck. You import sounds in the application and while playing you edit sound with different effects available on the software.
|
import logging
import math
import os
import click
import geojson as gj
import requests.utils
from sentinelsat import __version__ as sentinelsat_version
from sentinelsat.sentinel import SentinelAPI, geojson_to_wkt, read_geojson, placename_to_wkt
from sentinelsat.exceptions import InvalidKeyError
logger = logging.getLogger("sentinelsat")
def _set_logger_handler(level="INFO"):
logger.setLevel(level)
h = logging.StreamHandler()
h.setLevel(level)
fmt = logging.Formatter("%(message)s")
h.setFormatter(fmt)
logger.addHandler(h)
class CommaSeparatedString(click.ParamType):
name = "comma-string"
def convert(self, value, param, ctx):
if value:
return value.split(",")
else:
return value
@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
@click.option(
"--user",
"-u",
envvar="DHUS_USER",
default=None,
help="Username (or environment variable DHUS_USER is set)",
)
@click.option(
"--password",
"-p",
envvar="DHUS_PASSWORD",
default=None,
help="Password (or environment variable DHUS_PASSWORD is set)",
)
@click.option(
"--url",
default="https://scihub.copernicus.eu/apihub/",
envvar="DHUS_URL",
help="""Define API URL. Default URL is
'https://scihub.copernicus.eu/apihub/' (or environment variable DHUS_URL).
""",
)
@click.option(
"--start",
"-s",
default="NOW-1DAY",
show_default=True,
help="Start date of the query in the format YYYYMMDD.",
)
@click.option(
"--end",
"-e",
default="NOW",
show_default=True,
help="End date of the query in the format YYYYMMDD.",
)
@click.option(
"--geometry", "-g", type=click.Path(exists=True), help="Search area geometry as GeoJSON file."
)
@click.option(
"--uuid",
type=CommaSeparatedString(),
default=None,
help="Select a specific product UUID instead of a query. Multiple UUIDs can separated by comma.",
)
@click.option(
"--name",
type=CommaSeparatedString(),
default=None,
help="Select specific product(s) by filename. Supports wildcards.",
)
@click.option(
"--sentinel",
type=click.Choice(["1", "2", "3", "5"]),
help="Limit search to a Sentinel satellite (constellation)",
)
@click.option(
"--instrument",
type=click.Choice(["MSI", "SAR-C SAR", "SLSTR", "OLCI", "SRAL"]),
help="Limit search to a specific instrument on a Sentinel satellite.",
)
@click.option(
"--producttype", type=str, default=None, help="Limit search to a Sentinel product type."
)
@click.option(
"-c",
"--cloud",
type=int,
help="Maximum cloud cover in percent. (requires --sentinel to be 2 or 3)",
)
@click.option(
"-o",
"--order-by",
help="Comma-separated list of keywords to order the result by. "
"Prefix keywords with '-' for descending order.",
)
@click.option(
"-l", "--limit", type=int, help="Maximum number of results to return. Defaults to no limit."
)
@click.option("--download", "-d", is_flag=True, help="Download all results of the query.")
@click.option(
"--path",
type=click.Path(exists=True),
default=".",
help="Set the path where the files will be saved.",
)
@click.option(
"--query",
"-q",
type=CommaSeparatedString(),
default=None,
help="""Extra search keywords you want to use in the query. Separate
keywords with comma. Example: 'producttype=GRD,polarisationmode=HH'.
""",
)
@click.option(
"--location",
type=str,
help="Return only products overlapping with the bounding box of given location, "
"e.g. 'Berlin', 'Germany' or '52.393974, 13.066955'.",
)
@click.option(
"--footprints",
is_flag=True,
help="""Create a geojson file search_footprints.geojson with footprints
and metadata of the returned products.
""",
)
@click.option("--info", is_flag=True, is_eager=True, help="Displays the DHuS version used")
@click.version_option(version=sentinelsat_version, prog_name="sentinelsat")
def cli(
user,
password,
geometry,
start,
end,
uuid,
name,
download,
sentinel,
producttype,
instrument,
cloud,
footprints,
path,
query,
url,
order_by,
location,
limit,
info,
):
"""Search for Sentinel products and, optionally, download all the results
and/or create a geojson file with the search result footprints.
Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
containing the geometry of the area you want to search for or the UUIDs of the products. If you
don't specify the start and end dates, it will search in the last 24 hours.
"""
_set_logger_handler()
if user is None or password is None:
try:
user, password = requests.utils.get_netrc_auth(url)
except TypeError:
pass
if user is None or password is None:
raise click.UsageError(
"Missing --user and --password. Please see docs "
"for environment variables and .netrc support."
)
api = SentinelAPI(user, password, url)
if info:
ctx = click.get_current_context()
click.echo("DHuS version: " + api.dhus_version)
ctx.exit()
search_kwargs = {}
if sentinel and not (producttype or instrument):
search_kwargs["platformname"] = "Sentinel-" + sentinel
if instrument and not producttype:
search_kwargs["instrumentshortname"] = instrument
if producttype:
search_kwargs["producttype"] = producttype
if cloud:
if sentinel not in ["2", "3"]:
logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
exit(1)
search_kwargs["cloudcoverpercentage"] = (0, cloud)
if query is not None:
search_kwargs.update((x.split("=") for x in query))
if location is not None:
wkt, info = placename_to_wkt(location)
minX, minY, maxX, maxY = info["bbox"]
r = 6371 # average radius, km
extent_east = r * math.radians(maxX - minX) * math.cos(math.radians((minY + maxY) / 2))
extent_north = r * math.radians(maxY - minY)
logger.info(
"Querying location: '%s' with %.1f x %.1f km, %f, %f to %f, %f bounding box",
info["display_name"],
extent_north,
extent_east,
minY,
minX,
maxY,
maxX,
)
search_kwargs["area"] = wkt
if geometry is not None:
search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))
if uuid is not None:
uuid_list = [x.strip() for x in uuid]
products = {}
for productid in uuid_list:
try:
products[productid] = api.get_product_odata(productid)
except InvalidKeyError:
logger.error("No product with ID '%s' exists on server", productid)
exit(1)
elif name is not None:
search_kwargs["identifier"] = name[0] if len(name) == 1 else "(" + " OR ".join(name) + ")"
products = api.query(order_by=order_by, limit=limit, **search_kwargs)
else:
start = start or "19000101"
end = end or "NOW"
products = api.query(date=(start, end), order_by=order_by, limit=limit, **search_kwargs)
if footprints is True:
footprints_geojson = api.to_geojson(products)
with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
outfile.write(gj.dumps(footprints_geojson))
if download is True:
product_infos, triggered, failed_downloads = api.download_all(products, path)
if len(failed_downloads) > 0:
with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
for failed_id in failed_downloads:
outfile.write("%s : %s\n" % (failed_id, products[failed_id]["title"]))
else:
for product_id, props in products.items():
if uuid is None:
logger.info("Product %s - %s", product_id, props["summary"])
else: # querying uuids has no summary key
logger.info(
"Product %s - %s - %s MB",
product_id,
props["title"],
round(int(props["size"]) / (1024.0 * 1024.0), 2),
)
if uuid is None:
logger.info("---")
logger.info(
"%s scenes found with a total size of %.2f GB",
len(products),
api.get_products_size(products),
)
|
Lately, I have been busy settling down after the return to my homeland. I am so glad that the whole jet lag thing is finally gone and I get luxurious quality sleep. In these two weeks, I have been pampered with food! On a side note, it is so great to be back to people I dearly missed.
#1. Trying out good food at The Red Bean Bag with the boy days after my arrival. Molten chocolate cake must be the soul mate of good coffee.
#2. IT IS OFFICIAL! God knew how badly I had wanted a Cath Kidston in my life. This is, by far, my favorite bag!
#3. Receiving this antique miniature from the boy months ago and I really really love it. He has the best taste and knows just what I want.
#4. Trying to refrain myself from taking too much coffee so light teas come as an alternative.
#5. Getting to play the piano anytime I want. I miss it so so much!
#6. It is such a happiness to hug this furry pumpkin again. As usual, he eats and sleeps around.
#7. I took this bunny home the other day and he was then called Billy.
#8. Apparels from London. H&M is my ultimate weakness!
This entry was posted on Wednesday, 26 June 2013 and is filed under Lately. You can follow any responses to this entry through the RSS 2.0. You can leave a response.
|
from apscheduler.schedulers.blocking import BlockingScheduler
import os
from pymongo import MongoClient
from client import TodoistClient
from todoist_app import send_tasks, send_FB_text
from todoist_app import MONGO_DB_TOKENS_ENDPOINT, MONGO_DB_TOKENS_PORT
from todoist_app import MONGO_DB_TOKENS_DATABASE
from apscheduler.schedulers import SchedulerAlreadyRunningError
from uuid import uuid4
DAY_OVERVIEW_TIME_HOUR = 6
def connect():
connection = MongoClient(
MONGO_DB_TOKENS_ENDPOINT,
MONGO_DB_TOKENS_PORT
)
handle = connection[MONGO_DB_TOKENS_DATABASE]
handle.authenticate(
os.environ['MONGO_DB_USERNAME'],
os.environ['MONGO_DB_PWD']
)
return handle
scheduler = BlockingScheduler()
handle = connect()
def today_tasks(sender_id, tc):
today_tasks = tc.get_today_tasks()
if today_tasks:
send_FB_text(
sender_id,
'Here are your tasks for today:'
)
send_tasks(
sender_id,
today_tasks,
tc.tz_info['hours']
)
else:
send_FB_text(
sender_id,
'You have no tasks today! Have a great day!'
)
@scheduler.scheduled_job('cron', hour=0)
def schedule_day_overview():
for entry in handle.bot_users.find():
tc = TodoistClient(entry['access_token'])
job_id = uuid4().__str__()
scheduler.add_job(
today_tasks,
args=[entry['sender_id'], tc],
trigger='cron',
hour=DAY_OVERVIEW_TIME_HOUR - tc.tz_info['hours'],
id=job_id
)
try:
scheduler.start()
except SchedulerAlreadyRunningError:
pass
scheduler.start()
|
The Golden State of California is place to find online singles from Match.com. We are a California personals site making your online dating experience the best. Subscribe now and start emailing singles in Toluca Terrace, California today! Select from thousands of Toluca Terrace date choices on the largest online dating site.
|
import os
from pbxproj import PBXGenericObject
class PBXFileReference(PBXGenericObject):
@classmethod
def create(cls, path, tree='SOURCE_ROOT'):
return cls().parse({
'_id': cls._generate_id(),
'isa': cls.__name__,
'path': path,
'name': os.path.split(path)[1],
'sourceTree': tree
})
def set_explicit_file_type(self, file_type):
if 'lastKnownFileType' in self:
del self['lastKnownFileType']
self['explicitFileType'] = file_type
def set_last_known_file_type(self, file_type):
if 'explicitFileType' in self:
del self['explicitFileType']
self['lastKnownFileType'] = file_type
def get_file_type(self):
if 'explicitFileType' in self:
return self.explicitFileType
return self.lastKnownFileType
def _print_object(self, indent_depth='', entry_separator='\n', object_start='\n',
indent_increment='\t'):
return super(PBXFileReference, self)._print_object('', entry_separator=' ', object_start='',
indent_increment='')
def get_name(self):
if hasattr(self, 'name'):
return self.name
return self.path
def remove(self, recursive=True):
parent = self.get_parent()
# search on the BuildFiles if there is a build file to be removed, and remove it
build_files = [build_file for build_file in parent.get_objects_in_section('PBXBuildFile')
if build_file.fileRef == self.get_id()]
for build_file in build_files:
build_file.remove(recursive)
# search for each group that has a reference to the build file and remove it from it.
for group in parent.get_objects_in_section('PBXGroup'):
if self.get_id() in group.children:
group.remove_child(self)
# remove the file reference from it's parent
del parent[self.get_id()]
return True
|
List any medical information we should know below.
It is highly important that you fill this out accurately now. Once you're on the ambulance is not the time to find out you have a condition that could've been handled beforehand.
Would you like to help in the preseason if needed?
Write any additional comments, concerns, or ideas below.
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from .. import struct
from .dimensions import Dimension
from .domains import Domain
from ..collections import AND, reverse
from ..env.logs import Log
from ..queries import MVEL, _normalize_select, INDEX_CACHE
from ..queries.filters import TRUE_FILTER, simplify
from ..struct import nvl, Struct, EmptyList, split_field, join_field, StructList, Null
from ..structs.wraps import wrap, unwrap, listwrap
class Query(object):
def __new__(cls, query, schema=None):
if isinstance(query, Query):
return query
return object.__new__(cls)
def __init__(self, query, schema=None):
"""
NORMALIZE QUERY SO IT CAN STILL BE JSON
"""
if isinstance(query, Query):
return
object.__init__(self)
query = wrap(query)
self.name = query.name
select = query.select
if isinstance(select, list):
select = wrap([unwrap(_normalize_select(s, schema=schema)) for s in select])
elif select:
select = _normalize_select(select, schema=schema)
else:
select = StructList()
self.select2index = {} # MAP FROM NAME TO data INDEX
for i, s in enumerate(listwrap(select)):
self.select2index[s.name] = i
self.select = select
self.edges = _normalize_edges(query.edges, schema=schema)
self.frum = _normalize_from(query["from"], schema=schema)
self.where = _normalize_where(query.where, schema=schema)
self.window = [_normalize_window(w) for w in listwrap(query.window)]
self.sort = _normalize_sort(query.sort)
self.limit = query.limit
self.isLean = query.isLean
@property
def columns(self):
return self.select + self.edges
def __getitem__(self, item):
if item == "from":
return self.frum
return Struct.__getitem__(self, item)
def copy(self):
output = object.__new__(Query)
source = object.__getattribute__(self, "__dict__")
dest = object.__getattribute__(output, "__dict__")
struct.set_default(dest, source)
return output
def _normalize_selects(selects, schema=None):
if isinstance(selects, list):
return wrap([_normalize_select(s, schema=schema) for s in selects])
else:
return _normalize_select(selects, schema=schema)
def _normalize_edges(edges, schema=None):
return [_normalize_edge(e, schema=schema) for e in listwrap(edges)]
def _normalize_edge(edge, schema=None):
if isinstance(edge, basestring):
if schema:
e = schema[edge]
if e:
return Struct(
name=edge,
domain=e.getDomain()
)
return Struct(
name=edge,
value=edge,
domain=_normalize_domain(schema=schema)
)
else:
return Struct(
name=nvl(edge.name, edge.value),
value=edge.value,
range=edge.range,
allowNulls=False if edge.allowNulls is False else True,
domain=_normalize_domain(edge.domain, schema=schema)
)
def _normalize_from(frum, schema=None):
frum = wrap(frum)
if isinstance(frum, basestring):
return Struct(name=frum)
elif isinstance(frum, dict) and (frum["from"] or isinstance(frum["from"], (list, set))):
return Query(frum, schema=schema)
else:
return frum
def _normalize_domain(domain=None, schema=None):
if not domain:
return Domain(type="default")
elif isinstance(domain, Dimension):
return domain.getDomain()
elif schema and isinstance(domain, basestring) and schema[domain]:
return schema[domain].getDomain()
elif isinstance(domain, Domain):
return domain
if not domain.name:
domain = domain.copy()
domain.name = domain.type
return Domain(**struct.unwrap(domain))
def _normalize_window(window, schema=None):
return Struct(
name=nvl(window.name, window.value),
value=window.value,
edges=[_normalize_edge(e, schema) for e in listwrap(window.edges)],
sort=_normalize_sort(window.sort),
aggregate=window.aggregate,
range=_normalize_range(window.range),
where=_normalize_where(window.where, schema=schema)
)
def _normalize_range(range):
if range == None:
return None
return Struct(
min=range.min,
max=range.max
)
def _normalize_where(where, schema=None):
if where == None:
return TRUE_FILTER
if schema == None:
return where
where = simplify(_where_terms(where, where, schema))
return where
def _map_term_using_schema(master, path, term, schema_edges):
"""
IF THE WHERE CLAUSE REFERS TO FIELDS IN THE SCHEMA, THEN EXPAND THEM
"""
output = StructList()
for k, v in term.items():
dimension = schema_edges[k]
if isinstance(dimension, Dimension):
domain = dimension.getDomain()
if dimension.fields:
if isinstance(dimension.fields, dict):
# EXPECTING A TUPLE
for local_field, es_field in dimension.fields.items():
local_value = v[local_field]
if local_value == None:
output.append({"missing": {"field": es_field}})
else:
output.append({"term": {es_field: local_value}})
continue
if len(dimension.fields) == 1 and MVEL.isKeyword(dimension.fields[0]):
# SIMPLE SINGLE-VALUED FIELD
if domain.getPartByKey(v) is domain.NULL:
output.append({"missing": {"field": dimension.fields[0]}})
else:
output.append({"term": {dimension.fields[0]: v}})
continue
if AND(MVEL.isKeyword(f) for f in dimension.fields):
# EXPECTING A TUPLE
if not isinstance(v, tuple):
Log.error("expecing {{name}}={{value}} to be a tuple", {"name": k, "value": v})
for i, f in enumerate(dimension.fields):
vv = v[i]
if vv == None:
output.append({"missing": {"field": f}})
else:
output.append({"term": {f: vv}})
continue
if len(dimension.fields) == 1 and MVEL.isKeyword(dimension.fields[0]):
if domain.getPartByKey(v) is domain.NULL:
output.append({"missing": {"field": dimension.fields[0]}})
else:
output.append({"term": {dimension.fields[0]: v}})
continue
if domain.partitions:
part = domain.getPartByKey(v)
if part is domain.NULL or not part.esfilter:
Log.error("not expected to get NULL")
output.append(part.esfilter)
continue
else:
Log.error("not expected")
elif isinstance(v, dict):
sub = _map_term_using_schema(master, path + [k], v, schema_edges[k])
output.append(sub)
continue
output.append({"term": {k: v}})
return {"and": output}
def _move_nested_term(master, where, schema):
"""
THE WHERE CLAUSE CAN CONTAIN NESTED PROPERTY REFERENCES, THESE MUST BE MOVED
TO A NESTED FILTER
"""
items = where.term.items()
if len(items) != 1:
Log.error("Expecting only one term")
k, v = items[0]
nested_path = _get_nested_path(k, schema)
if nested_path:
return {"nested": {
"path": nested_path,
"query": {"filtered": {
"query": {"match_all": {}},
"filter": {"and": [
{"term": {k: v}}
]}
}}
}}
return where
def _get_nested_path(field, schema):
if MVEL.isKeyword(field):
field = join_field([schema.es.alias]+split_field(field))
for i, f in reverse(enumerate(split_field(field))):
path = join_field(split_field(field)[0:i+1:])
if path in INDEX_CACHE:
return join_field(split_field(path)[1::])
return None
def _where_terms(master, where, schema):
"""
USE THE SCHEMA TO CONVERT DIMENSION NAMES TO ES FILTERS
master - TOP LEVEL WHERE (FOR PLACING NESTED FILTERS)
"""
if isinstance(where, dict):
if where.term:
# MAP TERM
try:
output = _map_term_using_schema(master, [], where.term, schema.edges)
return output
except Exception, e:
Log.error("programmer problem?", e)
elif where.terms:
# MAP TERM
output = StructList()
for k, v in where.terms.items():
if not isinstance(v, (list, set)):
Log.error("terms filter expects list of values")
edge = schema.edges[k]
if not edge:
output.append({"terms": {k: v}})
else:
if isinstance(edge, basestring):
# DIRECT FIELD REFERENCE
return {"terms": {edge: v}}
try:
domain = edge.getDomain()
except Exception, e:
Log.error("programmer error", e)
fields = domain.dimension.fields
if isinstance(fields, dict):
or_agg = []
for vv in v:
and_agg = []
for local_field, es_field in fields.items():
vvv = vv[local_field]
if vvv != None:
and_agg.append({"term": {es_field: vvv}})
or_agg.append({"and": and_agg})
output.append({"or": or_agg})
elif isinstance(fields, list) and len(fields) == 1 and MVEL.isKeyword(fields[0]):
output.append({"terms": {fields[0]: v}})
elif domain.partitions:
output.append({"or": [domain.getPartByKey(vv).esfilter for vv in v]})
return {"and": output}
elif where["or"]:
return {"or": [unwrap(_where_terms(master, vv, schema)) for vv in where["or"]]}
elif where["and"]:
return {"and": [unwrap(_where_terms(master, vv, schema)) for vv in where["and"]]}
elif where["not"]:
return {"not": unwrap(_where_terms(master, where["not"], schema))}
return where
def _normalize_sort(sort=None):
"""
CONVERT SORT PARAMETERS TO A NORMAL FORM SO EASIER TO USE
"""
if not sort:
return EmptyList
output = StructList()
for s in listwrap(sort):
if isinstance(s, basestring):
output.append({"field": s, "sort": 1})
else:
output.append({"field": nvl(s.field, s.value), "sort": nvl(sort_direction[s.sort], 1)})
return wrap(output)
sort_direction = {
"asc": 1,
"desc": -1,
"none": 0,
1: 1,
0: 0,
-1: -1,
None: 1,
Null: 1
}
|
concatenations, I will try it again.
Will ecosconfig complain? Well, I'll try it.
|
# -*- coding: utf-8 -*-
#
# Metadata plugin for Plex Media Server, which updates media's metadata
# using information stored in local info files.
#
# Copyright (C) 2015 Yevgeny Nyden
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# @author zhenya (Yevgeny Nyden)
# @version @PLUGIN.REVISION@
# @revision @REPOSITORY.REVISION@
import re, os, io, datetime
LOGGER = Log
ENCODING_PLEX = 'utf-8'
INFO_FILE_EXTENSION = '.info'
MATCHER_INFO_TAG = re.compile('^\s*\[(.*)\]\s*', re.UNICODE)
MATCHER_COMMENT_LINE = re.compile('^\s*###')
TUPLE_SPLIT_STRING = '|'
def Start():
LOGGER.Info('***** START *****')
def ValidatePrefs():
LOGGER.Info('***** updating preferences...')
# Only use unicode if it's supported, which it is on Windows and OS X,
# but not Linux. This allows things to work with non-ASCII characters
# without having to go through a bunch of work to ensure the Linux
# filesystem is UTF-8 "clean".
#
def unicodize(s):
filename = s
if os.path.supports_unicode_filenames:
try: filename = unicode(s.decode(ENCODING_PLEX))
except: pass
return filename
def getAndTestInfoFilePath(media):
part = media.items[0].parts[0]
filename = unicodize(part.file)
path = os.path.splitext(filename)[0] + INFO_FILE_EXTENSION
if os.path.exists(path):
return path
else:
return None
def parsePipeSeparatedTuple(str):
"""Parses a tuple of values separated by '|' from the given string.
Args:
str - string to parse
Returns:
tuple of strings or empty values if nothing was parsed.
"""
tokens = str.split(TUPLE_SPLIT_STRING)
second = ''
if len(tokens) > 1:
second = tokens[1].strip()
return tokens[0].strip(), second
def parseStringValueFromText(lines):
mergedValue = ''
for line in lines:
line = line.strip()
if not line and not mergedValue:
# Skipping leading empty lines.
continue
if not line:
mergedValue += '\n'
elif mergedValue:
mergedValue += ' '
mergedValue += line
return mergedValue
def parseSingleValueFromText(lines):
for line in lines:
return line.strip()
return ''
def parseAndAddActorsFromText(roles, lines):
for line in lines:
actor, role = parsePipeSeparatedTuple(line)
if actor:
role = roles.new()
role.actor = actor
role.role = role
def parseAndAddArrayValuesFromText(container, lines):
"""Parses text values and adds them to a metadata array container.
Args:
container: list where parsed values are added;
lines: list of strings to parse.
"""
for line in lines:
line = line.strip()
if line:
container.add(line)
def parseIntegerValueFromText(lines):
return int(parseStringValueFromText(lines))
def parseFloatValueFromText(lines):
return float(parseStringValueFromText(lines))
def parseDateValueFromText(lines):
if lines:
return Datetime.ParseDate(lines[0]).date()
def isCommentLine(line):
return MATCHER_COMMENT_LINE.search(line)
def writeTagValueToMetadata(metadata, tagName, tagLines):
"""Parses and stores the passed tag data into metadata object.
Args:
metadata: Metadata - Plex metadata object.
tagName: string - 'info' file tag name
tagLines: list - lines as parsed from the file
"""
try:
if not tagName:
return
tagName = tagName.lower()
# Title.
if tagName == 'title':
metadata.title = parseStringValueFromText(tagLines)
elif tagName == 'original_title':
metadata.original_title = parseStringValueFromText(tagLines)
# Year.
elif tagName == 'year':
metadata.year = parseIntegerValueFromText(tagLines)
# Runtime.
elif tagName == 'duration' or tagName == 'runtime':
metadata.duration = parseIntegerValueFromText(tagLines) * 1000
# Genres.
elif tagName == 'genres':
parseAndAddArrayValuesFromText(metadata.genres, tagLines)
# Directors.
elif tagName == 'directors':
parseAndAddArrayValuesFromText(metadata.directors, tagLines)
# Writers.
elif tagName == 'writers':
parseAndAddArrayValuesFromText(metadata.writers, tagLines)
# Actors.
elif tagName == 'actors':
parseAndAddActorsFromText(metadata.roles, tagLines)
# Studio
elif tagName == 'studio':
metadata.studio = parseStringValueFromText(tagLines)
# Tagline.
elif tagName == 'tagline':
metadata.tagline = parseStringValueFromText(tagLines)
# Summary.
elif tagName == 'summary':
metadata.summary = parseStringValueFromText(tagLines)
# Content rating.
elif tagName == 'content_rating':
metadata.content_rating = parseSingleValueFromText(tagLines)
# Release date.
elif tagName == 'original_date':
metadata.originally_available_at = parseDateValueFromText(tagLines)
# Country.
elif tagName == 'countries':
parseAndAddArrayValuesFromText(metadata.countries, tagLines)
# Rating.
elif tagName == 'rating':
metadata.rating = parseFloatValueFromText(tagLines)
# Collections.
elif tagName == 'collections':
parseAndAddArrayValuesFromText(metadata.collections, tagLines)
elif tagName == 'poster':
pass
elif tagName == 'still':
pass
except:
LOGGER.Error('Failed to parse tag "' + str(tagName) + '"')
class MyMediaAgent(Agent.Movies):
name = 'Local Metadata (Movies)'
languages = [Locale.Language.NoLanguage]
primary_provider = True
fallback_agent = False
accepts_from = ['com.plexapp.agents.localmedia', 'com.plexapp.agents.none']
contributes_to = ['com.plexapp.agents.none']
##############################################################################
############################# S E A R C H ####################################
##############################################################################
def search(self, results, media, lang, manual=False):
"""Searches local directory for the metadata .info file.
"""
LOGGER.Debug('SEARCH START <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')
mediaName = media.name
mediaYear = media.year
mediaHash = media.hash
LOGGER.Debug('searching for name="%s", year="%s", guid="%s", hash="%s"...' %
(str(mediaName), str(mediaYear), str(media.guid), str(mediaHash)))
infoFilepath = getAndTestInfoFilePath(media)
if infoFilepath is None:
return
part = media.items[0].parts[0]
if mediaHash is None:
mediaHash = part.hash
if mediaYear is None:
filename = unicodize(part.file)
modificationTime = os.path.getmtime(filename)
date = datetime.date.fromtimestamp(modificationTime)
mediaYear = date.year
results.Append(MetadataSearchResult(id=mediaHash, name=mediaName, year=mediaYear, score=100, lang=lang))
LOGGER.Debug('SEARCH END <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')
##############################################################################
############################# U P D A T E ####################################
##############################################################################
def update(self, metadata, media, lang, force=False):
"""Updates the metadata on a given media item.
"""
LOGGER.Debug('UPDATE START <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')
infoFilepath = getAndTestInfoFilePath(media)
if infoFilepath is None:
return
if force:
resetMediaAllMetadata(metadata)
currTagName = None
currTagLines = []
for infoLine in io.open(infoFilepath, 'rt'):
match = MATCHER_INFO_TAG.search(infoLine)
if match:
# It's a tag.
writeTagValueToMetadata(metadata, currTagName, currTagLines)
currTagLines = []
currTagName = match.groups()[0]
elif not isCommentLine(infoLine):
# Content.
currTagLines.append(infoLine)
# Write the last tag data.
writeTagValueToMetadata(metadata, currTagName, currTagLines)
LOGGER.Debug('UPDATE END <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')
def resetMediaAllMetadata(metadata):
"""Resets all relevant fields on a passed media metadata object.
"""
metadata.genres.clear()
metadata.countries.clear()
metadata.directors.clear()
metadata.writers.clear()
metadata.roles.clear()
metadata.collections.clear()
metadata.studio = ''
metadata.summary = ''
metadata.title = ''
metadata.year = None
metadata.originally_available_at = None
metadata.original_title = ''
metadata.duration = None
|
What’s the message? Jobs are plentiful? Unemployment is down?
I come here and always feel so dumb because I seldom get your stuff. I keep coming because I love the challenge though.
Bice – thank you for being honest with me. I can be obscure at times, so I appreciate hearing about what works and what doesn’t for people. I do many revisions after I post and often they are intended to improve clarity. Judging from your prior comments, I submit it only “feels” like “always” and “seldom”. Your comments seem consistently on target and I always enjoy their intelligence and humor.
The above poem was on the back of a deposit slip. I had jotted down the line while looking at all those hideous Help Wanted ads. I thought how the ads are enclosed in the boxes and so are the jobs. I may have murked it all up a bit by adding a title which plays with the notion of “filling” these positions because that is what you are to most of these companies – “fill”, preferably “clean”. Best to you.
fencer – thank you but I must respectfully decline. I am deeply flattered that you consider me in such regard. The reasons for my non- participation are myriad but I can single out one as most significant: I have 50 sites/blogs on the right which I read (lately, very infrequently). They all deserve more time and attention than I am able to give right now. To select out five as more deserving than the others would only add insult to injury. Again, thank you for the recognition. Best.
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2016 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""pytest fixtures used by the whole testsuite.
See https://pytest.org/latest/fixture.html
"""
import sys
import collections
import itertools
import textwrap
import pytest
import helpers.stubs as stubsmod
from qutebrowser.config import config
from qutebrowser.utils import objreg
from PyQt5.QtCore import QEvent, QSize, Qt
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout
from PyQt5.QtNetwork import QNetworkCookieJar
class WinRegistryHelper:
"""Helper class for win_registry."""
FakeWindow = collections.namedtuple('FakeWindow', ['registry'])
def __init__(self):
self._ids = []
def add_window(self, win_id):
assert win_id not in objreg.window_registry
registry = objreg.ObjectRegistry()
window = self.FakeWindow(registry)
objreg.window_registry[win_id] = window
self._ids.append(win_id)
def cleanup(self):
for win_id in self._ids:
del objreg.window_registry[win_id]
class FakeStatusBar(QWidget):
"""Fake statusbar to test progressbar sizing."""
def __init__(self, parent=None):
super().__init__(parent)
self.hbox = QHBoxLayout(self)
self.hbox.addStretch()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.setAttribute(Qt.WA_StyledBackground, True)
self.setStyleSheet('background-color: red;')
def minimumSizeHint(self):
return QSize(1, self.fontMetrics().height())
@pytest.fixture
def fake_statusbar(qtbot):
"""Fixture providing a statusbar in a container window."""
container = QWidget()
qtbot.add_widget(container)
vbox = QVBoxLayout(container)
vbox.addStretch()
statusbar = FakeStatusBar(container)
# to make sure container isn't GCed
# pylint: disable=attribute-defined-outside-init
statusbar.container = container
vbox.addWidget(statusbar)
container.show()
qtbot.waitForWindowShown(container)
return statusbar
@pytest.yield_fixture
def win_registry():
"""Fixture providing a window registry for win_id 0 and 1."""
helper = WinRegistryHelper()
helper.add_window(0)
yield helper
helper.cleanup()
@pytest.yield_fixture
def tab_registry(win_registry):
"""Fixture providing a tab registry for win_id 0."""
registry = objreg.ObjectRegistry()
objreg.register('tab-registry', registry, scope='window', window=0)
yield registry
objreg.delete('tab-registry', scope='window', window=0)
def _generate_cmdline_tests():
"""Generate testcases for test_split_binding."""
# pylint: disable=invalid-name
TestCase = collections.namedtuple('TestCase', 'cmd, valid')
separators = [';;', ' ;; ', ';; ', ' ;;']
invalid = ['foo', '']
valid = ['leave-mode', 'hint all']
# Valid command only -> valid
for item in valid:
yield TestCase(''.join(item), True)
# Invalid command only -> invalid
for item in invalid:
yield TestCase(''.join(item), False)
# Invalid command combined with invalid command -> invalid
for item in itertools.product(invalid, separators, invalid):
yield TestCase(''.join(item), False)
# Valid command combined with valid command -> valid
for item in itertools.product(valid, separators, valid):
yield TestCase(''.join(item), True)
# Valid command combined with invalid command -> invalid
for item in itertools.product(valid, separators, invalid):
yield TestCase(''.join(item), False)
# Invalid command combined with valid command -> invalid
for item in itertools.product(invalid, separators, valid):
yield TestCase(''.join(item), False)
# Command with no_cmd_split combined with an "invalid" command -> valid
for item in itertools.product(['bind x open'], separators, invalid):
yield TestCase(''.join(item), True)
@pytest.fixture(params=_generate_cmdline_tests(), ids=lambda e: e.cmd)
def cmdline_test(request):
"""Fixture which generates tests for things validating commandlines."""
# Import qutebrowser.app so all cmdutils.register decorators get run.
import qutebrowser.app
return request.param
@pytest.yield_fixture
def config_stub(stubs):
"""Fixture which provides a fake config object."""
stub = stubs.ConfigStub()
objreg.register('config', stub)
yield stub
objreg.delete('config')
@pytest.yield_fixture
def default_config():
"""Fixture that provides and registers an empty default config object."""
config_obj = config.ConfigManager(configdir=None, fname=None, relaxed=True)
objreg.register('config', config_obj)
yield config_obj
objreg.delete('config')
@pytest.yield_fixture
def key_config_stub(stubs):
"""Fixture which provides a fake key config object."""
stub = stubs.KeyConfigStub()
objreg.register('key-config', stub)
yield stub
objreg.delete('key-config')
@pytest.yield_fixture
def host_blocker_stub(stubs):
"""Fixture which provides a fake host blocker object."""
stub = stubs.HostBlockerStub()
objreg.register('host-blocker', stub)
yield stub
objreg.delete('host-blocker')
@pytest.fixture(scope='session')
def stubs():
"""Provide access to stub objects useful for testing."""
return stubsmod
@pytest.fixture(scope='session')
def unicode_encode_err():
"""Provide a fake UnicodeEncodeError exception."""
return UnicodeEncodeError('ascii', # codec
'', # object
0, # start
2, # end
'fake exception') # reason
@pytest.fixture(scope='session')
def qnam(qapp):
"""Session-wide QNetworkAccessManager."""
from PyQt5.QtNetwork import QNetworkAccessManager
nam = QNetworkAccessManager()
nam.setNetworkAccessible(QNetworkAccessManager.NotAccessible)
return nam
@pytest.fixture
def webpage(qnam):
"""Get a new QWebPage object."""
from PyQt5.QtWebKitWidgets import QWebPage
page = QWebPage()
page.networkAccessManager().deleteLater()
page.setNetworkAccessManager(qnam)
return page
@pytest.fixture
def webview(qtbot, webpage):
"""Get a new QWebView object."""
from PyQt5.QtWebKitWidgets import QWebView
view = QWebView()
qtbot.add_widget(view)
view.page().deleteLater()
view.setPage(webpage)
view.resize(640, 480)
return view
@pytest.fixture
def webframe(webpage):
"""Convenience fixture to get a mainFrame of a QWebPage."""
return webpage.mainFrame()
@pytest.fixture
def fake_keyevent_factory():
"""Fixture that when called will return a mock instance of a QKeyEvent."""
from unittest import mock
from PyQt5.QtGui import QKeyEvent
def fake_keyevent(key, modifiers=0, text='', typ=QEvent.KeyPress):
"""Generate a new fake QKeyPressEvent."""
evtmock = mock.create_autospec(QKeyEvent, instance=True)
evtmock.key.return_value = key
evtmock.modifiers.return_value = modifiers
evtmock.text.return_value = text
evtmock.type.return_value = typ
return evtmock
return fake_keyevent
@pytest.yield_fixture
def cookiejar_and_cache(stubs):
"""Fixture providing a fake cookie jar and cache."""
jar = QNetworkCookieJar()
cache = stubs.FakeNetworkCache()
objreg.register('cookie-jar', jar)
objreg.register('cache', cache)
yield
objreg.delete('cookie-jar')
objreg.delete('cache')
@pytest.fixture
def py_proc():
"""Get a python executable and args list which executes the given code."""
if getattr(sys, 'frozen', False):
pytest.skip("Can't be run when frozen")
def func(code):
return (sys.executable, ['-c', textwrap.dedent(code.strip('\n'))])
return func
|
Are you worried about the compatibility of your IPEVO document camera as your school moves towards replacing computers with Chromebooks? Fret not, as IPEVO’s family of document cameras are all compatible with Chromebooks while in their USB mode. So, check out the following videos to see how you can use your IPEVO document camera with a Chromebook.
|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='1'
from os import listdir
import sys
import time
import argparse
import tools.ops
import numpy as np
import tensorflow as tf
import scipy.misc as sm
from models.mfb_net_cross import *
from tools.utilities import *
from tools.ops import *
parser = argparse.ArgumentParser()
parser.add_argument('-lr', dest='lr', type=float, default='1e-4', help='original learning rate')
args = parser.parse_args()
flags = tf.app.flags
flags.DEFINE_float('lr', args.lr, 'Original learning rate.')
flags.DEFINE_integer('batch_size', 5, 'Batch size.')
flags.DEFINE_integer('num_epochs', 1, 'Number of epochs.') # ~13 min per epoch
flags.DEFINE_integer('num_gpus', 4, 'Number of GPUs.')
flags.DEFINE_integer('seq_length', 16, 'Length of each video clip.')
flags.DEFINE_integer('height', 128, 'Height of video frame.')
flags.DEFINE_integer('width', 128, 'Width of video frame.')
flags.DEFINE_integer('channel', 3, 'Number of channels for each frame.')
flags.DEFINE_integer('num_sample', 1240, 'Number of samples in this dataset.')
flags.DEFINE_float('wd', 0.001, 'Weight decay rate.')
FLAGS = flags.FLAGS
prefix = 'mfb_cross'
model_save_dir = './ckpt/' + prefix
loss_save_dir = './loss'
val_list_path = './dataset/vallist.txt'
dataset_path = './dataset/UCF-101-tf-records'
use_pretrained_model = True
save_predictions = True
def run_validation():
# Create model directory
if not os.path.exists(model_save_dir):
os.makedirs(model_save_dir)
model_filename = "./mfb_baseline_ucf24.model"
tower_ffg_losses, tower_fbg_losses, tower_lfg_losses, tower_feat_losses = [], [], [], []
tower_ffg_m_losses, tower_fbg_m_losses, tower_lfg_m_losses = [], [], []
global_step = tf.get_variable(
'global_step',
[],
initializer=tf.constant_initializer(0),
trainable=False
)
starter_learning_rate = 1e-4
learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step,
100000000, 0.5, staircase=True)
opt = tf.train.AdamOptimizer(learning_rate)
# Create a session for running Ops on the Graph.
config = tf.ConfigProto(allow_soft_placement=True)
sess = tf.Session(config=config)
coord = tf.train.Coordinator()
threads = None
val_list_file = open(val_list_path, 'r')
val_list = val_list_file.read().splitlines()
for i, line in enumerate(val_list):
val_list[i] = os.path.join(dataset_path, val_list[i])
assert(len(val_list) % FLAGS.num_gpus == 0)
num_for_each_gpu = len(val_list) // FLAGS.num_gpus
clips_list, img_masks_list, loss_masks_list = [], [], []
with sess.as_default():
for i in range(FLAGS.num_gpus):
clips, img_masks, loss_masks = input_pipeline(val_list[i*num_for_each_gpu:(i+1)*num_for_each_gpu], \
FLAGS.batch_size, read_threads=1, num_epochs=FLAGS.num_epochs, is_training=False)
clips_list.append(clips)
img_masks_list.append(img_masks)
loss_masks_list.append(loss_masks)
mfb_list = []
with tf.variable_scope('vars') as var_scope:
for gpu_index in range(FLAGS.num_gpus):
with tf.device('/gpu:%d' % (gpu_index)):
with tf.name_scope('%s_%d' % ('tower', gpu_index)) as scope:
# construct model
mfb = mfb_net(clips_list[gpu_index], FLAGS.height, FLAGS.width, FLAGS.seq_length, \
FLAGS.channel, FLAGS.batch_size, is_training=False)
mfb_list.append(mfb)
_, first_fg_loss, first_bg_loss, last_fg_loss, feat_loss, _ = \
tower_loss(scope, mfb, clips_list[gpu_index], img_masks_list[gpu_index], loss_masks_list[gpu_index])
var_scope.reuse_variables()
tower_ffg_losses.append(first_fg_loss)
tower_fbg_losses.append(first_bg_loss)
tower_lfg_losses.append(last_fg_loss)
tower_feat_losses.append(feat_loss)
# concatenate the losses of all towers
ffg_loss_op = tf.reduce_mean(tower_ffg_losses)
fbg_loss_op = tf.reduce_mean(tower_fbg_losses)
lfg_loss_op = tf.reduce_mean(tower_lfg_losses)
feat_loss_op = tf.reduce_mean(tower_feat_losses)
# saver for saving checkpoints
saver = tf.train.Saver()
init = tf.initialize_all_variables()
sess.run(init)
if not os.path.exists(model_save_dir):
os.makedirs(model_save_dir)
if use_pretrained_model:
print('[*] Loading checkpoint ...')
model = tf.train.latest_checkpoint(model_save_dir)
if model is not None:
saver.restore(sess, model)
print('[*] Loading success: %s!'%model)
else:
print('[*] Loading failed ...')
# Create loss output folder
if not os.path.exists(loss_save_dir):
os.makedirs(loss_save_dir)
loss_file = open(os.path.join(loss_save_dir, prefix+'_val.txt'), 'a+')
total_steps = (FLAGS.num_sample / (FLAGS.num_gpus * FLAGS.batch_size)) * FLAGS.num_epochs
# start queue runner
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
ffg_loss_list, fbg_loss_list, lfg_loss_list, feat_loss_list = [], [], [], []
try:
with sess.as_default():
print('\n\n\n*********** start validating ***********\n\n\n')
step = global_step.eval()
print('[step = %d]'%step)
while not coord.should_stop():
# Run inference steps
ffg_loss, fbg_loss, lfg_loss, feat_loss = \
sess.run([ffg_loss_op, fbg_loss_op, lfg_loss_op, feat_loss_op])
ffg_loss_list.append(ffg_loss)
fbg_loss_list.append(fbg_loss)
lfg_loss_list.append(lfg_loss)
feat_loss_list.append(feat_loss)
print('ffg_loss=%.8f, fbg_loss=%.8f, lfg_loss=%.8f, feat_loss=%.8f' \
%(ffg_loss, fbg_loss, lfg_loss, feat_loss))
except tf.errors.OutOfRangeError:
print('Done training -- epoch limit reached')
finally:
# When done, ask the threads to stop.
coord.request_stop()
# Wait for threads to finish.
coord.join(threads)
sess.close()
mean_ffg = np.mean(np.asarray(ffg_loss_list))
mean_fbg = np.mean(np.asarray(fbg_loss_list))
mean_lfg = np.mean(np.asarray(lfg_loss_list))
mean_feat = np.mean(np.asarray(feat_loss_list))
line = '[step=%d] ffg_loss=%.8f, fbg_loss=%.8f, lfg_loss=%.8f, feat_loss=%.8f' \
%(step, mean_ffg, mean_fbg, mean_lfg, mean_feat)
print(line)
loss_file.write(line + '\n')
def main(_):
run_validation()
if __name__ == '__main__':
tf.app.run()
|
LISCA Speaker System - Description. How do they perform? See our experiences Here.
LISCA stands for: LIne Source Ceiling Array and is an original concept developed by the late Mr. Vivian Capel of Bristol England and is described in detail by him below. There are a couple of books that describe the concept in detail. These are: 'Audio & Hi-Fi engineers pocket book' third edition ISBN 0-7506-2001-3 published by Newnes, and 'Public Address Loudspeaker Systems' ISBN 0-8593-4237-9 published by Babani. LISCA is a concealed line array (or series of arrays) of loudspeakers fitted to (or in) a ceiling which delivers greater speech clarity than traditional downwards facing ceiling speaker systems.
LISCA is a Line-Source Ceiling Array of public address loudspeakers and consists of two (or more) rows of units on angled baffles above the ceiling at an angle of 64° and is fitted across the whole width of the hall at 600mm intervals. The first array is located just off the edge of the platform, the second if required, about half way along the hall.
LISCA overcomes all the disadvantages of other systems and gives sound of unequalled coverage, clarity and naturalness. It can be installed with no structural alteration to a tiled suspended ceiling.
LISCA is not a new, untried system. It has been installed in many halls all over Great Britain from Scotland to the Channel Islands, Ireland to Norwich and has been in use for a number of years. From a few tens of people to audiences of a thousand and more, all benefit from the enhanced sound from LISCA systems.
Less Feedback The angle of the LISCA array reduces the sound directed back to the microphone, being some 3dB less than other systems, thus reducing feedback. It also minimises reflections from side and rear walls further reducing it. This also reduces hall reverberation and resonance.
Reduced Reverberation Reverberation and resonance produce a hollow, boomy effect that is not always noticeable, yet is always present to muddle and confuse speech syllables. The reduction of these is one factor contributing to LISCA's clear and distinct reproduction.
Appearance The arrays are possibly the most unobtrusive of any system. There are no visible housings as there are with most ordinary ceiling speakers. The units are mounted above the ceiling and all that is visible are surface grilles level with the rest of the ceiling.
Optimum Sound Levels Sound level is lower in the first rows where it is only required to reinforce natural sound from the platform. From there the level increases, then as it starts to diminish, the second array takes over to maintain an even volume to the back of the hall. The level is thus at an optimum everywhere with no blind spots. Apart from the intentional lower level at the front, sound levels are usually within 1dB over the rest of the hall, something not possible with any other system.
Greater Clarity This is its greatest advantage. The array behaves acoustically as a single-source from which the plane (flat-fronted) sound waves travel down the length of the hall like straight-fronted waves rolling along a flat beach. All other systems that use multiple ceiling speakers or columns, generate spherical wave-fronts which mutually interfere with each other like confused ripples expanding in a pond when several stones are thrown in at the same time.
LISCA produces in-phase sound everywhere, like a laser beam, whereas all other systems produce complex reinforcement and cancellation patterns. These produce dips and peaks throughout the frequency spectrum known as the comb-filter effect, which result in variations of clarity over the audience area.
This is worsened by the treble-beaming experienced with all loudspeakers. Above 3KHz the response can drop to a half (-6dB) at 20° off-axis. For a conventional speaker facing downwards from a 3m high ceiling, this means that a circle with only 76cm (2.5ft) radius beneath it is within the 20° angle and so has reasonable treble. With LISCA, ALL the audience except the first row is within 20° and most within 10°. (Only a 46cm (1.5ft) radius circle is within 10° with a conventional speaker).
The effect of all of this is like the difference between a sharp photograph and one in which the camera moved. The blurring caused by multiple sources and beaming effect confuses the very short parts of speech - the short consonants, b, ch, d, g, j, k, p, t. This corresponds to the fine detail lost in a blurred photograph.
These short speech sounds are vital for intelligibility and clarity which are much degraded if they are impaired. Older persons are most affected, because hearing sensitivity for these sounds declines with age. Thus while older persons can hear speech, they have difficulty in understanding it.
The totally in-phase sound generated by LISCA causes all speech sounds to be reproduced clearly without loss of any part.
Natural Sound Location With most other systems the perceived sound source does not coincide with the visual location. With columns it is to one side and with multiple ceiling units it is overhead and even behind in some positions. The sound image with LISCA is normally centre-front, but due to the effect of LISCA's phase coherence on the audio/visual correlation of the brain, a pseudo stereo effect is often obtained whereby the sound appears to be coming from that part of the platform where the speaker is, -- the ultimate in natural sound!
Ease Of Installation All the units are installed in rows instead of all over the ceiling as with the conventional arrangement. This greatly facilitates mounting and wiring. Furthermore there are no holes to be cut in the tiles because each unit replaces one tile. Installation merely consists of dropping the unit with its base into the vacant tile position and then wiring.
Cost A LISCA system costs less than a conventional out-moded ceiling system. Fewer loudspeakers are required, although more are needed for a corner platform layout. 16 are needed in the average hall (100 people) against 20-24 for the conventional system. The cost of the LISCA housings are less than most conventional quality ceiling units and the cost of one tile is saved for each. Most conventional ceiling systems use high-impedance operation with a transformer for each speaker. LISCA operates at low impedance and so saves the cost of many transformers.
Fire Risk Many local safety authorities demand stringent specifications whereby fire is retarded from passing from the auditorium to the above ceiling area. The fibreglass LISCA units are fire retardant.
Sound Propagation Levels The significant dimension affecting all calculations, is h, the height of the array above the seated audience which is about 1m (3.5ft) above floor level. Thus h is the floor-to-ceiling height H, minus 1m (3.5ft). The first row of seats under the first array, is at an angle of 64° off-axis from the loudspeakers, which is the angle the line-source is tilted. The sound level there is therefore cos 64° = 0.44 times that of the same distance along the on-axis line. This corresponds to an SPL of -7dB.
The on-axis line converges with the audience level at a distance from the array of: h/sin(90°-64°) =h/0.44 or 2.3h. As the propagation loss from a line-source is 3dB for a doubling of distance, the loss along the on-axis line to the point where it reaches the audience is -3.5dB.
So with a level of -7dB under the array at the first row and -3.5dB at the on-axis point, there is a theoretical difference of 3.5dB. In practice, reflections from the inside top of the LISCA housing increase the level beneath it, so the difference is somewhat less, about 2dB.
The first rows thereby receive 2dB less sound than elsewhere. Thus the volume there is just enough to reinforce the natural voice and due to the Haas effect, the sound appears to come from the platform rather than the overhead array, so giving a perfectly natural effect.
The floor distance from under the array to the on-axis point is h/tan (90°-64°) = h/0.49, or approximately 2h. So 2h is the floor distance from the first row to the on-axis point, over which the sound level increases to a theoretical maximum of 3.5dB.
Beyond this it declines as the line of propagation goes off-axis again and the distance increases. However, distance has less effect than may be expected. The SPL decreases with distance because the wave expands; with a line-source it drops 3dB for a doubling of distance.
With LISCA, the wave expands from the array until it fills the space between floor and ceiling and up to there obeys the normal attenuation law, but beyond this, no further expansion is possible as it is constrained by the floor, walls and ceiling. So the effect is like sound travelling along a tube and in theory there should be little further loss beyond this point.
There are though losses due to absorption by the audience, carpet, curtains and padded seating. The range limit beyond 2h will thus depend on the furnishings. At 4h the propagation angle is narrowed to the point where the recessed loudspeaker cones begin to be masked by the ceiling. Low and mid frequencies are diffracted around the obstruction, but high frequencies are not, so intelligibility may start to deteriorate beyond this point. Also, the off-axis angle increasingly reduces the level. So 4h can be considered the maximum range to give adequate sound level with highest intelligibility. If the length of the hall from first row to last is greater than 4h, a second line-source array will be needed and in most cases this is so. There is no interference with the first one if correctly located and so there is no loss of clarity.
The Second Line-Source The second line-source is located, not where the output from the first is tailing off at 4h, but where it is strongest at 2h or a little beyond at 2.5h. Here, the output from the second array which is immediately overhead, is low compared to that from the first. So, with most of the sound coming from the first, the perceived location is still forward and there is virtually no awareness of the second array overhead.
Beyond this point, the output from the first line-source starts to diminish while that from the second increases, thus maintaining an even sound level throughout. Furthermore, as the second array is now forward relative to the listener, the frontal natural source location is maintained.
In very long halls or those with a short h dimension, the distance from the proposed second line-source location to the last row of seats may exceed 4h. In this case, the distance of the second array from the first may have to be increased to 2.5-3h.
In this case there may then be some awareness of an overhead source immediately beneath it. The situation where the length of a hall from the first row of seats to the last exceeds 6h is unusual.
The two arrays do not produce the mutual interference with subsequent comb-filter effect as produced by multiple ceiling speakers. For this, the volume levels need to be similar and the sound path difference between each source and the listener small. These conditions are experienced with the ordinary ceiling system, but nowhere with LISCA. Where the sound levels are similar, the path difference is large and vice-versa. So no interference problems are created by the two arrays.
The significant factor is h, the ceiling height above a seated audience. The second array should be located at 2h distance from the first, or as near to it as possible. (See diagrams). For long halls or those with low ceilings in which the distance from the first array to the back of the hall exceeds 4h, the spacing between arrays may be 2.5-3h. Each array must have an even number of units for sound balance and impedance matching.
Corner-Platform Layouts In Square Halls The first array is standard. The second array should be positioned just before the side corners of the walls, this will in most cases be somewhat less than 2h from the first array. The width of the hall at its widest point is greater tan the normal width of a rectangular hall, but the length from platform to the rear wall corner is less.
The second array must therefore be longer but does not need the normal range. Furthermore the required range is less at the ends of the array than in the middle. The units should therefore be mounted in alternate tile positions instead of in every one. The centre pair should be adjacent, then at alternate tile positions out to the walls.
As there is little sound propagation outside the ends of an array, two triangular sections at the sides where the walls diverge from the first array are left without coverage. These must be served by a single LISCA unit facing sideways toward the walls. Its location should be between the two arrays and in line with the end units of the first array. The reason for using a LISCA housing here, apart from visual uniformity, is that it will direct the sound sideways into the triangular areas and little if any will be propagated backward to cause interference with the main LISCA sound flow. A typical number of units needed for such a layout is 24, more than with a conventional system. Of course the actual number depends on the dimensions of the hall. (See Diagrams).
Installation Place the units in the required position in place of a ceiling tile and connect up. As only two rows are involved and no tile cutting is required, the installation is quicker than the old ceiling tile system.
There may be a reduction in sound level at seats adjacent to heavy side window curtains. Sound may be reinforced by putting an extra unit at the end of each array at the affected side, instead of the usual blank tile.
Connecting Up All speakers must be connected the same way round otherwise some will produce 'negative sound' that will cancel that of the others.
A series-parallel arrangement is usually best and depends upon the number of units used. All speakers in each bank are connected in series, positives (usually marked with a red spot) to negatives, just as the cells of a battery. The banks are then connected in parallel with the outermost positives and negatives wired together. (See Diagrams). The total impedance of the complete array is 8 ohms which perfectly matches a standard amplifier.
Frequency Equalising The bass response of a long line-source rises at 6dB per octave. The amplifier should have a bass cut facility to equalise this.
Hearing Aid Use The normal Inductive Loop enables hearing aid users to hear without the hollow, boxy sound due to reverberation caused by conventional speaker systems. With LISCA, reverberation is reduced and the hard-of-hearing can often get a better feeling of 'presence' by listening to the loudspeaker system through the deaf-aid microphone and not use the loop.
|
"""
Django settings for servant project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.core.exceptions import ImproperlyConfigured
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
def get_env_variable(var_name):
""" Get the environment variable or return exception """
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the %s environment variable" % var_name
raise ImproperlyConfigured(error_msg)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = get_env_variable("RASPBERRYWHITE_SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = get_env_variable("RASPBERRYWHITE_DEBUG")
TEMPLATE_DEBUG = get_env_variable("RASPBERRYWHITE_TEMPLATE_DEBUG")
ALLOWED_HOSTS = [get_env_variable("RASPBERRYWHITE_ALLOWED_HOSTS")]
PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), ".."),
)
REDIS_SSEQUEUE_CONNECTION_SETTINGS = {
'location': '{0}:{1}'.format(get_env_variable("RASPBERRYWHITE_REDIS_HOST"),
get_env_variable("RASPBERRYWHITE_REDIS_PORT")),
'db': get_env_variable("RASPBERRYWHITE_REDIS_DB"),
}
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social.apps.django_app.default',
'server',
'djangobower',
'gunicorn',
'filer',
'easy_thumbnails',
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'djangobower.finders.BowerFinder',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'social.apps.django_app.context_processors.backends',
'social.apps.django_app.context_processors.login_redirect',
)
AUTHENTICATION_BACKENDS = (
'social.backends.facebook.FacebookOAuth2',
'social.backends.google.GoogleOAuth2',
'social.backends.twitter.TwitterOAuth',
'django.contrib.auth.backends.ModelBackend',
)
SOCIAL_AUTH_PIPELINE = (
'social.pipeline.social_auth.social_details',
'social.pipeline.social_auth.social_uid',
'social.pipeline.social_auth.auth_allowed',
'social.pipeline.social_auth.social_user',
'social.pipeline.user.get_username',
'social.pipeline.user.create_user',
'social.pipeline.social_auth.associate_user',
'social.pipeline.social_auth.load_extra_data',
'social.pipeline.user.user_details',
'server.models.create_player_after_login'
)
LOGIN_REDIRECT_URL = get_env_variable("RASPBERRYWHITE_LOGIN_REDIRECT_URL")
SOCIAL_AUTH_FACEBOOK_KEY = get_env_variable("RASPBERRYWHITE_SOCIAL_AUTH_FACEBOOK_KEY")
SOCIAL_AUTH_FACEBOOK_SECRET = get_env_variable("RASPBERRYWHITE_SOCIAL_AUTH_FACEBOOK_SECRET")
ROOT_URLCONF = 'servant.urls'
WSGI_APPLICATION = 'servant.wsgi.application'
STATIC_URL = get_env_variable("RASPBERRYWHITE_STATIC_URL")
BOWER_COMPONENTS_ROOT = os.path.join(PROJECT_ROOT, 'components')
BOWER_INSTALLED_APPS = (
'jquery#1.9',
'underscore',
'bootstrap',
'jquery.cookie',
'angular'
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = "static"
|
At a recent trip to Walmart I happened to stop by the automotive department and noticed that they are now carrying transponder keys. They didn’t appear to have a huge selection but I definitely saw Ford and Nissan keys which seemed to be priced at about $50 dollars. The keys they are using are clonable which means they will use a scanner to read the chip value of the original key, swap the keys and then write that value to the new key. A very simple procedure and much easier than using a scan tool.
This is a step in the right direction for retailers, especially since transponder keys have been on the market for over 10 years and it’s still such a pain to have them duplicated. Hopefully more companys will follow suit and it will continue to get easier to have transponder keys made. Not that I don’t recommend showing some love to your local Locksmith (I do) but not all Locksmith’s have this capability. If you are in need of a new key you might want to check this out since it’s much cheaper than going to the dealer, as I’m sure many of you already know!
In this context (key fob), I don’t think FOB actually stands for anything. It’s just another word for remote.
I have removed your email from your comment to prevent you from being spammed.
If anyone would like to add to this discussion I will allow it here in the comments, or you may start a topic in the forum if you like.
A fob is a medallion or ornament attached to a key ring or pocket watch to assist in their handling or identifying ownership of the key or watch. There is no auto industry name or acronym given to FOB. It is easier then saying automotive keyless entry remote.
CheapFobs.com aims to help you get the best deal possible on a new keyless entry remote. We also offer free keyless programming instructions.
|
#!/usr/bin/env python3
"""Project Euler - Problem 67 Module"""
import os
def problem67(triangle_fileloc):
"""Problem 67 - Maximum path sum II"""
# We model tree node with dict:
# node = { 'value':123, 'left': {}, 'right': {}, 'depth':1}
root = {}
cur_depth = [root]
d = 0
d_nodelist = []
# read file
with open(triangle_fileloc, 'r') as f:
for line in f:
d_nodelist.append(cur_depth)
counter = 0
next_depth = []
for value in line.split():
cur_depth[counter]['value'] = int(value)
cur_depth[counter]['depth'] = d
if not next_depth:
cur_depth[counter]['left'] = {}
next_depth.append(cur_depth[counter]['left'])
else:
cur_depth[counter]['left'] = next_depth[-1]
cur_depth[counter]['right'] = {}
next_depth.append(cur_depth[counter]['right'])
counter += 1
cur_depth = next_depth
d += 1
# Correct Stuff
d -= 1
while d >= 0:
for x in d_nodelist[d]:
cur_max = x['value']
if ('cur_max' in x['left'] and 'cur_max' in x['right']):
if (x['left']['cur_max'] > x['right']['cur_max']):
cur_max += x['left']['cur_max']
else:
cur_max += x['right']['cur_max']
x['cur_max'] = cur_max
d -= 1
return root['cur_max']
FILENAME = 'problem0067.txt'
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
def run():
"""Default Run Method"""
return problem67(os.path.join(__location__, FILENAME))
if __name__ == '__main__':
print("Result: ", run())
|
How far can advertising be allowed to go?
Promotion is the main driving force behind the market economy. Last year alone spending on advertising reached $480 billion, but is it acceptable to use national symbols in advertising?
Do advertisers sometimes go too far in their bid to make a sale? Should there be restrictions from the government, or is it just a matter of self-censorship for advertisers?
John Tylee from the Campaign magazine in London, Alla Glinchikova from the Moscow-based Institute of Globalization and Social Movements, and Aleksey Sukhenko from the Council of the Russian Marketing Expert Guild shared their views with RT.
|
#!/usr/bin/python
"""
An example script for running an MPI grid job using the mig interface module.
"""
import miginterface as mig
import time, sys
def main():
"""
Run an mpi job on a grid resource. To run in local mode please install mpi.
"""
# mig.debug_mode_on() # uncomment to enable debug print outs
# mig.local_mode_on() # uncomment to enable local mode execution
mig.test_connection() # Check if we can connect to the MiG server
mpi_file = "example.c" # mpi program source file
# The shell command to execute on the grid resource using 4 processes. We need to it compile on the resource first.
cmds = ["mpicc -O2 example.c -o example", "$MPI_WRAP mpirun -np 4 ./example Hello"]
# specify that we need require MPI as a runtime env and use the DIKU vgrid cluster
specifications = {"RUNTIMEENVIRONMENT":"MPI-WRAP-2.0", "VGRID":"DIKU"}
# Create and submit the grid job
job_id = mig.create_job(cmds, input_files=mpi_file, resource_specifications=specifications)
print "\nJob (ID : %s) submitted. \n\n" % job_id
# Wait for the job to finish while monitoring the status
polling_frequency = 10 # seconds
while not mig.job_finished(job_id):
job_info = mig.job_info(job_id) # get an info dictionary
print 'Grid job : %(ID)s \t %(STATUS)s ' % job_info
time.sleep(polling_frequency) # wait a while before polling again
print mig.job_output(job_id)
if __name__ == "__main__":
if "-l" in sys.argv:
mig.local_mode_on()
if "-d" in sys.argv:
mig.debug_mode_on()
main()
|
Dyspepsia is a condition that causes pain and uncomfortable fullness in the upper part of the belly. There are many treatments which can be helpful for dyspepsia. No single therapy works in all patients with dyspepsia. It is common for patients to try several medications before finding one that works. If a person tries over the counter remedies and doesn’t improve or if they have worsening symptoms, vomiting, gastrointestinal bleeding or unexplained weight loss, they should have an informed discussion with their doctor to develop an effective treatment plan.
FDgard can help reduce abdominal pain in people with dyspepsia. It works by delivering caraway oil and peppermint oil directly to the upper belly (i.e., stomach and upper part of the small intestine). This helps to relax the stomach and upper intestines which can help with symptoms.
H. pylori is a bacteria that lives in the stomach and can cause dyspepsia symptoms. There are many different ways to treat and clear H. pylori from the stomach. The best treatment depends on many factors and people with H. pylori should have an informed discussion with their doctor.
Proton pump inhibitors (PPIs) strongly block acid production in your stomach and can help reduce dyspepsia symptoms. Available PPIs include Aciphex, Dexilant, Nexium, Prevacid, Prilosec, and Protonix.
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
lasindexPro.py
---------------------
Date : October 2014 and May 2016
Copyright : (C) 2014 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from future import standard_library
standard_library.install_aliases()
__author__ = 'Martin Isenburg'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from .LAStoolsUtils import LAStoolsUtils
from .LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterBoolean
class lasindexPro(LAStoolsAlgorithm):
MOBILE_OR_TERRESTRIAL = "MOBILE_OR_TERRESTRIAL"
APPEND_LAX = "APPEND_LAX"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('lasindexPro')
self.group, self.i18n_group = self.trAlgorithm('LAStools Production')
self.addParametersPointInputFolderGUI()
self.addParameter(ParameterBoolean(lasindexPro.APPEND_LAX,
self.tr("append *.lax file to *.laz file"), False))
self.addParameter(ParameterBoolean(lasindexPro.MOBILE_OR_TERRESTRIAL,
self.tr("is mobile or terrestrial LiDAR (not airborne)"), False))
self.addParametersAdditionalGUI()
self.addParametersCoresGUI()
self.addParametersVerboseGUI()
def processAlgorithm(self, progress):
if (LAStoolsUtils.hasWine()):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasindex.exe")]
else:
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasindex")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputFolderCommands(commands)
if self.getParameterValue(lasindexPro.APPEND_LAX):
commands.append("-append")
if self.getParameterValue(lasindexPro.MOBILE_OR_TERRESTRIAL):
commands.append("-tile_size")
commands.append("10")
commands.append("-maximum")
commands.append("-100")
self.addParametersAdditionalCommands(commands)
self.addParametersCoresCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
|
If it wasn't for my work commitment this evening, I would have been at P's place, entertaining and having a meal with J a retired staff.
P is a retired ifs and his wife was a stewardess. J, a great colleague of ours had suffered a stroke recently and is now wheelchair bound. He is lonely and depressed and in need of company.
This evening poolside gathering was organised for J. Only a few close friends were invited.
P and wife have been looking after the sick and lonely ex staff as well as the cabin crew. They often cook for them and would bring them out for a drive in their car. These two are really kind people and they are to me, the gems of cabin crew.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
SRXをエミュレーションするモジュール
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Juniper SRXのpolicy追加をテストするための
モジュールです。
:copyright:
:license:
"""
import re
from collections.abc import Sequence
from ipaddress import IPv4Network, IPv4Address
SourceAddress = "source_address"
DestinationAddress = "destination_address"
Application = "application"
ANY = "any"
Permit = "permit"
Deny = "deny"
Reject = "reject"
Log = "log"
Count = "count"
class PsuedoSRX(object):
"""PsuedoSRX
SRXのインスタンスをエミュレートします
"""
def __init__(self, device_name):
super(PsuedoSRX, self).__init__()
self.device_name = device_name
self.address_books = {}
self.set_address_book(ANY)
self.applications = {}
self.set_applications(ANY)
self.zones = {}
self.policies = []
def set_address_book(self, book_name, nw_address=None):
book = self.__get_book(book_name)
if nw_address:
book.append(nw_address)
def set_zone(self, zone_name):
zone = Zone(zone_name)
self.zones[zone.name] = zone
def set_applications(self, app_name, protocol=None,
src_or_dst="destination-port", port=None):
app = self.__get_app(app_name)
if protocol:
app.protocol = protocol
if port:
app.port = port
if src_or_dst == "source-port":
app.is_src = True
def set_policy_with_addressbook(self, policy_name, src_or_dst, book_name):
policy = self.__get_policy(policy_name)
book = self.__get_book(book_name)
policy.add_address_book(src_or_dst, book)
def set_policy_with_application(self, policy_name, app_name):
policy = self.__get_policy(policy_name)
app = self.__get_app(app_name)
policy.add_application(app)
def set_policy_with_action(self, policy_name, action):
policy = self.__get_policy(policy_name)
policy.add_action(action)
def insert_policy(self, policy_name, insert_name, before_or_after="before"):
policy = self.__get_policy(policy_name)
self.policies.remove(policy)
if before_or_after == "before":
target = self.__get_policy(insert_name)
i = self.policies.index(target)
else:
target = self.__get_policy(insert_name)
i = self.policies.index(target) + 1
self.policies.insert(i, policy)
def packet_in(self, protocol, src_addr, src_port, dest_addr, dest_port):
# Packetが入ってきたときの処理を記載する
for policy in self.policies:
if policy.lookup(protocol, src_addr, src_port, dest_addr, dest_port):
return policy.action()
return [Deny]
def __get_policy(self, policy_name):
policy = None
for p in self.policies:
if p.policy_name == policy_name:
policy = p
break
if not policy:
policy = Policy(policy_name)
self.policies.append(policy)
return policy
def __get_book(self, book_name):
if book_name in self.address_books:
return self.address_books[book_name]
book = AddressBook(book_name)
self.address_books[book_name] = book
return book
def __get_app(self, app_name):
if app_name in self.applications:
return self.applications[app_name]
app = Application(app_name)
self.applications[app_name] = app
return app
def __repr__(self):
"""
jsonを返すようにしたい
"""
return "{0}:{1}".format(self.device_name, self.applications)
class Zone(object):
"""Zone
インターフェースをエミュレートするのが困難なので
Zoneも実体としては定義していません。
"""
def __init__(self, name):
super(Zone, self).__init__()
self.name = name
self.address_books = []
def attach(self, address_book):
self.address_books.append(address_book)
class AddressBook(Sequence):
"""AddressBook
junos SRX のAddressBookです。
:param book_name: this address-book's name
:param nw: the nw that this address-book has.
"""
def __init__(self, book_name, nw=None):
super(AddressBook, self).__init__()
self.book_name = book_name
if book_name == ANY:
nw = '0.0.0.0/0'
self.nws = []
if nw:
self.nws.append(IPv4Network(nw))
def append(self, nw):
self.nws.append(IPv4Network(nw))
def __getitem__(self, key):
return self.nws[key]
def __len__(self):
return len(self.nws)
def __contains__(self, item):
n = IPv4Address(item)
for nw in self.nws:
if n in nw:
return True
return False
def __repr__(self):
s = 'AddressBook {0}: '.format(self.book_name)
for _nw in self.nws:
s += '{0} '.format(_nw.exploded)
return s
class Application(object):
"""Application"""
port_pattern_range = re.compile('(\d+)-(\d+)')
port_pattern_simple = re.compile('\d+')
port_pattern_junos_name = re.compile('junos-[a-zA-Z]+')
junos_app_name_mapper = {'junos-ssh':22, 'junos-http':80}
@classmethod
def __to_number(self, junos_app_name):
return self.junos_app_name_mapper[junos_app_name]
def __init__(self, app_name):
super(Application, self).__init__()
self.app_name = app_name
self.is_any = False
self.is_src = False
if app_name == ANY:
self.is_any = True
self._protocol = ""
self._port = None
@property
def protocol(self):
return self._protocol
@protocol.setter
def protocol(self, val):
self._protocol = val
@property
def port(self):
return self._port
@port.setter
def port(self, ports):
m = self.port_pattern_range.match(ports)
if m:
self._port = range(int(m.group(1)), int(m.group(2))+1)
return
m = self.port_pattern_simple.match(ports)
if m:
self._port = []
self._port.append(int(m.group(0)))
return
if self.port_pattern_junos_name.match(ports):
self._port = []
self._port.append(self.__to_number(ports))
return
else:
raise
def match(self, proto, src_port=None, dest_port=None):
if self.is_any:
return True
if (proto == self._protocol) and self._port:
if dest_port in self._port:
return True
if src_port in self._port:
return True
return False
def __repr__(self):
return "{0} {1} {2}".format(self.app_name, self._protocol, self._port)
class Policy(object):
"""Policy
"""
def __init__(self, policy_name):
super(Policy, self).__init__()
self.policy_name = policy_name
self.source_addresses = []
self.destination_addresses = []
self.applications = []
self.actions = []
def add_from_zone(self, from_zone):
self.from_zone = from_zone
def add_to_zone(self, to_zone):
self.to_zone = to_zone
def add_address_book(self, src_or_dst, book):
if src_or_dst == "source-address":
self.source_addresses.append(book)
else:
self.destination_addresses.append(book)
def add_application(self, app):
self.applications.append(app)
def add_action(self, action):
self.actions.append(action)
def lookup(self, protocol, src_addr, src_port, dest_addr, dest_port):
if not self.__is_in_source_addresses(src_addr):
return False
if not self.__is_in_destination_addresses(dest_addr):
return False
if not self.__is_in_application(protocol, src_port, dest_port):
return False
return True
def action(self):
return self.actions
def __is_in_source_addresses(self, src_addr):
for book in self.source_addresses:
if src_addr in book:
return True
return False
def __is_in_destination_addresses(self, dest_addr):
for book in self.destination_addresses:
if dest_addr in book:
return True
return False
def __is_in_application(self, protocol, src_port, dest_port):
for app in self.applications:
if app.match(protocol, src_port, dest_port):
return True
return False
def __repr__(self):
return self.policy_name
|
Are you looking to buy CBD oil in Corry? Finding the right CBD oil can be a daunting task, especially if you are a newbie. There are many ways of acquiring your Cannabinoid (CBD) oil. You can purchase the product online, from brick and mortar store, co-ops, dispensaries, and natural herbalists. Depending on your preferred mode of ingestion, the hemp-extracted oil is available in a range of applications including tinctures, topicals, vaporizers, vape pens, transdermal patches and as infused edibles – such as CBD gummies.
One of the essential question people will ask regarding CBD oil is whether it is legal to purchase in Corry. A vast majority of CBD oil in Pennsylvania is extracted from hemp which is a non-psychoactive compound that contains none or little concentration of THC, hence considered as entirely legal and separate from cannabis marijuana regulation and authority.
If you want to try buying CBD risk free in 16407, you can get a free trial bottle of CBD capsules here. Just pay for shipping and the bottle is yours 100% free!
You can get a free trial bottle of CBD oil to test in Corry, PA 16407 before spending a ton of money. Just pay for shipping to get the bottle delivered to your home!
You can purchase CBD and hemp oil in Corry, PA from specialty retail stores – over the counter (i.e., nutrition stores and smoke shops). Physical stores offer buyers the ability to see the products before they purchase as well as gives them the chance to actively engage with store associates by asking questions and learning about other products that they might be interested in.
It’s important to research to purchase the right product in Corry, Pennsylvania. Always use personal discretion when making purchases both in-person and online. Further, don’t ever go for the cheap products but instead be willing to pay the price for a quality product. If you want to reap the benefits of excellent quality, then you have no choice but to pay a substantial amount for it.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import glob
import json
PWD = os.path.dirname(__file__)
DATA_DIR = os.path.join(PWD, 'data')
def parse_doc(doc_path):
id = os.path.splitext(os.path.basename(doc_path))[0]
data = json.load(open(doc_path, 'r'))
assert data['type'] == 'album'
assert data['id'] == id
name = data['name']
artists = data['artists']
images = data.get('images', [])
if not images:
print 'images not found for "{}"'.format(name)
# TODO make it work
return {}
release_date = data['release_date']
uri = data['uri']
assert uri == 'spotify:album:{}'.format(id)
return {'id': id,
'name': name,
'images': images,
'release_date': release_date,
'uri': uri,
'artists': artists}
def main():
docs = glob.glob('{}/*.json'.format(DATA_DIR))
data_list = filter(bool, map(parse_doc, docs))
open(os.path.join(PWD, 'data.json'), 'w').write(
json.dumps({album['id']: album for album in data_list}, indent=2))
if __name__ == '__main__':
main()
|
a way along which something runs.
a paved or cleared strip on which planes land and take off.
a similar strip on which cars, trucks, or the like may park, load, or enter the stream of traffic.
the beaten track or habitual path of deer or other wild animals.
a fairly large enclosure in which domestic animals may range about: a runway for dogs.
the bed of a stream.
a narrow platform or ramp extending from a stage into the orchestra pit or into an aisle, as in a theater.
I walked across the runway to the large hangar we were housed in.
Twice in the past two weeks, this has entailed meeting a plane on the runway to retriever sick passengers who may be infected.
That was when I started getting couture, kind of from the other side of the runway.
Trying to do it in a reusable rocket, landing on a runway, that system has a very bad track record.
He sets no more snares across that runway, for the rabbits have had their alarm.
The runway or yard should be as large as our purse will permit.
Those steers just naturally follow along on up that runway and into the killing pens.
Bowers and his helpers were crowding the sheep up the runway into the last car when Kate rode up.
"customary track of an animal," especially a deer, 1833, American English, from run (v.) + way. Meaning "artificial sloping track" is attested from 1883; airfield sense is from 1923.
|
#!/usr/bin/env python3
"""关系型数据库"""
class sql_helper(object):
# 表前缀
__data_list = None
__prefix = ""
def __init__(self, prefix):
self.__data_list = []
self.__prefix = prefix
def select(self, seq):
self.__data_list.append(
"SELECT %s" % ",".join(seq)
)
return self
def where(self, where):
self.__data_list.append(" WHERE %s" % where)
return self
def from_(self, table):
self.__data_list.append(
" FROM %s%s" % (self.__prefix, table,)
)
return self
def delete(self):
self.__data_list.append(
"DELETE"
)
return self
def insert(self, table, **kwargs):
self.__data_list += [
"INSERT INTO ",
"%s%s" % (self.__prefix, table),
]
fields = []
values = []
for k, v in kwargs.items():
fields.append(k)
values.append(v)
if fields: self.__data_list.append(
" (%s)" % ",".join(fields)
)
self.__data_list.append(
" VALUES (%s)" % ",".join([str(v) for v in values])
)
return self
def update(self, table, **kwargs):
seq = []
for k, v in kwargs.items(): seq.append("%s=%s" % (k, v,))
self.__data_list.append(
"UPDATE %s%s SET %s" % (self.__prefix, table, ",".join(seq))
)
return self
def get_sql(self):
tmplist = []
while 1:
try:
tmplist.append(self.__data_list.pop(0))
except IndexError:
break
tmplist.append(";")
return "".join(tmplist)
def append(self, sts):
self.__data_list.append(" %s" % sts)
return self
def limit(self, limit):
self.__data_list.append(" LIMIT %s" % limit)
return self
def offset(self, offset):
self.__data_list.append(" OFFSET %s" % offset)
return self
def build_value_map(field_seq, value_seq):
"""Python默认返回tuple结果,没有包含字段,此函数生成 `字段->值`映射
"""
length = len(field_seq)
ret_dict = {}
for n in range(length):
field = field_seq[n]
value = value_seq[n]
ret_dict[field] = value
return ret_dict
|
On October 3rd, 2011, SATO Corporation underwent corporate separation and changed to the structure of a pure holding company in order to both strengthen Japanese business and expand overseas business.
In order to achieve growth and continue to generate profit, we must revitalize our entire organization, both in Japan and overseas, and effectively meet the needs of the global market.
Through reorganizing the group into an aggregate of smaller businesses, we can strengthen an open and innovative corporate culture. Also, by allowing capable employees take charge of certain parts our business, we will educate and train the next generation of management at SATO.
To further raise our corporate value, we must be able to carry out swift and intense investment of the group's management resources into new and potentially profitable business fields.
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import hamming, triang, blackmanharris
import sys, os, functools, time
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/'))
import sineModel as SM
import utilFunctions as UF
(fs, x) = UF.wavread(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../sounds/bendir.wav'))
x1 = x[0:50000]
w = np.blackman(2001)
N = 2048
H = 500
t = -90
minSineDur = .01
maxnSines = 150
freqDevOffset = 20
freqDevSlope = 0.02
Ns = 512
H = Ns/4
tfreq, tmag, tphase = SM.sineModelAnal(x1, fs, w, N, H, t, maxnSines, minSineDur, freqDevOffset, freqDevSlope)
y = SM.sineModelSynth(tfreq, tmag, tphase, Ns, H, fs)
numFrames = int(tfreq[:,0].size)
frmTime = H*np.arange(numFrames)/float(fs)
maxplotfreq = 3000.0
plt.figure(1, figsize=(9, 7))
plt.subplot(3,1,1)
plt.plot(np.arange(x1.size)/float(fs), x1, 'b', lw=1.5)
plt.axis([0,x1.size/float(fs),min(x1),max(x1)])
plt.title('x (bendir.wav)')
plt.subplot(3,1,2)
tracks = tfreq*np.less(tfreq, maxplotfreq)
tracks[tracks<=0] = np.nan
plt.plot(frmTime, tracks, color='k', lw=1.5)
plt.autoscale(tight=True)
plt.title('f_t, sine frequencies')
plt.subplot(3,1,3)
plt.plot(np.arange(y.size)/float(fs), y, 'b', lw=1.5)
plt.axis([0,y.size/float(fs),min(y),max(y)])
plt.title('y')
plt.tight_layout()
UF.wavwrite(y, fs, 'bendir-sine-synthesis.wav')
plt.savefig('sineModel-anal-synth.png')
plt.show()
|
Featuring the work of George Szirtes, Jo Bell, Dean Pasch, Sonja Benskin Mesher, Dave Kirkwood, Michael Powell and me.
Original unframed/unmounted pigment prints (30cm x 40cm) on German Etching Fine Art paper.
Limited to the selection exhibited at StAnza Poetry Festival March 2015.
Each print includes the work of three artists.
A limited edition of 20 per print and each numbered.
>erasure prints as subject heading, please.
|
#!/usr/bin/env python
import numpy
import pyfits
import pylab
import os
import sys
from astLib.astWCS import WCS
import Tigger
from scipy.optimize import curve_fit
import argparse
#image = sys.argv[1]
#catalog = sys.argv[2]
def reshape_data(image,zoom=1):
""" Reshape FITS data to (stokes,freq,npix_ra,npix_dec)
"""
with pyfits.open(image) as hdu:
data = hdu[0].data
hdr = hdu[0].header
shape = list(data.shape)
ndim = len(shape)
wcs = WCS(hdr,mode='pyfits')
if ndim<2:
raise ValueError('The FITS file needs at least two dimensions')
# This is the shape I want the data in
want = (
['STOKES',0],
['FREQ',1],
['RA',2],
['DEC',3],
)
# Assume RA,DEC is first (FITS) or last two (NUMPY)
if ndim>3:
for ctype,ind in want[:2]:
for axis in range(1,ndim+1):
if hdr['CTYPE%d'%axis].startswith(ctype):
want[ind].append(ndim-axis)
if want[0][-1] == want[1][-2] and want[0][-2] == want[1][-1]:
tmp = shape[0]
shape[0] = shape[1]
shape[1] = tmp
data = numpy.reshape(data,shape)
if ndim ==3:
if not hdr['CTYPE3'].startswith('FREQ'):
data = data[0,...]
elif ndim>4:
raise ValueError('FITS file has more than 4 axes. Aborting')
shape = data.shape
imslice = [slice(None)]*len(shape)
lx,ly = [ (x-int(x*zoom)) for x in shape[-2:] ]
hx,hy = [ (low + int(x*zoom)) for x,low in zip([lx,ly],shape[-2:]) ]
imslice[-1] = slice(lx,hx)
imslice[-2] = slice(ly,hy)
return data[imslice], wcs
def local_variance(data,catalog,wcs,step=20,averge_freq=True):
""" Calculates the local varience at source positions of catalog.
"""
shape = data.shape
ndim = len(shape)
if ndim==4:
data = data[0,...].sum(0)
elif ndim==3:
data = data.sum(0)
model = Tigger.load(catalog)
positions_sky = [map(lambda rad: numpy.rad2deg(rad),(src.pos.ra,src.pos.dec)) for src in model.sources]
positions = [wcs.wcs2pix(*pos) for pos in positions_sky]
if isinstance(step,(tuple,list,int)):
if isinstance(step,int):
step = [step,step]
for pos in sorted(positions):
x,y = pos
if x>shape[-2] or y>shape[-1] or numpy.array(pos).any()<0:
positions.remove(pos)
if (y+step[1]>shape[-1]) or (y-step[1]<0):
if pos in positions:
positions.remove(pos)
if (x+step[0]>shape[-2]) or (x-step[0]<0):
if pos in positions:
positions.remove(pos)
_std = []
for x,y in positions:
subrgn = data[x-step[0]:x+step[0],y-step[1]:y+step[1]]
_std.append(subrgn.std())
return _std
def hist(data,nbins=100,func=None,save=None,show=False):
func = func or gauss
hist,bins = numpy.histogram(data,bins=nbins)
x_min = min(bins)
x_max = max(bins)
hh = x_max - x_min
xx = numpy.linspace(x_min,x_max,nbins) + hh/2
# Initial guess
sigma = data.std()
peak = hist.max()
mean = data.mean() + hh/2
parms,pcov = curve_fit(func,xx,hist,p0=[peak,mean,sigma])
# Determine error in fit
#residual = lambda params,x,data: data - func(x,*params)
err = numpy.sqrt(numpy.diag(pcov))
pylab.figure(figsize=(15,10))
pylab.plot(xx-hh/2,hist,'.')
pylab.plot(xx-hh/2,func(xx,*parms))
pylab.grid()
func_name = func.func_name
func_name = func_name[0].upper() + func_name[1:]
title_string = 'Fitted a %s function with best fit parameters:'%func_name
title_string += ' \n Peak=%.4g $\pm$ %.4g, $\mu$=%.4g $\pm$ %.4g, $\sigma$=%.4g $\pm$ %.4g'%(parms[0],err[0],parms[1],err[1],parms[2],err[2])
pylab.title(title_string)
if show:
pylab.show()
if save:
pylab.savefig(save or 'fidelity_stats.png')
pylab.clf()
def estimate_noise(data):
negative = data[data<0]
return numpy.concatenate([negative,-negative]).std()
def gaussian(x,a0,mu,sigma):
return a0*numpy.exp(-(x-mu)**2/(2*sigma**2))
def laplace(x,a0,mu,sigma):
return a0*numpy.exp(-abs(x-mu)/sigma)
def cauchy(x,a0,mu,sigma):
return a0*(sigma**2 / ((x-mu)**2 + sigma**2) )
def maxwell(x,a0,mu,sigma):
return a0*x**2*numpy.exp(-(x-mu)**2/(2*sigma**2))
_FUNCS = dict(gaussian=gaussian,laplace=laplace,cauchy=cauchy,maxwell=maxwell)
if __name__=='__main__':
for i, arg in enumerate(sys.argv):
if (arg[0] == '-') and arg[1].isdigit(): sys.argv[i] = ' ' + arg
parser = argparse.ArgumentParser(description='Routines to measure image statistics')
add = parser.add_argument
add('image', help='Input FITS image')
add('-cat', '--catlog', dest='catalog', help='Measure image stats on source locations.')
add('-pad', '--pixel-amp-dist', dest='pix_dist', help='Fit a distribution to the pixel amplitute histogram')
add('-fit', '--fit', dest='fit', help='Function to to the pixel amplitude histogram',default='gaussian',choices=_FUNCS)
add('-s', '--show', dest='show', action='store_true', help='Show pixel amplitude fit')
add('-S', '--save', dest='save', help='Filename for pixel amplitude distribution plots',
default='fidelity_stats.png')
add('-nb', '--nbins', dest='nbins', type=int, help='Show pixel amplitude fit', default=100)
add('-n', '--noise', dest='noise', action="store_true", help='Returns noise estimate')
add('-z', '--zoom', dest='zoom', type=float, default=1.0, help='Percentage of inner region to consider for analysis')
opts = parser.parse_args()
data, wcs = reshape_data(opts.image, zoom=opts.zoom)
hist(data=data, nbins=opts.nbins, func=_FUNCS[opts.fit], show=opts.show, save=opts.save)
catalog = opts.catalog
if catalog:
_std = local_variance(data=data, wcs=wcs, step=20, catalog=catalog)
pylab.plot(_std, "-x")
pylab.plot([estimate_noise(data)]*len(_std))
pylab.show()
if opts.noise:
noise = estimate_noise(data)
print "Noise estimate is %.4g mJy"%(noise*1e3)
|
I used the 5" pattern, there are are also 10" and 15" patterns.
I made the cushion back with a flap to cover the zipper for the first time. It is was very easy. The only thing I would do differently is make sure my bobbin thread was the right colour before I sewed the flap down... I then bound the cushion just like a quilt with the same fabric as the backing.
Thanks to my friend Marilyn for the backing fabric. There was an auction while we were at the Pictou Retreat and Marilyn bid on a bundle of mostly neutral fabrics and got it. She gifted me with the brown leaf fabric as it wasn't to her taste. Thank you Marilyn!
You've created the perfect pillow for the season. Well done!!
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.