text
stringlengths
29
850k
''' Copyright (C) 2015 Ryan M Cote. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Author: Ryan M Cote <minervaconsole@gmail.com> ''' import bson import datetime import pymongo class dns(object): '''Initialize Class''' def __init__(self, minerva_core): db = minerva_core.get_db() self.alerts = db.alerts self.dns = db.dns self.sizeLimit = minerva_core.conf['Webserver']['events']['maxResults'] '''Function to get the flow records for a given alert''' def get_dns(self, IDs): results_found = [] for ID in IDs: orig_alert = self.alerts.find_one({ "_id": bson.objectid.ObjectId(ID) }) flow_results = [] src_ip = orig_alert['src_ip'] src_port = orig_alert['src_port'] dest_ip =orig_alert['dest_ip'] dest_port = orig_alert['dest_port'] proto = orig_alert['proto'] timestamp = orig_alert['timestamp'] start_time = timestamp - datetime.timedelta(seconds=300) stop_time = timestamp + datetime.timedelta(seconds=300) dns_results = self.dns.find( { "$and": [ { "src_ip": src_ip, "src_port": src_port, "dest_ip": dest_ip, "dest_port": dest_port, "proto": proto }, { "$and": [ { "timestamp": { "$gt": start_time }}, { "timestamp": { "$lt": stop_time }}, ] }, ]}).sort([("_id", pymongo.ASCENDING)]).limit(self.sizeLimit) numFound = dns_results.count() dns_results = map(self.map_dns, dns_results) results_found.append({ 'id': ID, 'sessions': dns_results, 'origin': orig_alert, 'numFound': numFound }) return results_found def map_dns(self, item): ret_dict = {} ret_dict['ID'] = item.pop('_id') ret_dict['document'] = item return ret_dict '''Function to search flow records''' def search_dns(self, request, orig_search=False): if not orig_search: event_search = {} if len(request['src_ip']) > 0: event_search['src_ip'] = str(request['src_ip']) if len(request['src_port']) > 0: try: event_search['src_port'] = int(request['src_port']) except ValueError: pass if len(request['dest_ip']) > 0: event_search['dest_ip'] = str(request['dest_ip']) if len(request['dest_port']) > 0: try: event_search['dest_port'] = int(request['dest_port']) except ValueError: pass if len(request['sensor']) > 0: event_search['sensor'] = str(request['sensor']) event_search['proto'] = str(request['proto']) if len(request['query_type']) > 0: event_search['dns.type'] = str(request['query_type']) if len(request['rrtype']) > 0: event_search['dns.rrtype'] = str(request['rrtype']) if len(request['rcode']) > 0: event_search['dns.rcode'] = str(request['rcode']) if len(request['rrname']) > 0: event_search['dns.rrname'] = str(request['rrname']) if len(request['rdata']) > 0: event_search['dns.rdata'] = str(request['rdata']) if len(request['start']) > 0: start_time = datetime.datetime.strptime(request['start'], '%m-%d-%Y %H:%M:%S') else: start_time = datetime.datetime.utcnow() - datetime.timedelta(seconds=600) if len(request['stop']) > 0: stop_time = datetime.datetime.strptime(request['stop'], '%m-%d-%Y %H:%M:%S') else: stop_time = datetime.datetime.utcnow() + datetime.timedelta(seconds=600) else: event_search = request stop_time = event_search.pop('stop_time') start_time = event_search.pop('start_time') results = self.dns.find( { "$and": [ event_search, { "$and": [ { "timestamp": { "$gt": start_time }}, { "timestamp": { "$lt": stop_time }} ]}, ]}).sort([("_id", pymongo.ASCENDING)]).limit(self.sizeLimit) numFound = results.count() results_found = map(self.map_dns, results) event_search['start_time'] = start_time event_search['stop_time'] = stop_time return numFound, results_found, event_search def map_dns(self, item): ret_dict = {} ret_dict['ID'] = item.pop('_id') ret_dict['document'] = item return ret_dict
Madeline Lohman is a Senior Researcher with The Advocates for Human Rights. She conducts research, education, and legislative advocacy on immigrant rights and human trafficking in Minnesota. She published a report on labor trafficking and exploitation in 2016 and continues combating human trafficking through training, protocol development, and research.
# # Daemon.py # # # Documentation # """ Disk And Execution MONitor (Daemon) Default daemon behaviors (they can be modified): 1.) Ignore SIGHUP signals. 2.) Default current working directory to the "/" directory. 3.) Set the current file creation mode mask to 0. 4.) Close all open files (0 to [SC_OPEN_MAX or 256]). 5.) Redirect standard I/O streams to "/dev/null". Failed fork() calls will return a tuple: (errno, strerror). This behavior can be modified to meet your program's needs. Resources: Advanced Programming in the Unix Environment: W. Richard Stevens Unix Network Programming (Volume 1): W. Richard Stevens http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 ----- Changes 08/16/04 * Changed os.umask(0) to os.umask(022) for Postfix purposes. ----- """ # # Imports # import os # Miscellaneous OS interfaces. import sys # System-specific parameters and functions. import signal # Set handlers for asynchronous events. # # Constants. # __author__ = "Chad J. Schroeder" __version__ = "$Id$" # # Methods. # def background(): """ Detach a process from the controlling terminal and run it in the background as a daemon. """ try: # Fork a child process so the parent can exit. This will return control # to the command line or shell. This is required so that the new process # is guaranteed not to be a process group leader. We have this guarantee # because the process GID of the parent is inherited by the child, but # the child gets a new PID, making it impossible for its PID to equal its # PGID. pid = os.fork() except OSError, e: return((e.errno, e.strerror)) # ERROR (return a tuple) if (pid == 0): # The first child. # Next we call os.setsid() to become the session leader of this new # session. The process also becomes the process group leader of the # new process group. Since a controlling terminal is associated with a # session, and this new session has not yet acquired a controlling # terminal our process now has no controlling terminal. This shouldn't # fail, since we're guaranteed that the child is not a process group # leader. os.setsid() # When the first child terminates, all processes in the second child # are sent a SIGHUP, so it's ignored. signal.signal(signal.SIGHUP, signal.SIG_IGN) try: # Fork a second child to prevent zombies. Since the first child is # a session leader without a controlling terminal, it's possible for # it to acquire one by opening a terminal in the future. This second # fork guarantees that the child is no longer a session leader, thus # preventing the daemon from ever acquiring a controlling terminal. pid = os.fork() # Fork a second child. except OSError, e: return((e.errno, e.strerror)) # ERROR (return a tuple) if (pid == 0): # The second child. # Ensure that the daemon doesn't keep any directory in use. Failure # to do this could make a filesystem unmountable. os.chdir("/") # Give the child complete control over permissions. os.umask(022) else: os._exit(0) # Exit parent (the first child) of the second child. else: os._exit(0) # Exit parent of the first child. # Close all open files. Try the system configuration variable, SC_OPEN_MAX, # for the maximum number of open files to close. If it doesn't exist, use # the default value (configurable). try: maxfd = os.sysconf("SC_OPEN_MAX") except (AttributeError, ValueError): maxfd = 256 # default maximum for fd in range(0, maxfd): try: os.close(fd) except OSError: # ERROR (ignore) pass # Redirect the standard file descriptors to /dev/null. os.open("/dev/null", os.O_RDONLY) # standard input (0) os.open("/dev/null", os.O_RDWR) # standard output (1) os.open("/dev/null", os.O_RDWR) # standard error (2) return(0)
Le Grande Belle is the arrangement for the auteur, the bold, those that break the mold. This incredible arrangement comes in a strong, glass vase wrapped with Curly Willow and stands over 3 feet tall. Featured in this design are dazzling Ecuadorian Long Stem Roses, elegant Bells of Ireland, fragrant Oriental Lilies, a full stem of Thai Orchids and many blooms, dancing Veronica or Snapdragons, fresh Antique Hydrange, Hanging Amaranthus, wild Larkspur & seasonal flowers & greenery. Upgrade to Luxurious or Elite for that fuller, luscious look. Arrangement is approximately 38"x24".
#!/usr/bin/env python ''' Layout: . (script home) file.txt F1 dir1/ file.txt F2 dir1/ file.txt F3 res.zip/ file.txt F7 dir1/ file.txt F8 dir1/ file.txt F9 dir2/ file.txt F6 ''' import os import sys import unittest from pyglet import resource from pyglet.compat import asbytes class ResourceLoadingTestCase(unittest.TestCase): def setUp(self): self.script_home = os.path.dirname(__file__) def check(self, path, result): self.check_file(path, 'file.txt', result) def check_file(self, path, file, result): loader = resource.Loader(path, script_home=self.script_home) self.assertTrue(loader.file(file).read() == asbytes('%s\n' % result)) def checkFail(self, path): loader = resource.Loader(path, script_home=self.script_home) self.assertRaises(resource.ResourceNotFoundException, loader.file, 'file.txt') def test1(self): self.check(None, 'F1') def test2(self): self.check('', 'F1') def test2a(self): self.check('.', 'F1') def test2b(self): self.checkFail(()) def test2c(self): self.checkFail('foo') def test2d(self): self.checkFail(['foo']) def test2e(self): self.check(['foo', '.'], 'F1') def test3(self): self.check(['.', 'dir1'], 'F1') def test4(self): self.check(['dir1'], 'F2') def test5(self): self.check(['dir1', '.'], 'F2') def test6(self): self.check(['dir1/dir1'], 'F3') def test7(self): self.check(['dir1', 'dir1/dir1'], 'F2') def test8(self): self.check(['dir1/dir1', 'dir1'], 'F3') def test9(self): self.check('dir1/res.zip', 'F7') def test9a(self): self.check('dir1/res.zip/', 'F7') def test10(self): self.check('dir1/res.zip/dir1', 'F8') def test10a(self): self.check('dir1/res.zip/dir1/', 'F8') def test11(self): self.check(['dir1/res.zip/dir1', 'dir1/res.zip'], 'F8') def test12(self): self.check(['dir1/res.zip', 'dir1/res.zip/dir1'], 'F7') def test12a(self): self.check(['dir1/res.zip', 'dir1/res.zip/dir1/dir1'], 'F7') def test12b(self): self.check(['dir1/res.zip/dir1/dir1/', 'dir1/res.zip/dir1'], 'F9') def test12c(self): self.check(['dir1/res.zip/dir1/dir1', 'dir1/res.zip/dir1'], 'F9') def test13(self): self.check(['dir1', 'dir2'], 'F2') def test14(self): self.check(['dir2', 'dir1'], 'F6') # path tests def test15(self): self.check_file([''], 'dir1/file.txt', 'F2') def test15a(self): self.check_file([''], 'dir1/dir1/file.txt', 'F3') def test15b(self): self.check_file(['dir1'], 'dir1/file.txt', 'F3') def test15c(self): self.check_file([''], 'dir2/file.txt', 'F6') def test15d(self): self.check_file(['.'], 'dir2/file.txt', 'F6') # zip path tests def test16(self): self.check_file(['dir1/res.zip'], 'dir1/file.txt', 'F8') def test16a(self): self.check_file(['dir1/res.zip/'], 'dir1/file.txt', 'F8') def test16a(self): self.check_file(['dir1/res.zip/'], 'dir1/dir1/file.txt', 'F9') def test16b(self): self.check_file(['dir1/res.zip/dir1'], 'dir1/file.txt', 'F9') def test16c(self): self.check_file(['dir1/res.zip/dir1/'], 'dir1/file.txt', 'F9')
Scientific theories are really scientific. Science supports GMO safety. A list of the most current published research on GMOs. Scientific consensus on GMO safety and climate change. Vaccines from GMO corn – science deniers everywhere faint. It’s always good to have an article that combines GMOs and vaccines. No more “debates” about GMO safety–thanks to trillions of data points. Journal retracts Séralini’s controversial “GMO causes cancer” article. Gilles-Eric Séralini hogwash article ends up on the waste heap of bad science, right next to the garbage article from Mr. Andrew Wakefield. Do GMO crops have a higher yield? It depends on the answer. Correlation does not imply causation. Except when it does or doesn’t. It’s an important concept in understanding the safety of GMOs. Stopping dengue fever with genetically modified mosquitoes. The attack of the Frankensquito. Cancer rates are increasing in the USA. Not really. And GMOs are not involved either way. Oh no, here comes Frankenfish. Actually not. Another anti-GMO paper retracted because the authors wanted to support their preordained conclusions that GMOs are unsafe. Séralini again. He goes to court to sue someone for calling his research fraudulent. He kind of wins. Doesn’t mean that his research is anything but bad, very bad, science. GMOs have been around for 10,000 years, give or take. So unless you want to eat inedible grains growing on the plains, enjoy those GMO foods. And another anti-GMO article is retracted because the data was essentially manufactured out of nothing. Genetically engineered crops – safe for humans and animals, according to the National Academy of Sciences. Almond milk – examining the science behind this food fad – not really about GMOs, just a silly food fad. GMO crop pesticide use in the USA soybeans and corn – just the facts – a newly published article seems to imply that glyphosate-resistant GMO crops cause resistant weeds. No, it doesn’t say that. GMO food safety – those genes do not transfer to humans – why do anti-GMO activists believe that GMO genes are somehow incorporated into the human genome? Because it doesn’t. Anti-GMO and anti-vaccine activists – they’ve converged – like climate change deniers, the anti-vaccine and anti-GMO zealots ignore real science to further their beliefs. GMO vs non-GMO foods – genetic modification techniques – we’ve been genetically modifying foods for 10,000 years, here’s how. GMO bananas – necessary to save the fruit from extinction – because most bananas shipped internationally are from one cultivar, to save the banana, we need a genetically modified version. GMO corn – safe and more productive according to new meta-review – another large systematic review that shows us GMO corn is safe and productive. The bad science checklist of GMO opponents. GMO opponents are almost the same as anti-science people who populate the global warming denier community. The anti-GMO bad science checklist. Examining some of the Red Flags of Science used by the GMO refusers. GMO opponents are the left’s version of global warming deniers. Neither climate change deniers nor GMO refusers want to believe this, but they both use junk science. GMO foods cause cancer–pseudoscience says it’s so. But real science says this is ridiculous. The myth of GMO gene transfer to humans. Impossible. GMO corn causes cancer–Myth vs. Science (recent news). Gilles-Eric Séralini writes bad science. Overhyped GMO corn study gets more scrutiny. Séralini again. Dr. Oz falls for the overhyped and debunked GMO corn study. Oz promotes Séralini. GMO opponents are the global warming denialists of the left. More information on how the left is wrong about GMOs (and the right is wrong about climate change). Refusers misuse GMO rice research data. Arguments that GMO opponents should delete from their brains. A list of arguments used (or misused) by the GMO refusers that should not be used anymore. GMO opponents fall for a hoax. Glyphosate causes cancer. No, not really. Regarding those mistakes made by science… They weren’t really mistakes. One hour of research on Google–obviously all science is wrong. No, many years of research tells us that GMOs are safe. Another myth – labeling GMO foods is not expensive. GMO labeling law – Senate thinks they’re smarter than scientists – US Senate pushes a compromise GMO labeling bill that could cause food prices to go up. Girl Scout cookies and GMO – subtly pushing an anti-science agenda – Some Girl Scout USA cookies are labeled GMO free. Why!?!?!?! Genetically modified gluten – delicious and tasty irony – Only a tiny number of people have real diagnosed gluten sensitivity. And for those people, GMO gluten may eliminate the nasty problems. High fructose corn syrup – it’s just sugar. Sometimes high fructose corn syrup is conflated with GMOs. Not only shouldn’t be conflated, but HFCS is nothing more than sugar. And then, Cheerios is now GMO-free – it’s not what it appears to be. GMO sugar vs natural sugar – can you tell the difference? – probably not, but the answers will be amusing. Richard Dawkins GMO position is positive. Renowned evolutionary biologist Dawkins takes on Prince Charles. Neil deGrasse Tyson tells GMO haters to chill out–liberals get angry. Everyone’s still mad that he removed Pluto from the list of planets. Neil deGrasse Tyson–unafraid of GMO’s. Developing and supporting a scientific consensus. This story is not just about GMOs but also about how to develop a consensus. The solid GMO scientific consensus. The consensus is similar to the ones for evolution, the Big Bang, vaccines, and climate change. Where’s the common sense in the GMO discussion?–A review of an article by Nina Fedoroff, a Penn State University faculty member, who actually studies biotechnology. Her scientific bona fides are public, including being past President of the American Association for the Advancement of Science (AAAS) and a member of the National Academy of Sciences. Anti-GMO activist changes his mind–what does it really mean? A former member of the anti-GMO forces switches sides. Could it be that Mark Lynas saw real science and changed his mind? Or did Monsanto brainwash him? Bill Gates, part 3 – despised by GMO refusers. Apparently, Bill Gates is hated by Apple users, antivaccine cultists, and the anti-GMO gang. That means he’s doing good things. GMO refusers and vaccine deniers cross–call Big Pharma and Monsanto. Anti-GMO activists and climate change deniers – no science. Using the tactics of the climate change deniers, anti-GMO gonzo journalists take emails from Dr. Kevin Folta, a renowned plant geneticist, in an attempt to destroy his reputation. They failed. Dr. Kevin Folta, Part 2. The non-apology apology. Dr. Kevin Folta, Part 3. Ad hominem personal attacks. Dr. Kevin Folta, Part 4. More fallout from personal attacks and threats. Editor’s note: This article was originally published in June 2015. It will be regularly updated as we publish new articles on GMO science facts.
# Twisted, the Framework of Your Internet # Copyright (C) 2001 Matthew W. Lefkowitz # # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # License as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from __future__ import nested_scopes from twisted.trial import unittest from twisted.internet import protocol, reactor from twisted.protocols import basic from twisted.python import util from OpenSSL import SSL from twisted.internet import ssl import os import test_tcp certPath = util.sibpath(__file__, "server.pem") class StolenTCPTestCase(test_tcp.ProperlyCloseFilesTestCase, test_tcp.WriteDataTestCase): def setUp(self): f = protocol.ServerFactory() f.protocol = protocol.Protocol self.listener = reactor.listenSSL( 0, f, ssl.DefaultOpenSSLContextFactory(certPath, certPath), interface="127.0.0.1", ) f = protocol.ClientFactory() f.protocol = test_tcp.ConnectionLosingProtocol f.protocol.master = self L = [] def connector(): p = self.listener.getHost()[2] ctx = ssl.ClientContextFactory() return reactor.connectSSL('127.0.0.1', p, f, ctx) self.connector = connector self.totalConnections = 0 class ClientTLSContext(ssl.ClientContextFactory): isClient = 1 def getContext(self): return SSL.Context(ssl.SSL.TLSv1_METHOD) class UnintelligentProtocol(basic.LineReceiver): pretext = [ "first line", "last thing before tls starts", "STARTTLS", ] posttext = [ "first thing after tls started", "last thing ever", ] def connectionMade(self): for l in self.pretext: self.sendLine(l) def lineReceived(self, line): if line == "READY": self.transport.startTLS(ClientTLSContext(), self.factory.client) for l in self.posttext: self.sendLine(l) self.transport.loseConnection() class ServerTLSContext(ssl.DefaultOpenSSLContextFactory): isClient = 0 def __init__(self, *args, **kw): kw['sslmethod'] = SSL.TLSv1_METHOD ssl.DefaultOpenSSLContextFactory.__init__(self, *args, **kw) class LineCollector(basic.LineReceiver): def __init__(self, doTLS): self.doTLS = doTLS def connectionMade(self): self.factory.rawdata = '' self.factory.lines = [] def lineReceived(self, line): self.factory.lines.append(line) if line == 'STARTTLS': self.sendLine('READY') if self.doTLS: ctx = ServerTLSContext( privateKeyFileName=certPath, certificateFileName=certPath, ) self.transport.startTLS(ctx, self.factory.server) else: self.setRawMode() def rawDataReceived(self, data): self.factory.rawdata += data self.factory.done = 1 def connectionLost(self, reason): self.factory.done = 1 class TLSTestCase(unittest.TestCase): def testTLS(self): cf = protocol.ClientFactory() cf.protocol = UnintelligentProtocol cf.client = 1 sf = protocol.ServerFactory() sf.protocol = lambda: LineCollector(1) sf.done = 0 sf.server = 1 port = reactor.listenTCP(0, sf) portNo = port.getHost()[2] reactor.connectTCP('0.0.0.0', portNo, cf) i = 0 while i < 5000 and not sf.done: reactor.iterate(0.01) i += 1 self.failUnless(sf.done, "Never finished reading all lines") self.assertEquals( sf.lines, UnintelligentProtocol.pretext + UnintelligentProtocol.posttext ) def testUnTLS(self): cf = protocol.ClientFactory() cf.protocol = UnintelligentProtocol cf.client = 1 sf = protocol.ServerFactory() sf.protocol = lambda: LineCollector(0) sf.done = 0 sf.server = 1 port = reactor.listenTCP(0, sf) portNo = port.getHost()[2] reactor.connectTCP('0.0.0.0', portNo, cf) i = 0 while i < 5000 and not sf.done: reactor.iterate(0.01) i += 1 self.failUnless(sf.done, "Never finished reading all lines") self.assertEquals( sf.lines, UnintelligentProtocol.pretext ) self.failUnless(sf.rawdata, "No encrypted bytes received") def testBackwardsTLS(self): cf = protocol.ClientFactory() cf.protocol = lambda: LineCollector(1) cf.server = 0 cf.done = 0 sf = protocol.ServerFactory() sf.protocol = UnintelligentProtocol sf.client = 0 port = reactor.listenTCP(0, sf) portNo = port.getHost()[2] reactor.connectTCP('0.0.0.0', portNo, cf) i = 0 while i < 2000 and not cf.done: reactor.iterate(0.01) i += 1 self.failUnless(cf.done, "Never finished reading all lines") self.assertEquals( cf.lines, UnintelligentProtocol.pretext + UnintelligentProtocol.posttext ) class SingleLineServerProtocol(protocol.Protocol): def connectionMade(self): self.transport.write("+OK <some crap>\r\n") class RecordingClientProtocol(protocol.Protocol): def connectionMade(self): self.buffer = [] def dataReceived(self, data): self.factory.buffer.append(data) class BufferingTestCase(unittest.TestCase): def testOpenSSLBuffering(self): server = protocol.ServerFactory() client = protocol.ClientFactory() server.protocol = SingleLineServerProtocol client.protocol = RecordingClientProtocol client.buffer = [] from twisted.internet.ssl import DefaultOpenSSLContextFactory from twisted.internet.ssl import ClientContextFactory sCTX = DefaultOpenSSLContextFactory(certPath, certPath) cCTX = ClientContextFactory() port = reactor.listenSSL(0, server, sCTX, interface='127.0.0.1') reactor.connectSSL('127.0.0.1', port.getHost()[2], client, cCTX) for i in range(100): reactor.iterate() self.assertEquals(client.buffer, ["+OK <some crap>\r\n"]) testOpenSSLBuffering.todo = "This wasn't working before anyway."
On January 24, 2019, His Holiness Patriarch Mor Ignatius Aphrem II presided over thanks prayer at Oum Al-Nour (Mother of Light) Church in Ankawa, Iraq. Their Eminences: Mor Nicodemus Daoud Sharaf, Archbishop of Mosul, Kirkuk, Kurdistan and Environs , Mor Timotheos Moussa Al-Shamani, Archbishop of the Archdiocese of Mor Mattai Monastery, Mor Timotheos Matta Al-Khouri, Patriarchal Vicar for the Patriarchal Archdiocese of Damascus, along with the clergy and a crowd of faithful attended the prayer. At the end, His Holiness blessed the faithful. A reception in honor of His Holiness was then organized by the parish.
from setuptools import setup, find_packages # Always prefer setuptools over distutils from codecs import open # To use a consistent encoding from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='py_versioning', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # http://packaging.python.org/en/latest/tutorial.html#version version='1.0.0', description='Set of tools helping in versioning projects', long_description=long_description, # The project's main homepage. url='https://github.com/sim1234/Versioning', # Author details author='Szymon Zmilczak & Jaroslaw Szymla', author_email='szymon.zmilczak@gmail.com', # Choose your license license='GPLv2', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Topic :: Software Development :: Version Control', 'Topic :: System :: Filesystems', 'Topic :: Database', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2 :: Only', 'Programming Language :: Python :: 2.7', ], # What does your project relate to? keywords='version control versioning py_versioning pyversioning', # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). packages=find_packages(exclude=['contrib', 'docs', 'tests*']), # List run-time dependencies here. These will be installed by pip when your # project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files install_requires=['sqlalchemy'], # List additional groups of dependencies here (e.g. development dependencies). # You can install these using the following syntax, for example: # $ pip install -e .[dev,test] extras_require = { 'dev': ['check-manifest'], 'test': ['coverage'], 'django': ['django'], }, # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ #'sample': ['package_data.dat'], }, # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. # see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # In this case, 'data_file' will be installed into '<sys.prefix>/my_data' data_files=[ #('my_data', ['data/data_file']), ], # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. entry_points={ 'console_scripts': [ 'py_versioning=py_versioning:main', ], }, )
Nearly fell off my chair laughing! These are GREAT! Thanks for posting them.
from igraph import Graph from seed import generate_seed_graph from query import query_seed class EGraph(object): def __init__(self, path): self.path = path self._seed_graph = None self._g = None def query_node(self, name): raise NotImplementedError @property def origin_graph(self): if not self._g: g = Graph.Read_Ncol(self.path, directed=False) self._g = g.simplify() return self._g @property def seed_graph(self): raise NotImplementedError class FBEgoGraph(EGraph): name = 'public' def query_node(self, node_name, n_attribute): node = self.origin_graph.vs.find(name=node_name) result = [{'name': n['name'], 'degree': n.degree()} for n in node.neighbors()] return result @property def seed_graph(self): if not self._seed_graph: self._seed_graph = generate_seed_graph(self.origin_graph, 100) return self._seed_graph class RemoteGraph(EGraph): name = 'public' def query_node(self, node_name, n_attribute): node = self.origin_graph.vs.find(name=node_name) result = [{'name': n['name'], 'degree': n.degree()} for n in node.neighbors()] return result @property def seed_graph(self): if not self._seed_graph: self._seed_graph = query_seed()[0] return self._seed_graph
The European Parliament today adopted a report on gender equality and taxation. Tax policies still affect men and women differently. Although conservatives and liberals tried to water down the report and opposed important reforms of the current tax systems which would contribute to greater gender equality in wealth and on the labour market, its progressive and feminist policy proposals make the report a milestone in the S&D Group’s campaign to have fairer and more gender sensitive taxation.
from os import getpid from dask.bag import from_sequence import dask.config from dask.distributed import Client from eliot import log_call, to_file from eliot.dask import compute_with_trace @log_call def multiply(x, y=7): return x * y @log_call def add(x, y): return x + y @log_call def main_computation(): bag = from_sequence([1, 2, 3]) bag = bag.map(multiply).fold(add) return compute_with_trace(bag)[0] # instead of dask.compute(bag) def _start_logging(): # Name log file based on PID, so different processes so stomp on each # others' logfiles: to_file(open("{}.log".format(getpid()), "a")) def main(): # Setup logging on the main process: _start_logging() # Start three worker processes on the local machine: client = Client(n_workers=3, threads_per_worker=1) # Setup Eliot logging on each worker process: client.run(_start_logging) # Run the Dask computation in the worker processes: result = main_computation() print("Result:", result) if __name__ == '__main__': import dask_eliot dask_eliot.main()
The constant demands of caring for a chronically ill or impaired loved one can be overwhelming. The VNA Adult Day Services give families a break and offer added support. Our services are designed to meet the needs of adults 18 and over, with two locations in Erie and Niagara counties. Clients attend on a planned basis, ranging from one to five days, depending on their needs. A wide variety of services and activities help individuals maintain their independence and promote feelings of self worth. Programs receive partial funding from the Niagara County Office for the Aging, Erie County Department of Senior Services and the NYS Office for the Aging.
#!./pyenv/bin/python import sys, json, numpy as np from sklearn.preprocessing import LabelEncoder from sklearn.ensemble import RandomForestRegressor file_nos, x, y_oil, y_gas, y_water = [], [], [], [], [] files = {} idx = 0 f = open('predict_input.txt') lines = f.read().split('\n') #for line in sys.stdin: for line in lines: tokens = line.strip().lower().split('\t') if tokens[0] in files: continue else: files[tokens[0]] = True if len(tokens) == 1: continue #print 'Parsing well ' + tokens[0] y_oil.append(float(tokens[1])) #bbls_oil y_gas.append(float(tokens[2])) #mcf_gas y_water.append(float(tokens[3])) #bbls_water #footages, fieldname, producedpools, wellbore, welltype, ctb, perfs, spacing x.append((tokens[4], tokens[5], tokens[6], tokens[7], tokens[8], tokens[9], tokens[10], tokens[11])) file_nos.append(tokens[0]) #for token in tokens[12:]: #if token != '': x[idx].append(float(token)) idx += 1 X = np.asarray(x) #LabelEncode footages, fieldname, producedpools, wellbore, welltype, ctb, perfs, spacing for idx in xrange(0, 8): X[:,idx] = LabelEncoder().fit_transform(X[:,idx]) m_oil, m_gas, m_water = RandomForestRegressor(), RandomForestRegressor(), RandomForestRegressor() m_oil.fit(X,y_oil) m_gas.fit(X,y_gas) m_water.fit(X,y_water) for idx,x in enumerate(X): print '\t'.join([file_nos[idx], str(x[0]), str(m_oil.predict(x)[0]), str(x[1]), str(m_gas.predict(x)[0]), str(x[1]), str(m_water.predict(x)[0])])
On March 15, employees and Top Management representatives from Luxoft and Excelian met for the third year in a row at Luxoft's office in New York City to work with the “Bottomless Closet” Foundation and help a group of disadvantaged women transition from unemployment and public assistance to work. At Luxoft, increasing gender equality, helping more women get back into the job market, and encouraging women to join the IT industry are some of our biggest goals. Acting as entrepreneurs and mentors gives us a unique chance to share the skills and knowledge our guests are looking for, in order to build their own career paths. Our recent activities have promoted multiple opportunities for women, and we are very excited to share stories of how our leaders planned their own careers, their thoughts on what it takes to become a successful leader, the impact of their positions in our organization, and how their decisions contribute to the running of our business. This unique meeting featured powerful women within our organization acting as role models, giving them a chance to motivate, encourage, and mentor. Our guests had the chance to learn what a well-organized recruitment process looks like, confront their own fears and uncertainty regarding interviews, learn self-presentation methods, and familiarize themselves with the basic rules of the recruitment process. We are proud to be able to host this event together with “Bottomless Closet” and empower women to take charge of their future!
#-*- coding: utf-8 -*- ######################################################################## import logging import datetime from datetime import datetime import time import os import sys lis=0 class system(object): """""" #---------------------------------------------------------------------- def __init__(self): """Constructor""" def log(self): # filepath=raw_input(u'输入文件') if not os.path.isdir('c:\\TEST_log\\test\\'): os.makedirs('c:\\TEST_log\\test\\') logFileName = 'c:\\TEST_log\\test\\%s.log' %time.strftime("%m-%d-%H-%M-%S",time.localtime(time.time())) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', filename=logFileName, level=logging.DEBUG) ################################################################################################# #定义一个StreamHandler,将INFO级别或更高的日志信息打印到标准错误,并将其添加到当前的日志处理对象# console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) ################################################################################################# logging.info('####################') logging.info('# Begin Test #') logging.info('####################') def FileName(self): global lis lis=lis+1 filesave='c:\\screenshot\\' picname= '%s.png'%(datetime.utcnow().strftime('%m-%d-') + time.strftime('%H',time.localtime(time.time()))+datetime.utcnow().strftime('.%M.%S.')+datetime.utcnow().strftime('%f')[:3]) # print filesave # picname= '%s.png'%(datetime.utcnow().strftime('%m-%d-%H:%M:%S:') + datetime.utcnow().strftime('%f')[:3]) # print picname filename=filesave+picname # print filename return filename def strB2Q(self,ustring): """半角转全角""" rstring = "" for uchar in ustring: inside_code=ord(uchar) if inside_code == 32: #半角空格直接转化 inside_code = 12288 elif inside_code >= 32 and inside_code <= 126: #半角字符(除空格)根据关系转化 inside_code += 65248 rstring += unichr(inside_code) return rstring
The following is part 9 in our 10-part series entitled “10 Steps Towards Launch!” by Terrel Transtrum, President/Founder of ServiceQuest. If you are thinking about starting a new direct selling company, contact us today! The ninth of ten business segments that make up the foundation for a successful launch of your MLM business is MARKETING. MLM Marketing is often overlooked, yet it is at the core of the successful MLM. Here is a closer look at the most important MLM best practices for MLM Marketing. Product Pricing – We have a clear understanding and analysis of our product margins, the features and benefits that hold up under pricing scrutiny, and a keen understanding of the consumer’s value perception and how to sustain that perception. Back Office – We have an online back office that contains all of our support materials and has analytics to tell distributors / consultants every useful thing about their business, ranging from news and company information to volumes and computations of commissions.
from functools import partial from glob import glob from itertools import chain from jsbuild.dependency import Dependency from jsbuild.logging import logger from jsbuild.manifest import Manifest from jsbuild.maps import get_class_by_format from jsbuild import templates import os.path import re clean_backdir = lambda path: re.sub('^(\.\.\/?)+','',path) count_backdir = lambda path: get_backdir(path).count('../') has_backdir = lambda path: re.match('^\.\.',path) and True or False join_path = lambda *args: os.path.normpath(os.path.join(*args)) def get_backdir(path): search = re.search('((?:\.\.\/)+)',path) return os.path.normpath(search.groups()[0]) if search else '' class Index(Dependency): def __init__(self,*args,**kwargs): super(Index,self).__init__(*args,**kwargs) self._buffer_ = None self._manifest_ = None self._dependencies_ = None self.to_call = [] @property def buffer(self): if not self._buffer_: self._buffer_ = self.read() return self._buffer_ @property def content(self): root = self while root.index: root = root.index name = root.manifest.name content = '\n'.join(map(lambda dep: dep.content if not isinstance(dep,Index) or not dep.get_config('filename',False) else dep.put() or '', self.dependencies)) if not self.index: content = templates.package%{ "name":name, "content":content } for flname in self.to_call: content = '%s\n%s'%(content,templates.maincall%{ "index_name":root.manifest.name, "filename":flname}) for rpl in self.get_config('replacements',[]): content = re.sub(rpl['pattern'],rpl['replacement']%self.get_config('dict',{}),content,flags=re.DOTALL) return content @property def dependencies(self): if self._dependencies_ == None: self.import_manifest() return self._dependencies_ @property def manifest(self): if self._manifest_ == None: self._manifest_ = Manifest(self.parse()) return self._manifest_ def get_config(self,key,default=None): return self.manifest.build.__contains__(key) and self.manifest['build'][key] or default @property def source_dir(self): return os.path.normpath(os.path.join(self.working_dir,self.get_config('dir',''))) @property def path(self): logger.debug('Trying to find client-side path of "%s" (:working_dir %s :source_dir %s)'%(self.src,self.working_dir,self.source_dir)) if not self.index: return '' parent = self.index parent_ref = get_backdir(self.src) while parent and has_backdir(parent_ref): parent_dir = join_path(os.path.dirname(parent.src) if parent.index else '',parent.get_config('dir','')) parent_dir_merged = join_path(clean_backdir(parent_dir),parent_ref) if len(parent_dir_merged)>0 and not parent_dir_merged=='.' and (not has_backdir(parent_dir_merged)): break parent_ref = join_path(parent_dir if parent.index and parent.index.index else clean_backdir(parent_dir),parent_ref) parent = parent.index path = join_path(parent.path if parent else '',clean_backdir(os.path.dirname(self.src))) return path if path!='.' else '' def import_manifest(self): logger.debug('Importing manifest document') self._dependencies_ = [] sdir = self.source_dir files = [ el for el in map(partial(lambda path: os.path.join(sdir,path)),self.get_config('files',[])) ] for depinfo in chain(*map(glob,files)): src = depinfo if not self.source_dir else depinfo[len(self.source_dir)+1:] dp = get_class_by_format(src)(index=self) dp.src = src self.dependencies.append(dp) def parse(self,content): raise Exception('Not Implemented') def put(self): filename = os.path.normpath(os.path.join(self.working_dir, self.get_config('filename'))) with open('%s'%filename,'w',encoding='utf-8') as fl: fl.write(self.content) logger.info('Writing %s OK'%filename)
Take some time to relive the special memories from the 2015 dinner. Your favorite photos can be easily ordered with your credit card. To purchase photos from our 2015 celebration, visit Enjoy Photos and log in using the following information.
#!/usr/bin/env python """ Basic Setup Script This code is part of the Arc-flow Vector Packing Solver (VPSolver). Copyright (C) 2013-2015, Filipe Brandao Faculdade de Ciencias, Universidade do Porto Porto, Portugal. All rights reserved. E-mail: <fdabrandao@dcc.fc.up.pt>. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from os import system from setuptools import setup from setuptools.command.install import install class CustomInstallCommand(install): """ Custom Install Command """ def run(self): try: system('/bin/bash ./compile.sh') system('/bin/cp bin/* ' + self.install_scripts) except IOError: pass install.run(self) setup( name='VPSolver', version='1.1', description='Cutting and Packing Exact Solver Based on an Arc-Flow Formulation', author='', author_email='', packages=['pyvpsolver'], include_package_data=True, scripts=[ 'scripts/vpsolver_gurobi.sh', 'scripts/vpsolver_cplex.sh', 'scripts/vpsolver_coinor.sh', 'scripts/vpsolver_glpk.sh', 'scripts/vpsolver_lpsolve.sh', 'scripts/vpsolver_scip.sh', ], url='', license='LICENSE', long_description=open('README').read(), keywords='', classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)', 'Topic :: Scientific/Engineering' ], cmdclass = { 'install' : CustomInstallCommand }, use_2to3 = True )
Mondoñedo Valley is the ideal place to spend a few days with friends or with your family. It allows you to get away from the daily routine with an incomparable landscape located in Spanish territories. It is specifically located in the heart of the mountains, between Lourenzá and A Toxiza, and the Enfesta hills. Mondoñedo Valley offers an historic tour through the medieval, modern and contemporary world in Galicia. In addition, it allows you to explore nature and it has an assorted and delicious gastronomy. In Mondoñedo Valley we offer to our visitors the opportunity to discover and enjoy tourism in an idyllic environment located in the province of Lugo. That way, we make available to our visitors an enterprising project focused on offering a different way of traveling and enjoying tourism in Galicia. Get to know Mondoñedo, its cultural, historic, gastronomic and artisan tradition through a series of immersive experiences. In Mondoñedo we took care of designing an exclusive and innovative activity program suitable for all audiences, with which it is possible to go in depth into the full potential of these Galician lands. You will be able to fully enjoy the natural and scenic geography of Lugo through our immersive technologies, which will be a unique trip to our visitors, including 360º videos, virtual reality or drones. If you dream of knowing an incredible territory where you can stroll and get lost through time and history, Mondoñedo Valley is your place. Come and discover this amazing millennial place!
from Globals import InitializeClass # from AccessControl import ClassSecurityInfo from Products.ZenRelations.RelSchema import * from Products.ZenModel.DeviceComponent import DeviceComponent from Products.ZenModel.ManagedEntity import ManagedEntity from Products.ZenUtils.Utils import convToUnits from Products.ZenModel.ZenossSecurity import ZEN_VIEW, ZEN_CHANGE_SETTINGS _kw = dict(mode='w') class BladeChassisInterconnect(DeviceComponent, ManagedEntity): "Blade Chassis Interconnect Information" portal_type = meta_type = 'BladeChassisInterconnect' bciNumber = -1 bciType = "" bciProductName = "" bciStatus = "" bciMgmtIp = "" bciSerialNum = "" bciPartNumber = "" bciSparePartNumber = "" _properties = ( dict(id='bciNumber', type='int', **_kw), dict(id='bciType', type='string', **_kw), dict(id='bciProductName', type='string', **_kw), dict(id='bciStatus', type='string', **_kw), dict(id='bciMgmtIp', type='string', **_kw), dict(id='bciSerialNum', type='string', **_kw), dict(id='bciPartNumber', type='string', **_kw), dict(id='bciSparePartNumber', type='string', **_kw) ) _relations = ( ('bladechassis', ToOne(ToManyCont, 'ZenPacks.community.HPBladeChassis.BladeChassis', 'bladechassisinterconnects')), ) # Screen action bindings (and tab definitions) factory_type_information = ( { 'id' : 'BladeChassisInterconnect', 'meta_type' : 'Blade Chassis Interconnect', 'description' : 'Blade Chassis Interconnect Description', 'icon' : 'Device_icon.gif', 'product' : 'BladeServers', 'factory' : 'manage_addBladeServer', 'immediate_view' : 'bladeserverPerformance', 'actions' : ( { 'id' : 'perf' , 'name' : 'perf' , 'action' : 'bladeserverPerformance' , 'permissions' : (ZEN_VIEW, ) }, { 'id' : 'templates' , 'name' : 'Templates' , 'action' : 'objTemplates' , 'permissions' : (ZEN_CHANGE_SETTINGS, ) }, ) }, ) def device(self): return self.bladechassis() def managedDeviceLink(self): from Products.ZenModel.ZenModelRM import ZenModelRM d = self.getDmdRoot("Devices").findDevice(self.bsProductName) if d: return ZenModelRM.urlLink(d, 'link') return None def snmpIgnore(self): return ManagedEntity.snmpIgnore(self) or self.snmpindex < 0 InitializeClass(BladeChassisInterconnect)
Having returned to Pitalito is the first step to victory for these families. They imagine a community different community from what they now have: a school, a health clinic, farms planted with cassava and corn. A different Pitalito—one with a future, full of life, full of hope. This is the Pitalito that the community wants.
## ## This file is part of the libsigrokdecode project. ## ## Copyright (C) 2015 Paul Evans <leonerd@leonerd.org.uk> ## ## This program is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published by ## the Free Software Foundation; either version 2 of the License, or ## (at your option) any later version. ## ## This program is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with this program; if not, see <http://www.gnu.org/licenses/>. ## import re import sigrokdecode as srd def _decode_intensity(val): intensity = val & 0x0f if intensity == 0: return 'min' elif intensity == 15: return 'max' else: return intensity registers = { 0x00: ['No-op', lambda _: ''], 0x09: ['Decode', lambda v: '0b{:08b}'.format(v)], 0x0A: ['Intensity', _decode_intensity], 0x0B: ['Scan limit', lambda v: 1 + v], 0x0C: ['Shutdown', lambda v: 'off' if v else 'on'], 0x0F: ['Display test', lambda v: 'on' if v else 'off'] } ann_reg, ann_digit, ann_warning = range(3) class Decoder(srd.Decoder): api_version = 3 id = 'max7219' name = 'MAX7219' longname = 'Maxim MAX7219/MAX7221' desc = 'Maxim MAX72xx series 8-digit LED display driver.' license = 'gplv2+' inputs = ['spi'] outputs = [] tags = ['Display'] annotations = ( ('register', 'Register write'), ('digit', 'Digit displayed'), ('warning', 'Warning'), ) annotation_rows = ( ('commands', 'Commands', (ann_reg, ann_digit)), ('warnings', 'Warnings', (ann_warning,)), ) def __init__(self): self.reset() def reset(self): pass def start(self): self.out_ann = self.register(srd.OUTPUT_ANN) self.pos = 0 self.cs_start = 0 def putreg(self, ss, es, reg, value): self.put(ss, es, self.out_ann, [ann_reg, ['%s: %s' % (reg, value)]]) def putdigit(self, ss, es, digit, value): self.put(ss, es, self.out_ann, [ann_digit, ['Digit %d: %02X' % (digit, value)]]) def putwarn(self, ss, es, message): self.put(ss, es, self.out_ann, [ann_warning, [message]]) def decode(self, ss, es, data): ptype, mosi, _ = data if ptype == 'DATA': if not self.cs_asserted: return if self.pos == 0: self.addr = mosi self.addr_start = ss elif self.pos == 1: if self.addr >= 1 and self.addr <= 8: self.putdigit(self.addr_start, es, self.addr, mosi) elif self.addr in registers: name, decoder = registers[self.addr] self.putreg(self.addr_start, es, name, decoder(mosi)) else: self.putwarn(self.addr_start, es, 'Unknown register %02X' % (self.addr)) self.pos += 1 elif ptype == 'CS-CHANGE': self.cs_asserted = mosi if self.cs_asserted: self.pos = 0 self.cs_start = ss else: if self.pos == 1: # Don't warn if pos=0 so that CS# glitches don't appear # as spurious warnings. self.putwarn(self.cs_start, es, 'Short write') elif self.pos > 2: self.putwarn(self.cs_start, es, 'Overlong write')
In 1957, Remo Belli invented the first successful synthetic Mylar drumheads, today's industry standard. Six decades on, drummers the world over rely on the tone response and durability of Remo drumheads. The Clear Powerstroke III features a thin underlay at the outer edge of the head to subtly dampen unwanted overtones. This creates a near-perfect balance of response and tone control that has quickly made Powerstroke III one of Remo's most preferred drumheads. The clear drumhead is Remo's brightest and most open sounding head.
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function import gc import pytest from _pytest.main import EXIT_NOTESTSCOLLECTED def test_simple_unittest(testdir): testpath = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): def testpassing(self): self.assertEqual('foo', 'foo') def test_failing(self): self.assertEqual('foo', 'bar') """ ) reprec = testdir.inline_run(testpath) assert reprec.matchreport("testpassing").passed assert reprec.matchreport("test_failing").failed def test_runTest_method(testdir): testdir.makepyfile( """ import unittest class MyTestCaseWithRunTest(unittest.TestCase): def runTest(self): self.assertEqual('foo', 'foo') class MyTestCaseWithoutRunTest(unittest.TestCase): def runTest(self): self.assertEqual('foo', 'foo') def test_something(self): pass """ ) result = testdir.runpytest("-v") result.stdout.fnmatch_lines( """ *MyTestCaseWithRunTest::runTest* *MyTestCaseWithoutRunTest::test_something* *2 passed* """ ) def test_isclasscheck_issue53(testdir): testpath = testdir.makepyfile( """ import unittest class _E(object): def __getattr__(self, tag): pass E = _E() """ ) result = testdir.runpytest(testpath) assert result.ret == EXIT_NOTESTSCOLLECTED def test_setup(testdir): testpath = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): def setUp(self): self.foo = 1 def setup_method(self, method): self.foo2 = 1 def test_both(self): self.assertEqual(1, self.foo) assert self.foo2 == 1 def teardown_method(self, method): assert 0, "42" """ ) reprec = testdir.inline_run("-s", testpath) assert reprec.matchreport("test_both", when="call").passed rep = reprec.matchreport("test_both", when="teardown") assert rep.failed and "42" in str(rep.longrepr) def test_setUpModule(testdir): testpath = testdir.makepyfile( """ values = [] def setUpModule(): values.append(1) def tearDownModule(): del values[0] def test_hello(): assert values == [1] def test_world(): assert values == [1] """ ) result = testdir.runpytest(testpath) result.stdout.fnmatch_lines(["*2 passed*"]) def test_setUpModule_failing_no_teardown(testdir): testpath = testdir.makepyfile( """ values = [] def setUpModule(): 0/0 def tearDownModule(): values.append(1) def test_hello(): pass """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=0, failed=1) call = reprec.getcalls("pytest_runtest_setup")[0] assert not call.item.module.values def test_new_instances(testdir): testpath = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): def test_func1(self): self.x = 2 def test_func2(self): assert not hasattr(self, 'x') """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=2) def test_function_item_obj_is_instance(testdir): """item.obj should be a bound method on unittest.TestCase function items (#5390).""" testdir.makeconftest( """ def pytest_runtest_makereport(item, call): if call.when == 'call': class_ = item.parent.obj assert isinstance(item.obj.__self__, class_) """ ) testdir.makepyfile( """ import unittest class Test(unittest.TestCase): def test_foo(self): pass """ ) result = testdir.runpytest_inprocess() result.stdout.fnmatch_lines(["* 1 passed in*"]) def test_teardown(testdir): testpath = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): values = [] def test_one(self): pass def tearDown(self): self.values.append(None) class Second(unittest.TestCase): def test_check(self): self.assertEqual(MyTestCase.values, [None]) """ ) reprec = testdir.inline_run(testpath) passed, skipped, failed = reprec.countoutcomes() assert failed == 0, failed assert passed == 2 assert passed + skipped + failed == 2 def test_teardown_issue1649(testdir): """ Are TestCase objects cleaned up? Often unittest TestCase objects set attributes that are large and expensive during setUp. The TestCase will not be cleaned up if the test fails, because it would then exist in the stackframe. """ testpath = testdir.makepyfile( """ import unittest class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase): def setUp(self): self.an_expensive_object = 1 def test_demo(self): pass """ ) testdir.inline_run("-s", testpath) gc.collect() for obj in gc.get_objects(): assert type(obj).__name__ != "TestCaseObjectsShouldBeCleanedUp" def test_unittest_skip_issue148(testdir): testpath = testdir.makepyfile( """ import unittest @unittest.skip("hello") class MyTestCase(unittest.TestCase): @classmethod def setUpClass(self): xxx def test_one(self): pass @classmethod def tearDownClass(self): xxx """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(skipped=1) def test_method_and_teardown_failing_reporting(testdir): testdir.makepyfile( """ import unittest, pytest class TC(unittest.TestCase): def tearDown(self): assert 0, "down1" def test_method(self): assert False, "down2" """ ) result = testdir.runpytest("-s") assert result.ret == 1 result.stdout.fnmatch_lines( [ "*tearDown*", "*assert 0*", "*test_method*", "*assert False*", "*1 failed*1 error*", ] ) def test_setup_failure_is_shown(testdir): testdir.makepyfile( """ import unittest import pytest class TC(unittest.TestCase): def setUp(self): assert 0, "down1" def test_method(self): print("never42") xyz """ ) result = testdir.runpytest("-s") assert result.ret == 1 result.stdout.fnmatch_lines(["*setUp*", "*assert 0*down1*", "*1 failed*"]) assert "never42" not in result.stdout.str() def test_setup_setUpClass(testdir): testpath = testdir.makepyfile( """ import unittest import pytest class MyTestCase(unittest.TestCase): x = 0 @classmethod def setUpClass(cls): cls.x += 1 def test_func1(self): assert self.x == 1 def test_func2(self): assert self.x == 1 @classmethod def tearDownClass(cls): cls.x -= 1 def test_teareddown(): assert MyTestCase.x == 0 """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=3) def test_setup_class(testdir): testpath = testdir.makepyfile( """ import unittest import pytest class MyTestCase(unittest.TestCase): x = 0 def setup_class(cls): cls.x += 1 def test_func1(self): assert self.x == 1 def test_func2(self): assert self.x == 1 def teardown_class(cls): cls.x -= 1 def test_teareddown(): assert MyTestCase.x == 0 """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=3) @pytest.mark.parametrize("type", ["Error", "Failure"]) def test_testcase_adderrorandfailure_defers(testdir, type): testdir.makepyfile( """ from unittest import TestCase import pytest class MyTestCase(TestCase): def run(self, result): excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0) try: result.add%s(self, excinfo._excinfo) except KeyboardInterrupt: raise except: pytest.fail("add%s should not raise") def test_hello(self): pass """ % (type, type) ) result = testdir.runpytest() assert "should not raise" not in result.stdout.str() @pytest.mark.parametrize("type", ["Error", "Failure"]) def test_testcase_custom_exception_info(testdir, type): testdir.makepyfile( """ from unittest import TestCase import py, pytest import _pytest._code class MyTestCase(TestCase): def run(self, result): excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0) # we fake an incompatible exception info from _pytest.monkeypatch import MonkeyPatch mp = MonkeyPatch() def t(*args): mp.undo() raise TypeError() mp.setattr(_pytest._code, 'ExceptionInfo', t) try: excinfo = excinfo._excinfo result.add%(type)s(self, excinfo) finally: mp.undo() def test_hello(self): pass """ % locals() ) result = testdir.runpytest() result.stdout.fnmatch_lines( [ "NOTE: Incompatible Exception Representation*", "*ZeroDivisionError*", "*1 failed*", ] ) def test_testcase_totally_incompatible_exception_info(testdir): (item,) = testdir.getitems( """ from unittest import TestCase class MyTestCase(TestCase): def test_hello(self): pass """ ) item.addError(None, 42) excinfo = item._excinfo.pop(0) assert "ERROR: Unknown Incompatible" in str(excinfo.getrepr()) def test_module_level_pytestmark(testdir): testpath = testdir.makepyfile( """ import unittest import pytest pytestmark = pytest.mark.xfail class MyTestCase(unittest.TestCase): def test_func1(self): assert 0 """ ) reprec = testdir.inline_run(testpath, "-s") reprec.assertoutcome(skipped=1) class TestTrialUnittest(object): def setup_class(cls): cls.ut = pytest.importorskip("twisted.trial.unittest") # on windows trial uses a socket for a reactor and apparently doesn't close it properly # https://twistedmatrix.com/trac/ticket/9227 cls.ignore_unclosed_socket_warning = ("-W", "always") def test_trial_testcase_runtest_not_collected(self, testdir): testdir.makepyfile( """ from twisted.trial.unittest import TestCase class TC(TestCase): def test_hello(self): pass """ ) reprec = testdir.inline_run(*self.ignore_unclosed_socket_warning) reprec.assertoutcome(passed=1) testdir.makepyfile( """ from twisted.trial.unittest import TestCase class TC(TestCase): def runTest(self): pass """ ) reprec = testdir.inline_run(*self.ignore_unclosed_socket_warning) reprec.assertoutcome(passed=1) def test_trial_exceptions_with_skips(self, testdir): testdir.makepyfile( """ from twisted.trial import unittest import pytest class TC(unittest.TestCase): def test_hello(self): pytest.skip("skip_in_method") @pytest.mark.skipif("sys.version_info != 1") def test_hello2(self): pass @pytest.mark.xfail(reason="iwanto") def test_hello3(self): assert 0 def test_hello4(self): pytest.xfail("i2wanto") def test_trial_skip(self): pass test_trial_skip.skip = "trialselfskip" def test_trial_todo(self): assert 0 test_trial_todo.todo = "mytodo" def test_trial_todo_success(self): pass test_trial_todo_success.todo = "mytodo" class TC2(unittest.TestCase): def setup_class(cls): pytest.skip("skip_in_setup_class") def test_method(self): pass """ ) from _pytest.compat import _is_unittest_unexpected_success_a_failure should_fail = _is_unittest_unexpected_success_a_failure() result = testdir.runpytest("-rxs", *self.ignore_unclosed_socket_warning) result.stdout.fnmatch_lines_random( [ "*XFAIL*test_trial_todo*", "*trialselfskip*", "*skip_in_setup_class*", "*iwanto*", "*i2wanto*", "*sys.version_info*", "*skip_in_method*", "*1 failed*4 skipped*3 xfailed*" if should_fail else "*4 skipped*3 xfail*1 xpass*", ] ) assert result.ret == (1 if should_fail else 0) def test_trial_error(self, testdir): testdir.makepyfile( """ from twisted.trial.unittest import TestCase from twisted.internet.defer import Deferred from twisted.internet import reactor class TC(TestCase): def test_one(self): crash def test_two(self): def f(_): crash d = Deferred() d.addCallback(f) reactor.callLater(0.3, d.callback, None) return d def test_three(self): def f(): pass # will never get called reactor.callLater(0.3, f) # will crash at teardown def test_four(self): def f(_): reactor.callLater(0.3, f) crash d = Deferred() d.addCallback(f) reactor.callLater(0.3, d.callback, None) return d # will crash both at test time and at teardown """ ) result = testdir.runpytest() result.stdout.fnmatch_lines( [ "*ERRORS*", "*DelayedCalls*", "*test_four*", "*NameError*crash*", "*test_one*", "*NameError*crash*", "*test_three*", "*DelayedCalls*", "*test_two*", "*crash*", ] ) def test_trial_pdb(self, testdir): p = testdir.makepyfile( """ from twisted.trial import unittest import pytest class TC(unittest.TestCase): def test_hello(self): assert 0, "hellopdb" """ ) child = testdir.spawn_pytest(p) child.expect("hellopdb") child.sendeof() def test_trial_testcase_skip_property(self, testdir): testpath = testdir.makepyfile( """ from twisted.trial import unittest class MyTestCase(unittest.TestCase): skip = 'dont run' def test_func(self): pass """ ) reprec = testdir.inline_run(testpath, "-s") reprec.assertoutcome(skipped=1) def test_trial_testfunction_skip_property(self, testdir): testpath = testdir.makepyfile( """ from twisted.trial import unittest class MyTestCase(unittest.TestCase): def test_func(self): pass test_func.skip = 'dont run' """ ) reprec = testdir.inline_run(testpath, "-s") reprec.assertoutcome(skipped=1) def test_trial_testcase_todo_property(self, testdir): testpath = testdir.makepyfile( """ from twisted.trial import unittest class MyTestCase(unittest.TestCase): todo = 'dont run' def test_func(self): assert 0 """ ) reprec = testdir.inline_run(testpath, "-s") reprec.assertoutcome(skipped=1) def test_trial_testfunction_todo_property(self, testdir): testpath = testdir.makepyfile( """ from twisted.trial import unittest class MyTestCase(unittest.TestCase): def test_func(self): assert 0 test_func.todo = 'dont run' """ ) reprec = testdir.inline_run( testpath, "-s", *self.ignore_unclosed_socket_warning ) reprec.assertoutcome(skipped=1) def test_djangolike_testcase(testdir): # contributed from Morten Breekevold testdir.makepyfile( """ from unittest import TestCase, main class DjangoLikeTestCase(TestCase): def setUp(self): print("setUp()") def test_presetup_has_been_run(self): print("test_thing()") self.assertTrue(hasattr(self, 'was_presetup')) def tearDown(self): print("tearDown()") def __call__(self, result=None): try: self._pre_setup() except (KeyboardInterrupt, SystemExit): raise except Exception: import sys result.addError(self, sys.exc_info()) return super(DjangoLikeTestCase, self).__call__(result) try: self._post_teardown() except (KeyboardInterrupt, SystemExit): raise except Exception: import sys result.addError(self, sys.exc_info()) return def _pre_setup(self): print("_pre_setup()") self.was_presetup = True def _post_teardown(self): print("_post_teardown()") """ ) result = testdir.runpytest("-s") assert result.ret == 0 result.stdout.fnmatch_lines( [ "*_pre_setup()*", "*setUp()*", "*test_thing()*", "*tearDown()*", "*_post_teardown()*", ] ) def test_unittest_not_shown_in_traceback(testdir): testdir.makepyfile( """ import unittest class t(unittest.TestCase): def test_hello(self): x = 3 self.assertEqual(x, 4) """ ) res = testdir.runpytest() assert "failUnlessEqual" not in res.stdout.str() def test_unorderable_types(testdir): testdir.makepyfile( """ import unittest class TestJoinEmpty(unittest.TestCase): pass def make_test(): class Test(unittest.TestCase): pass Test.__name__ = "TestFoo" return Test TestFoo = make_test() """ ) result = testdir.runpytest() assert "TypeError" not in result.stdout.str() assert result.ret == EXIT_NOTESTSCOLLECTED def test_unittest_typerror_traceback(testdir): testdir.makepyfile( """ import unittest class TestJoinEmpty(unittest.TestCase): def test_hello(self, arg1): pass """ ) result = testdir.runpytest() assert "TypeError" in result.stdout.str() assert result.ret == 1 @pytest.mark.parametrize("runner", ["pytest", "unittest"]) def test_unittest_expected_failure_for_failing_test_is_xfail(testdir, runner): script = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): @unittest.expectedFailure def test_failing_test_is_xfail(self): assert False if __name__ == '__main__': unittest.main() """ ) if runner == "pytest": result = testdir.runpytest("-rxX") result.stdout.fnmatch_lines( ["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"] ) else: result = testdir.runpython(script) result.stderr.fnmatch_lines(["*1 test in*", "*OK*(expected failures=1)*"]) assert result.ret == 0 @pytest.mark.parametrize("runner", ["pytest", "unittest"]) def test_unittest_expected_failure_for_passing_test_is_fail(testdir, runner): script = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): @unittest.expectedFailure def test_passing_test_is_fail(self): assert True if __name__ == '__main__': unittest.main() """ ) from _pytest.compat import _is_unittest_unexpected_success_a_failure should_fail = _is_unittest_unexpected_success_a_failure() if runner == "pytest": result = testdir.runpytest("-rxX") result.stdout.fnmatch_lines( [ "*MyTestCase*test_passing_test_is_fail*", "*1 failed*" if should_fail else "*1 xpassed*", ] ) else: result = testdir.runpython(script) result.stderr.fnmatch_lines(["*1 test in*", "*(unexpected successes=1)*"]) assert result.ret == (1 if should_fail else 0) @pytest.mark.parametrize( "fix_type, stmt", [("fixture", "return"), ("yield_fixture", "yield")] ) def test_unittest_setup_interaction(testdir, fix_type, stmt): testdir.makepyfile( """ import unittest import pytest class MyTestCase(unittest.TestCase): @pytest.{fix_type}(scope="class", autouse=True) def perclass(self, request): request.cls.hello = "world" {stmt} @pytest.{fix_type}(scope="function", autouse=True) def perfunction(self, request): request.instance.funcname = request.function.__name__ {stmt} def test_method1(self): assert self.funcname == "test_method1" assert self.hello == "world" def test_method2(self): assert self.funcname == "test_method2" def test_classattr(self): assert self.__class__.hello == "world" """.format( fix_type=fix_type, stmt=stmt ) ) result = testdir.runpytest() result.stdout.fnmatch_lines(["*3 passed*"]) def test_non_unittest_no_setupclass_support(testdir): testpath = testdir.makepyfile( """ class TestFoo(object): x = 0 @classmethod def setUpClass(cls): cls.x = 1 def test_method1(self): assert self.x == 0 @classmethod def tearDownClass(cls): cls.x = 1 def test_not_teareddown(): assert TestFoo.x == 0 """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=2) def test_no_teardown_if_setupclass_failed(testdir): testpath = testdir.makepyfile( """ import unittest class MyTestCase(unittest.TestCase): x = 0 @classmethod def setUpClass(cls): cls.x = 1 assert False def test_func1(self): cls.x = 10 @classmethod def tearDownClass(cls): cls.x = 100 def test_notTornDown(): assert MyTestCase.x == 1 """ ) reprec = testdir.inline_run(testpath) reprec.assertoutcome(passed=1, failed=1) def test_issue333_result_clearing(testdir): testdir.makeconftest( """ import pytest @pytest.hookimpl(hookwrapper=True) def pytest_runtest_call(item): yield assert 0 """ ) testdir.makepyfile( """ import unittest class TestIt(unittest.TestCase): def test_func(self): 0/0 """ ) reprec = testdir.inline_run() reprec.assertoutcome(failed=1) def test_unittest_raise_skip_issue748(testdir): testdir.makepyfile( test_foo=""" import unittest class MyTestCase(unittest.TestCase): def test_one(self): raise unittest.SkipTest('skipping due to reasons') """ ) result = testdir.runpytest("-v", "-rs") result.stdout.fnmatch_lines( """ *SKIP*[1]*test_foo.py*skipping due to reasons* *1 skipped* """ ) def test_unittest_skip_issue1169(testdir): testdir.makepyfile( test_foo=""" import unittest class MyTestCase(unittest.TestCase): @unittest.skip("skipping due to reasons") def test_skip(self): self.fail() """ ) result = testdir.runpytest("-v", "-rs") result.stdout.fnmatch_lines( """ *SKIP*[1]*skipping due to reasons* *1 skipped* """ ) def test_class_method_containing_test_issue1558(testdir): testdir.makepyfile( test_foo=""" import unittest class MyTestCase(unittest.TestCase): def test_should_run(self): pass def test_should_not_run(self): pass test_should_not_run.__test__ = False """ ) reprec = testdir.inline_run() reprec.assertoutcome(passed=1) @pytest.mark.parametrize( "base", ["six.moves.builtins.object", "unittest.TestCase", "unittest2.TestCase"] ) def test_usefixtures_marker_on_unittest(base, testdir): """#3498""" module = base.rsplit(".", 1)[0] pytest.importorskip(module) testdir.makepyfile( conftest=""" import pytest @pytest.fixture(scope='function') def fixture1(request, monkeypatch): monkeypatch.setattr(request.instance, 'fixture1', True ) @pytest.fixture(scope='function') def fixture2(request, monkeypatch): monkeypatch.setattr(request.instance, 'fixture2', True ) def node_and_marks(item): print(item.nodeid) for mark in item.iter_markers(): print(" ", mark) @pytest.fixture(autouse=True) def my_marks(request): node_and_marks(request.node) def pytest_collection_modifyitems(items): for item in items: node_and_marks(item) """ ) testdir.makepyfile( """ import pytest import {module} class Tests({base}): fixture1 = False fixture2 = False @pytest.mark.usefixtures("fixture1") def test_one(self): assert self.fixture1 assert not self.fixture2 @pytest.mark.usefixtures("fixture1", "fixture2") def test_two(self): assert self.fixture1 assert self.fixture2 """.format( module=module, base=base ) ) result = testdir.runpytest("-s") result.assert_outcomes(passed=2) def test_testcase_handles_init_exceptions(testdir): """ Regression test to make sure exceptions in the __init__ method are bubbled up correctly. See https://github.com/pytest-dev/pytest/issues/3788 """ testdir.makepyfile( """ from unittest import TestCase import pytest class MyTestCase(TestCase): def __init__(self, *args, **kwargs): raise Exception("should raise this exception") def test_hello(self): pass """ ) result = testdir.runpytest() assert "should raise this exception" in result.stdout.str() assert "ERROR at teardown of MyTestCase.test_hello" not in result.stdout.str() def test_error_message_with_parametrized_fixtures(testdir): testdir.copy_example("unittest/test_parametrized_fixture_error_message.py") result = testdir.runpytest() result.stdout.fnmatch_lines( [ "*test_two does not support fixtures*", "*TestSomethingElse::test_two", "*Function type: TestCaseFunction", ] ) @pytest.mark.parametrize( "test_name, expected_outcome", [ ("test_setup_skip.py", "1 skipped"), ("test_setup_skip_class.py", "1 skipped"), ("test_setup_skip_module.py", "1 error"), ], ) def test_setup_inheritance_skipping(testdir, test_name, expected_outcome): """Issue #4700""" testdir.copy_example("unittest/{}".format(test_name)) result = testdir.runpytest() result.stdout.fnmatch_lines(["* {} in *".format(expected_outcome)])
Earthquaker pedals take a standard 9 volt DC power supply with a 2.1mm negative center barrel. We always recommend pedal-specific, transformer-isolated wall-wart power supplies or multiple isolated-output supplies. Pedals will make extra noise if there is ripple or unclean power. Switching-type power supplies, daisy chains and non-pedal specific power supplies do not filter dirty power as well and let through unwanted noise. Do not run at higher voltages! Current draw is 100 mA.
""" Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Don't split all the model classes into seperate files (see The Zen of Python: "Flat is better than nested.") # If you are going to do it, adhere to the standards: # See: https://docs.djangoproject.com/en/2.0/topics/db/models/#organizing-models-in-a-package # See: https://www.python.org/dev/peps/pep-0008/#package-and-module-names import uuid from django.contrib.gis.db import models from django.contrib.auth.models import User from django.dispatch import receiver from django.db.models.signals import post_save from ..db_comments.model_mixins import DBComments from .common import * from .screen import * from .survey import * from .bulk import * # DATALOAD_USER: Use for ETL etc. DATALOAD_USER = 'DATALOAD_USER' # DE_DUPLICATE_USER: Use when running scripts that remove duplicates. DE_DUPLICATE_USER = 'DE_DUPLICATE_USER' class Profile(models.Model, DBComments): """ Extended User Profile """ user = models.OneToOneField( User, on_delete=models.CASCADE, related_name="profile") profile_guid = models.UUIDField( primary_key=True, default=uuid.uuid4, editable=False) username = models.CharField(max_length=100, blank=True, null=True) name = models.CharField(max_length=100, blank=True, null=True) db_table_comment = ('Additional user information that cannot be stored on the django auth_user table.') class Meta: db_table = 'profile' class Border(models.Model): se_a_c_flg = models.CharField(max_length=254) obejctid = models.FloatField() shape = models.FloatField() length_m = models.FloatField() oic_number = models.CharField(max_length=7) area_sqm = models.FloatField() upt_date = models.CharField(max_length=20) upt_type = models.CharField(max_length=50) chng_org = models.CharField(max_length=30) aa_parent = models.CharField(max_length=100) aa_type = models.CharField(max_length=50) aa_id = models.BigIntegerField() aa_name = models.CharField(max_length=100) abrvn = models.CharField(max_length=40) bdy_type = models.CharField(max_length=20) oic_year = models.CharField(max_length=4) afctd_area = models.CharField(max_length=120) geom = models.MultiPolygonField(srid=4269) @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) @receiver(post_save, sender=User) def save_user_profile(sender, instance, **kwargs): instance.profile.save()
The Association of the Luxembourg Fund Industry (ALFI) released the 2014 version of its annual real estate investment fund (REIF) survey, showing the development of the Luxembourg-domiciled REIF market as at the end of 2013. According to a report published by the Association of the Luxembourg Fund Industry, carried out by Oliver Wyman, the introduction of the AIFMD has fuelled strong growth in European fund domiciles, with the number of alternative investment funds increasing by10% since 2010, and assets under management increasing by 13%. This report looks at the impact of the financial crisis on European investor behaviour, and at the future direction of the asset management industry. The report “Beyond 10%: the case for enlarging the pool of retail investors in Europe’s investment funds,” by research company MackayWilliams, highlights the fact that there is a €4 trillion pool of unmanaged household assets in Europe, which is either losing value or failing to grasp growth opportunities that are available through long-term investment vehicles. This is a missed opportunity for asset managers and savers alike. Responsible investing (RI) funds are showing strong growth, with assets under management increasing since 2010 by 19% from €199.9 billion to €237.9 billion, with the proportion of RI assets compared to the total assets in European funds increasing by 1.6%, according to ALFI’s second European Responsible Investing Fund Survey carried out by KPMG on behalf of ALFI. The Association of the Luxembourg Fund Industry (ALFI) unveiled on 18 September 2012 a new study “Trends in Cross Border Distribution”.
"""A simple example of how to access the Google Analytics API.""" import argparse from apiclient.discovery import build import httplib2 from oauth2client import client from oauth2client import file from oauth2client import tools def get_service(api_name, api_version, scope, client_secrets_path): """Get a service that communicates to a Google API. Args: api_name: string The name of the api to connect to. api_version: string The api version to connect to. scope: A list of strings representing the auth scopes to authorize for the connection. client_secrets_path: string A path to a valid client secrets file. Returns: A service that is connected to the specified API. """ # Parse command-line arguments. parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, parents=[tools.argparser]) flags = parser.parse_args([]) # Set up a Flow object to be used if we need to authenticate. flow = client.flow_from_clientsecrets( client_secrets_path, scope=scope, message=tools.message_if_missing(client_secrets_path)) # Prepare credentials, and authorize HTTP object with them. # If the credentials don't exist or are invalid run through the native client # flow. The Storage object will ensure that if successful the good # credentials will get written back to a file. storage = file.Storage(api_name + '.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, flags) http = credentials.authorize(http=httplib2.Http()) # Build the service object. service = build(api_name, api_version, http=http) return service def main(): # Define the auth scopes to request. scope = ['https://www.googleapis.com/auth/analytics.readonly'] # Authenticate and construct service. service = get_service('analytics', 'v3', scope, 'client_secrets.json') return service if __name__ == '__main__': main()
Happy new year from all of us at Salty Studios, hope you had a great holiday season! So here we are getting ready to jump straight back in to development and thought we would share a little bit about the transition from what was Project Light/Source to the now named Lawson’s Shadow. From there it began, we started formulating a plan of action and the team we needed to get it done. Over time the team changed as people came and went but the core of us stuck together to move forward with this idea of a stealth game that plays with the idea of light. It went through numerous incarnations but one thing we knew we wanted was top down; that bird's eye, Hotline Miami-esque design. We experimented with numerous themes for the game - at one point it was a wasteland style survival game, at another point it was a neo-metropolis game about corporate espionage. Whatever the changes, we all knew what we wanted from this game and put our hearts in to crafting it into what we needed it to be. But with change comes setbacks and numerous roadblocks in our first year of development. From missed deadlines to family commitments, for a while it felt like maybe we have bitten off more than we could chew. This very much came to a head with our first year of showing the game at conventions. It began with MAIcon in Launceston, a smaller convention but one run by a group of super dedicated people. It was a solid showing and people were getting the core concept and the feedback was huge but from some reactions we knew we had to go even bigger for PAX and that was only 1 month away at the time. Soon it was go time, the team implemented as many changes as they could to get the game up and running and the next thing you know, we were off to Melbourne to showcase Light/Source, the first big game from Salty Studios. It was a whirlwind trip filled with good memories, great company and even better food but it came at a cost; on day one of our trip we lost one of display monitors and computers. On it was our build and everything that we had been working on to get it up and running - now this is where the age old saying of save your work would come in handy but unfortunately, we had been working on it the night before with no internet so what we had was simply what we had. We did what we could to repair and even had our modeller working on things on a donated laptop to show off our progress. We salvaged what we could, took in the numerous amounts of criticism and revelled in the compliments given to us by other developers and built friendships with some incredible people but most of all we were driven to come back bigger and better. The final stop was AICon 2018, where we showed exactly what we had planned for PAX, had a super great weekend and enjoyed the time as a team but one thing happened when we got home that changed everything. People really wanted this game in first person. For so long the game had been in our minds as this unique twist on stealth and the top down genre but maybe we were wrong. We always had this mantra “we make games that fans want to play” and we used this a lot for our design and direction. At every major turn we had players input about what they would want to see in the game and we saw the occasional note and had the occasional comment. But from everything we read, it was crazy how often this suggestion for first person popped up, and as a team we took it on board and went to work. We ended up coming out with the idea for Lawson’s Shadow, a 90’s noir comic inspired stealth game that puts you in the role of a fixer, a criminal for hire in Chicago criminal underworld. But this change came with some setbacks; we lost more team members to other commitments. As for most of us this was our side project from our day to day lives. We lost people due to various reasons and in the end, it went back to the core group. We eventually met some other people who shared our vision and together built something we were really proud of. We scrapped nearly everything from almost all the code to 90% of the assets and we went back to basics. We used tools that we had never thought once about using and began creating characters and assets beyond what was in the original concept. From there we announced Lawson’s Shadow and began moving out of planning into solid development, we finally felt like we had the right people, the story was coming together, and the game had this knock effect in which we started going to places and meeting new people and even though we have never met they knew of Lawson’s Shadow and Salty Studios. We finally felt as if we were ready for PAX AUS 2018 and ready to show off Lawson’s Shadow. Next time I’ll discuss and break down our trip to PAX AUS 2018 and how from there we have begun to build the game we think will be something special to us and those who play it. Hi everyone and thank you for taking time out of your busy holiday lives to have a read of what’s going on in the development of Lawson’s Shadow. Starting today, we will endevour to update the dev blog with weekly posts and updates about what is happening here at the studio. Expect to read interviews with the team, see photos of the development and sneak previews of what we have in the works. A common complaint was the lack of clear defined lighting in the game. Ben and the team have gone all in creating some absolutely amazing fixes to the game’s current lighting. So with character movement the biggest thing really was that there was no model at the time of the demo. Brett has been working really hard to get the models to the standard he wants them to be to make movement smoother and more responsive. Probably the biggest issue to come out of PAX was feedback of what the player was doing. From audio to onscreen, there just wasn’t enough information being relayed back to the player. We have addressed this by firstly increasing the audio in the game and secondly by building in more descriptive UI elements. In the demo, the game required you to be very precise in what you do and how you interact with things. We have made changes to the game to make it so that when you want to interact with an item, the distance makes sense. Another common complaint was the bland colour palette which made the game hard to navigate. As you will be able to see below we have made changes to how colours appear in the game world. Overall it’s been a very productive couple of months since PAX, right now myself and Grady are finalising the story to begin the script and bring in the characters we have made for this world, whilst the rest of the team are working on various assets and in-game items. Thank you for taking the time to read our first dev blog and we will check in next week with some new information. Lawson’s Shadow is a first person 90’s noir comic book inspired stealth game that puts players in the role of Lawson, a fixer for hire who operates in the shadows taking on jobs from Chicago’s biggest criminals. It’s been a long time coming but we are finally ready to show it off and where better to begin but at none other than PAX Aus 2018. Players will have the chance to come mess around within the tutorial level of the game and give us feedback to continue development for hopeful release some time in the near future. The biggest change as you would have probably noticed is the swap from top down to first person; this is the biggest change as it really transforms how the game plays (obviously). The main reason for this change was player feedback from the surveys that we had at conventions. When it came to the question “What feature you would like to see the most?” the top 2 responses were “a first-person mode” or “multiplayer”. Whilst the second option is on the cards as a possibility, the team sat down to have a hard think and decide what do in regard to the player perspective. Now, we have always said the reason Light/Source was in so many people’s hands early was so that our fans and other players could help craft the world we had in our minds. So, with that, the decision was made, and we turned the game first person and began building the world from there. Can you get in and out? Thanks to the hard work of the team we are now ready to get Lawson’s Shadow in to the hands of fans and players to help us finish crafting the game WE wanted to make, and YOU want to play. So, from the bottom of our hearts, we thank you for all the support and for coming along this journey with us.
#! /usr/bin/env python """! ----------------------------------------------------------------------------- File Name : updateDonations.py Purpose: This module is used to read the data downloaded from the Australian electoral commission website and update the database accordingly Updated: Thu Mar 5 19:25:41 AEDT 2015 Created: 24-Feb-2015 21:32:45 AEDT ----------------------------------------------------------------------------- Revision History Wed Mar 11 15:36:29 AEDT 2015: Version 0.2 *File renamed *updateDonations class re-configured to inherit partyCoinDbase class 24-Feb-2015 21:32:45 AEDT: Version 0.1 *Configured to work with sql database only ----------------------------------------------------------------------------- S.D.G """ __author__ = 'Ben Johnston' __revision__ = '0.2' __date__ = 'Wed Mar 11 15:36:17 AEDT 2015' __license__ = 'MPL v2.0' ## LICENSE DETAILS############################################################ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. ##IMPORTS##################################################################### from dbConfig import partyCoinDbase import re import os import pdb ############################################################################## #Tables used in tracking the donations made by the political parties MAIN_TABLE = 'funds_tracker_donation' SECONDARY_TABLE = 'new_funds' #regular expression search strings #the first string searches for the acronym of the state without any alphabet #characters on either side #STATES is used to to determine in which state a given political party resides STATES = {'nsw': '([^A-Za-z]nsw([^A-Za-z]|$))|' '([^A-Za-z]n[\.,\-]s[\.,\-]w[^A-Za-z])|' '(new south wales)', 'qld': '([^A-Za-z]qld([^A-Za-z]|$))|' '(^A-Za-z]q[\.,\-]l[\.,\-]d[^A-Za-z])|' '(queensland)', 'vic': '([^A-Za-z]vic([^A-Za-z]|$))|' '([^A-Za-z]v[\.,\-]i[\.,\-]c[^A-Za-z])|' '(victoria)', 'sa': '([^A-Za-z]sa([^A-Za-z]|$))|' '([^A-Za-z]s[\.,\-]a[^A-Za-z])|' '(south australia)', 'nt': '([^A-Za-z]nt([^A-Za-z]|$))|' '([^A-Za-z]n[\.,\-]t[^A-Za-z])|' '(northern territory)', 'wa': '([^A-Za-z]wa([^A-Za-z]|$))|' '([^A-Za-z]w[\.,\-]a[^A-Za-z])|' '(western australia)', 'act': '([^A-Za-z]act([^A-Za-z]|$))|' '([^A-Za-z]a[\.,\-]c[\.,\-]t[^A-Za-z])|' '(australian captial territory)', 'tas': '([^A-Za-z]tas([^A-Za-z]|$))|' '([^A-Za-z]t[\.,\-]a[\.,\-]s[^A-Za-z])|' '(tasmania)', } #FEDERAL is used to indicate that a political party is a country wide #organisation FEDERAL = 'FED' #CLASSES####################################################################### class updateDonations(partyCoinDbase): """! This class is used to update the mysql database containing the political funding information. The class possesses methods that enable reading of the data supplied by dataGetter.py """ def __init__(self, debug_level=0): """! The constructor for the object @param self The pointer for the object """ #Instantiate the parent class partyCoinDbase.__init__(self, debug_level=debug_level) self.connect_to_db() def prepare_for_new_data(self): #Drop the secondary table if it exists try: self.execute_command('DROP TABLE %s' % SECONDARY_TABLE) except Exception as e: if e[1].find('Unknown table') >= 0: pass else: raise(e) #Create a new blank instances of the secondary table msg = 'CREATE TABLE %s LIKE %s' % ( SECONDARY_TABLE, MAIN_TABLE) self.execute_command(msg) def add_funds_to_db(self, year=None, party=None, party_state=None, donor=None, address=None, state=None, postcode=None, don_type=None, amount=None): """! This method adds donation information to the database """ if (donor is None) or (address is None) or (state is None) or\ (postcode is None) or (don_type is None) or (amount is None)\ or (party is None) or (year is None) or (party_state is None): return False #Check inputs year = year.replace("'", '') party = party.replace("'", '') donor = donor.replace("'", '') address = address.replace("'", '') don_type = don_type.replace("'", '') state = state.replace("'", '') party_state = party_state.replace("'", '') msg = 'INSERT INTO %s(year, party,'\ 'donor, address, state, postCode, donor_type, amount, '\ 'party_state)'\ " VALUES('%s', '%s','%s','%s', '%s', '%s', '%s', %0.2f, '%s')" %\ (SECONDARY_TABLE, year, party, donor, address, state, postcode, don_type, amount, party_state.upper()) self.execute_command(msg) def replace_old_data(self): """! This method replaces the old data in the database with the recently collected """ try: self.execute_command('DROP TABLE %s' % MAIN_TABLE) except Exception as e: if e[1].find('Unknown table') >= 0: pass else: raise(e) self.execute_command('ALTER TABLE %s RENAME %s' % (SECONDARY_TABLE, MAIN_TABLE)) def import_data_from_dir(self, log_folder=None, file_extension=None): """! This method is used to import data from log files into the database @param self The pointer for the object @param log_folder The folder containing the log files to import @param file_extension The file type to be imported """ #Check the inputs are valid if (log_folder is None) or (file_extension is None): msg = 'log_folder and/or file_extension not supplied' self.info_logger.info(msg) return #Walk through the collected file list working_dir = log_folder file_list = os.listdir(working_dir) counter = 0 for f in file_list: #If the file is a csv file if os.path.splitext(f)[1] == file_extension: counter += 1 self.info_logger.info("Reading file %s" % f) row_counter = 0 f_handle = open(os.path.join(working_dir, f), 'r') data = f_handle.read().split(',\r\n') #Process the data based on the row for row in data: #Ensure the file acutally contains data if len(data) == 1: break #The first row contains the year if (row_counter == 0): year = row.split(' ') year = year[len(year) - 1][:4] #The second row contains the name elif (row_counter == 1): party = row.split('data')[0].replace(',', '') #party = row.split(',')[0] party_state = None #find the state test_party = party.lower().\ replace('.', '').\ replace(',', '') for state in STATES.keys(): #Check which state the party is from if re.search(STATES[state], test_party): party_state = state break #If a state has been allocated, break the loop if party_state is not None: break #If a state has not been allocated default to FEDERAL #level if party_state is None: party_state = FEDERAL #Ignore the third row elif(row_counter == 2): pass #Handle data rows except for last blank lines elif (row != ''): extracted_data = row.split('","') #Remove existing quotation marks for i in range(len(extracted_data)): extracted_data[i] = \ extracted_data[i].replace('"', '').\ replace("'", '') self.add_funds_to_db(year=year, party=party, party_state=party_state, donor=extracted_data[0], address=extracted_data[1], state=extracted_data[3], postcode=extracted_data[4], don_type=extracted_data[6], amount=float(extracted_data[5])) row_counter += 1 self.replace_old_data()
The Windows Form is essential to a vital component in the development of any Windows -based application. Forms essentially provide the windows that make up a Windows application, in . In fact , the terms window and form are often used interchangeably. Forms allow the Visual Basic developer to create windows and layout controls (such as buttons, labels etc) in those forms to provide the application's user interface. In the next chapter ([[Designing Forms in Visual Basic]]) we will look at how to lay out controls inside a form. Before we reach that stage, however, there are many changes that can be made to the form itself using Visual Basic. We will cover these topics in this chapter. == Creating In the next chapter ([[Designing Forms in Visual Basic]]) we will look at how to layout controls inside a New Form ==form. Before we reach that stage, however, there are a surprising number of ways in which the form itself can be modified and configured. We will cover these options in detail in this chapter. Throughout this chapter we will work with a form in a new project. Begin by starting Visual Studio and creating a new Windows Application project (see [[Creating a New Visual Basic Project]] for details of how to do this). Name the project ''VBforms''. All objects in a Visual Basic application need a name so that they can be referenced in the code. When a new object is added to an application in Visual Studio it is assigned a default name which usually consists of the object type and a number. For example the first form object in an application is named ''Form1'', the second ''Form2'', and so on. To change the name of a Form to something more meaningful simply click in any area of the Form in Visual Studio and change the ''(Name)'' value in the ''Properties'' panel. Each form represents an application window. The text displayed in the title bar for each window should be changed to display something meaningful. This should eiher be the name of application, or a description of the form's function (for example ''Order Entry'' or ''Sales Report''). The value of the text to be displayed in the window title is defined by the form's ''Text'' property. To change the title of the form, therefore, select the ''Text'' value in the Property panel and change it to a new value (for example, 'My Form Example').
from django import forms from django.utils.translation import pgettext_lazy from ...seo.models import SeoModel from ..widgets import CharsLeftWidget SEO_FIELD_HELP_TEXT = pgettext_lazy( 'Form field help text', 'If empty, the preview shows what will be autogenerated.') MIN_DESCRIPTION_LENGTH = 120 MIN_TITLE_LENGTH = 25 DESCRIPTION_MAX_LENGTH = SeoModel._meta.get_field('seo_description').max_length TITLE_MAX_LENGTH = SeoModel._meta.get_field('seo_title').max_length class SeoTitleField(forms.CharField): widget = CharsLeftWidget( attrs={ 'data-min-recommended-length': MIN_TITLE_LENGTH, 'maxlength': TITLE_MAX_LENGTH}) def __init__(self, extra_attrs=None, required=False, *args, **kwargs): super().__init__(*args, **kwargs) self.max_length = TITLE_MAX_LENGTH if extra_attrs: self.widget.attrs.update(extra_attrs) self.required = required self.help_text = SEO_FIELD_HELP_TEXT self.label = pgettext_lazy( 'A SEO friendly title', 'SEO Friendly Title') class SeoDescriptionField(forms.CharField): help_text = SEO_FIELD_HELP_TEXT widget = CharsLeftWidget( attrs={ 'help_text': SEO_FIELD_HELP_TEXT, 'data-min-recommended-length': MIN_DESCRIPTION_LENGTH, 'maxlength': DESCRIPTION_MAX_LENGTH}) def __init__(self, extra_attrs=None, required=False, *args, **kwargs): super().__init__(*args, **kwargs) self.max_length = DESCRIPTION_MAX_LENGTH if extra_attrs: self.widget.attrs.update(extra_attrs) self.required = required self.help_text = SEO_FIELD_HELP_TEXT self.label = pgettext_lazy( 'A SEO friendly description', 'SEO Friendly Description')
Interview ERC grantee Natasa Przulj, Professor of Biomedical Data Science at UCL. The European Conference on Computational Biology took place from September 9 to 12 in Athens. The meeting gathered more than a thousand participants, allowing researchers from a variety of backgrounds - geneticists, molecular biologists, biochemists, computer scientists, statisticians - to come together at one of the most interdisciplinary gatherings in the Life Sciences.
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = "E06000039" addresses_name = ( "parl.2019-12-12/Version 1/Democracy_Club__12December2019slough.CSV" ) stations_name = "parl.2019-12-12/Version 1/Democracy_Club__12December2019slough.CSV" elections = ["parl.2019-12-12"] allow_station_point_from_postcode = False def station_record_to_dict(self, record): if record.polling_place_id == "1000": # Claycots School [Town Hall] record = record._replace(polling_place_easting="0") record = record._replace(polling_place_northing="0") return super().station_record_to_dict(record) def address_record_to_dict(self, record): rec = super().address_record_to_dict(record) uprn = record.property_urn.strip().lstrip("0") if uprn in [ "100081042223", "10022917421", ]: rec["accept_suggestion"] = False if uprn == "100080321307": rec["postcode"] = "SL6 0LG" return rec
Today the Librarian of Congress named the 25 films that will comprise the National Film Registry’s entries for the year 2010. These are films that have cultural, historical or aesthetic significance that warrants their preservation for posterity. All in all, there are 550 films in the registry. Although there is great variety in this year’s list – which includes movies ranging from “Airplane!” and “All the President’s Men” to the documentary “Grey Gardens” (which spurred an HBO movie and a Broadway show) – one film will be of special interest to those in the Latino community. The 20-minute film “I Am Joaquin,” produced and directed in 1969 by filmmaker Luis Valdez (who later directed “Zoot Suit” and “La Bamba”), is a visual presentation of a watershed poem by the Hispanic activist Rodolfo “Corky” Gonzales. Gonzales, who died in his hometown of Denver in April 2005 at the age of 76, was an amateur and later a pro boxer, retiring in 1955 with a professional record of 65-9-1. He became active in politics later that year and in 1966 founded an organization known as The Crusade for Justice, which was at the leading edge of the Chicano movement. In 1967, Gonzales – soon to march in Washington leading the Southwestern states’ contingent of the 1968 Poor People’s Campaign — penned “I am Joaquin.” Opening as a cry from the present, it takes its listener back through ancient and more recent Mexican history; it speaks to the Mexican, Spanish, Indian, and American roots of U.S. Hispanics. The Library of Congress offers vast resources of interest to the U.S. Hispanic community and to people worldwide who seek research materials in Spanish. From the Kislak Collection of rare objects reflecting pre- and post-Columbian culture in the New World to the Treaty of Guadalupe Hidalgo to personal accounts housed in the Library’s Veterans History Project and a “Latinos in math and science: resources for kids, young adults and teachers” web page, there is much to experience, both in-person and online. The Library has appointed an acting Register of Copyrights (that is, head of the U.S. Copyright Office), Maria Pallante, beginning Jan. 1, 2011. Read more about the appointment here.
# Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2016, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ This is for running some very preliminary disjoint pooling experiments. """ import cPickle from multiprocessing import Pool import random import time import numpy import matplotlib.pyplot as plt import matplotlib as mpl mpl.rcParams['pdf.fonttype'] = 42 from htmresearch.frameworks.layers.l2_l4_inference import L4L2Experiment from htmresearch.frameworks.layers.object_machine_factory import ( createObjectMachine ) def printColumnPoolerDiagnostics(pooler): print "sampleSizeProximal: ", pooler.sampleSizeProximal print "Average number of proximal synapses per cell:", print float(pooler.numberOfProximalSynapses()) / pooler.cellCount print "Average number of distal segments per cell:", print float(pooler.numberOfDistalSegments()) / pooler.cellCount print "Average number of connected distal synapses per cell:", print float(pooler.numberOfConnectedDistalSynapses()) / pooler.cellCount print "Average number of distal synapses per cell:", print float(pooler.numberOfDistalSynapses()) / pooler.cellCount def runExperiment(args): """ Run experiment. args is a dict representing the parameters. We do it this way to support multiprocessing. The method returns the args dict updated with multiple additional keys representing accuracy metrics. """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) sensorInputSize = args.get("sensorInputSize", 300) networkType = args.get("networkType", "MultipleL4L2Columns") longDistanceConnections = args.get("longDistanceConnections", 0) locationNoise = args.get("locationNoise", 0.0) featureNoise = args.get("featureNoise", 0.0) numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) plotInferenceStats = args.get("plotInferenceStats", True) settlingTime = args.get("settlingTime", 3) includeRandomLocation = args.get("includeRandomLocation", False) enableFeedback = args.get("enableFeedback", True) numAmbiguousLocations = args.get("numAmbiguousLocations", 0) numInferenceRpts = args.get("numInferenceRpts", 1) numLearningRpts = args.get("numLearningRpts", 3) l2Params = args.get("l2Params", None) l4Params = args.get("l4Params", None) # Create the objects objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=sensorInputSize, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum ) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) r = objects.objectConfusion() print "Average common pairs in objects=", r[0], print ", locations=",r[1],", features=",r[2] # print "Total number of objects created:",len(objects.getObjects()) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # This object machine will simulate objects where each object is just one # unique feature/location pair. We will use this to pretrain L4/L2 with # individual pairs. pairObjects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=sensorInputSize, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum ) # Create "pair objects" consisting of all unique F/L pairs from our objects. # These pairs should have the same SDRs as the original objects. pairObjects.locations = objects.locations pairObjects.features = objects.features distinctPairs = objects.getDistinctPairs() print "Number of distinct feature/location pairs:",len(distinctPairs) for pairNumber,pair in enumerate(distinctPairs): pairObjects.addObject([pair], pairNumber) ##################################################### # # Setup experiment and train the network name = "dp_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum ) exp = L4L2Experiment( name, numCorticalColumns=numColumns, L2Overrides=l2Params, L4Overrides=l4Params, networkType = networkType, longDistanceConnections=longDistanceConnections, inputSize=sensorInputSize, externalInputSize=2400, numInputBits=20, seed=trialNum, enableFeedback=enableFeedback, numLearningPoints=numLearningRpts, ) # Learn all FL pairs in each L4 and in each L2 # Learning in L2 involves choosing a small random number of cells, growing # proximal synapses to L4 cells. Growing distal synapses to active cells in # each neighboring column. Each column gets its own distal segment. exp.learnObjects(pairObjects.provideObjectsToLearn()) # Verify that all columns learned the pairs # numCorrectClassifications = 0 # for pairId in pairObjects: # # obj = pairObjects[pairId] # objectSensations = {} # for c in range(numColumns): # objectSensations[c] = [obj[0]]*settlingTime # # inferConfig = { # "object": pairId, # "numSteps": settlingTime, # "pairs": objectSensations, # } # # inferenceSDRs = pairObjects.provideObjectToInfer(inferConfig) # # exp.infer(inferenceSDRs, objectName=pairId, reset=False) # # if exp.isObjectClassified(pairId, minOverlap=30): # numCorrectClassifications += 1 # # exp.sendReset() # # print "Classification accuracy for pairs=",100.0*numCorrectClassifications/len(distinctPairs) ######################################################################## # # Create "object representations" in L2 by simultaneously invoking the union # of all FL pairs in an object and doing some sort of spatial pooling to # create L2 representation. exp.resetStatistics() for objectId in objects: # Create one sensation per object consisting of the union of all features # and the union of locations. ul, uf = objects.getUniqueFeaturesLocationsInObject(objectId) print "Object",objectId,"Num unique features:",len(uf),"Num unique locations:",len(ul) objectSensations = {} for c in range(numColumns): objectSensations[c] = [(tuple(ul), tuple(uf))]*settlingTime inferConfig = { "object": objectId, "numSteps": settlingTime, "pairs": objectSensations, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName="Object "+str(objectId)) # Compute confusion matrix between all objects as network settles for iteration in range(settlingTime): confusion = numpy.zeros((numObjects, numObjects)) for o1 in objects: for o2 in objects: confusion[o1, o2] = len(set(exp.statistics[o1]["Full L2 SDR C0"][iteration]) & set(exp.statistics[o2]["Full L2 SDR C0"][iteration]) ) plt.figure() plt.imshow(confusion) plt.xlabel('Object #') plt.ylabel('Object #') plt.title("Object overlaps") plt.colorbar() plt.savefig("confusion_random_10L_5F_"+str(iteration)+".pdf") plt.close() for col in range(numColumns): print "Diagnostics for column",col printColumnPoolerDiagnostics(exp.getAlgorithmInstance(column=col)) print return args # Show average overlap as a function of number of shared FL pairs, # shared locations, shared features # Compute confusion matrix showing number of shared FL pairs # Compute confusion matrix using our normal method def runExperimentPool(numObjects, numLocations, numFeatures, numColumns, longDistanceConnectionsRange = [0.0], numWorkers=7, nTrials=1, numPoints=10, locationNoiseRange=[0.0], featureNoiseRange=[0.0], enableFeedback=[True], ambiguousLocationsRange=[0], numInferenceRpts=1, settlingTime=3, l2Params=None, l4Params=None, resultsName="convergence_results.pkl"): """ Allows you to run a number of experiments using multiple processes. For each parameter except numWorkers, pass in a list containing valid values for that parameter. The cross product of everything is run, and each combination is run nTrials times. Returns a list of dict containing detailed results from each experiment. Also pickles and saves the results in resultsName for later analysis. Example: results = runExperimentPool( numObjects=[10], numLocations=[5], numFeatures=[5], numColumns=[2,3,4,5,6], numWorkers=8, nTrials=5) """ # Create function arguments for every possibility args = [] for c in reversed(numColumns): for o in reversed(numObjects): for l in numLocations: for f in numFeatures: for p in longDistanceConnectionsRange: for t in range(nTrials): for locationNoise in locationNoiseRange: for featureNoise in featureNoiseRange: for ambiguousLocations in ambiguousLocationsRange: for feedback in enableFeedback: args.append( {"numObjects": o, "numLocations": l, "numFeatures": f, "numColumns": c, "trialNum": t, "numPoints": numPoints, "longDistanceConnections" : p, "plotInferenceStats": False, "locationNoise": locationNoise, "featureNoise": featureNoise, "enableFeedback": feedback, "numAmbiguousLocations": ambiguousLocations, "numInferenceRpts": numInferenceRpts, "l2Params": l2Params, "l4Params": l4Params, "settlingTime": settlingTime, } ) numExperiments = len(args) print "{} experiments to run, {} workers".format(numExperiments, numWorkers) # Run the pool if numWorkers > 1: pool = Pool(processes=numWorkers) rs = pool.map_async(runExperiment, args, chunksize=1) while not rs.ready(): remaining = rs._number_left pctDone = 100.0 - (100.0*remaining) / numExperiments print " =>", remaining, "experiments remaining, percent complete=",pctDone time.sleep(5) pool.close() # No more work pool.join() result = rs.get() else: result = [] for arg in args: result.append(runExperiment(arg)) # print "Full results:" # pprint.pprint(result, width=150) # Pickle results for later use with open(resultsName,"wb") as f: cPickle.dump(result,f) return result if __name__ == "__main__": # This is how you run a specific experiment in single process mode. Useful # for debugging, profiling, etc. results = runExperiment( { "numObjects": 20, "numPoints": 10, "numLocations": 10, "numFeatures": 5, "numColumns": 1, "trialNum": 4, "settlingTime": 3, "plotInferenceStats": False, # Outputs detailed graphs } )
Let's learn the art of appreciating lame jokes. As someone who often cracks lame jokes, getting disapproving glares and sighs have become a norm. However, these jokes are not always as unbearable as they are known to be. THAT SENSE OF ACHIEVEMENT WHEN YOU GET A JOKE IN A SHORT TIME IS TO DIE FOR. The ability to appreciate and make puns should be celebrated, because it trains your mind to work fast. I won't call these jokes lame, I'd call them witty. Of course cracking lame jokes all the time won't make you a genius, but you would find yourself spending more time thinking and constructing clever word plays. INSTEAD OF FOLLOWING FRIENDS ON TWITTER, WE FOLLOW MORE OF THESE ACCOUNTS. Instead of staying in the box, cracking witty jokes and puns make you think outside of the box and create jokes that will never cross normal people's minds. It makes you have a whole new view on things, focusing on those which can be made into jokes later. As a result, you can laugh about almost everything and make a joke out of it. Of course, I would not encourage you to make a joke of other people's misery… unless you do not want friends anymore. Contrary to popular belief, you will still have friends after cracking all those jokes. Although they may sigh and shoot disapproving glares at you, they secretly appreciate your jokes. You just have to get used to getting ignored and having them roll their eyes. REALLY, WE ARE BEST FRIENDS. However, I would not recommend you to go crazy with all the jokes, because they might really not take you seriously one day. Then the joke's on you. Admit it, these lame jokes are funny. Most of the time, we only call them "lame" because we fail to appreciate them. Underneath the disapproval, we secretly applaud these people for having the ability to create puns and jokes for almost anything. Appreciating lame jokes makes you a less serious and rigid person. As we live in a fast-paced and stressful environment, we need to learn to take things slow and laugh at the little things in life. WHAT IT FEELS LIKE TO BE THE ONLY ONE WHO APPRECIATES LAME JOKES IN A GROUP OF FRIENDS. Catching someone off guard with your lame jokes may be a waste of their time, but it definitely is not a waste of yours. You get to appreciate the betrayed looks on their faces when they realise that you just fooled them for the past 10 seconds of their lives that they trusted you with. While there is no great benefit for wasting someone else's time, it gives you a sense of satisfaction that you are able to trick someone over and over, fooling them into thinking that you are about to say something serious. Let's try to understand and applaud our friends' efforts when they crack lame jokes. It makes everyone happier!
#!/usr/bin/python -tt # # Copyright (c) 2011 Intel, Inc. # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation; version 2 of the License # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. import os import shutil import tempfile from mic import chroot, msger, rt_util from mic.utils import misc, fs_related, errors, cmdln from mic.conf import configmgr from mic.plugin import pluginmgr from mic.imager.loop import LoopImageCreator, load_mountpoints from mic.pluginbase import ImagerPlugin class LoopPlugin(ImagerPlugin): name = 'loop' @classmethod @cmdln.option("--compress-disk-image", dest="compress_image", type='choice', choices=("gz", "bz2", "lzo"), default=None, help="Same with --compress-image") # alias to compress-image for compatibility @cmdln.option("--compress-image", dest="compress_image", type='choice', choices=("gz", "bz2", "lzo"), default=None, help="Compress all loop images with 'gz' or 'bz2' or 'lzo'," "Note: if you want to use 'lzo', package 'lzop' is needed to" "be installed manually.") @cmdln.option("--shrink", action='store_true', default=False, help="Whether to shrink loop images to minimal size") def do_create(self, subcmd, opts, *args): """${cmd_name}: create loop image Usage: ${name} ${cmd_name} <ksfile> [OPTS] ${cmd_option_list} """ if len(args) != 1: raise errors.Usage("Extra arguments given") creatoropts = configmgr.create ksconf = args[0] if creatoropts['runtime'] == "bootstrap": configmgr._ksconf = ksconf rt_util.bootstrap_mic() elif not rt_util.inbootstrap(): try: fs_related.find_binary_path('mic-native') except errors.CreatorError: if not msger.ask("Subpackage \"mic-native\" has not been " "installed in your host system, still " "continue with \"native\" running mode?", False): raise errors.Abort("Abort because subpackage 'mic-native' " "has not been installed") recording_pkgs = [] if len(creatoropts['record_pkgs']) > 0: recording_pkgs = creatoropts['record_pkgs'] if creatoropts['release'] is not None: if 'name' not in recording_pkgs: recording_pkgs.append('name') if 'vcs' not in recording_pkgs: recording_pkgs.append('vcs') configmgr._ksconf = ksconf # try to find the pkgmgr pkgmgr = None backends = pluginmgr.get_plugins('backend') if 'auto' == creatoropts['pkgmgr']: for key in configmgr.prefer_backends: if key in backends: pkgmgr = backends[key] break else: for key in backends.keys(): if key == creatoropts['pkgmgr']: pkgmgr = backends[key] break if not pkgmgr: raise errors.CreatorError("Can't find backend: %s, " "available choices: %s" % (creatoropts['pkgmgr'], ','.join(backends.keys()))) creator = LoopImageCreator(creatoropts, pkgmgr, opts.compress_image, opts.shrink) if len(recording_pkgs) > 0: creator._recording_pkgs = recording_pkgs image_names = [creator.name + ".img"] image_names.extend(creator.get_image_names()) self.check_image_exists(creator.destdir, creator.pack_to, image_names, creatoropts['release']) try: creator.check_depend_tools() creator.mount(None, creatoropts["cachedir"]) creator.install() creator.configure(creatoropts["repomd"]) creator.copy_kernel() creator.unmount() creator.package(creatoropts["destdir"]) creator.create_manifest() if creatoropts['release'] is not None: creator.release_output(ksconf, creatoropts['destdir'], creatoropts['release']) creator.print_outimage_info() except errors.CreatorError: raise finally: creator.cleanup() msger.info("Finished.") return 0 @classmethod def _do_chroot_tar(cls, target, cmd=[]): mountfp_xml = os.path.splitext(target)[0] + '.xml' if not os.path.exists(mountfp_xml): raise errors.CreatorError("No mount point file found for this tar " "image, please check %s" % mountfp_xml) import tarfile tar = tarfile.open(target, 'r') tmpdir = misc.mkdtemp() tar.extractall(path=tmpdir) tar.close() mntdir = misc.mkdtemp() loops = [] for (mp, label, name, size, fstype) in load_mountpoints(mountfp_xml): if fstype in ("ext2", "ext3", "ext4"): myDiskMount = fs_related.ExtDiskMount elif fstype == "btrfs": myDiskMount = fs_related.BtrfsDiskMount elif fstype in ("vfat", "msdos"): myDiskMount = fs_related.VfatDiskMount else: raise errors.CreatorError("Cannot support fstype: %s" % fstype) name = os.path.join(tmpdir, name) size = size * 1024L * 1024L loop = myDiskMount(fs_related.SparseLoopbackDisk(name, size), os.path.join(mntdir, mp.lstrip('/')), fstype, size, label) try: msger.verbose("Mount %s to %s" % (mp, mntdir + mp)) fs_related.makedirs(os.path.join(mntdir, mp.lstrip('/'))) loop.mount() except: loop.cleanup() for lp in reversed(loops): chroot.cleanup_after_chroot("img", lp, None, mntdir) shutil.rmtree(tmpdir, ignore_errors=True) raise loops.append(loop) try: if len(cmd) != 0: cmdline = "/usr/bin/env HOME=/root " + ' '.join(cmd) else: cmdline = "/usr/bin/env HOME=/root /bin/bash" chroot.chroot(mntdir, None, cmdline) except: raise errors.CreatorError("Failed to chroot to %s." % target) finally: for loop in reversed(loops): chroot.cleanup_after_chroot("img", loop, None, mntdir) shutil.rmtree(tmpdir, ignore_errors=True) @classmethod def do_chroot(cls, target, cmd=[]): if target.endswith('.tar'): import tarfile if tarfile.is_tarfile(target): LoopPlugin._do_chroot_tar(target, cmd) return else: raise errors.CreatorError("damaged tarball for loop images") img = target imgsize = misc.get_file_size(img) * 1024L * 1024L imgtype = misc.get_image_type(img) if imgtype == "btrfsimg": fstype = "btrfs" myDiskMount = fs_related.BtrfsDiskMount elif imgtype in ("ext3fsimg", "ext4fsimg"): fstype = imgtype[:4] myDiskMount = fs_related.ExtDiskMount else: raise errors.CreatorError("Unsupported filesystem type: %s" \ % imgtype) extmnt = misc.mkdtemp() extloop = myDiskMount(fs_related.SparseLoopbackDisk(img, imgsize), extmnt, fstype, 4096, "%s label" % fstype) try: extloop.mount() except errors.MountError: extloop.cleanup() shutil.rmtree(extmnt, ignore_errors=True) raise try: if len(cmd) != 0: cmdline = ' '.join(cmd) else: cmdline = "/bin/bash" envcmd = fs_related.find_binary_inchroot("env", extmnt) if envcmd: cmdline = "%s HOME=/root %s" % (envcmd, cmdline) chroot.chroot(extmnt, None, cmdline) except: raise errors.CreatorError("Failed to chroot to %s." % img) finally: chroot.cleanup_after_chroot("img", extloop, None, extmnt) @classmethod def do_unpack(cls, srcimg): image = os.path.join(tempfile.mkdtemp(dir="/var/tmp", prefix="tmp"), "target.img") msger.info("Copying file system ...") shutil.copyfile(srcimg, image) return image
2. Q: Where is your factory located9 How can I visit there9 A: Our factory is located in Shenzhen, Guangdong Province, China. 3. Q: What's the material of your products9 A: We can use both polyester or nylon, different fabric, different quality, it depends on you. 6. Q: How does your factory do regarding to quality control9 A: Quality is our culture. 4 YRS Shenzhen Siezend Technology Co., Ltd. 2 YRS Ningbo Morning Rubber And Plastic Industrial & Trading Co., Ltd. 4. We don,t use cheap material , only use high quality for you choose . 2. Sample order 3. We will reply you for your inquiry in 24 hours. 4. after sending, we will track the products for you once every two days, until you get the products. 1 YRS Jiande Dayang Industry Co., Ltd. Q2. Could I use my own LOGO or design 9 A2: Yes, customized logo and design are available. Q3. Can I choose different colours9 A3: Yes, customized colours are available. Q7. May I visit your factory9 Q7: Sure, you are welcomed any time. Polyester fabric soft texture:Open pocket trot 10 m,it can fill air about 80%. Until the air inside 100%. After using the bayonet lock on the sealing strip Suitable place:glass,beach,indoor and so on. Alibaba.com offers 18,117 inflatable sleeping bags products. About 32% of these are sleeping bags, 1% are living room sofas, and 1% are inflatable animal toy. A wide variety of inflatable sleeping bags options are available to you, such as free samples, paid samples. There are 18,117 inflatable sleeping bags suppliers, mainly located in Asia. The top supplying country is China (Mainland), which supply 100% of inflatable sleeping bags respectively. Inflatable sleeping bags products are most popular in North America, Western Europe, and South America. You can ensure product safety by selecting from certified suppliers, including 2,515 with ISO9001, 2,069 with Other, and 1,074 with BSCI certification.
import logging import re import struct from typing import Final, Optional import aiohttp from ..abc import MixinMeta from ..cog_utils import CompositeMetaClass log = logging.getLogger("red.cogs.Audio.cog.Utilities.Parsing") STREAM_TITLE: Final[re.Pattern] = re.compile(br"StreamTitle='([^']*)';") class ParsingUtilities(MixinMeta, metaclass=CompositeMetaClass): async def icyparser(self, url: str) -> Optional[str]: try: async with self.session.get(url, headers={"Icy-MetaData": "1"}) as resp: metaint = int(resp.headers["icy-metaint"]) for _ in range(5): await resp.content.readexactly(metaint) metadata_length = struct.unpack("B", await resp.content.readexactly(1))[0] * 16 metadata = await resp.content.readexactly(metadata_length) m = re.search(STREAM_TITLE, metadata.rstrip(b"\0")) if m: title = m.group(1) if title: title = title.decode("utf-8", errors="replace") return title else: return None except (KeyError, aiohttp.ClientConnectionError, aiohttp.ClientResponseError): return None
When the parameter PARALLEL_DEGREE_POLICY is set to AUTO, Oracle Database queues SQL statements that require parallel execution if the necessary number of parallel execution server processes are not available. After the necessary resources become available, the SQL statement is dequeued and allowed to execute. The default dequeue order is a simple first in, first out queue based on the time a statement was issued. If there are sufficient parallel execution servers available and there are no statements ahead in the queue waiting for the resources, the SQL statement is executed. If there are not sufficient parallel execution servers available, the SQL statement is queued based on specified conditions and dequeued from the front of the queue when specified conditions are met. This value is not the maximum number of parallel server processes allowed on the system, but the number available to run parallel statements before parallel statement queuing is used. It is set lower than the maximum number of parallel server processes allowed on the system (PARALLEL_MAX_SERVERS) to ensure each parallel statement gets all of the parallel server resources required and to prevent overloading the system with parallel server processes. Note all serial (nonparallel) statements execute immediately even if parallel statement queuing has been activated. For information about views for monitoring and analyzing parallel statement queuing, refer to "V$RSRC_SESSION_INFO" and "V$RSRCMGRMETRIC".
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re import subprocess from .adb_executor import AdbExecutor class DeviceNameCalculator: def __init__(self, executor=AdbExecutor()): self.executor = executor def name(self): api_version_text = self._api_version_text() play_services_text = self._play_services_text() screen_density_text = self._screen_density_text() screen_size_text = self._screen_size_text() architecture_text = self._architecture_text() locale = self._locale() device_parameters = [ api_version_text, play_services_text, screen_density_text, screen_size_text, architecture_text, locale, ] if None in device_parameters: raise RuntimeError( "ERROR: you shouldn't see this in normal operation," "file a bug report please.\n\n " "One or more device params are None" ) return "{0}_{1}_{2}_{3}_{4}_{5}".format( api_version_text, play_services_text, screen_density_text, screen_size_text, architecture_text, locale, ) def _screen_density_text(self): density = int(self._screen_density()) if density in range(0, 121): return "LDPI" elif density in range(121, 161): return "MDPI" elif density in range(161, 241): return "HDPI" elif density in range(241, 321): return "XHDPI" elif density in range(321, 481): return "XXHDPI" return "XXXHDPI" def _screen_density(self): result = self.executor.execute(["shell", "wm", "density"]) density = re.search("[0-9]+", result) if density: return density.group(0) def _screen_size_text(self): result = self.executor.execute(["shell", "wm", "size"]) density = re.search("[0-9]+x[0-9]+", result) if density: return density.group(0) def _has_play_services(self): try: output = self.executor.execute( ["shell", "pm", "path", "com.google.android.gms"] ) return True if output else False except subprocess.CalledProcessError: return False def _play_services_text(self): play_services = self._has_play_services() return "GP" if play_services else "NO_GP" def _api_version(self): return self.executor.execute(["shell", "getprop", "ro.build.version.sdk"]) def _api_version_text(self): return "API_{0}".format(int(self._api_version())) def _architecture_text(self): architecture = self.executor.execute(["shell", "getprop", "ro.product.cpu.abi"]) return architecture.rstrip() def _locale(self): persist_locale = self.executor.execute( ["shell", "getprop", "persist.sys.locale"] ) product_locale = self.executor.execute( ["shell", "getprop", "ro.product.locale"] ) return persist_locale.rstrip() if persist_locale else product_locale.rstrip()
The euro zone's struggle to avoid another recession will take center stage in the coming week in the absence of major US data, as investors mull whether the ECB's new asset-buying plan is a prelude to even more radical steps. While data from China may give clarity on a pattern of uneven growth there, it is in Europe that the prospects for the economy are most uncertain, although a ceasefire in Ukraine could lift the mood and avoid new EU sanctions on Russia this week, Reuters reported. The euro zone's fragile economic recovery came to a halt in the second quarter, in marked contrast to the United States, where the economy grew robustly. Like many of its neighbors struggling to rebound from the debt crisis, Italy slipped into recession for the third time since 2008. EU finance ministers and European Central Bank President Mario Draghi will convene on Friday in Milan, where the ECB's latest move to help the economy and avoid deflation will be at the forefront of discussions. The ECB stunned markets last week by cutting interest rates and announcing a plan to buy asset-backed securities from October, which Barclays described "as a clear first step into quantitative easing" - a US-style bond-buying program that could help the economy but divides the central bank. Draghi said his aim was to expand the bank's balance sheet back to the heights reached in early 2012, which equates to a rise of around 50 percent or 1 trillion euros in new assets. "There's now a 50-50 chance that the ECB will go further and announce a sovereign bond-buying program by year-end, or the beginning of 2015," he said. Under its statutes, the ECB is banned from buying bonds directly from governments but can find ways to purchase them from banks, for example, on the secondary market. An inflation rate of just 0.3 percent, coupled with the lack of economic growth, has given new urgency to the bloc's search for growth. The ECB is urging governments to also do their part and enact ambitious structural reforms. In Asia, the central banks of South Korea, Indonesia and the Philippines hold monetary policy meetings this week. The People's Bank of China has so far refrained from cutting interest rates, preferring instead to ease liquidity for some banks to free funds for lending. Beijing in turn has tried to ease conditions in the property market. Activity in China's vast factory sector cooled in August as foreign and domestic demand slowed, spurring new calls for more policy easing to prevent the economy from stumbling once more. But China's services sector rebounded in August after a drop in July, offsetting factory-sector weakness and letting the government stick with its policy stance. "The economic expansion is quite uneven, as exports accelerate, investment slows, and the real estate correction intensifies, but on balance, headline real GDP growth is probably a bit faster to the third quarter," said Bill Adams, an economist at PNC Financial Services Group.
"""Tests for high-level polynomials manipulation functions.""" import pytest from diofant import (ComputationFailed, MultivariatePolynomialError, horner, interpolate, symbols, symmetrize, viete) from diofant.abc import a, b, c, d, e, x, y, z __all__ = () def test_symmetrize(): assert symmetrize(0, x, y, z) == (0, 0) assert symmetrize(1, x, y, z) == (1, 0) s1 = x + y + z s2 = x*y + x*z + y*z assert symmetrize(1) == (1, 0) assert symmetrize(1, formal=True) == (1, 0, []) assert symmetrize(x) == (x, 0) assert symmetrize(x + 1) == (x + 1, 0) assert symmetrize(x, x, y) == (x + y, -y) assert symmetrize(x + 1, x, y) == (x + y + 1, -y) assert symmetrize(x, x, y, z) == (s1, -y - z) assert symmetrize(x + 1, x, y, z) == (s1 + 1, -y - z) assert symmetrize(x**2, x, y, z) == (s1**2 - 2*s2, -y**2 - z**2) assert symmetrize(x**2 + y**2) == (-2*x*y + (x + y)**2, 0) assert symmetrize(x**2 - y**2) == (-2*x*y + (x + y)**2, -2*y**2) assert symmetrize(x**3 + y**2 + a*x**2 + b*y**3, x, y) == \ (-3*x*y*(x + y) - 2*a*x*y + a*(x + y)**2 + (x + y)**3, y**2*(1 - a) + y**3*(b - 1)) U = [u0, u1, u2] = symbols('u:3') assert symmetrize(x + 1, x, y, z, formal=True, symbols=U) == \ (u0 + 1, -y - z, [(u0, x + y + z), (u1, x*y + x*z + y*z), (u2, x*y*z)]) assert symmetrize([1, 2, 3]) == [(1, 0), (2, 0), (3, 0)] assert symmetrize([1, 2, 3], formal=True) == ([(1, 0), (2, 0), (3, 0)], []) assert symmetrize([x + y, x - y]) == [(x + y, 0), (x + y, -2*y)] def test_horner(): assert horner(0) == 0 assert horner(1) == 1 assert horner(x) == x assert horner(x + 1) == x + 1 assert horner(x**2 + 1) == x**2 + 1 assert horner(x**2 + x) == (x + 1)*x assert horner(x**2 + x + 1) == (x + 1)*x + 1 assert horner( 9*x**4 + 8*x**3 + 7*x**2 + 6*x + 5) == (((9*x + 8)*x + 7)*x + 6)*x + 5 assert horner( a*x**4 + b*x**3 + c*x**2 + d*x + e) == (((a*x + b)*x + c)*x + d)*x + e assert horner(4*x**2*y**2 + 2*x**2*y + 2*x*y**2 + x*y, wrt=x) == (( 4*y + 2)*x*y + (2*y + 1)*y)*x assert horner(4*x**2*y**2 + 2*x**2*y + 2*x*y**2 + x*y, wrt=y) == (( 4*x + 2)*y*x + (2*x + 1)*x)*y def test_interpolate(): assert interpolate([1, 4, 9, 16], x) == x**2 assert interpolate([(1, 1), (2, 4), (3, 9)], x) == x**2 assert interpolate([(1, 2), (2, 5), (3, 10)], x) == 1 + x**2 assert interpolate({1: 2, 2: 5, 3: 10}, x) == 1 + x**2 def test_viete(): r1, r2 = symbols('r1, r2') ans = [(r1 + r2, -b/a), (r1*r2, c/a)] assert viete(a*x**2 + b*x + c, [r1, r2], x) == ans assert viete(a*x**2 + b*x + c, None, x) == ans pytest.raises(ValueError, lambda: viete(1, [], x)) pytest.raises(ValueError, lambda: viete(x**2 + 1, [r1])) pytest.raises(MultivariatePolynomialError, lambda: viete(x + y, [r1])) pytest.raises(ComputationFailed, lambda: viete(1))
Using a portable electric space heater is one of the most expensive ways to heat your home. At today’s prices, electric heat costs five times as much per BTU Beware of claims too good to be true All portable electric heaters are the same efficiency, no matter how fancy the cabinet. The truth about electric space heaters If it seems too good to be true, it probably is Space heating poses a much higher risk of fire, • Select a space heater with a guard around the heating element. Running An Electric Space Heater or Water Heater on Inverter Power September 1997 space heaters and/or water heaters, so a good three-stage regulator such as an InCharge or Link 2000R is also recommended. Portable Space Heater Usage Request Responsible Office: Physical Plant; Ensure the space heater is kept in good working condition. 4. EH&S will conduct a needs assessment to confirm the space is conducive to use of a portable space heater, and contact the requestor. Unvented Crawl Spaces Whose idea was that? Gary Nordeen WSU Extension Energy Program September 25, 2008 (which also cools the crawl space), warm, humid air enters the crawlspace through the vents and condenses on the cold framing members.
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from __future__ import print_function import sqlite3 def hr_department_create(client, department_name): hr_department_model = client.model('hr.department') hr_department_browse = hr_department_model.browse([('name', '=', department_name), ]) if hr_department_browse.id == []: values = { 'name': department_name, } hr_department_model.create(values) def hr_department_export_sqlite(client, args, db_path, table_name): conn = sqlite3.connect(db_path) conn.text_factory = str cursor = conn.cursor() try: cursor.execute('''DROP TABLE ''' + table_name + ''';''') except Exception as e: print('------->', e) cursor.execute( ''' CREATE TABLE ''' + table_name + ''' ( id INTEGER NOT NULL PRIMARY KEY, name, new_id INTEGER ); ''' ) department_model = client.model('hr.department') department_browse = department_model.browse(args) department_count = 0 for department_reg in department_browse: department_count += 1 print(department_count, department_reg.id, department_reg.name.encode("utf-8")) cursor.execute(''' INSERT INTO ''' + table_name + '''( id, name ) VALUES(?,?) ''', (department_reg.id, department_reg.name, ) ) conn.commit() conn.close() print() print('--> department_count: ', department_count) print() def hr_department_import_sqlite(client, args, db_path, table_name): hr_department_model = client.model('hr.department') conn = sqlite3.connect(db_path) # conn.text_factory = str conn.row_factory = sqlite3.Row cursor = conn.cursor() cursor2 = conn.cursor() data = cursor.execute(''' SELECT id, name, new_id FROM ''' + table_name + '''; ''') print(data) print([field[0] for field in cursor.description]) hr_department_count = 0 for row in cursor: hr_department_count += 1 print( hr_department_count, row['id'], row['name'], ) hr_department_browse = hr_department_model.browse([('name', '=', row['name']), ]) if hr_department_browse.id == []: values = { 'name': row['name'], } hr_department_id = hr_department_model.create(values).id cursor2.execute( ''' UPDATE ''' + table_name + ''' SET new_id = ? WHERE id = ?;''', (hr_department_id, row['id'] ) ) conn.commit() conn.close() print() print('--> hr_department_count: ', hr_department_count) def employee_create_from_user(client, user_login, job_title, department_name): print('Configuring employee "' + user_login + '"...') employee_model = client.model('res.users') hr_employee_model = client.model('hr.employee') hr_job_model = client.model('hr.job') hr_department_model = client.model('hr.department') employee_browse = employee_model.browse([('login', '=', user_login), ]) user_ids = employee_browse.id if user_ids == []: print('--> User "' + user_login + '"does not exist!') else: user = employee_browse[0] hr_employee_browse = hr_employee_model.browse([('name', '=', user.name), ]) employee_ids = hr_employee_browse.id if employee_ids != []: print('--> Employee "' + user.name + '"already exists!') else: job_id = False hr_job_browse = hr_job_model.browse([('name', '=', job_title), ]) if hr_job_browse.id != []: job_id = hr_job_browse[0].id department_id = False hr_department_browse = hr_department_model.browse([('name', '=', department_name), ]) if hr_department_browse.id != []: department_id = hr_department_browse[0].id values = { 'name': user.name, 'address_id': user.partner_id.id, 'work_email': user.partner_id.email, 'job_id': job_id, 'department_id': department_id, 'user_id': user.id, } hr_employee_model.create(values) print() print('--> Done') print() def hr_employee_export_sqlite(client, args, db_path, table_name): conn = sqlite3.connect(db_path) conn.text_factory = str cursor = conn.cursor() try: cursor.execute('''DROP TABLE ''' + table_name + ''';''') except Exception as e: print('------->', e) cursor.execute( ''' CREATE TABLE ''' + table_name + ''' ( id INTEGER NOT NULL PRIMARY KEY, resource_id, name, code, work_email, department_id, address_id, job_id, user_id, image, new_id INTEGER ); ''' ) employee_model = client.model('hr.employee') employee_browse = employee_model.browse(args) employee_count = 0 for employee_reg in employee_browse: employee_count += 1 print(employee_count, employee_reg.id, employee_reg.name.encode("utf-8")) department_id = None if employee_reg.department_id: department_id = employee_reg.department_id.id job_id = None if employee_reg.job_id: job_id = employee_reg.job_id.id # address_id = None # if employee_reg.address_id: # address_id = employee_reg.address_id.id # user_id = None # if employee_reg.user_id: # user_id = employee_reg.user_id.id image = None if employee_reg.image: image = employee_reg.image cursor.execute(''' INSERT INTO ''' + table_name + '''( id, resource_id, name, code, work_email, department_id, address_id, job_id, user_id, image ) VALUES(?,?,?,?,?,?,?,?,?,?) ''', (employee_reg.id, employee_reg.resource_id.id, employee_reg.name, employee_reg.code, employee_reg.work_email, department_id, employee_reg.address_id.id, job_id, employee_reg.user_id.id, image, ) ) conn.commit() conn.close() print() print('--> employee_count: ', employee_count) print() def hr_employee_import_sqlite( client, args, db_path, table_name, hr_department_table_name, res_partner_table_name, res_users_table_name ): hr_employee_model = client.model('hr.employee') conn = sqlite3.connect(db_path) # conn.text_factory = str conn.row_factory = sqlite3.Row cursor = conn.cursor() cursor2 = conn.cursor() data = cursor.execute(''' SELECT id, resource_id, name, code, work_email, department_id, address_id, job_id, user_id, image, new_id FROM ''' + table_name + '''; ''') print(data) print([field[0] for field in cursor.description]) hr_employee_count = 0 for row in cursor: hr_employee_count += 1 print( hr_employee_count, row['id'], row['name'], row['code'], ) hr_employee_browse = hr_employee_model.browse([('name', '=', row['name']), ]) if hr_employee_browse.id == []: department_id = row['department_id'] new_department_id = False if department_id is not None: cursor2.execute( ''' SELECT new_id FROM ''' + hr_department_table_name + ''' WHERE id = ?;''', (department_id, ) ) new_department_id = cursor2.fetchone()[0] address_id = row['address_id'] new_address_id = False if address_id is not None: cursor2.execute( ''' SELECT new_id FROM ''' + res_partner_table_name + ''' WHERE id = ?;''', (address_id, ) ) new_address_id = cursor2.fetchone()[0] user_id = row['user_id'] new_user_id = False if user_id is not None: cursor2.execute( ''' SELECT new_id FROM ''' + res_users_table_name + ''' WHERE id = ?;''', (user_id, ) ) new_user_id = cursor2.fetchone()[0] values = { 'name': row['name'], 'code': row['code'], 'address_id': new_address_id, 'work_email': row['work_email'], 'job_id': row['job_id'], 'department_id': new_department_id, 'user_id': new_user_id, 'image': row['image'], } hr_employee_id = hr_employee_model.create(values).id cursor2.execute( ''' UPDATE ''' + table_name + ''' SET new_id = ? WHERE id = ?;''', (hr_employee_id, row['id'] ) ) conn.commit() conn.close() print() print('--> hr_employee_count: ', hr_employee_count)
There was once a time when students in our schools learned about our American History. Students, even in their younger years, knew many of the facts, figures, personalities, and major events that comprise the American Experience. They knew about the famous explorers that came to this continent from across the Atlantic Ocean. They understood the reasons that many came to settle in this undeveloped region, whether or not it was to leave religious persecution, find a new life of opportunity, or simply seeking the ability to own land. From the late 1500's on, they came from all over Europe. There were colonies established by England, Germany, Spain, France, the Dutch, and even from the Scandinavian Regions. They understood what America was. Much has changed in just a generation. Many people, unsatisfied with the historical record, seek to alter history by a combination of patent untruths, willful omissions, and overly broad generalizations. They seek to re-create American History on the basis of a political or social agenda. In doing so, they do a great disservice to our heritage. In this section we will expose some revisionist lies, and how they attempt to alter the historical record.
#!/usr/bin/env python3 # # Sming hardware configuration tool # import common, argparse, os, partition from common import * from config import Config from config import schema as config_schema def openOutput(path): if path == '-': try: stdout_binary = sys.stdout.buffer # Python 3 except AttributeError: stdout_binary = sys.stdout return stdout_binary status("Writing to '%s'" % path) output_dir = os.path.abspath(os.path.dirname(path)) os.makedirs(output_dir, exist_ok=True) return open(path, 'wb') def handle_validate(args, config, part): # Validate resulting hardware configuration against schema try: from jsonschema import Draft7Validator inst = json_loads(config.to_json()) v = Draft7Validator(config_schema) errors = sorted(v.iter_errors(inst), key=lambda e: e.path) if errors != []: for e in errors: critical("%s @ %s" % (e.message, e.path)) sys.exit(3) except ImportError as err: critical("\n** WARNING! %s: Cannot validate '%s', please run `make python-requirements **\n\n" % (str(err), args.input)) def handle_flashcheck(args, config, part): # Expect list of chunks, such as "0x100000=/out/Esp8266/debug/firmware/spiff_rom.bin 0x200000=custom.bin" list = args.expr.split() if len(list) == 0: raise InputError("No chunks to flash!") for e in list: addr, filename = e.split('=') addr = int(addr, 0) part = config.partitions.find_by_address(config.devices[0], addr) if part is None: raise InputError("No partition contains address 0x%08x" % addr) if part.address != addr: raise InputError("Address 0x%08x is within partition '%s', not at start (0x%08x)" % (addr, part.name, part.address)) filesize = os.path.getsize(filename) if filesize > part.size: raise InputError("File '%s' is 0x%08x bytes, too big for partition '%s' (0x%08x bytes)" % (os.path.basename(filename), filesize, part.name, part.size)) def handle_partgen(args, config, part): # Generate partition table binary if not args.no_verify: status("Verifying partition table...") config.verify(args.secure) return config.partitions.to_binary(config.devices) def handle_expr(args, config, part): # Evaluate expression against configuration data return str(eval(args.expr)).encode() def main(): parser = argparse.ArgumentParser(description='Sming hardware configuration utility') parser.add_argument('--no-verify', help="Don't verify partition table fields", action='store_true') parser.add_argument('--quiet', '-q', help="Don't print non-critical status messages to stderr", action='store_true') parser.add_argument('--secure', help="Require app partitions to be suitable for secure boot", action='store_true') parser.add_argument('--part', help="Name of partition to operate on") parser.add_argument('command', help='Action to perform', choices=['partgen', 'expr', 'validate', 'flashcheck']) parser.add_argument('input', help='Name of hardware configuration or path to binary partition table') parser.add_argument('output', help='Path to output file. Will use stdout if omitted.', nargs='?', default='-') parser.add_argument('expr', help='Expression to evaluate', nargs='?', default=None) args = parser.parse_args() common.quiet = args.quiet output = None input_is_binary = False if os.path.exists(args.input): inputData = open(args.input, "rb").read() input_is_binary = inputData[0:2] == partition.Entry.MAGIC_BYTES if input_is_binary: config = Config.from_binary(inputData) else: raise InputError("File '%s' not recognised as partition table" % args.input) else: config = Config.from_name(args.input) partitions = config.partitions # Locate any supplied partition by name part = None if args.part is not None: part = partitions.find_by_name(args.part) if part is None: return output = globals()['handle_' + args.command](args, config, part) if output is not None: openOutput(args.output).write(output) if __name__ == '__main__': try: main() except InputError as e: print("** ERROR! %s" % e, file=sys.stderr) sys.exit(2)
You have several peanuts inside a bag. Before you start eating some of them, you decide that you will eat exactly t peanuts in total. Repeatedly, you will take a peanut at random from the bag, and eat it. However, it happens that some of the peanuts are not complete, but just a half-peanut. Therefore, it is possible that you will not eat exactly t peanuts. For instance, suppose that the bag has c = 1 complete peanuts, h = 2 half-peanuts, and that you want to eat exactly one peanut (that is, t = 1). In this case, with probability 1/3 you will eat the complete peanut, and stop. Otherwise, after eating a half-peanut, you will eat another peanut, which can be the remaining half-peanut (this would be a success, since you would have eaten 1/2 + 1/2 = t peanuts) or the complete peanut (this would be a failure, bacause you would have eaten 1/2 + 1 > t peanuts). Altogether, the probability of success is 1/3 + (2/3) · (1/2) = 2/3. Given c, h and t, can you compute the probability of success? Input consists of several cases, with only integer numbers, each one with c, h and t. Assume 0 ≤ c ≤ 1000, 0 ≤ h ≤ 2000, and 0 ≤ t ≤ c + ⌊ h/2 ⌋. For every case, print with four digits after the decimal point the probability of eating exactly t peanuts when you are given a bag with c complete peanuts and h half-peanuts. The expected solution has cost O(t). The given bounds for c, h and t are rather small, in order to reduce the magnitude of numerical errors. Even so, use the type long double and try hard to avoid underflows and overflows. Good luck!
# -*- coding: utf-8 -*- """ etc.adapter ~~~~~~~~~~~ The interface for etcd adapters. A subclass of :class:`Adapter` will be injected verification code automatically. """ from __future__ import absolute_import import functools import six __all__ = ['Adapter'] def with_verifier(verify, func): @functools.wraps(func) def wrapped(self, *args, **kwargs): verify(*args, **kwargs) return func(self, *args, **kwargs) return wrapped class AdapterMeta(type): def __new__(meta, name, bases, attrs): for attr, verify in [('set', meta.verify_set), ('append', meta.verify_append)]: try: func = attrs[attr] except KeyError: continue attrs[attr] = with_verifier(verify, func) return super(AdapterMeta, meta).__new__(meta, name, bases, attrs) @staticmethod def verify_set(key, value=None, dir=False, ttl=None, refresh=False, prev_value=None, prev_index=None, prev_exist=None, timeout=None): if not refresh and (value is None) == (not dir): raise ValueError('Set value or make as directory') if value is not None and not isinstance(value, six.text_type): raise TypeError('Set %s value' % six.text_type.__name__) @staticmethod def verify_append(key, value=None, dir=False, ttl=None, timeout=None): if (value is None) == (not dir): raise ValueError('Set value or make as directory') if value is not None and not isinstance(value, six.text_type): raise TypeError('Set %s value' % six.text_type.__name__) class Adapter(six.with_metaclass(AdapterMeta)): """An interface to implement several essential raw methods of etcd.""" def __init__(self, url): self.url = url def clear(self): pass def get(self, key, recursive=False, sorted=False, quorum=False, wait=False, wait_index=None, timeout=None): raise NotImplementedError def set(self, key, value=None, dir=False, ttl=None, refresh=False, prev_value=None, prev_index=None, prev_exist=None, timeout=None): raise NotImplementedError def append(self, key, value=None, dir=False, ttl=None, timeout=None): raise NotImplementedError def delete(self, key, dir=False, recursive=False, prev_value=None, prev_index=None, timeout=None): raise NotImplementedError
I will be writing a series of articles documenting my journeys for Nothing But Hope and Passion. Read the first one here below…. In the last year I have played shows on the rooftops of Nairobi, under the dystopian skyline of Chongqing, upon the endless white sands of Jambiani, before the Soviet architecture of Vladivostok, and in the deepest slums of Kampala. It has been a time of learning, of challenging myself, of eye balling fear, and rediscovering the great power of music to bring joy…. Yes, working on that – will be soon!
# See edu.ku.brc.helpers.Encryption and rfc2898 from itertools import islice from hashlib import md5 from Crypto.Cipher import DES from Crypto.Random.random import randint ITERATION_COUNT = 1000 def decrypt(text: str, password: str) -> str: key = password.encode('utf-8') fromhex = bytes.fromhex(text) salt, ciphertext = fromhex[:8], fromhex[8:] derivedkey = generate_derivedkey(key, salt) deskey, iv = derivedkey[:8], derivedkey[8:] des = DES.new(deskey, DES.MODE_CBC, iv) padded = des.decrypt(ciphertext) paddinglen = padded[-1] return padded[:-paddinglen].decode('utf-8') def encrypt(text: str, password: str) -> str: text_encoded = text.encode('utf-8') paddinglen = 8 - len(text_encoded) % 8 padded = text_encoded + bytes([paddinglen]) * paddinglen key = password.encode('utf-8') salt = make_salt() derivedkey = generate_derivedkey(key, salt) deskey, iv = derivedkey[:8], derivedkey[8:] des = DES.new(deskey, DES.MODE_CBC, iv) ciphertext = des.encrypt(padded) return (salt + ciphertext).hex().upper() def rand_byte() -> int: return randint(0, 0xff) def make_salt() -> bytes: return bytes(islice(iter(rand_byte, None), 8)) def generate_derivedkey(key: bytes, salt: bytes, iterations: int = ITERATION_COUNT) -> bytes: out = key + salt for i in range(iterations): md = md5() md.update(out) out = md.digest() return out
Mike F.: How do you play the game? Ok Experts I Need Help. Thanks to Brian & Co. Is it this easy ? Does straight = slight pull? Above the TSP (Turned Shoulder Plane) for Tour Pros? Eliminate the left side; Why?? Off the tee: Work the ball vs 1 ball flight? Can I catch my buddies? More angle = more speed? Never Slice Again! 2.0 (web version) Now Available! Curtis Strange: Swings around his spine? Fred Couples Swing Analysis by Brian Manzella, PGA, G.S.E.D. FINALLY....Proof that FEEL IS SOUND! PP#3: When do you feel it? Can someone take a look at these swings? What makes the shaft flatten? What makes some golfers "lose it"? "You can have a steady head with a shoulder centre pivot." Not golf, but really, really cool! What constitutes a "Full Swing"? Getting rid of hands wobble at top.? My lesson with Brian - is it REALLY this simple?? what parts of swing really matter? "Jack would be hitting it..." can tgm principles also be single plane? Pivot assisted by arm / hand lift???? Brian what would you do with Goosen?? Ben Hogan 1955 Swing Analysis by Brian Manzella, PGA, G.S.E.D. What is up with Woods??? Dr. Cary Middlecoff, AUDIO Swing Analysis by Brian Manzella, PGA, G.S.E.D. What makes a Good Golfer GOOD! Welcome to the BEST Staff in Golf! How to read your divots? This is what a FLAT Left Wrist looks like! The power of not trying?! Learning to play....back in the day. How to avoid over the top effectively? Winter Golf and training aids.
import re from cStringIO import StringIO as BytesIO from datetime import timedelta from functools import update_wrapper from flask import make_response, current_app, request _ws_re = re.compile(r'(\s+)') def chop_tail(base, tail): if not base.endswith(tail): return base, False return base[:-len(tail)], True def normalize_text(text): def _handle_match(match): ws = match.group() nl = ws.count('\n') if nl >= 2: return u'\n\n' elif nl == 1: return u'\n' return u' ' return _ws_re.sub(_handle_match, text).strip('\n') def cors(origin=None, methods=None, headers=None, max_age=21600, attach_to_all=True, automatic_options=True): if methods is not None: methods = ', '.join(sorted(x.upper() for x in methods)) if headers is not None and not isinstance(headers, basestring): headers = ', '.join(x.upper() for x in headers) if not isinstance(origin, basestring): origin = ', '.join(origin or ('*',)) if isinstance(max_age, timedelta): max_age = max_age.total_seconds() def get_methods(): if methods is not None: return methods options_resp = current_app.make_default_options_response() return options_resp.headers['allow'] def decorator(f): def wrapped_function(*args, **kwargs): if automatic_options and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: resp = make_response(f(*args, **kwargs)) if not attach_to_all and request.method != 'OPTIONS': return resp h = resp.headers h['Access-Control-Allow-Origin'] = origin h['Access-Control-Allow-Methods'] = get_methods() h['Access-Control-Max-Age'] = str(max_age) if headers is not None: h['Access-Control-Allow-Headers'] = headers return resp f.provide_automatic_options = False return update_wrapper(wrapped_function, f) return decorator def release_file(request, name): f = request.files[name] rv = f.stream f.stream = BytesIO() return rv
Products - Euroceram Plus S.A. Euroceram is now ten years old. Our activities include many aspects of interior design. At Euroceram, you find specialists in the tile industry; a professional staff is available to share their expertise with you. “Plus”, you will find a large variety of products and their personalized service, unique in Haiti. FLOORING: Always looking to follow the newest European trends, Euroceram offers you a wide variety of beautiful tiles and laminated flooring. We can guarantee the best quality at the best price, products are distributed exclusively and are in stock.. We have recently geared ourselves towards China, Peru and Brazil, to increase our product line to include a more affordable one. These countries have the newest technology available to create beautiful designs that we have carefully selected to have the European look and feel, at inexpensive prices. KITCHENS: Functional and elegant, we offer you tiles, sinks, faucets and beautiful granite countertops with their vast array of choices and colors. BATHROOMS: Sanitary ware, Jacuzzis, vanities, shower units, faucets, bathroom accessories, and of course, our specialty: beautiful wall tiles to meet your personal style. Always looking for new and innovative products, Euroceram is proud to introduce our new line of marble and stone sinks, made locally by our artisans. These beautiful sinks will seduce you with their unique design and color. FOR THE REST OF THE HOUSE: Euroceram suggests glass blocks, doors and accessories, 800 colors of high quality paint, made to withstand our unpredictable tropical climate. Euroceram supplies and installs granite countertops for kitchens and bathrooms. Please stop by our Pétion-Ville store to see our wide range of choices. Click on the image below to download a PDF version of MANUEL DU PARFAIT PETIT CARRELEUR, a reference for all tile works.
from zeit.cms.i18n import MessageFactory as _ import zc.form.field import zc.form.interfaces import zeit.cms.content.field import zeit.cms.content.sources import zeit.cms.interfaces import zeit.cms.repository.interfaces import zeit.cms.tagging.interfaces import zope.component.interfaces import zope.interface import zope.interface.common.sequence import zope.interface.interfaces import zope.schema import zope.schema.interfaces # XXX There is too much, too unordered in here, clean this up. # prevent circular import from zeit.cms.content.contentsource import ICMSContentSource from zeit.cms.content.contentsource import INamedCMSContentSource from zeit.cms.content.contentsource import IAutocompleteSource class IAuthorType(zeit.cms.interfaces.ICMSContentType): """Interface type for authors.""" class AuthorSource(zeit.cms.content.contentsource.CMSContentSource): zope.interface.implements( zeit.cms.content.contentsource.IAutocompleteSource) check_interfaces = IAuthorType name = 'authors' authorSource = AuthorSource() class IChannelField(zc.form.interfaces.ICombinationField): """Marker interface so we can register a specialized widget for this field.""" class ReferenceField(zope.schema.Choice): def _validate(self, value): if self._init_field: return # skip immediate superclass, since that's what we want to change super(zope.schema.Choice, self)._validate(value) if value.target not in self.vocabulary: raise zope.schema.interfaces.ConstraintNotSatisfied(value) class ICommonMetadata(zope.interface.Interface): year = zope.schema.Int( title=_("Year"), min=1900, max=2100) volume = zope.schema.Int( title=_("Volume"), min=1, max=53, required=False) page = zope.schema.Int( title=_("Page"), readonly=True, required=False) ressort = zope.schema.Choice( title=_("Ressort"), source=zeit.cms.content.sources.RessortSource()) sub_ressort = zope.schema.Choice( title=_('Sub ressort'), source=zeit.cms.content.sources.SubRessortSource(), required=False) channels = zope.schema.Tuple( title=_('Channels'), value_type=zc.form.field.Combination( (zope.schema.Choice( title=_('Channel'), source=zeit.cms.content.sources.ChannelSource()), zope.schema.Choice( title=_('Subchannel'), source=zeit.cms.content.sources.SubChannelSource(), required=False)) ), default=(), required=False) zope.interface.alsoProvides(channels.value_type, IChannelField) lead_candidate = zope.schema.Bool( title=_('Lead candidate'), default=True, required=False) printRessort = zope.schema.TextLine( title=_("Print ressort"), readonly=True, required=False, default=u'n/a') # not required since e.g. Agenturmeldungen don't have an author, only # a copyright notice authorships = zope.schema.Tuple( title=_("Authors"), value_type=ReferenceField(source=authorSource), default=(), required=False) authorships.value_type.setTaggedValue( 'zeit.cms.addform.contextfree', 'zeit.content.author.add_contextfree') # DEPRECATED, use authorships instead # (still used by zeit.vgwort for querying) authors = zope.schema.Tuple( title=_("Authors (freetext)"), value_type=zope.schema.TextLine(), required=False, default=(u'',), description=_(u'overwritten if any non-freetext authors are set')) access = zope.schema.Choice( title=_('Access'), default=u'free', source=zeit.cms.content.sources.ACCESS_SOURCE) keywords = zeit.cms.tagging.interfaces.Keywords( required=False, default=()) serie = zope.schema.Choice( title=_("Serie"), source=zeit.cms.content.sources.SerieSource(), required=False) copyrights = zope.schema.TextLine( title=_("Copyright (c)"), description=_("Do not enter (c)."), required=False) supertitle = zope.schema.TextLine( title=_("Kicker"), description=_("Please take care of capitalisation."), required=False, max_length=70) # DEPRECATED, use authorships instead (still used by # k4import/exporter.zeit.de to transmit author information *into* vivi, # so Producing can manually convert it to authorships) byline = zope.schema.TextLine( title=_("By line"), readonly=True, required=False) title = zope.schema.Text( title=_("Title"), missing_value=u'') title.setTaggedValue('zeit.cms.charlimit', 70) subtitle = zope.schema.Text( title=_("Subtitle"), missing_value=u'', required=False) subtitle.setTaggedValue('zeit.cms.charlimit', 170) teaserTitle = zope.schema.TextLine( title=_("Teaser title"), required=False, max_length=70) teaserText = zope.schema.Text( title=_("Teaser text"), required=False, max_length=170) teaserSupertitle = zope.schema.TextLine( title=_(u'Teaser kicker'), description=_(u'Please take care of capitalisation.'), required=False, max_length=70) vg_wort_id = zope.schema.TextLine( title=_('VG Wort Id'), required=False) dailyNewsletter = zope.schema.Bool( title=_("Daily newsletter"), description=_( "Should this article be listed in the daily newsletter?"), required=False, default=True) commentsPremoderate = zope.schema.Bool( title=_("Comments premoderate"), required=False, default=False) commentsAllowed = zope.schema.Bool( title=_("Comments allowed"), required=False, default=True) commentsAPIv2 = zope.schema.Bool( title=_("Use Comments APIv2"), required=False, default=False) commentSectionEnable = zope.schema.Bool( title=_("Show commentthread"), required=False, default=True) banner = zope.schema.Bool( title=_("Banner"), required=False, default=True) banner_content = zope.schema.Bool( title=_("Banner in Content"), required=False, default=True) banner_outer = zope.schema.Bool( title=_("Banner Mainad"), required=False, default=True) banner_id = zope.schema.TextLine( title=_('Banner id'), required=False) hide_adblocker_notification = zope.schema.Bool( title=_('Hide AdBlocker notification'), default=False, required=False) product = zope.schema.Choice( title=_('Product id'), # XXX kludgy, we expect a product with this ID to be present in the XML # file. We only need to set an ID here, since to read the product we'll # ask the source anyway. default=zeit.cms.content.sources.Product(u'ZEDE'), source=zeit.cms.content.sources.PRODUCT_SOURCE) overscrolling = zope.schema.Bool( title=_('Overscrolling'), required=False, default=True) cap_title = zope.schema.TextLine( title=_('CAP title'), required=False) deeplink_url = zope.schema.URI( title=_('Deeplink URL'), required=False, default=None) tldr_title = zope.schema.TextLine( title=_("tldr title"), required=False, max_length=70) tldr_text = zope.schema.Text( title=_("tldr text"), required=False, max_length=450) tldr_milestone = zope.schema.Bool( title=_("tldr milestone"), required=False, default=False) tldr_date = zope.schema.Datetime( title=_("tldr date"), required=False) storystreams = zope.schema.Tuple( title=_("Storystreams"), value_type=zope.schema.Choice( source=zeit.cms.content.sources.StorystreamSource()), default=(), required=False) advertisement_title = zope.schema.TextLine( title=_("Advertisement title"), required=False) advertisement_text = zope.schema.Text( title=_("Advertisement text"), required=False) class IProduct(zope.interface.Interface): """A publication product""" id = zope.interface.Attribute('id') title = zope.interface.Attribute('title') vgwortcode = zope.interface.Attribute('VGWort code, optional') href = zope.interface.Attribute('URL for the "homepage" of this product') target = zope.interface.Attribute('Optional link target (e.g. _blank)') show = zope.interface.Attribute( 'Flag what to display in frontend byline. {issue,link,source}') volume = zope.interface.Attribute('Boolean: has print volumes') location = zope.interface.Attribute( 'uniqueId template of the IVolumes of this product, ' 'e.g. http://xml.zeit.de/{year}/{name}/ausgabe') centerpage = zope.interface.Attribute( 'uniqueId template for the public-facing CP of this product, ' 'e.g. http://xml.zeit.de/{year}/{name}/index') cp_template = zope.interface.Attribute( 'uniqueId of a zeit.content.text.interfaces.IPythonScript, which is ' 'used to create the public-facing CP of this product') autochannel = zope.interface.Attribute( 'Set false to suppress setting channel on ressort changes') relates_to = zope.interface.Attribute( 'Product-ID of another Product we belong to') dependent_products = zope.interface.Attribute( 'List of products whose relates_to points to us') class ISerie(zope.interface.Interface): id = zope.interface.Attribute('') title = zope.interface.Attribute('') serienname = zope.interface.Attribute('') url = zope.interface.Attribute('') encoded = zope.interface.Attribute('') column = zope.interface.Attribute('') video = zope.interface.Attribute('') class IStorystreamReference(zope.interface.Interface): id = zope.interface.Attribute('') title = zope.interface.Attribute('') references = zope.interface.Attribute('') def hex_literal(value): try: int(value, base=16) except ValueError: raise zeit.cms.interfaces.ValidationError(_("Invalid hex literal")) else: return True WRITEABLE_ON_CHECKIN = object() WRITEABLE_LIVE = object() WRITEABLE_ALWAYS = object() class IDAVPropertyConverter(zope.interface.Interface): """Parse a unicode string from a DAV property to a value and vice versa.""" def fromProperty(value): """Convert property value to python value. returns python object represented by value. raises ValueError if the value could not be converted. raises zope.schema.ValidationError if the value could be converted but does not satisfy the constraints. """ def toProperty(value): """Convert python value to DAV property value. returns unicode """ class IGenericDAVPropertyConverter(IDAVPropertyConverter): """A dav property converter which converts in a generic way. This interface is a marker if some code wants to know if a generic converter or a specialised is doing the work. """ class IDAVToken(zope.interface.Interface): """A string representing a token that uniquely identifies a value.""" class IDAVPropertyChangedEvent(zope.component.interfaces.IObjectEvent): """A dav property has been changed.""" old_value = zope.interface.Attribute("The value before the change.") new_value = zope.interface.Attribute("The value after the change.") property_namespace = zope.interface.Attribute("Webdav property namespace.") property_name = zope.interface.Attribute("Webdav property name.") field = zope.interface.Attribute( "zope.schema field the property was changed for.") class DAVPropertyChangedEvent(zope.component.interfaces.ObjectEvent): zope.interface.implements(IDAVPropertyChangedEvent) def __init__(self, object, property_namespace, property_name, old_value, new_value, field): self.object = object self.property_namespace = property_namespace self.property_name = property_name self.old_value = old_value self.new_value = new_value self.field = field class ITextContent(zope.interface.Interface): """Representing text content XXX""" data = zope.schema.Text(title=u"Document content") class IXMLRepresentation(zope.interface.Interface): """Objects with an XML representation.""" xml = zeit.cms.content.field.XMLTree( title=_("XML Source")) class IXMLReference(zope.interface.Interface): """XML representation of an object reference. How the object references is serialized is dependent on both the target object and the type of reference. For instance, a feed might usually use an <xi:include> tag, while an image uses <img>. And then there might be references inside the <head> that always use a <reference> tag. (NOTE: These are just examples, not actual zeit.cms policy!) Adapting to IXMLReference yields an lxml.objectify tree:: node = zope.component.getAdapter( content, zeit.cms.content.interfaces.IXMLReference, name='image') The target uniqueId is always stored in the ``href`` attribute of the node. """ class IXMLReferenceUpdater(zope.interface.Interface): """Objects that update metadata etc on XML references.""" def update(xml_node, suppress_errors=False): """Update xml_node with data from the content object. xml_node: lxml.objectify'ed element """ class IReference(IXMLRepresentation, zeit.cms.interfaces.ICMSContent, zope.location.interfaces.ILocation): """Reference to an ICMSContent object (optionally with properties of its own). To deserialize an IXMLReference, adapt the source ICMSContent and the XML node to IReference (using the same adapter name that was used to create the IXMLReference):: reference = zope.component.getMultiAdapter( (source, node), zeit.cms.content.interfaces.IReference, name='image') For widget support (DropObjectWidget/ObjectSequenceWidget), IReference can be resolved as ICMSContent, using a uniqueId built from "source uniqueId, source attribute name, target uniqueId". """ target = zope.interface.Attribute('The referenced ICMSContent object') target_unique_id = zope.interface.Attribute( 'uniqueId of the referenced ICMSContent object') attribute = zope.interface.Attribute( 'Attribute name of reference property on source') def create(target, suppress_errors=False): """Create a new references from our source to the given target (either an ICMSContent or a uniqueId).""" def get(target, default=None): """If our source has a reference to the given target (ICMSContent or uniqueId), return that, else return default.""" def update_metadata(suppress_errors=False): """Run XMLReferenceUpdater on our XML node.""" class IReferences(zope.interface.common.sequence.IReadSequence): def __iter__(self): # XXX not declared by IReadSequence, # dear zope.interface are you serious?! pass def create(target): """Returns a new IReference to the given ICMSContent object.""" def get(target, default=None): """Returns IReference to the given target (uniqueId or ICMSContent) if one exists.""" class IXMLSource(zope.interface.Interface): """str representing the xml of an object.""" class IXMLContent(zeit.cms.repository.interfaces.IDAVContent, IXMLRepresentation): """Content with an XML representation.""" class ITemplateManagerContainer(zope.app.container.interfaces.IReadContainer): """Container which holds all template managers.""" class ITemplateManager(zope.app.container.interfaces.IReadContainer): """Manages templates for a content type.""" class ITemplate(IXMLRepresentation): """A template for xml content types.""" title = zope.schema.TextLine(title=_('Title')) class IDAVPropertiesInXML(zope.interface.Interface): """Marker interface for objects which store their webdav properties in xml. It is common for articles and CPs to store their webdav properties in the xml, too. That is in addition to the Metadata stored as webdav properties. """ class IDAVPropertyXMLSynchroniser(zope.interface.Interface): """Synchronises dav properties to XML.""" def set(namespace, name): """Set value for the DAV property (name, namespace).""" def sync(): """Synchronise all properties.""" class ISynchronisingDAVPropertyToXMLEvent(zope.interface.Interface): namespace = zope.interface.Attribute("DAV property namespace") name = zope.interface.Attribute("DAV property name") value = zope.interface.Attribute("DAV property value") vetoed = zope.schema.Bool( title=u"True if sync was vetoed.", readonly=True, default=False) def veto(): """Called by subscribers to veto the property being added to xml.""" class IAccessCounter(zope.interface.Interface): """Give information about how many times an object was accessed.""" hits = zope.schema.Int( title=_('Hits today'), description=_('Indicates how many times a page viewed today.'), required=False, default=None) total_hits = zope.schema.Int( title=_('Total hits'), description=_('Indicates how many times a page was viewed in total, ' 'i.e. during its entire life time.'), required=False, default=None) detail_url = zope.schema.URI( title=_('URI to the access counting details'), required=False, default=None) class IContentSortKey(zope.interface.Interface): """Content objects can be adapted to this interface to get a sort key. The sort key usually is a tuple of (weight, lowercased-name) """ class ILivePropertyManager(zope.interface.Interface): """Manages live properties.""" def register_live_property(name, namespace): """Register property as live property.""" def unregister_live_property(name, namespace): """Unregister property as live property.""" def is_live_property(name, namespace): """Return (bool) whether the property is a live property.""" class ISemanticChange(zope.interface.Interface): """Indicates when the content last changed meaningfully, as opposed to small corrections like fixed typos. This might be shown to the reader, e.g. as "Aktualisiert am" on article pages. """ last_semantic_change = zope.schema.Datetime( title=_('Last semantic change'), required=False, readonly=True, default=None) has_semantic_change = zope.schema.Bool( title=_('Update last semantic change'), required=False, default=False) def update(): """Set last semantic change to last modified.""" class IUUID(zope.interface.Interface): """Accessing the uuid of a content object.""" id = zope.schema.ASCIILine( title=u"The uuid of the content object.", default=None, required=False) shortened = zope.schema.ASCIILine( title=u"id without `{urn:uuid:}` prefix", readonly=True, required=False, default=None) class IMemo(zope.interface.Interface): """Provide a memo for additional remarks on a content object.""" memo = zope.schema.Text( title=_('Memo'), required=False) class IContentAdder(zope.interface.Interface): type_ = zope.schema.Choice( title=_("Type"), source=zeit.cms.content.sources.AddableCMSContentTypeSource()) ressort = zope.schema.Choice( title=_("Ressort"), source=zeit.cms.content.sources.RessortSource(), required=False) sub_ressort = zope.schema.Choice( title=_('Sub ressort'), source=zeit.cms.content.sources.SubRessortSource(), required=False) year = zope.schema.Int( title=_("Year"), min=1900, max=2100) month = zope.schema.Int( title=_("Month"), min=1, max=12) class IAddLocation(zope.interface.Interface): """Marker interface that adapts a content type to a context object on which the add form should be displayed. Register this adapter for (content_type, IContentAdder), where content_type is an interface like ICMSContent or IImageGroup. """ class IAddableContent(zope.interface.interfaces.IInterface): """Interface type to register additional addable entries that are *not* ICMSContentTypes. """ class ISkipDefaultChannel(zope.interface.Interface): """Marker interface to opt out of setting default ICommonMetadata.channels according to ressort/sub_ressort."""
A stunning traditional design pairs with some modern flair to make for a beautiful sofa collection that can handle being the centerpiece of your living room. Wooden trim details, round bun feet and carved motifs make for an elevated elegance that adds to the luxury of your room. The seat features a distinct back with split deep tufts and beautifully-patterned fabric. Please note that the base price only includes (1) Sofa. Please add the additional loveseat to customize this set to your liking.
from astar import astar import copy Moves = { 'W':(0,-1), 'S':(1,0), 'N':(-1,0), 'E':(0,1) } def t2l(t): l = [] for x in range(5): i = [] for y in range(5): i.append(t[x][y]) l.append(i) return l def print_state(s): for x in range(5): print s[x][0], s[x][1], s[x][2], s[x][3], s[x][4] guard = 0 def h(node, instance): global guard guard += 1 count = 0 for x in range(5): for y in range(5): if node[x][y]: count += 1 if guard % 1000 == 0: print count return count * 2 def stones_left(board): count = 0 for x in range(5): for y in range(5): if board[x][y]: count += 1 return count class solitare: def __init__(self): self.start_state = [[True, True, True, True, True], [True, True, True, True, True], [True, True, True, True, True], [True, True, False, True, True], [True, True, True, True, True], ] def is_target_state(self, s): count = 0 for x in range(5): for y in range(5): if s[x][y]: count += 1 if count == 2: return False #print count return True def get_start_state(self): return self.start_state def is_valid_move(self, move, state): x,y,dir = move if state[x][y] == False: return False dx, dy = Moves[dir] newx, newy = x + dx, y + dy if newx < 0 or newx >= 5 or newy < 0 or newy >= 5: return False if state[newx][newy] == False: return False newx += dx newy += dy if newx < 0 or newx >= 5 or newy < 0 or newy >= 5: return False return state[newx][newy] == False def get_after_move(self, state, move): start = stones_left(state) x,y,dir = move dx, dy = Moves[dir] middlex, middley = x + dx, y + dy lastx, lasty = middlex + dx, middley + dy s2 = copy.deepcopy(state) s2[x][y] = False s2[middlex][middley] = False s2[lastx][lasty] = True stop = stones_left(s2) assert start - 1 == stop return s2 def get_children(self, parent): successors = [] for x in range(5): for y in range(5): if parent[x][y]: for m in Moves: d = Moves[m] if not self.is_valid_move((x,y,m), parent): continue child = self.get_after_move(parent,(x,y,m)), (m, x,y), 1 successors.append(child) #print 'legal moves', len(successors), 'left', h(parent, self) return successors l = solitare() s = l.get_start_state() print s #print_state(s) c = l.get_children(s) #print l.get_children(s) res = astar(l, h) print res
Last week, a casual listener of The Bearded Man asked somewhat jokingly: “Does The Bearded Man ever get a case of the Mondays?” While obviously an attempt at getting underneath the thickest skin there is, there’s more to the question than first meets the eye. See, the word “Monday” originally meant “day of the moon”, and this is something The Bearded Man knows better than most. It’s true that in his more contemplative moments, The Bearded Man requires deep introspection and time to reflect on the lusciousness of his beard. When better to do this than while basking in the glow of the night? Our hero celebrates the day of the moon and remembers to look up towards ‘The Sky’. There are more things in heaven and earth than are dreamt of in your philosophy, so just take a cue from Jyra and know that all will soon be well.
import os,sys,logging import signal, time, atexit, threading from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler import xmlrpclib import threading import Queue try: import sqlite3 except ImportError: from pysqlite2 import dbapi2 as sqlite3 import bb.server.xmlrpc import prserv import prserv.db import errno logger = logging.getLogger("BitBake.PRserv") if sys.hexversion < 0x020600F0: print("Sorry, python 2.6 or later is required.") sys.exit(1) class Handler(SimpleXMLRPCRequestHandler): def _dispatch(self,method,params): try: value=self.server.funcs[method](*params) except: import traceback traceback.print_exc() raise return value PIDPREFIX = "/tmp/PRServer_%s_%s.pid" singleton = None class PRServer(SimpleXMLRPCServer): def __init__(self, dbfile, logfile, interface, daemon=True): ''' constructor ''' SimpleXMLRPCServer.__init__(self, interface, logRequests=False, allow_none=True) self.dbfile=dbfile self.daemon=daemon self.logfile=logfile self.working_thread=None self.host, self.port = self.socket.getsockname() self.pidfile=PIDPREFIX % (self.host, self.port) self.register_function(self.getPR, "getPR") self.register_function(self.quit, "quit") self.register_function(self.ping, "ping") self.register_function(self.export, "export") self.register_function(self.importone, "importone") self.register_introspection_functions() self.db = prserv.db.PRData(self.dbfile) self.table = self.db["PRMAIN"] self.requestqueue = Queue.Queue() self.handlerthread = threading.Thread(target = self.process_request_thread) self.handlerthread.daemon = False def process_request_thread(self): """Same as in BaseServer but as a thread. In addition, exception handling is done here. """ while True: (request, client_address) = self.requestqueue.get() try: self.finish_request(request, client_address) self.shutdown_request(request) except: self.handle_error(request, client_address) self.shutdown_request(request) self.table.sync() def process_request(self, request, client_address): self.requestqueue.put((request, client_address)) def export(self, version=None, pkgarch=None, checksum=None, colinfo=True): try: return self.table.export(version, pkgarch, checksum, colinfo) except sqlite3.Error as exc: logger.error(str(exc)) return None def importone(self, version, pkgarch, checksum, value): return self.table.importone(version, pkgarch, checksum, value) def ping(self): return not self.quit def getinfo(self): return (self.host, self.port) def getPR(self, version, pkgarch, checksum): try: return self.table.getValue(version, pkgarch, checksum) except prserv.NotFoundError: logger.error("can not find value for (%s, %s)",version, checksum) return None except sqlite3.Error as exc: logger.error(str(exc)) return None def quit(self): self.quit=True return def work_forever(self,): self.quit = False self.timeout = 0.5 logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" % (self.dbfile, self.host, self.port, str(os.getpid()))) self.handlerthread.start() while not self.quit: self.handle_request() self.table.sync() logger.info("PRServer: stopping...") self.server_close() return def start(self): pid = self.daemonize() # Ensure both the parent sees this and the child from the work_forever log entry above logger.info("Started PRServer with DBfile: %s, IP: %s, PORT: %s, PID: %s" % (self.dbfile, self.host, self.port, str(pid))) def delpid(self): os.remove(self.pidfile) def daemonize(self): """ See Advanced Programming in the UNIX, Sec 13.3 """ try: pid = os.fork() if pid > 0: os.waitpid(pid, 0) #parent return instead of exit to give control return pid except OSError as e: raise Exception("%s [%d]" % (e.strerror, e.errno)) os.setsid() """ fork again to make sure the daemon is not session leader, which prevents it from acquiring controlling terminal """ try: pid = os.fork() if pid > 0: #parent os._exit(0) except OSError as e: raise Exception("%s [%d]" % (e.strerror, e.errno)) os.umask(0) os.chdir("/") sys.stdout.flush() sys.stderr.flush() si = file('/dev/null', 'r') so = file(self.logfile, 'a+') se = so os.dup2(si.fileno(),sys.stdin.fileno()) os.dup2(so.fileno(),sys.stdout.fileno()) os.dup2(se.fileno(),sys.stderr.fileno()) # Clear out all log handlers prior to the fork() to avoid calling # event handlers not part of the PRserver for logger_iter in logging.Logger.manager.loggerDict.keys(): logging.getLogger(logger_iter).handlers = [] # Ensure logging makes it to the logfile streamhandler = logging.StreamHandler() streamhandler.setLevel(logging.DEBUG) formatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") streamhandler.setFormatter(formatter) logger.addHandler(streamhandler) # write pidfile pid = str(os.getpid()) pf = file(self.pidfile, 'w') pf.write("%s\n" % pid) pf.close() self.work_forever() self.delpid() os._exit(0) class PRServSingleton(object): def __init__(self, dbfile, logfile, interface): self.dbfile = dbfile self.logfile = logfile self.interface = interface self.host = None self.port = None def start(self): self.prserv = PRServer(self.dbfile, self.logfile, self.interface) self.prserv.start() self.host, self.port = self.prserv.getinfo() def getinfo(self): return (self.host, self.port) class PRServerConnection(object): def __init__(self, host, port): if is_local_special(host, port): host, port = singleton.getinfo() self.host = host self.port = port self.connection, self.transport = bb.server.xmlrpc._create_server(self.host, self.port) def terminate(self): try: logger.info("Terminating PRServer...") self.connection.quit() except Exception as exc: sys.stderr.write("%s\n" % str(exc)) def getPR(self, version, pkgarch, checksum): return self.connection.getPR(version, pkgarch, checksum) def ping(self): return self.connection.ping() def export(self,version=None, pkgarch=None, checksum=None, colinfo=True): return self.connection.export(version, pkgarch, checksum, colinfo) def importone(self, version, pkgarch, checksum, value): return self.connection.importone(version, pkgarch, checksum, value) def getinfo(self): return self.host, self.port def start_daemon(dbfile, host, port, logfile): pidfile = PIDPREFIX % (host, port) try: pf = file(pidfile,'r') pid = int(pf.readline().strip()) pf.close() except IOError: pid = None if pid: sys.stderr.write("pidfile %s already exist. Daemon already running?\n" % pidfile) return 1 server = PRServer(os.path.abspath(dbfile), os.path.abspath(logfile), (host,port)) server.start() return 0 def stop_daemon(host, port): pidfile = PIDPREFIX % (host, port) try: pf = file(pidfile,'r') pid = int(pf.readline().strip()) pf.close() except IOError: pid = None if not pid: sys.stderr.write("pidfile %s does not exist. Daemon not running?\n" % pidfile) try: PRServerConnection(host, port).terminate() except: logger.critical("Stop PRService %s:%d failed" % (host,port)) time.sleep(0.5) try: if pid: if os.path.exists(pidfile): os.remove(pidfile) wait_timeout = 0 while is_running(pid) and wait_timeout < 10: print("Waiting for pr-server to exit.") time.sleep(0.5) wait_timeout += 1 if is_running(pid): print("Sending SIGTERM to pr-server.") os.kill(pid,signal.SIGTERM) time.sleep(0.1) except OSError as e: err = str(e) if err.find("No such process") <= 0: raise e return 0 def is_running(pid): try: os.kill(pid, 0) except OSError as err: if err.errno == errno.ESRCH: return False return True def is_local_special(host, port): if host.strip().upper() == 'localhost'.upper() and (not port): return True else: return False class PRServiceConfigError(Exception): pass def auto_start(d): global singleton host_params = filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':')) if not host_params: return None if len(host_params) != 2: logger.critical('\n'.join(['PRSERV_HOST: incorrect format', 'Usage: PRSERV_HOST = "<hostname>:<port>"'])) raise PRServiceConfigError if is_local_special(host_params[0], int(host_params[1])) and not singleton: import bb.utils cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True)) if not cachedir: logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") raise PRServiceConfigError bb.utils.mkdirhier(cachedir) dbfile = os.path.join(cachedir, "prserv.sqlite3") logfile = os.path.join(cachedir, "prserv.log") singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), ("localhost",0)) singleton.start() if singleton: host, port = singleton.getinfo() else: host = host_params[0] port = int(host_params[1]) try: connection = PRServerConnection(host,port) connection.ping() realhost, realport = connection.getinfo() return str(realhost) + ":" + str(realport) except Exception: logger.critical("PRservice %s:%d not available" % (host, port)) raise PRServiceConfigError def auto_shutdown(d=None): global singleton if singleton: host, port = singleton.getinfo() try: PRServerConnection(host, port).terminate() except: logger.critical("Stop PRService %s:%d failed" % (host,port)) singleton = None def ping(host, port): conn=PRServerConnection(host, port) return conn.ping()
I dissabled Clique, assign keyboard keymaps and all is working! Create a new Mapped Key in the “Click Bar Healing Config” Key Map. Call this “Show Hide Healing Bars”. This mapped key is actually used to keep the Click Bars visible when switching characters. Set a Hotkey for this Mapped Key. I’ve used F10. Create a New Step for this Mapped Key so it has 2 steps. Main Healer Bar ON should be Window:Current not Window:All w/ Current? The previous section for Set Healing Bars on Switch, mean that as you switch to each slot, it will "read" the status of the healing bars, and ONLY show them on the active slot. This is because 1. it seems pointless to use them from another slot, and 2. if you were to videofx into the other windows, you can't set a source on the clickbars. For the most part, I expected you would only ever use them from the current active slot. Set this up on my two man in LOTRO and it works perfectly if I am on my minstrel. However, my guardian is the character I run as the main and when I mouseover and click the bars the heal does not go through to the healer. What am I doing wrong? The Targets here should be the appropriate ATG that will execute the action. E.g. in the Main Healing Bar, it should be MainHealer ("Group (all of):MainHealer" in the drop down), in Alt Healing Bar, it should be AltHealer, and in Buff Bar, it should be Buffer. Thank you that fixed it. Now I just have to figure out why follow isn't working and all run when I with the press of keyboard keys. While a different topic, disable broadcasting when trying to move about. Press your Follow Hotkey (ALT+F). It presses the variable keystroke. So, provided your in game keybind for Follow is set to ALT+G, it should work when you press ALT+F. Hello. I'm new to ISBoxer have been following this extremely well documented and laid out configuration guide. However, I am running into a couple of issues and have some questions. Earlier today, I think I almost had everything working. The mouseover overlay was working on the warrior. If I was mousing over the main healer buttons I watched the cleric target slot1 -slot 3 PCs. When mousing over the alt healing bars the shaman was changing targets as well. The first problem was that when I clicked key 1 or 2 while moused over the warrior is that not only was the cleric casting a hot the warrior was trying to kick. The same thing was occurring when I clicked key 2 (cleric medium heal ; warrior taunt). I tried to fix it. Regretfully, my mucking around has now broken the click bars. They are no longer executing keystrokes on the cleric/shaman windows. The targetting is still working great, but I am unable to figure why nothing is executing and still don't know the answer to my original problem. Why keystrokes were being executed on not only the primary account (warrior), but also on the clerics. Please advise. Any comments or suggestions will be greatly appreciated. Update - I managed to get mapped keys to execute heals and no longer activating keys on the warrior. I set target to Window: All w/o Current. However, now I'm still having issues with both the cleric and shaman executing keystrokes when using a mapped key. I have a HoT on hotkey bar slot 1 (cleric) and melee attack hotkey bar slot 1 (shaman) and both are getting pressed when I have the mouseover target as my warrior and pressing 1. Last edited by bob on Sat Jan 19, 2019 7:40 pm, edited 1 time in total. You don't really have any mapped keys setup for healing as far as I can tell. The Main/Alt/Buff key maps are pretty much empty. When you are pressing the 1 key, it is executing the mapped keys in ISB42 - Combat Broadcast Hotkeys. If you want to use Hotkeys to trigger things when moused over the bars, then you will need to setup some mapped keys to do something, and disable the keymaps which have the other mapped keys with hotkeys that you want to press. Or, you could hook up your new mapped keys to the actual buttons so you can click on them and they do something. I removed the default ISB42 keymaps that contained references to the number keys and exported to inner space. I'm still seeing the same thing. I really like the mouseover targetting provided by the heal bar, but still unable to execute a keystroke on 1 account only. On the Main Healing Bar I have 3 keys mapped (for testing) which is a HoT, Medium Heal, and CH and on the Alt Healing Bar I have a HoT and Medium Heal mapped. Basically what I'm looking for is to be able to target using the Click Bar then press a keystroke which is mapped to an in-game macro, but ensure that it does it on a specified account. Cleric heals executed when targetting using the Main Healing Bar and Shammy heals executed when targetting using the Alt Healing Bar. Will this solution work from what I'm trying to do or should I try something else? Sorry, my lack of knowledge when it comes to explaining this.
# -*- coding: utf-8 -*- from __future__ import unicode_literals import re import time import xml.etree.ElementTree as etree from .common import InfoExtractor from ..utils import ( unescapeHTML, urlencode_postdata, unified_timestamp, ) class AdobePassIE(InfoExtractor): _SERVICE_PROVIDER_TEMPLATE = 'https://sp.auth.adobe.com/adobe-services/%s' _USER_AGENT = 'Mozilla/5.0 (X11; Linux i686; rv:47.0) Gecko/20100101 Firefox/47.0' @staticmethod def _get_mvpd_resource(provider_id, title, guid, rating): channel = etree.Element('channel') channel_title = etree.SubElement(channel, 'title') channel_title.text = provider_id item = etree.SubElement(channel, 'item') resource_title = etree.SubElement(item, 'title') resource_title.text = title resource_guid = etree.SubElement(item, 'guid') resource_guid.text = guid resource_rating = etree.SubElement(item, 'media:rating') resource_rating.attrib = {'scheme': 'urn:v-chip'} resource_rating.text = rating return '<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">' + etree.tostring(channel).decode() + '</rss>' def _extract_mvpd_auth(self, url, video_id, requestor_id, resource): def xml_text(xml_str, tag): return self._search_regex( '<%s>(.+?)</%s>' % (tag, tag), xml_str, tag) def is_expired(token, date_ele): token_expires = unified_timestamp(re.sub(r'[_ ]GMT', '', xml_text(token, date_ele))) return token_expires and token_expires <= int(time.time()) mvpd_headers = { 'ap_42': 'anonymous', 'ap_11': 'Linux i686', 'ap_z': self._USER_AGENT, 'User-Agent': self._USER_AGENT, } guid = xml_text(resource, 'guid') requestor_info = self._downloader.cache.load('mvpd', requestor_id) or {} authn_token = requestor_info.get('authn_token') if authn_token and is_expired(authn_token, 'simpleTokenExpires'): authn_token = None if not authn_token: # TODO add support for other TV Providers mso_id = 'DTV' username, password = self._get_netrc_login_info(mso_id) if not username or not password: return '' def post_form(form_page, note, data={}): post_url = self._html_search_regex(r'<form[^>]+action=(["\'])(?P<url>.+?)\1', form_page, 'post url', group='url') return self._download_webpage( post_url, video_id, note, data=urlencode_postdata(data or self._hidden_inputs(form_page)), headers={ 'Content-Type': 'application/x-www-form-urlencoded', }) provider_redirect_page = self._download_webpage( self._SERVICE_PROVIDER_TEMPLATE % 'authenticate/saml', video_id, 'Downloading Provider Redirect Page', query={ 'noflash': 'true', 'mso_id': mso_id, 'requestor_id': requestor_id, 'no_iframe': 'false', 'domain_name': 'adobe.com', 'redirect_url': url, }) provider_login_page = post_form( provider_redirect_page, 'Downloading Provider Login Page') mvpd_confirm_page = post_form(provider_login_page, 'Logging in', { 'username': username, 'password': password, }) post_form(mvpd_confirm_page, 'Confirming Login') session = self._download_webpage( self._SERVICE_PROVIDER_TEMPLATE % 'session', video_id, 'Retrieving Session', data=urlencode_postdata({ '_method': 'GET', 'requestor_id': requestor_id, }), headers=mvpd_headers) if '<pendingLogout' in session: self._downloader.cache.store('mvpd', requestor_id, {}) return self._extract_mvpd_auth(url, video_id, requestor_id, resource) authn_token = unescapeHTML(xml_text(session, 'authnToken')) requestor_info['authn_token'] = authn_token self._downloader.cache.store('mvpd', requestor_id, requestor_info) authz_token = requestor_info.get(guid) if authz_token and is_expired(authz_token, 'simpleTokenTTL'): authz_token = None if not authz_token: authorize = self._download_webpage( self._SERVICE_PROVIDER_TEMPLATE % 'authorize', video_id, 'Retrieving Authorization Token', data=urlencode_postdata({ 'resource_id': resource, 'requestor_id': requestor_id, 'authentication_token': authn_token, 'mso_id': xml_text(authn_token, 'simpleTokenMsoID'), 'userMeta': '1', }), headers=mvpd_headers) if '<pendingLogout' in authorize: self._downloader.cache.store('mvpd', requestor_id, {}) return self._extract_mvpd_auth(url, video_id, requestor_id, resource) authz_token = unescapeHTML(xml_text(authorize, 'authzToken')) requestor_info[guid] = authz_token self._downloader.cache.store('mvpd', requestor_id, requestor_info) mvpd_headers.update({ 'ap_19': xml_text(authn_token, 'simpleSamlNameID'), 'ap_23': xml_text(authn_token, 'simpleSamlSessionIndex'), }) short_authorize = self._download_webpage( self._SERVICE_PROVIDER_TEMPLATE % 'shortAuthorize', video_id, 'Retrieving Media Token', data=urlencode_postdata({ 'authz_token': authz_token, 'requestor_id': requestor_id, 'session_guid': xml_text(authn_token, 'simpleTokenAuthenticationGuid'), 'hashed_guid': 'false', }), headers=mvpd_headers) if '<pendingLogout' in short_authorize: self._downloader.cache.store('mvpd', requestor_id, {}) return self._extract_mvpd_auth(url, video_id, requestor_id, resource) return short_authorize
The quality of resources for website monetization is important for making money online. In order to facilitate your task of choosing website monetization resources, we have compiled a list that, in your opinion, includes the best resources and tools. When selecting resources to monetize website it is necessary to make some experiments, which will allow you ultimately to find the best combination of used resources and monetization strategies. Sign up to the marketplaces relevant to your business topic. Make a list of marketplaces for your business case and familiarize with the possibilities to get business website ads from each marketplace. Typically, marketers use several marketplaces in order most fully cover the market of products and services of their business topic. So, carefully study each marketplace in order achieve the fullest use of its capabilities. You have to learn how to choose the most profitable products to advertise on your business website. Also, learn the ability to automate your advertising, many marketplaces often offer their free WP plugins and widgets. In this section the best online marketplaces for a wide range of products are presented. For the big number of online business intentions this list of marketplaces may be enough, as the number of products in these markets is simply huge, and can satisfy almost all tastes for business website ads. The Amazon today is widely recognized as one of the biggest e-commerce platforms worldwide. Its main competitor is eBay. It was originally planned to trade books, but today Amazon turned into a giant supermarket, where you can buy everything from baby clothes to television, from plumbing to jewelry. Amazon will allow you to find and pick up any item and place an order in a matter of minutes. It, also, has a great affiliate program. The eBay is the next largest marketplace in the world (with over 230 million users). It is a fantastic place where you can buy anything you like, smart businesses realize that eBay can be a valuable component of a variety of e-commerce strategies and the perfect place where to get the business website ads. The eBay Partner Network is eBay’s in-house affiliate program. They provide publishers with resources for website monetization, social pages, mobile apps and other online properties by driving high-quality traffic to eBay. In this section the best online marketplaces for the digital products is presented. The sale of digital products is very popular and profitable. All listed below marketplaces have affiliate programs, so join them and get the best ads for website. The ClickBank is the largest and most well-known online marketplace for digital products. It provides an opportunity to its customers to open a free account and accept credit card payments. ClickBank has more than 10,000 items of products that you can buy, or you can sell them as an affiliate and get commission of near of 50% and even more. Its products are selling well and by the price usually are not bigger than $100. In addition, it offers a money back guarantee for 60 days at the request of the buyer. This circumstance attracts more customers, vendors and affiliates. The Commission Junction is an online promoting service (company) working in the affiliate marketing sphere. The company is the biggest among the affiliate networks in North America and runs their service worldwide. Among the TOP 500 merchants which are using 3-rd party affiliate marketing application, 62% are managing by Commission Junction platform. Commission Junction is more a community than a network and it is very rich in best ads for website of different business profile. The JVZoo is an e-commerce platform that makes it possible for sellers to create a quick affiliate program for their products and services. You can sell your products anywhere – website, blog, forums, others. The JVZoo resources for website monetization are lower in price In comparison with other marketplaces, for example, ClickBank charges $49 to list one product, WarriorPlus charges $19 per product. The ClickSure is a digital products marketplace which offers similar products to ClickBank and is its direct competitor. It seems that in fact they gradually are taking many products away from ClickBank. Over 120,000 affiliate marketers are using ClickSure affiliate network. For today they only accept payments through credit cards and do not use PayPal. They also have a long time to wait for product approvals, but in many cases it is useful, because it encourages quality and you can get best ads for website. Their website is well designed and it is easy to use. The Warrior+Plus is a help and affiliate marketing website of Warrior Forum and it is a marketplace for WSO (Warrior Special Offers). This is a great system to get interesting digital products and to promote them on your business website. It is very easy to use ant it has an affiliate program. The affiliate program allows you to sell other people WSOs and to receive commission. Once you have selected the product you should ask permission from the WSO author to promote it, and as it is approved you get you promotion URL. The PayDotCom is one of the world’s biggest and best growing online marketplaces with the catalogs of thousands of items and services (both digital and physical products). Today, they join over 25,000 vendors, who currently sell products from this platform, and have over 600,000 affiliates, that promote products now. The affiliates can earn sales commissions up to 80% by linking their consumers. As an affiliate you’ll obtain needed information znd get all links and the promotion resources from the Vendor’s affiliate tools. The ProductPay is a marketplace only for digital products. Website is very well designed and easy to use by vendors and affiliates. It can be attributed to the leading sites of this type. The products are very easy to find on categories and they provide a separate short list for the most popular products, it is very convenient if you are looking for best ads for website. They use Paypal to get and make payments. So, you should have an active Paypal account to use their services and affiliate offers. The DigiResults is a relatively new e-commerce platform which becomes a great new marketplace for both vendors and affiliates. The main advantages of this platform include: the ease of use, wide range of products and prompt payment of services performed. They use the member’s PayPal account to pay affiliate commissions. Ordinarily, the vendor pays 50%, the vendor and affiliate both get 50% from the sale paid out immediately into their PayPal accounts, right at the moment of sale. This is a very good place to get best ads for your website. The RAP Bank (Rapid Action Profits) is the instant commission marketplace with extremely exclusive features. It allows you to promote 100’s of products, submit your products, and obtain exclusive reports, features and affiliate opportunities. The affiliate center will help you track all listed products, allowing you to manage your online business. They provide you with exclusive reports that give you the “what’s hot” at RAP Bank. This will let you to get the best resources for website monetization. Here we present some of the WP plugins that can help you automate the process of selecting and placing your business website ads. There is a big set of plugins of this type, but we recommend only the most reliable and easy to use. The Ad Injection from reviewmylife injects any kind of advert or other content (e.g. Google AdSense, Amazon Associates, ClickBank, TradeDoubler, etc) into the existing content of your WordPress posts and pages. You can control the number of adverts based on the post length, and it can restrict who sees adverts by post age, visitor referrer and IP address. Adverts can be configured in the post (random, top, and bottom positions) or in any widget/sidebar area. The WP Robot is a WordPress plugin used to supplement content for WP websites and WP blogs. In other words it is an auto posting on WordPress website software. It allows users to create targeted posts on any topic without the need to prepare (write) content yourself. Content can be taken from very many sources, including Amazon, eBay, ClickBank, and YouTube. You can once insert your affiliate data for Amazon, eBay, ClickBank, YouTube etc., and your affiliate links (URLs) will be automatically transferred to all your posts. The Easy Azon is an excellent tool to integrate your Amazon affiliate links directly into your web pages in an automated style. It is extremely easy to understand and use. There are some configurations that need to be tuned only for the first time and you’ll never have to feel it once again. Based upon on your visitor’s analytics, and also the products you’re promoting, this might recover a good amount of product sales for you right off the ground. If you plan to use Amazon ads on your business website the WP Easy Azon plagin will be a very good solution. This program easy to set up and there is no need for hosting as CB Ad Rotator hosting is included in the price. Really, all you need to do is login to your control panel, select your targeted key words and you are ready to go. CB Ad Rotator will continually update with targeted ads from ClickBank’s marketplace, there is no need to change as new ClickBank products are even updated to your ads for you, it’s that simple. It provides an excellent and profitable alternative to AdSense, which will benefit you and your visitors for years to come. The ASINspector application is made to assist you make highest profits upon Amazon products with small effort. It is a very simple yet powerful tool you may use to make money with Amazon, eBay or Shopify. With this useful tool, it is simple to know the best-seller of any kind of product, that help you make a decision if you possibly could beat the competition and find the huge stack of the money by defeating your competitors to dust. You will be able to track down every single move of the competitors, and you know what this may mean to your business. And it works in multiple countries. In this section you’ll find the most popular resources for the solution of the problem of placing ads of the third-party on your website. Check out these resources and you’ll know how to get ads for website automatically and free. AdSense is the most popular resource for third-party ads placement on a website. However, it has very strict rules for the users. If you wish to use this resource effectively, you should first of all concentrate on the content of your web pages and not to place on them other advertising in big amounts, especially affiliate links of ClickBank, Amazon, etc. Also it is desirable to use a quality tutorial to learn how to build the profitable advertising campaigns to get the best ads for website. This is a secure website and it has not many of bad comments on the Internet. So, use it to find business website ads. For many reasons your AdSense account may be suspended. If you happened you can try alternatives to AdSense. But in each case they should be tested, as it depends on the content of your website. The MediaNet is one of the most effective alternatives for AdSense third party ads. It is a contextual ad network, operated by Yahoo and Bing. It makes it possible for publishers to earn money from advertising revenue, and offers custom-made ad units that fit with your website look. The minimal payment is $100 (PayPal/Wire). The RevenueHits offer Banner ads, Sliders, Pop-unders, button, etc. Their technology permits them to scan millions of online ads and analyze their performance over time. The network was designed to help publishers monetize online assets, including toolbars, websites, search, widgets, IM applications and more. The minimal payment is $20 (PayPal/Payoneer), $500 (Wire). The Bidvertiser is a pay per click advertising site that could be a great AdSense alternative if you didn’t get Adsense approval or got banned. The BidVertiser will always display the highest bidders’ ads on web pages, assuring the maximum revenue possible at any given time. The Bidvertiser offers Text ads, Banner ads, Mobile Ads, Slider ads and others. They pay monthly and the minimal payment is $10 for PayPal/Payza and $50 for the check. The Infolinks is a bit unique from other advertising networks. Mainly for the reason that it displays ads differently. It doesn’t take too many spots on your website to show ads. It shows in text ads or pop-up ads. Infolinks offers 4 types of ads – InFold, InText, InTag, and InFrame. All of them are designed to overcome banner blindness. They pay monthly and the minimal payment is $50 for PayPal or $100 for Bank wire. The Adversal is the most effective Adsense alternative in terms of serving ads. It has a wonderful CTR and operates with several languages, but you need to have monthly page views of 50,000 to apply this network. To use the Adversal your website must: have a minimum of 50,000 page views per month, not host content that delivers malware or browser hijacks/redirects, not host or link to copyright infringing (pirated/illegal) material, not contain or link to adult content. They pay monthly and the minimal payment is $20 (PayPal, ACH, Check, or Wire). The Wiglink is totally different from other Ad networks and third party advertising services. It converts normal links into affiliate links. If someone makes a purchase by using your link, you’ll earn a commission out of it. The Viglink allows you to easily create monetized links to incorporate into your social media posts. The dashboard also provides you with insights into the content and products that resonate with your readers. The minimal payment is – No Minimum. You need not get approval for using Chitika ads on your website. That means you can monetize your low traffic site with Chitika. Just create an account and put ads on your website. It provides 3 types of ads – Search Targeted Ads, Local Ads, and Mobile Ads. Also, Chitika has a new mobile advertising platform. The minimal payment is – $10 (PayPal), $50 (Check).
""" This module intends to work as glue between the gamemodel and the gamewidget taking care of stuff that is neither very offscreen nor very onscreen like bringing up dialogs and """ import math import gtk from pychess.Utils.Offer import Offer #from pychess.Utils.GameModel import GameModel #from pychess.Utils.TimeModel import TimeModel from pychess.Utils.const import * import pychess.ic.ICGameModel from pychess.Utils.repr import * from pychess.System import conf from pychess.System import glock from pychess.widgets import preferencesDialog from gamewidget import getWidgets, key2gmwidg, isDesignGWShown from gamewidget import MENU_ITEMS, ACTION_MENU_ITEMS from pychess.ic.ICGameModel import ICGameModel def nurseGame (gmwidg, gamemodel): """ Call this function when gmwidget is just created """ gmwidg.connect("infront", on_gmwidg_infront) gmwidg.connect("closed", on_gmwidg_closed) gmwidg.connect("title_changed", on_gmwidg_title_changed) # Because of the async loading of games, the game might already be started, # when the glock is ready and nurseGame is called. # Thus we support both cases. if gamemodel.status == WAITING_TO_START: gamemodel.connect("game_started", on_game_started, gmwidg) gamemodel.connect("game_loaded", game_loaded, gmwidg) else: if gamemodel.uri: game_loaded(gamemodel, gamemodel.uri, gmwidg) on_game_started(gamemodel, gmwidg) gamemodel.connect("game_saved", game_saved, gmwidg) gamemodel.connect("game_ended", game_ended, gmwidg) gamemodel.connect("game_unended", game_unended, gmwidg) gamemodel.connect("game_resumed", game_unended, gmwidg) #=============================================================================== # Gamewidget signals #=============================================================================== def on_gmwidg_infront (gmwidg): # Set right sensitivity states in menubar, when tab is switched auto = gmwidg.gamemodel.players[0].__type__ != LOCAL and \ gmwidg.gamemodel.players[1].__type__ != LOCAL for item in ACTION_MENU_ITEMS: getWidgets()[item].props.sensitive = not auto for widget in MENU_ITEMS: sensitive = False if widget == 'abort': if isinstance(gmwidg.gamemodel, pychess.ic.ICGameModel.ICGameModel): sensitive = True elif widget == 'adjourn': if isinstance(gmwidg.gamemodel, pychess.ic.ICGameModel.ICGameModel): sensitive = True elif widget == 'hint_mode': if gmwidg.gamemodel.hintEngineSupportsVariant and conf.get("analyzer_check", True): sensitive = True elif widget == 'spy_mode': if gmwidg.gamemodel.spyEngineSupportsVariant and conf.get("inv_analyzer_check", True): sensitive = True elif widget == 'show_sidepanels': if not isDesignGWShown(): sensitive = True else: sensitive = True getWidgets()[widget].set_property('sensitive', sensitive) # Change window title getWidgets()['window1'].set_title('%s - PyChess' % gmwidg.getTabText()) def on_gmwidg_closed (gmwidg): if len(key2gmwidg) == 1: getWidgets()['window1'].set_title('%s - PyChess' % _('Welcome')) def on_gmwidg_title_changed (gmwidg): if gmwidg.isInFront(): getWidgets()['window1'].set_title('%s - PyChess' % gmwidg.getTabText()) #=============================================================================== # Gamemodel signals #=============================================================================== # Connect game_loaded, game_saved and game_ended to statusbar def game_loaded (gamemodel, uri, gmwidg): if type(uri) in (str, unicode): s = "%s: %s" % (_("Loaded game"), str(uri)) else: s = _("Loaded game") glock.acquire() try: gmwidg.status(s) finally: glock.release() def game_saved (gamemodel, uri, gmwidg): glock.acquire() try: gmwidg.status("%s: %s" % (_("Saved game"), str(uri))) finally: glock.release() def game_ended (gamemodel, reason, gmwidg): nameDic = {"white": gamemodel.players[WHITE], "black": gamemodel.players[BLACK], "mover": gamemodel.curplayer} if gamemodel.status == WHITEWON: nameDic["winner"] = gamemodel.players[WHITE] nameDic["loser"] = gamemodel.players[BLACK] elif gamemodel.status == BLACKWON: nameDic["winner"] = gamemodel.players[BLACK] nameDic["loser"] = gamemodel.players[WHITE] m1 = reprResult_long[gamemodel.status] % nameDic m2 = reprReason_long[reason] % nameDic md = gtk.MessageDialog() md.set_markup("<b><big>%s</big></b>" % m1) md.format_secondary_markup(m2) if gamemodel.players[0].__type__ == LOCAL or gamemodel.players[1].__type__ == LOCAL: if gamemodel.players[0].__type__ == REMOTE or gamemodel.players[1].__type__ == REMOTE: md.add_button(_("Offer Rematch"), 0) else: md.add_button(_("Play Rematch"), 1) if gamemodel.ply > 1: md.add_button(_("Undo two moves"), 2) elif gamemodel.ply == 1: md.add_button(_("Undo one move"), 2) def cb (messageDialog, responseId): if responseId == 0: if gamemodel.players[0].__type__ == REMOTE: gamemodel.players[0].offerRematch() else: gamemodel.players[1].offerRematch() elif responseId == 1: from pychess.widgets.newGameDialog import createRematch createRematch(gamemodel) elif responseId == 2: if gamemodel.curplayer.__type__ == LOCAL and gamemodel.ply > 1: offer = Offer(TAKEBACK_OFFER, gamemodel.ply-2) else: offer = Offer(TAKEBACK_OFFER, gamemodel.ply-1) if gamemodel.players[0].__type__ == LOCAL: gamemodel.players[0].emit("offer", offer) else: gamemodel.players[1].emit("offer", offer) md.connect("response", cb) glock.acquire() try: gmwidg.showMessage(md) gmwidg.status("%s %s." % (m1,m2[0].lower()+m2[1:])) if reason == WHITE_ENGINE_DIED: engineDead(gamemodel.players[0], gmwidg) elif reason == BLACK_ENGINE_DIED: engineDead(gamemodel.players[1], gmwidg) finally: glock.release() def game_unended (gamemodel, gmwidg): glock.acquire() try: print "sending hideMessage" gmwidg.hideMessage() gmwidg.status("") finally: glock.release() def on_game_started (gamemodel, gmwidg): on_gmwidg_infront(gmwidg) # setup menu items sensitivity # Rotate to human player boardview = gmwidg.board.view if gamemodel.players[1].__type__ == LOCAL: if gamemodel.players[0].__type__ != LOCAL: boardview.rotation = math.pi elif conf.get("autoRotate", True) and \ gamemodel.curplayer == gamemodel.players[1]: boardview.rotation = math.pi # Play set-up sound preferencesDialog.SoundTab.playAction("gameIsSetup") # Connect player offers to statusbar for player in gamemodel.players: if player.__type__ == LOCAL: player.connect("offer", offer_callback, gamemodel, gmwidg) # Start analyzers if any setAnalyzerEnabled(gmwidg, HINT, getWidgets()["hint_mode"].get_active()) setAnalyzerEnabled(gmwidg, SPY, getWidgets()["spy_mode"].get_active()) #=============================================================================== # Player signals #=============================================================================== def offer_callback (player, offer, gamemodel, gmwidg): if offer.type == DRAW_OFFER: if gamemodel.status != RUNNING: return # If the offer has already been handled by # Gamemodel and the game was drawn, we need # to do nothing glock.acquire() try: gmwidg.status(_("You sent a draw offer")) finally: glock.release() #=============================================================================== # Subfunctions #=============================================================================== def engineDead (engine, gmwidg): gmwidg.bringToFront() d = gtk.MessageDialog(type=gtk.MESSAGE_ERROR, buttons=gtk.BUTTONS_OK) d.set_markup(_("<big><b>Engine, %s, has died</b></big>") % repr(engine)) d.format_secondary_text(_("PyChess has lost connection to the engine, probably because it has died.\n\nYou can try to start a new game with the engine, or try to play against another one.")) d.connect("response", lambda d,r: d.hide()) d.show_all() def setAnalyzerEnabled (gmwidg, analyzerType, enabled): if not analyzerType in gmwidg.gamemodel.spectactors: return analyzer = gmwidg.gamemodel.spectactors[analyzerType] if analyzerType == HINT: arrow = gmwidg.board.view._set_greenarrow else: arrow = gmwidg.board.view._set_redarrow set_arrow = lambda x: gmwidg.board.view.runWhenReady(arrow, x) if enabled: if len(analyzer.getAnalysis()) >= 1: if gmwidg.gamemodel.curplayer.__type__ == LOCAL or \ [player.__type__ for player in gmwidg.gamemodel.players] == [REMOTE, REMOTE]: set_arrow (analyzer.getAnalysis()[0].cords) else: set_arrow (None) # This is a kludge using pythons ability to asign attributes to an # object, even if those attributes are nowhere mentioned in the objects # class. So don't go looking for it ;) # Code is used to save our connection ids, enabling us to later dis- # connect if not hasattr (gmwidg.gamemodel, "anacons"): gmwidg.gamemodel.anacons = {HINT:[], SPY:[]} if not hasattr (gmwidg.gamemodel, "chacons"): gmwidg.gamemodel.chacons = [] def on_analyze (analyzer, moves, score): if moves and (gmwidg.gamemodel.curplayer.__type__ == LOCAL or \ [player.__type__ for player in gmwidg.gamemodel.players] == [REMOTE, REMOTE]): set_arrow (moves[0].cords) else: set_arrow (None) def on_game_change (gamemodel): set_arrow (None) gmwidg.gamemodel.anacons[analyzerType].append( analyzer.connect("analyze", on_analyze)) gmwidg.gamemodel.chacons.append( gmwidg.gamemodel.connect("game_changed", on_game_change)) gmwidg.gamemodel.chacons.append( gmwidg.gamemodel.connect("moves_undoing", lambda model, moves: on_game_change(model))) else: if hasattr (gmwidg.gamemodel, "anacons"): for conid in gmwidg.gamemodel.anacons[analyzerType]: analyzer.disconnect(conid) del gmwidg.gamemodel.anacons[analyzerType][:] if hasattr (gmwidg.gamemodel, "chacons"): for conid in gmwidg.gamemodel.chacons: gmwidg.gamemodel.disconnect(conid) del gmwidg.gamemodel.chacons[:] set_arrow (None)
Another record auction has been recorded, this time not for diamond, but for an antique Chinese bowl. The sale broke the record for Chinese ceramics, auction house Sotheby’s said. The small piece — which dates from 960-1127 — stole the previous record of $36.05 million set in 2014 for a Ming Dynasty wine cup which was snapped up by a Shanghai tycoon famous for making eye-watering bids. The person behind Tuesday’s winning offer wished to remain anonymous, Sotheby’s said, with the auction house declining to say whether the buyer hailed from the Chinese mainland or not. “It’s a totally new benchmark for Chinese ceramics and we’ve made history with this piece today,” Nicolas Chow, deputy chairman of Sotheby’s Asia, told reporters. Bidding started at around US$10.2 million with the suspense-filled auction lasting some 20 minutes as a handful of phone bidders and one person in the room itself competed with each other. The winning offer eventually came from one of the phone bidders and was received by a round of applause. The bowl — originally designed to wash brushes — is an example of extremely rare Chinese porcelain from the imperial court of the Northern Song Dynasty and one of only four such pieces in private hands, according to Sotheby’s. Measuring 13cm in diameter, the dish features a luminous blue glaze. The price tag exceeds the earlier record made by a tiny white piece known as the “Chicken Cup”, decorated with a color painting of a rooster and a hen tending to their chicks, and created during the reign of the Chenghua Emperor between 1465 and 1487. That cup sold in 2014 to taxi-driver-turned-financier Liu Yiqian, one of China’s wealthiest people and among a new class of Chinese super-rich scouring the globe for artwork and antiquities.
#!/usr/bin/python # # Problem: Random Route # Language: Python # Author: KirarinSnow # Usage: python thisfile.py <input.in >output.out import heapq MAX = 100000000000 def compute(): def countroutes(v, x, s): # num of routes into x toward v if v in counts[x]: return if x[0] == s: counts[x][v] = 1 elif x[0] not in pred: counts[x][v] = 0 else: pp = pred[x[0]] num = 0 for i in pp: pv = i countroutes(v, pv, s) num += counts[pv][v] counts[x][v] = num def countr2(v, x, s): # num of routes out of x toward v if v in scounts[x]: return if x[1] not in succ: scounts[x][v] = 0 else: pp = succ[x[1]] num = 0 for i in pp: countr2(v, i, s) num += scounts[i][v] scounts[x][v] = num line = raw_input().split() nr = int(line[0]) source = line[1] edges = [] vertices = dict() pred = dict() succ = dict() for i in xrange(nr): a, b, c = raw_input().split() c = int(c) edges.append((a,b,c,i)) if a not in vertices: #[d, inedge->{v->num}, outedges, v->total] vertices[a] = [MAX,dict(),[],dict()] if b not in vertices: vertices[b] = [MAX,dict(),[],dict()] vertices[a][2].append(edges[i]) vertices[b][1][edges[i]] = dict() vertices[source][0] = 0 #d[s] = 0 q = map(lambda k: (vertices[k][0], k), vertices.keys()) heapq.heapify(q) s = set() # Dijkstra while q: node = heapq.heappop(q) if node[0] == vertices[node[1]][0]: # valid key s.add(node) u = node[1] for adj in vertices[u][2]: w = adj[2] v = adj[1] if vertices[v][0] > vertices[u][0] + w: vertices[v][0] = vertices[u][0] + w heapq.heappush(q,(vertices[v][0],v)) # replace key pred[v] = [adj] elif vertices[v][0] == vertices[u][0] + w: pred[v].append(adj) for v in vertices: if v in pred: for e in pred[v]: u = e[0] if u not in succ: succ[u] = [] succ[u].append(e) nv = len(pred) counts = dict() scounts = dict() for e in edges: counts[e] = dict() scounts[e] = dict() totals = dict() for v in vertices: if v in pred: for kk in pred[v]: scounts[kk][v] = 1 for v in vertices: totals[v] = 0.0 if v in pred: for e in pred[v]: countroutes(v, e, source) totals[v] += counts[e][v] if source in succ: for e in succ[source]: countr2(v, e, source) edgecounts = len(edges)*[0] for e in edges: edgecounts[e[3]] = 0.0 for v in vertices: for e in edges: i = e[3] u = e[0] if v in counts[e] and v in scounts[e]: edgecounts[i]+=(0.0+counts[e][v]*scounts[e][v])/totals[v]/nv return ' '.join(map(lambda x: '%0.7f' % x, edgecounts)) for i in range(input()): print "Case #%d: %s" % (i+1, compute())
Veľká Franková situated in the valley of Frankovský potok (Frankovský Creek) in the Zamagurie region. It was founded during the first phase of colonisation (allowed by the Gorgey family) in the 13-th century. During sc. walachian colonisation in the 16-th century it was re-colonised and till the 9-th century it was a vassal village of the Red Monastery. Locals were first of all shepherds, but they also worked in a sawmill and in a mill. A lot of people moved out of Veľká Franková between 1890 and 1910. Today most of them work as farmers, in winter as woodcutters, masons or builders (also outside of the village). You can find there a Roman-Catholic church from the 18-th century and a wooden belfry.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # papr.py # # Copyright 2014 Balint Seeber <balint256@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # import sys, math import numpy import matplotlib.pyplot as pyplot from optparse import OptionParser def moving_average(values, window): weights = numpy.repeat(1.0, window) / window sma = numpy.convolve(values, weights, 'same') # 'valid' #print len(values), len(sma) return sma def main(): parser = OptionParser(usage="%prog: [options] <input file>") parser.add_option("-t", "--type", type="string", default="c8", help="data type [default=%default]") parser.add_option("-T", "--trim", type="int", default=None, help="max # of samples to use [default=%default]") parser.add_option("-d", "--decim", type="int", default=None, help="decimation [default=%default]") parser.add_option("-l", "--length", type="int", default="2048", help="target window length [default=%default]") parser.add_option("-a", "--average", type="int", default="128", help="moving average window length [default=%default]") parser.add_option("-m", "--max-length", type="int", default="1024", help="max window length [default=%default]") parser.add_option("-D", "--max-decim", type="int", default=None, help="max decimation [default=%default]") parser.add_option("-L", "--log", action="store_true", default=False, help="log scale [default=%default]") parser.add_option("-M", "--show-mag", action="store_true", default=False, help="show magnitude plot [default=%default]") #parser.add_option("-T", "--mag-trim", type="int", default=None, help="max # of samples to show in mag plot [default=%default]") (options, args) = parser.parse_args() if len(args) < 1: print "Supply input file" return input_file = args[0] dtype = numpy.dtype(options.type) print "Opening", input_file, "as", dtype data = numpy.fromfile(input_file, dtype) print "File samples:", len(data) if options.trim is not None: print "Trimming to", options.trim data = data[:options.trim] print "Min,mean,max:", data.min(), data.mean(), data.max() if options.decim is not None: decim = options.decim # FIXME: Validate else: decim = len(data) / options.length print "Decim:", decim new_length = decim * options.length print "New length:", new_length, ", skipping:", (len(data) - new_length) data = data[:new_length] data_mag = numpy.abs(data) data_mag_min = data_mag.min() if data_mag_min == 0.0: print "Mag min: %f" % (data_mag_min) else: print "Mag min: %f (%f dB)" % (data_mag_min, 10.0*math.log10(data_mag_min)) data_mag_mean = data_mag.mean() print "Mag mean: %f (%f dB)" % (data_mag_mean, 10.0*math.log10(data_mag_mean)) data_mag_max = data_mag.max() print "Mag max: %f (%f dB)" % (data_mag_max, 10.0*math.log10(data_mag_max)) data_mag_squared = data_mag ** 2.0 mean_rms = math.sqrt(data_mag_squared.mean()) print "Mean RMS:", mean_rms, "(%f dB)" % (10.0*math.log10(mean_rms)) print "Moving average window length:", options.average data_mag_squared_ma = moving_average(data_mag_squared, options.average) #print len(data_mag_ma) #len_diff = new_length - len(data_mag_ma) #if options.decim is not None: # decim = len(data_mag_ma) / options.length #else: # decim = len(data) / options.length #print "Decim:", decim #new_length = decim * options.length #print "New length:", new_length #data_mag_ma = data_mag_ma[:new_length] #print "Moving average decim:", decim #new_length = decim * options.length #data_mag_ma = data_mag_ma[:new_length] #print "New length:", len(data_mag_ma) if decim > 1: data_mag_ma_mat = data_mag_squared_ma.reshape(-1, decim) data_mag_ma_mean = data_mag_ma_mat.mean(axis=1) else: data_mag_ma_mean = data_mag_squared_ma print "Mean moving-average data length:", len(data_mag_ma_mean) print "Min,mean,max: %f, %f, %f" % (data_mag_ma_mean.min(), data_mag_ma_mean.mean(), data_mag_ma_mean.max()) if options.max_decim is None: assert((new_length % options.max_length) == 0) decim_max = new_length / options.max_length else: decim_max = options.max_decim print "Max decim:", decim_max #new_length_max = decim_max * options.max_length #data_mag_decim = data_mag[new_length/2:] #len_diff = len(data_mag_decim) - new_length #data_mag_decim = data_mag_decim[:-len_diff+1] data_mag_decim = data_mag if decim_max > 1: data_mag_decim_mat = data_mag_decim.reshape(-1, decim_max) data_mag_decim_max = data_mag_decim_mat.max(axis=1) else: data_mag_decim_max = data_mag_decim repeat = options.length / options.max_length print "Max repeat:", repeat if repeat > 1: data_mag_decim_max = numpy.repeat(data_mag_decim_max, repeat) print "Min,mean,max: %f, %f, %f" % (data_mag_decim_max.min(), data_mag_decim_max.mean(), data_mag_decim_max.max()) data_mag_decim_max_squared = data_mag_decim_max ** 2.0 ratio = data_mag_decim_max_squared / data_mag_ma_mean ratio_filtered = ratio[~numpy.isnan(ratio)] print "NaNs:", (len(ratio) - len(ratio_filtered)) ratio_filtered2 = ratio_filtered[~numpy.isinf(ratio_filtered)] print "Infs:", (len(ratio_filtered) - len(ratio_filtered2)) print "Min,mean,max: %f, %f, %f" % (ratio_filtered2.min(), ratio_filtered2.mean(), ratio_filtered2.max()) #print ratio #print ratio_filtered orig_ratio_len = len(ratio) #trim = options.average - 1 #ratio = ratio[trim:-trim] trim = 0 x = numpy.linspace(trim, trim + len(ratio), len(ratio), False) #print len(x), len(ratio) mean_ratio = ratio_filtered2.mean() print "Mean ratio:", mean_ratio, "(%f dB)" % (10.0*math.log10(mean_ratio)) ratio_db = 10.0 * numpy.log10(ratio) ratio_filtered_db = 10.0 * numpy.log10(ratio_filtered2) print "Min,mean,max ratio (dB): %f, %f, %f" % (ratio_filtered_db.min(), ratio_filtered_db.mean(), ratio_filtered_db.max()) if options.show_mag: subplot = pyplot.subplot(111) subplot.grid(True) print "Showing magnitude plot..." #subplot.set_ylim(ymin=0.0) #subplot.plot(data) subplot.plot(data_mag) data_mag_rms_ma = data_mag_squared_ma ** 0.5 subplot.plot(data_mag_rms_ma) data_mag_rms_ma_mean = numpy.repeat(data_mag_ma_mean, decim) ** 0.5 subplot.plot(data_mag_rms_ma_mean) data_mag_decim_max_repeat = numpy.repeat(data_mag_decim_max, decim) subplot.plot(data_mag_decim_max_repeat) pyplot.show() subplot = pyplot.subplot(111) if options.log: subplot.set_yscale('log') subplot.grid(True) #subplot.set_ylim(ymin=(10.0**-18.), ymax=(10.0**-8.)) #plot, = subplot.plot(data_mag_mean) #plot, = subplot.plot(data_mag_decim_max) print "Showing PAPR plot..." subplot.set_ylim(ymin=0.0, ymax=ratio_filtered_db.max()) subplot.set_xlim(xmax=orig_ratio_len) plot, = subplot.plot(x, ratio_db) pyplot.show() return 0 if __name__ == '__main__': main()
Hank recalls laying on the floor with his dad at age 5, listening to the stereo, with "music coming from speakers on his left and his right". He learned to play trumpet, french horn, piano, and conduct choral groups. As a vocal major at Samford University, he made several records with his choral group, and realized that recording was a blending of the things he loved. When he heard about a new program at Belmont University in Nashville that focused on the music industry, he was on his way. There he met many of the industry leaders that he would work amongst later in life, and finished his degree in the very first graduating class from that program. Hank's first job after school was at Soundwest Studios in Calgary, Alberta, CA. He returned to Nashville and soon was working at Woodland Sound Studios, where he spent the next seven years honing his mastering skills. In 1983, Hank "escaped" and became MasterMix's first employee. It was the first facility of its type, with a dedicated mixing room, along with a mastering room. Since that time, Hank has served as both mastering engineer and managing partner. He has worked with countless artists, producers, and record labels, primarily in the country and Christian genres, and on everything from indie to platinum-selling artists. Hank mastered the 2004 Grammy-winning Best Country Album "Songs of the Louvin Bros." on Universal Records, the Country Song of the Year "Its Five O'Clock Somewhere" by Alan Jackson and Jimmy Buffett, and the Best Contemporary Blues Album, Etta James' "Let's Roll".
"""Tool for calculating RDFs """ from __future__ import print_function import numpy as np from MDAnalysis.lib.distances import distance_array from analysisbase import AnalysisBase, blocks_of class InterRDF(AnalysisBase): """Analysis object for calculating intermolecular RDF. See the init method for arguments and keywords. Run the analysis with method *run* Results are stored in the following attributes: rdf The pair distribution function, normalised. edges The boundaries of each rdf bin. bins The center of each rdf bin. """ def __init__(self, *args, **kwargs): """InterRDF(g1, g2, nbins=75, range=(0.0, 15.0)) :Arguments: *g1* First AtomGroup *g2* Second AtomGroup :Keywords: *nbins* Number of bins in the histogram [75] *range* The size of the RDF [0.0, 15.0] *exclusion_block* A tuple representing the tile to exclude from the distance array. [None] *start* The frame to start at [0] *stop* The frame to end analysis at. [-1] *step* The step size through the trajectory in frames [0] Keyword *exclusion_block* allows same molecule contributions to be excluded from the rdf calculation. """ self.g1 = args[0] self.g2 = args[1] self.u = self.g1.universe kwargs.update({'traj': self.u.trajectory}) self._setup_frames(**kwargs) nbins = kwargs.pop('nbins', 75) hrange = kwargs.pop('range', (0.0, 15.0)) self.rdf_settings = {'bins':nbins, 'range':hrange} # Empty histogram to store the RDF count, edges = np.histogram([-1], **self.rdf_settings) count *= 0.0 self.count = count self.edges = edges self.bins = 0.5 * (edges[:-1] + edges[1:]) # Need to know average volume self.volume = 0.0 # Allocate a results array which we will reuse self._result = np.zeros((len(self.g1), len(self.g2)), dtype=np.float64) # If provided exclusions, create a mask of _result which # lets us take these out exclusion_block = kwargs.pop('exclusion_block', None) if not exclusion_block is None: self._exclusion_block = exclusion_block self._exclusion_mask = blocks_of(self._result, *exclusion_block) self._maxrange = hrange[1] + 1.0 else: self._exclusion_block = None self._exclusion_mask = None def _singleframe(self): distance_array(self.g1.positions, self.g2.positions, box=self.u.dimensions, result=self._result) # Maybe exclude same molecule distances if not self._exclusion_mask is None: self._exclusion_mask[:] = self._maxrange count = np.histogram(self._result, **self.rdf_settings)[0] self.count += count self.volume += self._ts.volume def _normalise(self): # Number of each selection nA = len(self.g1) nB = len(self.g2) N = nA * nB # If we had exclusions, take these into account if self._exclusion_block: xA, xB = self._exclusion_block nblocks = nA / xA N -= xA * xB * nblocks # Volume in each radial shell vol = np.power(self.edges[1:], 3) - np.power(self.edges[:-1], 3) vol *= 4/3.0 * np.pi # Number of frames nframes = len(self.frames) # Average number density box_vol = self.volume / nframes density = N / box_vol rdf = self.count / (density * vol * nframes) self.rdf = rdf
Weavile is a character that's all about speed and momentum. Having a dominant walk speed as well as mobility options both on the ground and in the air allows Weavile to approach and dance around the opponent with ease. Signature moves such as Agility giving projectile invincibility makes zoners re-evaluate their gameplan. And Icicle Crash which can be used for air stall, as an angled double jump, or something to chuck at the opponent while also creating space. Weavile's got a ton of counter pierces. Many leaving Weavile in an advantageous state when blocked, giving the character a solid oki game. Adding onto all of this with instances such as jab pressure and an active Burst Mode, Weavile really tests the opponent's defense. In comparison though to those that have similar health, Weavile lacks long-ranged options. There's always Icicle Crash, but 9 chip damage won't be able to cut it. This forces Weavile to get up close to be able to deal relevant damage. This becomes more of problem when Weavile is faced against the bulkier side of the cast that can withstand multiple exchanges, and be able to balance it all out with one correct play. With the use of knockback and spacing, the opponent never has to worry about retaliation because of the short range Weavile's normals have. To be able to excel with Weavile, one must fully commit to the tools the character has to offer. Hesitation leads to "Should've, Would've, Could've" thoughts. With the potential to maneuver around Counter Attacks and Agility through Burst Attacks, Its alot about having confident play and keeping up with the pace Weavile brings. High Stance: Recharges shield health. Field Phase Grab: Increases Support Gauge. Weavile is put into a counter armor state from frame 1-32 (52 if charged). Weavile fastfalls to the ground. Duel Phase Nosedive recovers faster than Field Phase Nosedive. Weavile conjures a tornado and spins upwards into the air. Counter armor on frame 1, and becomes invincible on frame 9. Reduces the opponent's Synergy Gauge by 12.5CC on hit. The hitbox increases in size horizontally over time. Hitbox is no longer active when Weavile starts to descend. Throws out three snowballs in a horizontal range. Holding Y allows Weavile to move before throwing. Movement is not affected by Speed buffs / debuffs. Holding Y for the full duration launches seven snowballs. Lays down three icicle traps on the ground. Freezes the opponent if they come in contact. Holding sY throws the traps further. In Burst Mode, sY throws out five icicle traps. Throws out a shadow projectile that follows up into a claw strike. Can be angled left or right. Similar to fY, but instead the shadow projectile and claw strike go upwards. Lobs a snowball projectile that explodes on contact with the ground, the opponent, or a projectile. Can be angled to throw to the side. In Burst Mode, three snowball projectiles will be thrown out. Weavile homes in on the opponent by skating in the air. Can press X again to follow up immediately with the divekick portion. Weavile homes in on the opponent and does three swipes. Following up from Homing Attack 1, Weavile strikes upwards with Its claw. A low claw swipe to the ground. Only combos into 2YY from a critical hit. A quick claw jab that has two variations. 4Y comes out faster and throw crushes, but has more scaling. 5Y advances Weavile forward while being more advantageous on hit and block. * Can cancel both hits with Night Slash. Weavile lunges forward with Its claw. Weavile hops in the air with an upwards claw swipe. Weavile slides on the ground straightforward. Weavile crouches, then throws out a vertical claw attack that has two hits. Can be charged to counter pierce as well as launch. Charging 5X also extends the High invincibility duration. Weavile advances forward with a blast of ice. Can be charged to counter pierce as well as making it safer on block. * A charged 6X from 5Y / 5YY / 5YYY deals 70 damage and has less scaling. Weavile jumps forward and strikes with both claws. Inputting 7X decreases the distance of the jump. Inputting 9X increases the distance of the jump. If blocked, Weavile bounces off the opponent's shield. Weavile goes into a counter armor state from frame 1-41. Coming in contact with an opponent's attack causes Weavile to lunge backwards, prepping to attack. During this state, Weavile is invincible. The invincibility time can be extended by holding the input. Letting go of the input causes Weavile to lunge forward with a launching claw attack. Counter pierces and reduces the opponent's Synergy Gauge by 20CC on hit. The attack is similar to Night Slash (launches forwards instead of backwards). Can be canceled with R (invincible when canceled). Weavile lunges backwards and preps to attack. Can hold the input to stay in this backed position for a set amount of time. Weavile dashes forward and becomes invincible against projectiles on frame 1. In Field Phase, Weavile can also Agility left or right. Can only be used from Agility. Weavile does a multitude of claw swipes. Can cancel into 5AY Fake Out. Weavile hops and claps on the opponent. Charging Fake Out causes it to counter pierce. Weavile leaps ahead with an arc-shaped claw strike. Invincible against Lows on frame 1, and Mid Lows on frame 9. An uppercut motion that spawns an ice pillar. The hitbox lingers, but disappears if Weavile is hit. Weavile fires an advancing forward punch of ice. Charging Ice Punch causes it to counter pierce. The charged version has five hits instead of one. Also has a larger hitbox. Weavile spawns an icicle to stand on in the air. Can only be hit by the icicle once, afterwards the hitbox is no longer active. In Burst Mode, Icicle Crash can be used twice. Able to jump off of the Icicle in multiple directions. Able to cancel into any aerial move afterwards. Weavile slashes the icicle, charging Its Support Gauge. Can be done a second time to gain half the charge of the first charge. The icicle's hitbox disappears once the input is pressed. Weavile lands on the ground once complete. Sends the icicle downwards, exploding in a radius. * -22 against Aegislash, Decidueye, Gardevoir, and Suicune, -8 for everyone else. Any character not listed requires a Speed buff. This page was last modified on 25 January 2019, at 22:50.
import datetime from dateutil.relativedelta import * ## give final date and time after parsing by changing current date-time def change_datetime ( c="0", y=0, mt=0, w=0, d=0, h=0, m=0, s=0): #mt = mt + 12*y #d = d + 30*mt now = datetime.datetime.now() change = relativedelta( years =+ y, months =+ mt, weeks =+ w, days =+ d, hours =+ h, minutes =+ m, seconds =+ s) #print (now + change) if c == "date": return (now + change).date() elif c == "time": return (now + change).time() ## make separate date and time functions #def change_date (y=0, m=0, w=0, d=0): #def change_time (h=0, m=0, s=0): ## make separate functions for setting date and time and print -- if not provided the data ## give final date and time after parsing by setting date-time def set_datetime (y=0, mt=0, d=0, h=0, m=0, s=0, c="0"): a = "" if d!=0: a = a + str(d) + "/" if mt!=0: a = a + str(mt) + "/" if y!=0: a = a + str(y) #a = a + " " if h!=0: a = a + str(h) + ":" if m!=0: a = a + str(m) + ":" if s!=0: a = a + str(s) if c!="0": a = a + " " a = a + str(c) #print (a, "a") return a ## make function for am/pm def get_disease (string): with open("dataset.txt") as f: content = f.readlines() names = [] definitions = [] values = [] check = 1 ## TODO ## remove the common words from defintion (or input) (or use replace) like a, the,disease, etc. while splitting definition in words ## Also do stemming ## Go through dataset once manually to get these words for word in content: if word[0] == 'n': ## TODO think better way in which pop is not required, directly append only if required if check == 1: names.append(word) check = 0 if check == 0: names.pop() names.append(word) if word[0] == 'd': definitions.append(word) check = 1 values.append(0) #string = input("Give Text:") words = string.split(" ") for word in words: for defintion in definitions: defintion.replace('. ',' ') defintion.replace(', ',' ') definition_words = defintion.split(" ") if word in definition_words: values[definitions.index(defintion)] += 1 #print (word) highest = 0 index_of_highest = 0 answer = [] ## TODO if there are more than one highest for value in values: if value > highest: highest = value index_of_highest = values.index(value) answer.append(names[index_of_highest]) answer.append(highest) answer.append(definitions[index_of_highest]) for word in words: newd = definitions[index_of_highest].replace('. ',' ') newda = newd.replace(', ',' ') definition_words = newda.split(" ") ## cannot pass with or in split, find better way #print (definition_words) if word in definition_words: values[definitions.index(defintion)] += 1 answer.append(word) # print (definitions[index_of_highest][defintion.index(word)]) ## make definition sort only usable things ## find a way like , and parameters for passing more than value in relplace return answer def get_sentences(str): import re ## use of regular expressions ## str cannot be changed further, always make a new object words = str.split(" ") Abbrs = ['Mr.', 'mr.', 'Mrs.', 'mrs.', 'Dr.', 'dr.' , 'Er.', 'er.', 'Prof.', 'prof.', 'Br.', 'br.', 'Fr.', 'fr.', 'Sr.', 'sr.', 'Jr.', 'jr.'] SentenceType = [] for abbr in Abbrs: if abbr in words: new_word = abbr.replace(abbr[len(abbr)-1], "") str = str.replace(abbr, new_word) #print (new_str) ## str.replace(abbr[len(abbr)-1], " ") ## Do directly in string without using words for word in words: if re.findall(r'\.(.)+\.', word): new_word = word.replace('.','') str = str.replace(word, new_word) #print (word) #print (new_word) #print (new_str2) if '.' in word[0:len(word)-2]: new_word = word.replace('.', '[dot]') str = str.replace(word, new_word) for letter in str: if letter == '.': SentenceType.append("Assertive") if letter == '?': SentenceType.append("Interrogative") if letter == '!' or letter == '!!': SentenceType.append('Exclamatory') sentences = re.split("[ ]*[.|?|!|!!]+[ ]*", str) if (str[len(str)-1] == '.') or (str[len(str)-1] == '?') or (str[len(str)-1] == '!'): sentences.pop() return dict(zip(sentences, SentenceType)) ## TODOs ## Extend Abbrs list ## Dots back in sentences ## If abbr of acronyms with dots at end of a sentence? ## what if sentence doesn't end with !!? Get the expression from this word. ## If already a new line exist. ## Also implement through machine learning to obtain results without help of punctuation. ## Sentence Type : What about Imperative, compound, complex etc. Exclamatory Sentence or Word ## ensure sentences are returned sequentially def get_tokens(str): words = str.split(" ") return words ## Make an algorithm for different kind of words for forming effective tokens before returning
Included below you will find our Brass Quintet arrangements of In The Bleak Midwinter. The individual instrumental parts are included in the purchase of the full score of In The Bleak Midwinter. The full score and the individual parts can be previewed for your convenience.
""" This file is part of the TheLMA (THe Laboratory Management Application) project. See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information. Worklist series experiment design table. """ from sqlalchemy import Column from sqlalchemy import ForeignKey from sqlalchemy import Integer from sqlalchemy import PrimaryKeyConstraint from sqlalchemy import Table __docformat__ = "reStructuredText en" __all__ = ['create_table'] def create_table(metadata, experiment_design_tbl, worklist_series_tbl): "Table factory." tbl = Table('worklist_series_experiment_design', metadata, Column('experiment_design_id', Integer, ForeignKey(experiment_design_tbl.c.experiment_design_id, ondelete='CASCADE', onupdate='CASCADE'), nullable=False, unique=True), Column('worklist_series_id', Integer, ForeignKey(worklist_series_tbl.c.worklist_series_id, ondelete='CASCADE', onupdate='CASCADE'), nullable=False) ) PrimaryKeyConstraint(tbl.c.experiment_design_id, tbl.c.worklist_series_id) return tbl
Medical issues can also be dealt with using hypnosis, but it is recommended that it is undertaken in conjunction with medical plan formed with your GP. It may seem illogical that something like hypnotherapy, a non physical treatment, could deal with Medical issues, but the premise is based on the ability for the mind to not only influence how we mentally feel but also as a healing agent for the body too. There is considerable evidence that hypnotherapy can support medical issues. The use of hypnotherapy is supported by the NICE NHS guidelines for Pain management (including childbirth) and Irritable Bowel Syndrome. In addition, the use of Hypnotherapy for Migraines, Skin conditions, Pain Management, Dental phobia, Bruxism (Teeth Grinding), Chronic Fatigue Syndrome, Stuttering and Sleep Disorders including Insomnia.
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Module for building TensorBoard servers. This is its own module so it can be used in both actual code and test code. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import os import threading import time import six from six.moves import BaseHTTPServer from six.moves import socketserver from tensorflow.python.platform import logging from tensorflow.python.summary import event_accumulator from tensorflow.tensorboard.backend import handler # How many elements to store per tag, by tag type TENSORBOARD_SIZE_GUIDANCE = { event_accumulator.COMPRESSED_HISTOGRAMS: 500, event_accumulator.IMAGES: 4, event_accumulator.SCALARS: 1000, event_accumulator.HISTOGRAMS: 1, } # How often to reload new data after the latest load (secs) LOAD_INTERVAL = 60 def ParseEventFilesSpec(logdir): """Parses `logdir` into a map from paths to run group names. The events files flag format is a comma-separated list of path specifications. A path specification either looks like 'group_name:/path/to/directory' or '/path/to/directory'; in the latter case, the group is unnamed. Group names cannot start with a forward slash: /foo:bar/baz will be interpreted as a spec with no name and path '/foo:bar/baz'. Globs are not supported. Args: logdir: A comma-separated list of run specifications. Returns: A dict mapping directory paths to names like {'/path/to/directory': 'name'}. Groups without an explicit name are named after their path. If logdir is None, returns an empty dict, which is helpful for testing things that don't require any valid runs. """ files = {} if logdir is None: return files for specification in logdir.split(','): # If the spec looks like /foo:bar/baz, then we assume it's a path with a # colon. if ':' in specification and specification[0] != '/': # We split at most once so run_name:/path:with/a/colon will work. run_name, path = specification.split(':', 1) else: run_name = None path = specification if not os.path.isabs(path): # Create absolute path out of relative one. path = os.path.join(os.path.realpath('.'), path) files[path] = run_name return files def ReloadMultiplexer(multiplexer, path_to_run): """Loads all runs into the multiplexer. Args: multiplexer: The `EventMultiplexer` to add runs to and reload. path_to_run: A dict mapping from paths to run names, where `None` as the run name is interpreted as a run name equal to the path. """ start = time.time() for (path, name) in six.iteritems(path_to_run): multiplexer.AddRunsFromDirectory(path, name) multiplexer.Reload() duration = time.time() - start logging.info('Multiplexer done loading. Load took %0.1f secs', duration) def StartMultiplexerReloadingThread(multiplexer, path_to_run, load_interval=LOAD_INTERVAL): """Starts a thread to automatically reload the given multiplexer. The thread will reload the multiplexer by calling `ReloadMultiplexer` every `load_interval` seconds, starting immediately. Args: multiplexer: The `EventMultiplexer` to add runs to and reload. path_to_run: A dict mapping from paths to run names, where `None` as the run name is interpreted as a run name equal to the path. load_interval: How many seconds to wait after one load before starting the next load. Returns: A started `threading.Thread` that reloads the multiplexer. """ # Ensure the Multiplexer initializes in a loaded state before it adds runs # So it can handle HTTP requests while runs are loading multiplexer.Reload() def _ReloadForever(): while True: ReloadMultiplexer(multiplexer, path_to_run) time.sleep(load_interval) thread = threading.Thread(target=_ReloadForever) thread.daemon = True thread.start() return thread class ThreadedHTTPServer(socketserver.ThreadingMixIn, BaseHTTPServer.HTTPServer): """A threaded HTTP server.""" daemon = True def BuildServer(multiplexer, host, port): """Sets up an HTTP server for running TensorBoard. Args: multiplexer: An `EventMultiplexer` that the server will query for information about events. host: The host name. port: The port number to bind to, or 0 to pick one automatically. Returns: A `BaseHTTPServer.HTTPServer`. """ factory = functools.partial(handler.TensorboardHandler, multiplexer) return ThreadedHTTPServer((host, port), factory)
Matteo and Jlo discuss the final episode ever of LOST and what we will do now that its all over. Matt flips about this (Broken) statement we are all just supposed to be OK with. Lost episode Matteo and Jlo Joe talk about the latest episode Across the Sea and then dive into their own 20 questions. Hugo finds Libby in the alternate time line. Thank goodness right!?! note, One scene is very explicit, 40 minutes into the epsiode. Who do you trust? Sayid bad? seriously! 6 Years later we are starting season 1 over again, and this time Jack has a kid!!! In this episode, we go into detail about locke and the anti locke. IS anti Locke really locke? We Re-Cap all seasons of LOST except the past season. Thursday we are going to discuss liveeee! multiple characters of the show. Awesome clips of Jack being a B.A. Hope you all enjoy. Were going to rock the live podcast again! This time we just wanna talk about your crazy theories! This is an explicit podcast! we don't usually do an explicit podcast, but today we did! So please, don't listen if you are not supposed to. Matteo and Jlo Joe go over the smoke monster and decipher some of the possibilities. We discuss the craziness of the episode and the newest character "MARK" What junk .. do we really need this week off? Dead is Dead Baby .. or is it? Great episode all together, Love the way the characters unfolded in this episode. Almost like a season 3 episode it was soo good. Today we discuss the Lost Episode in review, and the upcoming episode. What we think, what we know, and how the show will roll. ... and remember.. The first rule of Sayid, is Dont talk about Sayid. The second rule of Sayid, is Dont talk about Sayid! We discuss the episode, and who is more evil Ben or Widmore? we also ask which 5 lost women you would rather be stuck with forever? Today Matt and Jlo Joe discuss some new theories, and questions of our own! Have fun and come to the website to join in! Matt and Joe discuss possible outcomes of next episodes! We also discuss the parts of the episode we didn't particularly love. What's Happnin?! The writers are on stike! Whaaa? Why? We try to comprehend it. Afterwards, we read a listener theory explaining their thoughts on "Jacob". Very interesting! Also, we go through the rest of season 2 quotes. Stay tuned for episode 12! We will begin our season 3 quotes. Enjoy! PS. All new episodes will be located on www.mklost.com from here on out. Continuing on with season 2... Enjoy! In this episode we discuss more notable quotes from season 1! Hope you enjoy!! Babble on about quotes from the first half of season 1! Fun Fun!! Hello LOST fans!! In this episode we discuss Watership Down. Don't know what that is? You remember, Sawyer was reading it! Also, was there an audition leaked? Fact or fiction? Help us figure it out! Oh, and come talk LOST with us in our forums on www.mklost.com. Enjoy the show!! A mistake! Show was schelduled but we were unable to make it :( This episode takes it's place to keep numerical order. Us and a couple friends chat about LOST and then craziness strikes!! Ahhhhhh!! Random jabber about The most recent LOST episode! Enjoy!
#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Py2neo documentation build configuration file, created by # sphinx-quickstart on Fri Oct 17 16:03:15 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os sys.path.insert(0, os.path.abspath('_themes')) import alabaster # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'alabaster', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Py2neo' copyright = '2011-2014, Nigel Small' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # sys.path.insert(0, os.path.abspath('..')) from py2neo import __version__ # The short X.Y version. version = ".".join(__version__.split(".")[0:2]) # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'logo': 'py2neo-2.0.200x260.png', 'logo_align': 'left', 'github_user': 'nigelsmall', 'github_repo': 'py2neo', 'github_branch': 'release/' + release, 'travis_button': True, } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { '**': [ 'about.html', 'navigation.html', 'searchbox.html', 'donate.html', ] } # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Py2neodoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Py2neo.tex', 'Py2neo Documentation', 'Nigel Small', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'py2neo', 'Py2neo Documentation', ['Nigel Small'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Py2neo', 'Py2neo Documentation', 'Nigel Small', 'Py2neo', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = 'Py2neo' epub_author = 'Nigel Small' epub_publisher = 'Nigel Small' epub_copyright = '2014, Nigel Small' # The basename for the epub file. It defaults to the project name. #epub_basename = 'Py2neo' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the PIL. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True def get_class_name(full_module_name): """ Pull out the class name from the full_module_name """ #split the full_module_name by "."'s return full_module_name.split('.')[1] def process_docstring(app, what, name, obj, options, lines): names = name.split(".") module_name = names[0] try: class_name = names[1] except IndexError: class_name = None try: attr_name = names[2] except IndexError: attr_name = None for i, line in enumerate(lines): lines[i] = (line .replace('«class»', class_name) .replace('«class.lower»', class_name.lower())) def setup(app): app.connect('autodoc-process-docstring', process_docstring)
im assuming your circle has a diameter of 1/4 inches to find sq in just find the area since the area is the definition of square inches. … One half squared equals one fourth because when you multiply one half by one half you get one fourth. This is the equivalent of saying that half of one half is one fourth. These signs are the same as saying "3 squared, 4 squared, and x squared." This is also called a superscript or the power of the number. The number to the "power of 2" is the same as the number "squared" or the "square" of the number. Algebra Examples. Popular Problems. Algebra. Evaluate square root of 1/4. Rewrite as . Any root of is . Simplify the denominator. Tap for more steps… Rewrite as . Pull terms out from under the radical, assuming positive real numbers. The result can be shown in both exact and decimal forms.
""" This uses pandas.io.data.DataReader, all kwargs get passed to that. start and end are optional, but must be of the form 'YYYY-MM-DD'. Will default to since the beginning of available data, and run through "today". data_column is required to be specified as well. """ stype = 'PyDataDataReaderST' renew = True class Source(object): def __init__(self, ses, **kwargs): import pandas.io.data as pydata import datetime as dt self.pydata = pydata self.dt = dt def getseries(self, ses, **kwargs): fmt = "%Y-%m-%d" if 'start' in kwargs: kwargs['start'] = self.dt.datetime.strptime(kwargs['start'], fmt) if 'end' in kwargs: if kwargs['end'] == 'now': kwargs['end'] = self.dt.datetime.now() else: kwargs['end'] = self.dt.datetime.strptime(kwargs['end'], fmt) col = kwargs['data_column'] del kwargs['data_column'] adf = self.pydata.DataReader(**kwargs) data = adf[col] return data
If you’re buying furniture for the first time, keep away from distinctive or fashionable pieces. You may uncover that the style itself does not stand the check of time and is tough to pair with different items. Just remember to determine on a method that fits your model and may work nicely with different items. Stick to neutral colours to your important furniture items. You may love a selected bright coloration or pattern, however will you continue to like it 10 years from now? As a substitute, decide impartial colours for the principle items in your room and use accent decor to usher in coloration. You will be glad of your decision the time comes to redecorate. When trying to find the best deal on furniture, search for furniture stores who’re providing deep reductions on furniture. Many furniture shops supply a a reimbursement guarantee. If a bit of furniture is returned, it cannot be bought as new. Instead, they discount it and promote it as used. Avoid high upkeep furniture unless you may have the time to take good care of it. If you do not need to continually clean your furniture, look for items with a top quality finish. Don’t hesitate to buy some material safety so you shouldn’t have to fret about stains in your sofa. Take the time to keep furniture polished and clear recurrently. This little bit of upkeep helps retains your furniture trying like new and also helps to get rid of allergens like dust and pet dander. Letting dust and other allergens acquire, particularly on completed furniture, can harm it over time, so it’s essential to maintain it on a regular basis. When buying furniture with curiosity-free bank cards, it’s important that you simply pay off the furniture prior to the time period’s expiration. Failure to repay the stability might trigger you to incur interest over the entire period. Just be sure you perceive all advantageous print earlier than you make your purchase. Don’t enable your self to really feel pressured into shopping for a chunk of furniture that you are not certain about. Furniture gross sales folks typically work off of fee. Their need to make a sale, can sometimes go away you feeling pressured into shopping for something you do not really want. Be polite, however stand your ground and wait until you find that good piece. When buying furniture, high quality is essential. Customized furniture makers will not use nails and glue to hitch wood corners and ends together. They will use a course of often called wooden joinery. This produces a top quality joint that shall be sturdier and can take more weight. Nails and glue construction is not going to ship a strong high quality product. Measure the house in your home very fastidiously earlier than buying furniture. Model named furniture shouldn’t be all the time the easiest way to go. Usually, furniture and not using a brand name is just pretty much as good as brand name furniture. The one distinction is the worth; brand names imply more cash. It doesn’t matter what form of furniture you resolve to purchase, quality should be your number one precedence. If you end up searching for a mattress, ask the gross sales rep to see for those who can take a look at it for 30 days at no danger. The one way which you can really tell if an expensive mattress is best for you is by sleeping on it. Simply maintain the the plastic masking on the mattress to preserve its cleanliness. In the case of choosing a palette, keep on with conservative selections. You won’t need to choose trending objects because the model will soon leave and you will be caught having to exchange the furniture. As a substitute, add coloration and trendiness with pillows, wall colors, plants, ceramics and different ornamental items. Though it could not look good, it might be sensible to purchase a sofa cover once you buy a new sofa. Let’s face it, sofas may be quite pricy and it might be a disgrace to have it ruined by stains. You should purchase a neutral colour to match with every thing else. If you’re searching for out previous furniture, make sure you look at its underside so that you simply guarantee stability. Oftentimes, furniture could appear to be it’s in nice condition; however, it is in dangerous situation. Older furniture may be affected by dry rot and rust. Measure your space earlier than looking for furniture. An “eyeballed” measurement will not lower it right here. You’ll want to be exact as even just a few inches can be a large deal for a way your home design will work out. You will be a way more assured shopper when you know the precise dimensions that you just want. Since you will retaining and treasuring that merchandise, find new furniture that compliments it. For those who buy a brand new merchandise that does not go together with it, it is going to clash and look out of place.
#! /usr/bin/env python # -*- coding: utf-8 -*- ##################### A U T H O R ########################## # # # Copyright 2010 Jack Desert # # <jackdesert@gmail.com> # # http://TwoMoreLines.com # # # ###################### L I C E N S E ########################## # # # This file is part of LyXBlogger. # # # # LyXBlogger is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published # # by the Free Software Foundation, either version 3 of the License, # # or (at your option) any later version. # # # # LyXBlogger is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with LyXBlogger. If not, see <http://www.gnu.org/licenses>. # # # ######################################################################## import os, sys import re import wordpresslib from misc import pr3 from misc import get_format from misc import trim_cut_material def get_html(input_file, CUT_FLAG): # Read data from file f = open(input_file, 'r') html = f.read() f.close() pr3 ("FORMAT") ELYXER_ENGINE = get_format(html) # Trim designated cut material from bottom of post html = trim_cut_material(html, CUT_FLAG, ELYXER_ENGINE) # RECORD TITLE FROM HEADER TO USE AS POST tit_exp = re.compile(''' <title> # Start of the <title> tag ..{1,}? # Anything in the middle (non-greedy) </title> # Closing </title> tag ''', re.VERBOSE) # VERBOSE allows ''' ''' tit_obj = tit_exp.search(html) # eLyXer uses 'Converted document' as the default title in the head # and body. LyXHTML uses 'LyX Document' as the default, but only # puts it in the head. The following code detects these default # titles and asks for a real title TITLE_EXPECTED_IN_BODY, TITLE_PROMPT = False, True pr3 ("\nTITLE") if(tit_obj): TITLE_EXPECTED_IN_BODY = True TITLE_PROMPT = False full_title_tag = tit_obj.group() blog_title = full_title_tag[7:-8] # Strip tags off if (blog_title == 'Converted document'): # eLyXer's default (head and body) TITLE_PROMPT = True if (blog_title == 'LyX Document'): # LyXHTML's default (only in head) TITLE_PROMPT = True TITLE_EXPECTED_IN_BODY = False if(TITLE_PROMPT): pr3 ('No title found in document.') pr3 ('Please enter a title now') blog_title = sys.stdin.readline().replace('\n', '') pr3 ('Using title: ' + blog_title) # REMOVING TITLE FROM BODY # Typical body title using ENGINE_INTERNAL: # <h1 class="title"><a id='magicparlabel-309' /> # Example Article Title</h1> # <h1 class="title"> # Typical body title using ELYXER_ENGINE using optional sizing: # <h1 class="title"> # <span class="footnotesize">Hi Brian</span> # # </h1> exp = re.compile(''' <h1\ # Beginning of tag with space class="title"> # The rest of the tag ..{1,}? # Anything (non-greedy) </h1> # Closing tag ''', re.VERBOSE | re.DOTALL) # .. can include linebreaks bt_obj = exp.search(html) if(bt_obj): entire_bt_tag = bt_obj.group() html = html.replace(entire_bt_tag, '') elif (TITLE_EXPECTED_IN_BODY): pass #~ pr3 ('\nWARNING! The title of your entry may appear twice.') #~ pr3 ('Please notify the author at jackdesert@gmail.com to') #~ pr3 ('have this bug squashed.\n\n Press Enter to continue uploading.') #~ sys.stdin.readline() # What this really means is an opening title tag was found, but # no title tag was found in the body. # Eliminate everything outside the <body></body> tags START_TAG = '<body>' END_TAG = '</body>' if (START_TAG in html): html = html.partition(START_TAG)[2] html = html.partition(END_TAG)[0] # Reinvoke <code> and </code> tags from their escape sequence counterparts html = html.replace('&lt;code&gt;', '<code>') html = html.replace('&lt;/code&gt;', '</code>') # Remove Arrows from footnotes and margin notes html = html.replace('[→', '[') html = html.replace('→]', ']') # Change the elyxer-generated id to a class, since wordpresslib appears to # strip out all ids upon upload html = html.replace("<div class=\"footer\" id=\"generated-by\">", "<div class=\"footer generated-by-elyxer\">") return html, blog_title, ELYXER_ENGINE
Rocknob Grey Slick Rock. Made out of 100% rock! These are put together very well and feel great as a shifter. Has a smooth shiny surface. Our shift knobs use a universal mounting system that will not only fit any manual transmission shift lever, but will make installation easy and done in a matter of minutes. A perfect way to personalize your Jeep, Truck, Toyota, Car or Hot Rod with Rocknob’s Slick Rock! HIGH QUALITY! FAST SHIPPING!
# -*- coding: utf-8 -*- """\ This file contains functions to profile natsorted with different inputs and different settings. """ from __future__ import print_function import cProfile import random import sys sys.path.insert(0, '.') from natsort import natsorted, index_natsorted from natsort.compat.py23 import py23_range # Sample lists to sort nums = random.sample(py23_range(10000), 1000) nstr = list(map(str, random.sample(py23_range(10000), 1000))) astr = ['a'+x+'num' for x in map(str, random.sample(py23_range(10000), 1000))] tstr = [['a'+x, 'a-'+x] for x in map(str, random.sample(py23_range(10000), 1000))] cstr = ['a'+x+'-'+x for x in map(str, random.sample(py23_range(10000), 1000))] def prof_nums(a): print('*** Basic Call, Numbers ***') for _ in py23_range(1000): natsorted(a) cProfile.run('prof_nums(nums)', sort='time') def prof_num_str(a): print('*** Basic Call, Numbers as Strings ***') for _ in py23_range(1000): natsorted(a) cProfile.run('prof_num_str(nstr)', sort='time') def prof_str(a): print('*** Basic Call, Strings ***') for _ in py23_range(1000): natsorted(a) cProfile.run('prof_str(astr)', sort='time') def prof_str_index(a): print('*** Basic Index Call ***') for _ in py23_range(1000): index_natsorted(a) cProfile.run('prof_str_index(astr)', sort='time') def prof_nested(a): print('*** Basic Call, Nested Strings ***') for _ in py23_range(1000): natsorted(a) cProfile.run('prof_nested(tstr)', sort='time') def prof_str_noexp(a): print('*** No-Exp Call ***') for _ in py23_range(1000): natsorted(a, exp=False) cProfile.run('prof_str_noexp(astr)', sort='time') def prof_str_unsigned(a): print('*** Unsigned Call ***') for _ in py23_range(1000): natsorted(a, signed=False) cProfile.run('prof_str_unsigned(astr)', sort='time') def prof_str_unsigned_noexp(a): print('*** Unsigned No-Exp Call ***') for _ in py23_range(1000): natsorted(a, signed=False, exp=False) cProfile.run('prof_str_unsigned_noexp(astr)', sort='time') def prof_str_asint(a): print('*** Int Call ***') for _ in py23_range(1000): natsorted(a, number_type=int) cProfile.run('prof_str_asint(astr)', sort='time') def prof_str_asint_unsigned(a): print('*** Unsigned Int (Versions) Call ***') for _ in py23_range(1000): natsorted(a, number_type=int, signed=False) cProfile.run('prof_str_asint_unsigned(astr)', sort='time') def prof_str_key(a): print('*** Basic Call With Key ***') for _ in py23_range(1000): natsorted(a, key=lambda x: x.upper()) cProfile.run('prof_str_key(astr)', sort='time') def prof_str_index_key(a): print('*** Basic Index Call With Key ***') for _ in py23_range(1000): index_natsorted(a, key=lambda x: x.upper()) cProfile.run('prof_str_index_key(astr)', sort='time') def prof_str_unorderable(a): print('*** Basic Index Call, "Unorderable" ***') for _ in py23_range(1000): natsorted(a) cProfile.run('prof_str_unorderable(cstr)', sort='time')
In this day and age of the "throwaway society," so much of our precious resources find their way into the garbage. After the amounts of money that we have spent on so many items, that seems a shame. And then, when we want to decorate our homes and buy more accessories, we spend yet more money and waste even more of the world's resources. But with a little creativity, you can save your money and recycle some of the things you would normally throw out into cute ornaments and accessories for your home. Let's look at a few selected examples of items that you could recycle and use to decorate your living room. What about some cute and unusual wall art? That's easy enough to make. Buy some second-hand frames and fill them with your favorite magazine pictures. Or maybe you would like to create your own forms of abstract art by adding pressed flowers, seashells, and ticket stubs to give you an everlasting memento of your recent vacation. This can actually produce some sophisticated designs, and the beauty of creating them yourself is that you can put your own personality into what you make. No one else will ever have exactly the same design as you. You can also choose the exact themes and colors that you want in your home. How about making your own faux oil lamp? Put the glass globe upside down and then place the mayonnaise jar lid on top. Atop of this should be the pickle jar lid. All of these should be glued together. Once these parts have been securely glued together, spray paint them. This will be the base of your lamp. Then put the candle inside the lid of the spray paint. This makes into a makeshift candle holder, with tin foil making it secure. Put this candle and its holder on top of the holder once it is dry. The glass lamp shade should then be put over the candle once it has been lit. The faux oil lamp looks very realistic and sophisticated once it has been made. Its ingredients are relatively easy to come by — you don't even have to buy most of it. You could also recycle a used glass jar into a pretty vase. Remove the labels and wash well before painting on it the design or picture of your own personal choice. This is a pretty decoration that will look good in any room.
from app import create_app from flask import (render_template, request, redirect, url_for, flash, make_response, Response) from flaskext.uploads import (UploadSet, configure_uploads, ARCHIVES, UploadConfiguration) from flask.ext.pymongo import PyMongo import hashlib import subprocess import json import os import zipfile from functools import wraps app = create_app() UPLOAD_DEST = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'static/data') uploaded_files = UploadSet('tracks', ARCHIVES, default_dest=lambda app: app.instance_path) configure_uploads(app, uploaded_files) uploaded_files._config = UploadConfiguration(UPLOAD_DEST) configure_uploads(app, uploaded_files) mongo = PyMongo(app) def check_auth(username, password): """This function is called to check if a username / password combination is valid. """ users = app.config.get('USERS') passkey = app.config.get('SECRET')[0] if username in users and passkey == password: return username def authenticate(): """Sends a 401 response that enables basic auth""" return Response( 'Could not verify your access level for that URL.\n' 'You have to login with proper credentials', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'}) def requires_auth(f): @wraps(f) def decorated(*args, **kwargs): auth = request.authorization if not auth or not check_auth(auth.username, auth.password): return authenticate() return f(*args, **kwargs) return decorated @app.route('/') def index(): return render_template('index.html', tracks=get_all_tracks()) @app.route('/upload', methods=['POST', 'GET']) #@requires_auth def upload(): if request.method == "POST": filename = uploaded_files.save(request.files.get('track')) hash = hashlib.md5() fObject = open(os.path.join(UPLOAD_DEST, filename), 'r') for chunk in iter(lambda: fObject.read( 4096), ""): hash.update(chunk) if mongo.db.tracks.find_one({'checksum': hash.hexdigest()}): flash('Duplicate file!!') return redirect(url_for('index')) extract_file(filename, hash.hexdigest()) flash('Your upload was successful.') return redirect(url_for('index')) return render_template('upload.html') def extract_file(name, checksum): """TODO: Insert assertions for error handling.""" """Extract the zip and save the contents of the zip into a directory organized by username in the config file.""" with zipfile.ZipFile(os.path.join(UPLOAD_DEST, name)) as zipF: for fileName in zipF.infolist(): if fileName.filename.endswith('.json'): configFilePath = fileName.filename break if configFilePath.find('/'): configDirName = configFilePath.split('/')[0] with zipF.open(configFilePath) as f: config = json.load(f) zipF.extractall(os.path.join(UPLOAD_DEST, 'extracted_data', config.get('Device ID'), config.get('User'))) for files in zipF.infolist(): if files.filename.endswith(".gpx"): url = url_for('static', filename=os.path.join('data', 'extracted_data', config.get('Device ID'), config.get('User'), files.filename)) config['track-path'] = url config['track-name'] = files.filename.rstrip('.gpx').split( '/')[-1] try: dirPath = configDirName except NameError: dirPath = '' subprocess.Popen(['bash', os.path.abspath(os.path.join( os.path.dirname(__file__), os.pardir, 'scripts', 'convert.sh')), os.path.join(UPLOAD_DEST, 'extracted_data', config.get('Device ID'), config.get('User'), dirPath)]) config['data-path'] = config.get('track-path').rsplit('/', 1)[0] config['checksum'] = checksum mongo.db.tracks.save(config) return True def get_all_tracks(): tracks = [track for track in mongo.db.tracks.find()] for track in tracks: track['id'] = str(track['_id']) track['device_ID'] = track['Device ID'] track['track_name'] = track['track-name'] del(track['_id']) del(track['Device ID']) del(track['track-name']) return tracks @app.route('/track/<ObjectId:id>', methods=["POST"]) def upload_track(id): mongo.db.tracks.update({'_id': id}, {'$set': { 'track': json.loads(request.form.get('track'))}}) response = make_response() return response
This particular item won’t come as news to anyone here, since I started with a review of an old-but-good piece, but I’m contributing reviews of IF to Greg Costikyan’s new indie and alternative game blog. There’s some other cool and fun stuff to see over at PlayThisThing too. As it does pretty much everything but casual games, I’m hoping it’ll be a great game-a-day pairing with JayIsGames, where I get my casual fix. This entry was posted in interactive fiction, video games and tagged Greg Costikyan, PlayThisThing by Emily Short. Bookmark the permalink.
""" Base classes for writing management commands (named commands which can be executed through ``tipi.py``). """ import os import sys from ConfigParser import ConfigParser from optparse import make_option, OptionParser from virtualenv import resolve_interpreter class CommandError(Exception): """ Exception class indicating a problem while executing a management command. If this exception is raised during the execution of a management command, it will be caught and turned into a nicely-printed error message to the appropriate output stream (i.e., stderr); as a result, raising this exception (with a sensible description of the error) is the preferred way to indicate that something has gone wrong in the execution of a command. """ pass class BaseCommand(object): """ The base class from which all management commands ultimately derive. Use this class if you want access to all of the mechanisms which parse the command-line arguments and work out what code to call in response; if you don't need to change any of that behavior, consider using one of the subclasses defined in this file. If you are interested in overriding/customizing various aspects of the command-parsing and -execution behavior, the normal flow works as follows: 1. ``tipi.py`` loads the command class and calls its ``run_from_argv()`` method. 2. The ``run_from_argv()`` method calls ``create_parser()`` to get an ``OptionParser`` for the arguments, parses them, performs any environment changes requested by options like ``pythonpath``, and then calls the ``execute()`` method, passing the parsed arguments. 3. The ``execute()`` method attempts to carry out the command by calling the ``handle()`` method with the parsed arguments; any output produced by ``handle()`` will be printed to standard output. 4. If ``handle()`` raised a ``CommandError``, ``execute()`` will instead print an error message to ``stderr``. Thus, the ``handle()`` method is typically the starting point for subclasses; many built-in commands and command types either place all of their logic in ``handle()``, or perform some additional parsing work in ``handle()`` and then delegate from it to more specialized methods as needed. Several attributes affect behavior at various steps along the way: ``args`` A string listing the arguments accepted by the command, suitable for use in help messages; e.g., a command which takes a list of application names might set this to '<appname appname ...>'. ``help`` A short description of the command, which will be printed in help messages. ``option_list`` This is the list of ``optparse`` options which will be fed into the command's ``OptionParser`` for parsing arguments. """ # Metadata about this command. option_list = ( make_option('-v', '--verbose', action='store', dest='verbose', default='1', type='choice', choices=['0', '1', '2'], help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), make_option('-p', '--python', help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 ' 'interpreter to create the new environment. The default is the interpreter that ' 'virtualenv was installed with (%s)' % sys.executable), make_option('--traceback', action='store_true', help='Print traceback on exception'), ) help = '' args = '' #TODO syntax coloring support #def __init__(self): # #self.style = color_style() # try: # home = os.getenv('USERPROFILE') or os.getenv('HOME') # config = ConfigParser(open(os.path.join(home, '.tipirc'))) # except IOError: # pass # except: # pass # # self._interpreter = resolve_interpreter('python') # #@property #def python_interpreter(self): # return self._interpreter def get_version(self): """ Return the Django version, which should be correct for all built-in Django commands. User-supplied commands should override this method. """ #TODO placeholder return (0, 1, 0,) def usage(self, subcommand): """ Return a brief description of how to use this command, by default from the attribute ``self.help``. """ usage = '%%prog %s [options] %s' % (subcommand, self.args) if self.help: return '%s\n\n%s' % (usage, self.help) else: return usage def create_parser(self, prog_name, subcommand): """ Create and return the ``OptionParser`` which will be used to parse the arguments to this command. """ return OptionParser(prog=prog_name, usage=self.usage(subcommand), version=str(self.get_version()), option_list=self.option_list) def print_help(self, prog_name, subcommand): """ Print the help message for this command, derived from ``self.usage()``. """ parser = self.create_parser(prog_name, subcommand) parser.print_help() def run_from_argv(self, argv): """ Set up any environment changes requested, then run this command. """ parser = self.create_parser(argv[0], argv[1]) options, args = parser.parse_args(argv[2:]) self.execute(*args, **options.__dict__) def execute(self, *args, **options): """ Try to execute this command. If the command raises a ``CommandError``, intercept it and print it sensibly to stderr. """ try: #output = self.handle(*args, **options) print self.handle(*args, **options) #if output: # print output except CommandError, e: #sys.stderr.write(self.style.ERROR(str('Error: %s\n' % e))) sys.stderr.write(str('Error: %s\n' % e)) sys.exit(1) def handle(self, *args, **options): """ The actual logic of the command. Subclasses must implement this method. """ raise NotImplementedError() #class AppCommand(BaseCommand): # """ # A management command which takes one or more installed application # names as arguments, and does something with each of them. # # Rather than implementing ``handle()``, subclasses must implement # ``handle_app()``, which will be called once for each application. # # """ # args = '<appname appname ...>' # # def handle(self, *app_labels, **options): # from django.db import models # if not app_labels: # raise CommandError('Enter at least one appname.') # try: # app_list = [models.get_app(app_label) for app_label in app_labels] # except (ImproperlyConfigured, ImportError), e: # raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) # output = [] # for app in app_list: # app_output = self.handle_app(app, **options) # if app_output: # output.append(app_output) # return '\n'.join(output) # # def handle_app(self, app, **options): # """ # Perform the command's actions for ``app``, which will be the # Python module corresponding to an application name given on # the command line. # # """ # raise NotImplementedError() class LabelCommand(BaseCommand): """ A management command which takes one or more arbitrary arguments (labels) on the command line, and does something with each of them. Rather than implementing ``handle()``, subclasses must implement ``handle_label()``, which will be called once for each label. If the arguments should be names of installed applications, use ``AppCommand`` instead. """ args = '<label label ...>' label = 'label' def handle(self, *labels, **options): if not labels: raise CommandError('Enter at least one %s.' % self.label) output = [] for label in labels: label_output = self.handle_label(label, **options) if label_output: output.append(label_output) return '\n'.join(output) def handle_label(self, label, **options): """ Perform the command's actions for ``label``, which will be the string as given on the command line. """ raise NotImplementedError() #class NoArgsCommand(BaseCommand): # """ # A command which takes no arguments on the command line. # # Rather than implementing ``handle()``, subclasses must implement # ``handle_noargs()``; ``handle()`` itself is overridden to ensure # no arguments are passed to the command. # # Attempting to pass arguments will raise ``CommandError``. # # """ # args = '' # # def handle(self, *args, **options): # if args: # raise CommandError("Command doesn't accept any arguments") # return self.handle_noargs(**options) # # def handle_noargs(self, **options): # """ # Perform this command's actions. # # """ # raise NotImplementedError() #def copy_helper(style, app_or_project, name, directory, other_name=''): # """ # Copies either a Django application layout template or a Django project # layout template into the specified directory. # # """ # # style -- A color style object (see django.core.management.color). # # app_or_project -- The string 'app' or 'project'. # # name -- The name of the application or project. # # directory -- The directory to which the layout template should be copied. # # other_name -- When copying an application layout, this should be the name # # of the project. # import re # import shutil # other = {'project': 'app', 'app': 'project'}[app_or_project] # if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name. # # Provide a smart error message, depending on the error. # if not re.search(r'^[_a-zA-Z]', name): # message = 'make sure the name begins with a letter or underscore' # else: # message = 'use only numbers, letters and underscores' # raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message)) # top_dir = os.path.join(directory, name) # try: # os.mkdir(top_dir) # except OSError, e: # raise CommandError(e) # # # Determine where the app or project templates are. Use # # django.__path__[0] because we don't know into which directory # # django has been installed. # template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project) # # for d, subdirs, files in os.walk(template_dir): # relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name) # if relative_dir: # os.mkdir(os.path.join(top_dir, relative_dir)) # for i, subdir in enumerate(subdirs): # if subdir.startswith('.'): # del subdirs[i] # for f in files: # if not f.endswith('.py'): # # Ignore .pyc, .pyo, .py.class etc, as they cause various # # breakages. # continue # path_old = os.path.join(d, f) # path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name)) # fp_old = open(path_old, 'r') # fp_new = open(path_new, 'w') # fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name)) # fp_old.close() # fp_new.close() # try: # shutil.copymode(path_old, path_new) # _make_writeable(path_new) # except OSError: # sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)) # #def _make_writeable(filename): # """ # Make sure that the file is writeable. Useful if our source is # read-only. # # """ # import stat # if sys.platform.startswith('java'): # # On Jython there is no os.access() # return # if not os.access(filename, os.W_OK): # st = os.stat(filename) # new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR # os.chmod(filename, new_permissions)
Last edited by harleyrider 330; 09-07-2017 at 11:14 PM. if you still have the stealth box you have a deal. I can send you the money paypal if you want. Email me at cdjohnson803@gmail ASAP Please.. Do you still have the headlights? How much shipped to 33166? Thx. Im interested in the cluster and the vsm and the steering wheel.
#!/usr/bin/env python3 # # Copyright 2017+ Jakub Kolasa <jkolczasty@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # __author__ = 'Jakub Kolasa <jkolczasty@gmail.com'> # from appletree.gui.qt import Qt, QtCore, QtGui, FontDB, loadQImageFix from appletree.helpers import T, messageDialog, getIconImage import requests import html import re import logging from weakref import ref import base64 RE_URL = re.compile(r'((file|http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)') class ImageResizeDialog(Qt.QDialog): def __init__(self, win, title, name, w, h): super(ImageResizeDialog, self).__init__(win) self.w = w self.h = h self.keepaspect = True self.aspect = float(w) / float(h) self.result = False self.setWindowTitle(title) self.vbox = Qt.QVBoxLayout(self) self.vbox.addWidget(Qt.QLabel(T(name))) self.box = Qt.QGroupBox(self) self.form = Qt.QFormLayout(self.box) buttonbox = Qt.QDialogButtonBox() buttonbox.setGeometry(Qt.QRect(150, 250, 341, 32)) buttonbox.setOrientation(QtCore.Qt.Horizontal) buttonbox.setStandardButtons(Qt.QDialogButtonBox.Cancel | Qt.QDialogButtonBox.Ok) buttonbox.setWindowTitle(title) self.vbox.addWidget(self.box) self.vbox.addWidget(buttonbox) self.vbox.setStretch(2, 0) self.setAttribute(QtCore.Qt.WA_DeleteOnClose) # self.setWindowModality(QtCore.QCoreApplication) self.setModal(True) self.ww = Qt.QSpinBox() self.ww.setMinimum(16) self.ww.setMaximum(0xffff) self.ww.setValue(self.w) self.ww.valueChanged.connect(self.on_changed_width) self.form.addRow(T("Width"), self.ww) self.ww.setFocus() self.wh = Qt.QSpinBox() self.ww.setMinimum(16) self.wh.setMaximum(0xffff) self.wh.setValue(self.h) self.wh.valueChanged.connect(self.on_changed_height) self.form.addRow(T("Height"), self.wh) widget = Qt.QCheckBox() widget.setChecked(True) widget.stateChanged.connect(self.on_changed_aspect) self.form.addRow(T("Keep aspect"), widget) buttonbox.accepted.connect(self.on_accept) buttonbox.rejected.connect(self.on_reject) # QtCore.QMetaObject.connectSlotsByName(Dialog) self.adjustSize() self.setMinimumWidth(600) self.setSizePolicy(Qt.QSizePolicy.MinimumExpanding, Qt.QSizePolicy.MinimumExpanding) def exec_(self): super(ImageResizeDialog, self).exec_() # del self.fields return self.result def on_accept(self): self.result = True self.close() def on_reject(self): self.result = False self.close() def on_changed_width(self, w): self.w = w if not self.keepaspect: return self.keepaspect = False h = float(w) / self.aspect self.wh.setValue(int(h)) self.keepaspect = True def on_changed_height(self, h): self.h = h if not self.keepaspect: return self.keepaspect = False w = float(h) * self.aspect self.ww.setValue(int(w)) self.keepaspect = True def on_changed_aspect(self, newvalue): self.keepaspect = newvalue class ImageViewDialog(Qt.QDialog): def __init__(self, win, title, image): super(ImageViewDialog, self).__init__(win) self.setWindowTitle(title) vbox = Qt.QVBoxLayout(self) scrollarea = Qt.QScrollArea(self) scrollarea.setWidgetResizable(True) label = Qt.QLabel(self) label.setAlignment(QtCore.Qt.AlignVCenter | QtCore.Qt.AlignHCenter) if image.__class__.__name__ == 'QImage': pixmap = Qt.QPixmap() pixmap.fromImage(image) else: pixmap = image label.setPixmap(pixmap) scrollarea.setWidget(label) vbox.addWidget(scrollarea) self.setSizePolicy(Qt.QSizePolicy.MinimumExpanding, Qt.QSizePolicy.MinimumExpanding) class QTextEdit(Qt.QTextEdit): contextMenuEventSingal = Qt.pyqtSignal(object) linkClicked = Qt.pyqtSignal(object) clickedAnchor = None def __init__(self, *args, **kwargs): super(QTextEdit, self).__init__() self.win = ref(kwargs.get('parent')) # self.contextMenuEventSingal = Qt.pyqtSignal(object) flags = self.textInteractionFlags() flags = QtCore.Qt.TextInteractionFlags(flags) flags |= QtCore.Qt.LinksAccessibleByMouse flags |= QtCore.Qt.LinksAccessibleByKeyboard self.setTextInteractionFlags(flags) self.setAcceptRichText(True) self.setAutoFormatting(QTextEdit.AutoAll) self.addShortcut('CTRL+B', self.on_bold) self.addShortcut('CTRL+I', self.on_italic) self.addShortcut('CTRL+U', self.on_underline) self.addShortcut('CTRL+T', self.on_test) def addShortcut(self, shortcut, callback): action = Qt.QAction(self) action.setShortcut(shortcut) action.triggered.connect(callback) self.addAction(action) def mousePressEvent(self, event): pos = event.pos() self.clickedAnchor = self.anchorAt(pos) return super(QTextEdit, self).mousePressEvent(event) def mouseReleaseEvent(self, event): if self.clickedAnchor and (event.button() & QtCore.Qt.LeftButton) and ( event.modifiers() & QtCore.Qt.ControlModifier): pos = event.pos() clickedAnchor = self.anchorAt(pos) messageDialog("Link clicked", "Link you clicked: {0}".format(clickedAnchor), details=clickedAnchor) self.linkClicked.emit(event) self.clickedAnchor = None return return super(QTextEdit, self).mouseReleaseEvent(event) def contextMenuEvent(self, event): self.contextMenuEventSingal.emit(event) def insertLink(self, url, cursor=None, addSpace=True): if not cursor: cursor = self.textCursor() cursor = Qt.QTextCursor(cursor) _cformat = cursor.charFormat() font = _cformat.font() _format = Qt.QTextCharFormat() _format.setFont(font) _format.setUnderlineStyle(1) _format.setForeground(QtCore.Qt.blue) _format.setAnchor(True) _format.setAnchorHref(url) cursor.insertText(url, _format) if addSpace: _format = Qt.QTextCharFormat() _format.setFont(font) cursor.insertText(" ", _format) def insertText(self, s, cursor=None): if not cursor: cursor = self.textCursor() cursor = Qt.QTextCursor(cursor) _cformat = cursor.charFormat() font = _cformat.font() _format = Qt.QTextCharFormat() _format.setFont(font) cursor.insertText(s, _format) def insertFromMimeData(self, mime): if mime.hasText() and not mime.hasHtml(): global RE_URL s = mime.text() # replace links s = html.escape(s, quote=False) index = 0 c = 0 while c < 1000: m = RE_URL.search(s, index) if not m: s2 = s[index:] if c and s2.startswith(" "): s2 = s2[1:] self.insertText(s2) break pos = m.start() s2 = s[index:pos] if c and s2.startswith(" "): s2 = s2[1:] self.insertText(s2) index2 = m.end() self.insertLink(m.group(1)) c += 1 index = index2 return return super(QTextEdit, self).insertFromMimeData(mime) def on_bold(self): if self.fontWeight() == QtGui.QFont.Bold: self.setFontWeight(QtGui.QFont.Normal) else: self.setFontWeight(QtGui.QFont.Bold) def on_italic(self): self.setFontItalic(not self.fontItalic()) def on_underline(self): self.setFontUnderline(not self.fontUnderline()) def on_strikeout(self): # not implemented font = self.currentFont() font.setStrikeOut(not font.strikeOut()) self.setCurrentFont(font) self.setFont(font) def on_test(self): pass class RTDocument(Qt.QTextDocument): def __init__(self, editor, docid, *args, **kwargs): super(RTDocument, self).__init__(*args, **kwargs) self.log = logging.getLogger("at.document." + docid) self.editor = editor self.docid = docid def loadResourceRemote(self, url): # TODO: show wait/progress dialog/info try: ret = requests.get(url) if ret.status_code not in (200,): return None data = Qt.QByteArray(ret.content) image = Qt.QPixmap() image.loadFromData(data) data.clear() return image except Exception as e: self.log.error("Failed to retrive remote image: %s: %s", e.__class__.__name__, e) def loadResourceMissing(self, _qurl): image = getIconImage("noimage") self.editor.doc.addResource(Qt.QTextDocument.ImageResource, _qurl, image) return image def loadResource(self, p_int, _qurl): url = _qurl.toString() if url.startswith('data:image/'): return super(RTDocument, self).loadResource(p_int, _qurl) self.editor.log.info("loadResource(): %s", url) scheme = _qurl.scheme() image = self.editor.project.doc.getImage(self.docid, url) if image: self.editor.doc.addResource(Qt.QTextDocument.ImageResource, _qurl, image) return image if scheme: if scheme in ('http', 'https'): self.editor.log.info("Trying retrive remote image: %s", url) # remote image get it from network image = self.loadResourceRemote(url) if image: self.editor.doc.addResource(Qt.QTextDocument.ImageResource, _qurl, image) return image if scheme == 'file': try: filename = Qt.QDir.toNativeSeparators(_qurl.toLocalFile()) self.editor.log.info("Trying retrive local image: %s", filename) f = Qt.QFile(filename) if not f.open(Qt.QFile.ReadOnly): self.log.error("loadResource(): could not open file: %s", url) return self.loadResourceMissing(_qurl) data = f.readAll() f.close() del f image = Qt.QPixmap() image.loadFromData(data) data.clear() del data if image: self.editor.doc.addResource(Qt.QTextDocument.ImageResource, _qurl, image) return image except Exception as e: self.log.error("Failed to load image: %s: %s", e.__class__.__name__, e) res = super(RTDocument, self).loadResource(p_int, _qurl) if res: return res return self.loadResourceMissing(_qurl)
Synopsis: A look at a small, Los Angeles church with a choir that’s filled with talented singers. It’s also a choir filled with interesting personalities and relationships. A single mom, a “new” Christian, an attention seeker, a prayer warrior, the non-singer, and of course the director! Synopsis: Could a poker game bet that forces two heathens to attend a hip, bible teaching church be considered a divine act of God? Synopsis: The rules of the dating game can get a little mixed up when matters of the heart comes into play. Synopsis: A story about a hustler who gets played, trying to get over on a church girl. Synopsis: A youth-focused story about young life, young love, and the social challenges that come with peer pressure. Synopsis: A story about three church choir girls who want the fame, fortune, and success so much that they compromise their commitment to God. Synopsis: A comedic look at a variety of lifestyles, all thinking they’re on their way to heaven.
# -*- coding: utf-8 -*- ## src/network_manager_listener.py ## ## Copyright (C) 2006 Jeffrey C. Ollie <jeff AT ocjtech.us> ## Nikos Kouremenos <kourem AT gmail.com> ## Stefan Bethge <stefan AT lanpartei.de> ## Copyright (C) 2006-2014 Yann Leboulanger <asterix AT lagaule.org> ## ## This file is part of Gajim. ## ## Gajim is free software; you can redistribute it and/or modify ## it under the terms of the GNU General Public License as published ## by the Free Software Foundation; version 3 only. ## ## Gajim is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## GNU General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Gajim. If not, see <http://www.gnu.org/licenses/>. ## import sys from common import gajim def device_now_active(self, *args): """ For Network Manager 0.6 """ for connection in gajim.connections.values(): if gajim.config.get_per('accounts', connection.name, 'listen_to_network_manager') and connection.time_to_reconnect: connection._reconnect() def device_no_longer_active(self, *args): """ For Network Manager 0.6 """ for connection in gajim.connections.values(): if gajim.config.get_per('accounts', connection.name, 'listen_to_network_manager') and connection.connected > 1: connection._disconnectedReconnCB() def state_changed(state): """ For Network Manager 0.7 - 0.9 """ nm_state = props.Get("org.freedesktop.NetworkManager", "State") if nm_state == 3 or nm_state == 70: for connection in gajim.connections.values(): if gajim.config.get_per('accounts', connection.name, 'listen_to_network_manager') and connection.time_to_reconnect: connection._reconnect() else: for connection in gajim.connections.values(): if gajim.config.get_per('accounts', connection.name, 'listen_to_network_manager') and connection.connected > 1: connection._disconnectedReconnCB() supported = False from common import dbus_support if dbus_support.supported: import dbus try: from common.dbus_support import system_bus bus = system_bus.bus() if 'org.freedesktop.NetworkManager' in bus.list_names(): nm_object = bus.get_object('org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') props = dbus.Interface(nm_object, "org.freedesktop.DBus.Properties") bus.add_signal_receiver(state_changed, 'StateChanged', 'org.freedesktop.NetworkManager', 'org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') supported = True except dbus.DBusException: try: if 'org.freedesktop.NetworkManager' in bus.list_names(): supported = True bus.add_signal_receiver(device_no_longer_active, 'DeviceNoLongerActive', 'org.freedesktop.NetworkManager', 'org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') bus.add_signal_receiver(device_now_active, 'DeviceNowActive', 'org.freedesktop.NetworkManager', 'org.freedesktop.NetworkManager', '/org/freedesktop/NetworkManager') except Exception: pass
Abstract: Fine-scale habitat preferences of three co-occurring mycophagous mammals were examined in a tropical wet sclerophyll forest community in north-eastern Australia. Two of the three mammal species responded to fine-scale variation in vegetation and landform around individual trap locations. At a broad scale, the northern bettong (Bettongia tropica), an endangered marsupial endemic to the Australian wet tropics region, showed a preference for ridges over mid-slopes and gullies, irrespective of forest type. In contrast, the northern brown bandicoot (Isoodon macrourus), a widespread marsupial, displayed a preference for Eucalyptus woodland over adjacent Allocasuarina forest, irrespective of topographic category. The giant white-tailed rat (Uromys caudimaculatus), a rodent endemic to the wet tropics, showed no particular preference for either forest type or topographic category. A multiple regression model of mammal capture success against three principal habitat gradients constructed from 21 habitat variables using principal component analysis indicated strong species-specific preferences for fine-scale vegetation assemblages. Bettongs preferred areas of Eucalyptus woodland with sparse ground cover, low densities of certain grass species, high density of tree stems and few pig diggings. Bandicoots, in contrast, favoured areas in both forest types with dense ground cover, fewer tree stems and greater numbers of pig diggings; that is, characteristics least favoured by bettongs. The striking differences in fine-scale habitat preferences of these two mammals of similar body size and broad habitat requirements suggest a high degree of fine-scale habitat partitioning. White-tailed rats did not show preference for any of the habitat gradients examined.
""" ******************************************************************************* * Ledger Blue * (c) 2016 Ledger * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************** """ DEFAULT_ALIGNMENT = 1024 PAGE_ALIGNMENT = 64 import argparse import os NOCRC=False if "NOCRC" in os.environ and len(os.environ["NOCRC"]) != 0: NOCRC=os.environ["NOCRC"] def get_argparser(): parser = argparse.ArgumentParser(description="Load an app onto the device from a hex file.") parser.add_argument("--targetId", help="The device's target ID (default is Ledger Blue)", type=auto_int) parser.add_argument("--targetVersion", help="Set the chip target version") parser.add_argument("--fileName", help="The application hex file to be loaded onto the device") parser.add_argument("--icon", help="The icon content to use (hex encoded)") parser.add_argument("--curve", help="""A curve on which BIP 32 derivation is locked ("secp256k1", "prime256r1", "ed25519" or "bls12381g1"), can be repeated""", action='append') parser.add_argument("--path", help="""A BIP 32 path to which derivation is locked (format decimal a'/b'/c), can be repeated""", action='append') parser.add_argument("--path_slip21", help="""A SLIP 21 path to which derivation is locked""", action='append') parser.add_argument("--appName", help="The name to give the application after loading it") parser.add_argument("--signature", help="A signature of the application (hex encoded)") parser.add_argument("--signApp", help="Sign application with provided signPrivateKey", action='store_true') parser.add_argument("--appFlags", help="The application flags", type=auto_int) parser.add_argument("--bootAddr", help="The application's boot address", type=auto_int) parser.add_argument("--rootPrivateKey", help="""The Signer private key used to establish a Secure Channel (otherwise a random one will be generated)""") parser.add_argument("--signPrivateKey", help="Set the private key used to sign the loaded app") parser.add_argument("--apdu", help="Display APDU log", action='store_true') parser.add_argument("--deployLegacy", help="Use legacy deployment API", action='store_true') parser.add_argument("--apilevel", help="Use given API level when interacting with the device", type=auto_int) parser.add_argument("--delete", help="Delete the app with the same name before loading the provided one", action='store_true') parser.add_argument("--params", help="Store icon and install parameters in a parameter section before the code", action='store_true') parser.add_argument("--tlv", help="Use install parameters for all variable length parameters", action='store_true') parser.add_argument("--dataSize", help="The code section's size in the provided hex file (to separate data from code, if not provided the whole allocated NVRAM section for the application will remain readonly.", type=auto_int) parser.add_argument("--appVersion", help="The application version (as a string)") parser.add_argument("--offline", help="Request to only output application load APDUs into given filename") parser.add_argument("--offlineText", help="Request to only output application load APDUs into given filename in text mode", action='store_true') parser.add_argument("--installparamsSize", help="The loaded install parameters section size (when parameters are already included within the .hex file.", type=auto_int) parser.add_argument("--tlvraw", help="Add a custom install param with the hextag:hexvalue encoding", action='append') parser.add_argument("--dep", help="Add a dependency over an appname[:appversion]", action='append') parser.add_argument("--nocrc", help="Skip CRC generation when loading", action='store_true') return parser def auto_int(x): return int(x, 0) def parse_bip32_path(path, apilevel): import struct if len(path) == 0: return b"" result = b"" elements = path.split('/') if apilevel >= 5: result = result + struct.pack('>B', len(elements)) for pathElement in elements: element = pathElement.split('\'') if len(element) == 1: result = result + struct.pack(">I", int(element[0])) else: result = result + struct.pack(">I", 0x80000000 | int(element[0])) return result def parse_slip21_path(path): import struct result = struct.pack('>B', 0x80 | (len(path) + 1)) result = result + b'\x00' + string_to_bytes(path) return result def string_to_bytes(x): import sys if sys.version_info.major == 3: return bytes(x, 'ascii') else: return bytes(x) if __name__ == '__main__': from .ecWrapper import PrivateKey from .comm import getDongle from .hexParser import IntelHexParser, IntelHexPrinter from .hexLoader import HexLoader from .hexLoader import * from .deployed import getDeployedSecretV1, getDeployedSecretV2 import struct import binascii import sys args = get_argparser().parse_args() if args.apilevel == None: args.apilevel = 10 if args.targetId == None: args.targetId = 0x31000002 if args.fileName == None: raise Exception("Missing fileName") if args.appName == None: raise Exception("Missing appName") if args.path_slip21 != None and args.apilevel < 10: raise Exception("SLIP 21 path not supported using this API level") if args.appFlags == None: args.appFlags = 0 if args.rootPrivateKey == None: privateKey = PrivateKey() publicKey = binascii.hexlify(privateKey.pubkey.serialize(compressed=False)) print("Generated random root public key : %s" % publicKey) args.rootPrivateKey = privateKey.serialize() args.appName = string_to_bytes(args.appName) parser = IntelHexParser(args.fileName) if args.bootAddr == None: args.bootAddr = parser.getBootAddr() path = b"" curveMask = 0xff if args.curve != None: curveMask = 0x00 for curve in args.curve: if curve == 'secp256k1': curveMask |= 0x01 elif curve == 'prime256r1': curveMask |= 0x02 elif curve == 'ed25519': curveMask |= 0x04 elif curve == 'bls12381g1': curveMask |= 0x10 else: raise Exception("Unknown curve " + curve) if args.apilevel >= 5: if (args.path_slip21 != None): curveMask |= 0x08 path += struct.pack('>B',curveMask) if args.path != None: for item in args.path: if len(item) != 0: path += parse_bip32_path(item, args.apilevel) if args.path_slip21 != None: for item in args.path_slip21: if len(item) != 0: path += parse_slip21_path(item) if (args.path == None) or ((len(args.path) == 1) and (len(args.path[0]) == 0)): path += struct.pack('>B', 0) # Unrestricted, authorize all paths for regular derivation else: if args.curve != None: print("Curve not supported using this API level, ignoring") if args.path != None: if len(args.path) > 1: print("Multiple path levels not supported using this API level, ignoring") else: path = parse_bip32_path(args.path[0], args.apilevel) if not args.icon is None: args.icon = bytearray.fromhex(args.icon) signature = None if not args.signature is None: signature = bytearray.fromhex(args.signature) #prepend app's data with the icon content (could also add other various install parameters) printer = IntelHexPrinter(parser) # Use of Nested Encryption Key within the SCP protocol is mandartory for upgrades cleardata_block_len=None if args.appFlags & 2: # Not true for scp < 3 # if signature is None: # raise BaseException('Upgrades must be signed') # ensure data can be decoded with code decryption key without troubles. cleardata_block_len = 16 dongle = None secret = None if not args.offline: dongle = getDongle(args.apdu) if args.deployLegacy: secret = getDeployedSecretV1(dongle, bytearray.fromhex(args.rootPrivateKey), args.targetId) else: secret = getDeployedSecretV2(dongle, bytearray.fromhex(args.rootPrivateKey), args.targetId) else: fileTarget = open(args.offline, "wb") class FileCard(): def __init__(self, target): self.target = target def exchange(self, apdu): if (args.apdu): print(binascii.hexlify(apdu)) apdu = binascii.hexlify(apdu) if sys.version_info.major == 2: self.target.write(str(apdu) + '\n') else: self.target.write(apdu + '\n'.encode()) return bytearray([]) def apduMaxDataSize(self): # ensure to allow for encryption of those apdu afterward return 240 dongle = FileCard(fileTarget) loader = HexLoader(dongle, 0xe0, not(args.offline), secret, cleardata_block_len=cleardata_block_len) #tlv mode does not support explicit by name removal, would require a list app before to identify the hash to be removed if (not (args.appFlags & 2)) and args.delete: loader.deleteApp(args.appName) if (args.tlv): #if code length is not provided, then consider the whole provided hex file is the code and no data section is split code_length = printer.maxAddr() - printer.minAddr() if not args.dataSize is None: code_length -= args.dataSize else: args.dataSize = 0 installparams = b"" # express dependency if (args.dep): for dep in args.dep: appname = dep appversion = None # split if version is specified if (dep.find(":") != -1): (appname,appversion) = dep.split(":") depvalue = encodelv(string_to_bytes(appname)) if(appversion): depvalue += encodelv(string_to_bytes(appversion)) installparams += encodetlv(BOLOS_TAG_DEPENDENCY, depvalue) #add raw install parameters as requested if (args.tlvraw): for tlvraw in args.tlvraw: (hextag,hexvalue) = tlvraw.split(":") installparams += encodetlv(int(hextag, 16), binascii.unhexlify(hexvalue)) if (not (args.appFlags & 2)) and ( args.installparamsSize is None or args.installparamsSize == 0 ): #build install parameters #mandatory app name installparams += encodetlv(BOLOS_TAG_APPNAME, args.appName) if not args.appVersion is None: installparams += encodetlv(BOLOS_TAG_APPVERSION, string_to_bytes(args.appVersion)) if not args.icon is None: installparams += encodetlv(BOLOS_TAG_ICON, bytes(args.icon)) if len(path) > 0: installparams += encodetlv(BOLOS_TAG_DERIVEPATH, path) # append install parameters to the loaded file param_start = printer.maxAddr()+(PAGE_ALIGNMENT-(args.dataSize%PAGE_ALIGNMENT))%PAGE_ALIGNMENT # only append install param section when not an upgrade as it has already been computed in the encrypted and signed chunk printer.addArea(param_start, installparams) paramsSize = len(installparams) else: paramsSize = args.installparamsSize # split code and install params in the code code_length -= args.installparamsSize # create app #ensure the boot address is an offset if args.bootAddr > printer.minAddr(): args.bootAddr -= printer.minAddr() loader.createApp(code_length, args.dataSize, paramsSize, args.appFlags, args.bootAddr|1) elif (args.params): paramsSectionContent = [] if not args.icon is None: paramsSectionContent = args.icon #take care of aligning the parameters sections to avoid possible invalid dereference of aligned words in the program nvram. #also use the default MPU alignment param_start = printer.minAddr()-len(paramsSectionContent)-(DEFAULT_ALIGNMENT-(len(paramsSectionContent)%DEFAULT_ALIGNMENT)) printer.addArea(param_start, paramsSectionContent) # account for added regions (install parameters, icon ...) appLength = printer.maxAddr() - printer.minAddr() loader.createAppNoInstallParams(args.appFlags, appLength, args.appName, None, path, 0, len(paramsSectionContent), string_to_bytes(args.appVersion)) else: # account for added regions (install parameters, icon ...) appLength = printer.maxAddr() - printer.minAddr() loader.createAppNoInstallParams(args.appFlags, appLength, args.appName, args.icon, path, None, None, string_to_bytes(args.appVersion)) hash = loader.load(0x0, 0xF0, printer, targetId=args.targetId, targetVersion=args.targetVersion, doCRC=not (args.nocrc or NOCRC)) print("Application full hash : " + hash) if (signature == None and args.signApp): masterPrivate = PrivateKey(bytes(bytearray.fromhex(args.signPrivateKey))) signature = masterPrivate.ecdsa_serialize(masterPrivate.ecdsa_sign(bytes(binascii.unhexlify(hash)), raw=True)) print("Application signature: " + str(binascii.hexlify(signature))) if (args.tlv): loader.commit(signature) else: loader.run(args.bootAddr-printer.minAddr(), signature)
suitedjaxx69 (2176) plays Dr_King_Schultz (2233) in a rated Correspondence game of chess. Game is still being played after 16 moves. Click to replay, analyse, and discuss the game!
""" Created on Feb 28, 2015 @author: StarlitGhost """ import re from twisted.plugin import IPlugin from twisted.words.protocols.irc import assembleFormattedText as colour, attributes as A from zope.interface import implementer from desertbot.message import IRCMessage from desertbot.moduleinterface import IModule from desertbot.modules.commandinterface import BotCommand from desertbot.response import IRCResponse from desertbot.utils import dictutils class UnbalancedBracesException(Exception): def __init__(self, message, column): # Call the base exception constructor with the params it needs super(UnbalancedBracesException, self).__init__(message) # Store the message self.message = message # Store the column position of the unbalanced brace self.column = column class DictMergeError(Exception): pass @implementer(IPlugin, IModule) class Sub(BotCommand): def triggers(self): return ['sub'] def help(self, query): return [ "sub <text> - " "executes nested commands in <text> and replaces the commands with their output", "syntax: text {command params} more text {command {command params} {command params}}", "example: .sub Some {rainbow magical} {flip topsy-turvy} text"] def execute(self, message: IRCMessage): subString = self._mangleEscapes(message.parameters) try: segments = list(self._parseSubcommandTree(subString)) except UnbalancedBracesException as e: red = colour(A.bold[A.fg.lightRed['']]) normal = colour(A.normal['']) error = (subString[:e.column] + red + subString[e.column] + normal + subString[e.column+1:]) error = self._unmangleEscapes(error, False) return [ IRCResponse("Sub Error: {} (column {})".format(e.message, e.column), message.replyTo), IRCResponse(error, message.replyTo)] prevLevel = -1 responseStack = [] metadata = message.metadata if 'tracking' in metadata: metadata['tracking'].add('Sub') else: metadata['tracking'] = set('Sub') for segment in segments: (level, command, start, end) = segment # grab the text replace var dict from the metadata, if present if 'var' in metadata: replaceVars = metadata['var'] else: replaceVars = {} # We've finished executing subcommands at the previous depth, # so replace subcommands with their output at the current depth if level < prevLevel: command = self._substituteResponses(command, responseStack, level, replaceVars, start) # Replace any replaceVars in the command for var, value in replaceVars.items(): command = re.sub(r'\$\b{}\b'.format(re.escape(var)), '{}'.format(value), command) # Build a new message out of this segment inputMessage = IRCMessage(message.type, message.user, message.channel, self.bot.commandChar + command.lstrip(), self.bot, metadata=metadata) # Execute the constructed message if inputMessage.command.lower() in self.bot.moduleHandler.mappedTriggers: module = self.bot.moduleHandler.mappedTriggers[inputMessage.command.lower()] response = module.execute(inputMessage) """@type : IRCResponse""" else: return IRCResponse("'{}' is not a recognized command trigger" .format(inputMessage.command), message.replyTo) # Push the response onto the stack responseStack.append((level, response.response, start, end)) # merge response metadata back into our sub-global dict metadata = dictutils.recursiveMerge(metadata, response.Metadata) # update the replaceVars in case this is the outermost segment # (and therefore we won't be looping again to pick them up) if 'var' in metadata: replaceVars = metadata['var'] prevLevel = level responseString = self._substituteResponses(subString, responseStack, -1, replaceVars, -1) responseString = self._unmangleEscapes(responseString) return IRCResponse(responseString, message.replyTo, metadata=metadata) @staticmethod def _parseSubcommandTree(string): """Parse braced segments in string as tuples (level, contents, start index, end index).""" stack = [] for i, c in enumerate(string): if c == '{': stack.append(i) elif c == '}': if stack: start = stack.pop() yield len(stack), string[start + 1: i], start, i else: raise UnbalancedBracesException("unbalanced closing brace", i) if stack: start = stack.pop() raise UnbalancedBracesException("unbalanced opening brace", start) @staticmethod def _substituteResponses(command, responseStack, commandLevel, replaceVars, start): # Pop responses off the stack and replace the subcommand that generated them while len(responseStack) > 0: level, responseString, rStart, rEnd = responseStack.pop() if level <= commandLevel: responseStack.append((level, responseString, rStart, rEnd)) break cStart = rStart - start - 1 cEnd = rEnd - start # Replace the subcommand with its output command = command[:cStart] + responseString + command[cEnd:] # Replace any replaceVars generated by functions for var, value in replaceVars.items(): command = re.sub(r'\$\b{}\b'.format(re.escape(var)), '{}'.format(value), command) return command @staticmethod def _mangleEscapes(string): # Replace escaped left and right braces with something # that should never show up in messages/responses string = re.sub(r'(?<!\\)\\\{', '@LB@', string) string = re.sub(r'(?<!\\)\\\}', '@RB@', string) return string @staticmethod def _unmangleEscapes(string, unescape=True): if unescape: # Replace the mangled escaped braces with unescaped braces string = string.replace('@LB@', '{') string = string.replace('@RB@', '}') else: # Just unmangle them, ie, keep the escapes string = string.replace('@LB@', '\\{') string = string.replace('@RB@', '\\}') return string sub = Sub()
I lay down on my back on my yoga mat and my dog Penny flopped down next to me for a belly rub. My neighbor’s dog chose to crawl on top of her for kisses. I’m not sure what else was going on around the room, but I heard rustling and giggling. Louisville’s first Doga class came together after Competitive Edge’s owner, Terry Vanhook, told Comp about an article she’d read some eight years ago. She’d always thought the concept was fascinating, and she and Comp thought the newly opened Competitive Edge facility was ready to offer the class. Comp began researching, getting input from her own yoga instructors and mentorship from Suzi Teitelman, the creator of Doga. We moved slowly through a series of simple yoga poses and stretches. In some, our dogs would position themselves under our outstretched hands for a pat. In others, they would lie down next to us. At times, they would just sit and watch. Partner Yoga with a dog is a very organic experience. Comp incorporates massage into her Doga class, as well, drawing on her background as a licensed massage therapist. When we each massaged our dogs, most of us skipped a body part, a paw here, an ear there, trusting our dogs to let us know what made them feel uncomfortable instead of more relaxed and connected. Both Comp and Vanhook say they have been surprised at how relaxed the dogs in the class have remained. For the most part, the treats standing at the ready have not been needed to keep the dogs’ attention; they love the one-on-one interaction with their humans, especially the dogs who don’t come from one-dog homes. Comp’s partner for this class, Lacey the Chihuahua, is one of 12 dogs at Comp’s house, and was basking in the extra attention. The Doga class is offered on Saturday mornings at Competitive Edge’s facility at 4600 Shepherdsville Road. Call 502-599-7450 for more information.