text
stringlengths
29
850k
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * import glob import os class Superlu(Package): """SuperLU is a general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines. SuperLU is designed for sequential machines.""" homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/#superlu" url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_5.2.1.tar.gz" version('5.2.1', sha256='28fb66d6107ee66248d5cf508c79de03d0621852a0ddeba7301801d3d859f463') version('4.3', sha256='169920322eb9b9c6a334674231479d04df72440257c17870aaa0139d74416781') variant('pic', default=True, description='Build with position independent code') depends_on('cmake', when='@5.2.1:', type='build') depends_on('blas') # CMake installation method def install(self, spec, prefix): cmake_args = [ '-Denable_blaslib=OFF', '-DBLAS_blas_LIBRARY={0}'.format(spec['blas'].libs.joined()) ] if '+pic' in spec: cmake_args.extend([ '-DCMAKE_POSITION_INDEPENDENT_CODE=ON' ]) cmake_args.extend(std_cmake_args) with working_dir('spack-build', create=True): cmake('..', *cmake_args) make() make('install') # Pre-cmake installation method @when('@4.3') def install(self, spec, prefix): config = [] # Define make.inc file config.extend([ 'PLAT = _x86_64', 'SuperLUroot = %s' % self.stage.source_path, # 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu$(PLAT).a', 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu_{0}.a' \ .format(self.spec.version), 'BLASDEF = -DUSE_VENDOR_BLAS', 'BLASLIB = {0}'.format(spec['blas'].libs.ld_flags), # or BLASLIB = -L/usr/lib64 -lblas 'TMGLIB = libtmglib.a', 'LIBS = $(SUPERLULIB) $(BLASLIB)', 'ARCH = ar', 'ARCHFLAGS = cr', 'RANLIB = {0}'.format('ranlib' if which('ranlib') else 'echo'), 'CC = {0}'.format(os.environ['CC']), 'FORTRAN = {0}'.format(os.environ['FC']), 'LOADER = {0}'.format(os.environ['CC']), 'CDEFS = -DAdd_' ]) if '+pic' in spec: config.extend([ # Use these lines instead when pic_flag capability arrives 'CFLAGS = -O3 {0}'.format(self.compiler.cc_pic_flag), 'NOOPTS = {0}'.format(self.compiler.cc_pic_flag), 'FFLAGS = -O2 {0}'.format(self.compiler.f77_pic_flag), 'LOADOPTS = {0}'.format(self.compiler.cc_pic_flag) ]) else: config.extend([ 'CFLAGS = -O3', 'NOOPTS = ', 'FFLAGS = -O2', 'LOADOPTS = ' ]) # Write configuration options to make.inc file with open('make.inc', 'w') as inc: for option in config: inc.write('{0}\n'.format(option)) make(parallel=False) # Install manually install_tree('lib', prefix.lib) headers = glob.glob(join_path('SRC', '*.h')) mkdir(prefix.include) for h in headers: install(h, prefix.include)
If you want to check or change what types of cookies you accept, this can usually be altered within your browser settings. You can block cookies at any time by activating the setting on your browser that allows you to refuse the setting of all or some cookies. We suggest consulting the Help section of your browser or taking a look at the About Cookies website which offers guidance for all modern browsers. However, if you use your browser settings to block all cookies (including essential cookies) you may not be able to access all or parts of our site.
# -*- coding: utf-8 -*- # Copyright 2017 The Chromium OS Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Test chrome_on_cros_bisector module.""" from __future__ import print_function import copy import itertools import os import mock from chromite.cli import flash from chromite.cros_bisect import common from chromite.cros_bisect import builder as builder_module from chromite.cros_bisect import evaluator as evaluator_module from chromite.cros_bisect import chrome_on_cros_bisector from chromite.cros_bisect import git_bisector_unittest from chromite.lib import commandline from chromite.lib import cros_test_lib from chromite.lib import gs from chromite.lib import gs_unittest from chromite.lib import partial_mock class DummyEvaluator(evaluator_module.Evaluator): """Evaluator which just return empty score.""" # pylint: disable=unused-argument def Evaluate(self, remote, build_label, repeat): return common.Score() def CheckLastEvaluate(self, build_label, repeat=1): return common.Score() class TestChromeOnCrosBisector(cros_test_lib.MockTempDirTestCase): """Tests ChromeOnCrosBisector class.""" BOARD = 'samus' TEST_NAME = 'graphics_WebGLAquarium' METRIC = 'avg_fps_1000_fishes/summary/value' REPORT_FILE = 'reports.json' DUT_ADDR = '192.168.1.1' DUT = commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH)(DUT_ADDR) # Be aware that GOOD_COMMIT_INFO and BAD_COMMIT_INFO should be assigned via # copy.deepcopy() as their users are likely to change the content. GOOD_COMMIT_SHA1 = '44af5c9a5505' GOOD_COMMIT_TIMESTAMP = 1486526594 GOOD_COMMIT_SCORE = common.Score([100]) GOOD_COMMIT_INFO = common.CommitInfo( sha1=GOOD_COMMIT_SHA1, timestamp=GOOD_COMMIT_TIMESTAMP, title='good', label='last-known-good ', score=GOOD_COMMIT_SCORE) BAD_COMMIT_SHA1 = '6a163bb66c3e' BAD_COMMIT_TIMESTAMP = 1486530021 BAD_COMMIT_SCORE = common.Score([80]) BAD_COMMIT_INFO = common.CommitInfo( sha1=BAD_COMMIT_SHA1, timestamp=BAD_COMMIT_TIMESTAMP, title='bad', label='last-known-bad ', score=BAD_COMMIT_SCORE) GOOD_CROS_VERSION = 'R60-9592.50.0' BAD_CROS_VERSION = 'R60-9592.51.0' CULPRIT_COMMIT_SHA1 = '12345abcde' CULPRIT_COMMIT_TIMESTAMP = 1486530000 CULPRIT_COMMIT_SCORE = common.Score([81]) CULPRIT_COMMIT_INFO = common.CommitInfo( sha1=CULPRIT_COMMIT_SHA1, timestamp=CULPRIT_COMMIT_TIMESTAMP, title='bad', score=CULPRIT_COMMIT_SCORE) THRESHOLD_SPLITTER = 95 # Score between good and bad, closer to good side. THRESHOLD = 5 # Distance between good score and splitter. REPEAT = 3 GOOD_METADATA_CONTENT = '\n'.join([ '{', ' "metadata-version": "2",', ' "toolchain-url": "2017/05/%(target)s-2017.05.25.101355.tar.xz",', ' "suite_scheduling": true,', ' "build_id": 1644146,', ' "version": {', ' "full": "R60-9592.50.0",', ' "android-branch": "git_mnc-dr-arc-m60",', ' "chrome": "60.0.3112.53",', ' "platform": "9592.50.0",', ' "milestone": "60",', ' "android": "4150402"', ' }', '}']) def setUp(self): """Sets up test case.""" self.options = cros_test_lib.EasyAttr( base_dir=self.tempdir, board=self.BOARD, reuse_repo=True, good=self.GOOD_COMMIT_SHA1, bad=self.BAD_COMMIT_SHA1, remote=self.DUT, eval_repeat=self.REPEAT, auto_threshold=False, reuse_eval=False, cros_flash_sleep=0.01, cros_flash_retry=3, cros_flash_backoff=1, eval_raise_on_error=False, skip_failed_commit=False) self.repo_dir = os.path.join(self.tempdir, builder_module.Builder.DEFAULT_REPO_DIR) self.SetUpBisector() def SetUpBisector(self): """Instantiates self.bisector using self.options.""" self.evaluator = DummyEvaluator(self.options) self.builder = builder_module.Builder(self.options) self.bisector = chrome_on_cros_bisector.ChromeOnCrosBisector( self.options, self.builder, self.evaluator) def SetUpBisectorWithCrosVersion(self): """Instantiates self.bisector using CrOS version as good and bad options.""" self.options.good = self.GOOD_CROS_VERSION self.options.bad = self.BAD_CROS_VERSION self.SetUpBisector() def SetDefaultCommitInfo(self): """Sets up default commit info.""" self.bisector.good_commit_info = copy.deepcopy(self.GOOD_COMMIT_INFO) self.bisector.bad_commit_info = copy.deepcopy(self.BAD_COMMIT_INFO) def testInit(self): """Tests __init__() with SHA1 as good and bad options.""" self.assertEqual(self.GOOD_COMMIT_SHA1, self.bisector.good_commit) self.assertIsNone(self.bisector.good_cros_version) self.assertEqual(self.BAD_COMMIT_SHA1, self.bisector.bad_commit) self.assertIsNone(self.bisector.bad_cros_version) self.assertFalse(self.bisector.bisect_between_cros_version) self.assertEqual(self.DUT_ADDR, self.bisector.remote.raw) self.assertEqual(self.REPEAT, self.bisector.eval_repeat) self.assertEqual(self.builder, self.bisector.builder) self.assertEqual(self.repo_dir, self.bisector.repo_dir) self.assertIsNone(self.bisector.good_commit_info) self.assertIsNone(self.bisector.bad_commit_info) self.assertEqual(0, len(self.bisector.bisect_log)) self.assertIsNone(self.bisector.threshold) self.assertTrue(not self.bisector.current_commit) def testInitCrosVersion(self): """Tests __init__() with CrOS version as good and bad options.""" self.SetUpBisectorWithCrosVersion() self.assertEqual(self.GOOD_CROS_VERSION, self.bisector.good_cros_version) self.assertIsNone(self.bisector.good_commit) self.assertEqual(self.BAD_CROS_VERSION, self.bisector.bad_cros_version) self.assertIsNone(self.bisector.bad_commit) self.assertTrue(self.bisector.bisect_between_cros_version) self.assertEqual(self.DUT_ADDR, self.bisector.remote.raw) self.assertEqual(self.REPEAT, self.bisector.eval_repeat) self.assertEqual(self.builder, self.bisector.builder) self.assertEqual(self.repo_dir, self.bisector.repo_dir) self.assertIsNone(self.bisector.good_commit_info) self.assertIsNone(self.bisector.bad_commit_info) self.assertEqual(0, len(self.bisector.bisect_log)) self.assertIsNone(self.bisector.threshold) self.assertTrue(not self.bisector.current_commit) def testInitMissingRequiredArgs(self): """Tests that ChromeOnCrosBisector raises for missing required argument.""" options = cros_test_lib.EasyAttr() with self.assertRaises(common.MissingRequiredOptionsException) as cm: chrome_on_cros_bisector.ChromeOnCrosBisector(options, self.builder, self.evaluator) exception_message = str(cm.exception) self.assertIn('Missing command line', exception_message) self.assertIn('ChromeOnCrosBisector', exception_message) for arg in chrome_on_cros_bisector.ChromeOnCrosBisector.REQUIRED_ARGS: self.assertIn(arg, exception_message) def testCheckCommitFormat(self): """Tests CheckCommitFormat().""" CheckCommitFormat = ( chrome_on_cros_bisector.ChromeOnCrosBisector.CheckCommitFormat) self.assertEqual(self.GOOD_COMMIT_SHA1, CheckCommitFormat(self.GOOD_COMMIT_SHA1)) self.assertEqual(self.GOOD_CROS_VERSION, CheckCommitFormat(self.GOOD_CROS_VERSION)) self.assertEqual('R60-9592.50.0', CheckCommitFormat('60.9592.50.0')) invalid = 'bad_sha1' self.assertIsNone(CheckCommitFormat(invalid)) def testObtainBisectBoundaryScoreImpl(self): """Tests ObtainBisectBoundaryScoreImpl().""" git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) build_deploy_eval_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'BuildDeployEval') build_deploy_eval_mock.side_effect = [self.GOOD_COMMIT_SCORE, self.BAD_COMMIT_SCORE] self.assertEqual(self.GOOD_COMMIT_SCORE, self.bisector.ObtainBisectBoundaryScoreImpl(True)) self.assertEqual(self.BAD_COMMIT_SCORE, self.bisector.ObtainBisectBoundaryScoreImpl(False)) self.assertEqual( [mock.call(customize_build_deploy=None, eval_label=None), mock.call(customize_build_deploy=None, eval_label=None)], build_deploy_eval_mock.call_args_list) def testObtainBisectBoundaryScoreImplCrosVersion(self): """Tests ObtainBisectBoundaryScoreImpl() with CrOS version.""" self.SetUpBisectorWithCrosVersion() # Inject good_commit and bad_commit as if # bisector.ResolveChromeBisectRangeFromCrosVersion() being run. self.bisector.good_commit = self.GOOD_COMMIT_SHA1 self.bisector.bad_commit = self.BAD_COMMIT_SHA1 git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) self.PatchObject(chrome_on_cros_bisector.ChromeOnCrosBisector, 'UpdateCurrentCommit') evaluate_mock = self.PatchObject(DummyEvaluator, 'Evaluate') # Mock FlashCrosImage() to verify that customize_build_deploy is assigned # as expected. flash_cros_image_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'FlashCrosImage') evaluate_mock.return_value = self.GOOD_COMMIT_SCORE self.assertEqual(self.GOOD_COMMIT_SCORE, self.bisector.ObtainBisectBoundaryScoreImpl(True)) flash_cros_image_mock.assert_called_with( self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION)) evaluate_mock.assert_called_with( self.DUT, 'cros_%s' % self.GOOD_CROS_VERSION, self.REPEAT) evaluate_mock.return_value = self.BAD_COMMIT_SCORE self.assertEqual(self.BAD_COMMIT_SCORE, self.bisector.ObtainBisectBoundaryScoreImpl(False)) flash_cros_image_mock.assert_called_with( self.bisector.GetCrosXbuddyPath(self.BAD_CROS_VERSION)) evaluate_mock.assert_called_with( self.DUT, 'cros_%s' % self.BAD_CROS_VERSION, self.REPEAT) def testObtainBisectBoundaryScoreImplCrosVersionFlashError(self): """Tests ObtainBisectBoundaryScoreImpl() with CrOS version.""" self.SetUpBisectorWithCrosVersion() # Inject good_commit and bad_commit as if # bisector.ResolveChromeBisectRangeFromCrosVersion() being run. self.bisector.good_commit = self.GOOD_COMMIT_SHA1 self.bisector.bad_commit = self.BAD_COMMIT_SHA1 git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) self.PatchObject(chrome_on_cros_bisector.ChromeOnCrosBisector, 'UpdateCurrentCommit') evaluate_mock = self.PatchObject(DummyEvaluator, 'Evaluate') # Mock FlashCrosImage() to verify that customize_build_deploy is assigned # as expected. flash_cros_image_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'FlashCrosImage') flash_cros_image_mock.side_effect = flash.FlashError('Flash failed.') with self.assertRaises(flash.FlashError): self.bisector.ObtainBisectBoundaryScoreImpl(True) flash_cros_image_mock.assert_called_with( self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION)) evaluate_mock.assert_not_called() with self.assertRaises(flash.FlashError): self.bisector.ObtainBisectBoundaryScoreImpl(False) flash_cros_image_mock.assert_called_with( self.bisector.GetCrosXbuddyPath(self.BAD_CROS_VERSION)) evaluate_mock.assert_not_called() def testGetCrosXbuddyPath(self): """Tests GetCrosXbuddyPath().""" self.assertEqual( 'xbuddy://remote/%s/%s/test' % (self.BOARD, self.GOOD_CROS_VERSION), self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION)) def testExchangeChromeSanityCheck(self): """Tests the flow of exchanging Chrome between good and bad CrOS.""" self.SetUpBisectorWithCrosVersion() # Inject good_commit and bad_commit as if # bisector.ResolveChromeBisectRangeFromCrosVersion() has been run. self.bisector.good_commit = self.GOOD_COMMIT_SHA1 self.bisector.bad_commit = self.BAD_COMMIT_SHA1 # Inject commit_info and threshold as if # bisector.ObtainBisectBoundaryScore() and bisector.GetThresholdFromUser() # has been run. self.SetDefaultCommitInfo() self.bisector.threshold = self.THRESHOLD # Try bad Chrome first. git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) self.PatchObject(chrome_on_cros_bisector.ChromeOnCrosBisector, 'UpdateCurrentCommit') evaluate_mock = self.PatchObject(DummyEvaluator, 'Evaluate') expected_evaluate_calls = [ mock.call(self.DUT, x, self.REPEAT) for x in [ 'cros_%s_cr_%s' % (self.GOOD_CROS_VERSION, self.BAD_COMMIT_SHA1), 'cros_%s_cr_%s' % (self.BAD_CROS_VERSION, self.GOOD_COMMIT_SHA1)]] # Mock FlashCrosImage() to verify that customize_build_deploy is assigned # as expected. flash_cros_image_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'FlashCrosImage') expected_flash_cros_calls = [ mock.call(self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION)), mock.call(self.bisector.GetCrosXbuddyPath(self.BAD_CROS_VERSION))] # Make sure bisector.BuildDeploy() is also called. build_deploy_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'BuildDeploy') # Assume culprit commit is in Chrome side, i.e. first score is bad. evaluate_mock.side_effect = [self.BAD_COMMIT_SCORE, self.GOOD_COMMIT_SCORE] self.assertTrue(self.bisector.ExchangeChromeSanityCheck()) flash_cros_image_mock.assert_has_calls(expected_flash_cros_calls) evaluate_mock.assert_has_calls(expected_evaluate_calls) self.assertEqual(2, build_deploy_mock.call_count) flash_cros_image_mock.reset_mock() evaluate_mock.reset_mock() build_deploy_mock.reset_mock() # Assume culprit commit is not in Chrome side, i.e. first score is good. evaluate_mock.side_effect = [self.GOOD_COMMIT_SCORE, self.BAD_COMMIT_SCORE] self.assertFalse(self.bisector.ExchangeChromeSanityCheck()) flash_cros_image_mock.assert_has_calls(expected_flash_cros_calls) evaluate_mock.assert_has_calls(expected_evaluate_calls) self.assertEqual(2, build_deploy_mock.call_count) def testExchangeChromeSanityCheckFlashError(self): """Tests the flow of exchanging Chrome between good and bad CrOS.""" self.SetUpBisectorWithCrosVersion() # Inject good_commit and bad_commit as if # bisector.ResolveChromeBisectRangeFromCrosVersion() has been run. self.bisector.good_commit = self.GOOD_COMMIT_SHA1 self.bisector.bad_commit = self.BAD_COMMIT_SHA1 # Inject commit_info and threshold as if # bisector.ObtainBisectBoundaryScore() and bisector.GetThresholdFromUser() # has been run. self.SetDefaultCommitInfo() self.bisector.threshold = self.THRESHOLD # Try bad Chrome first. git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) self.PatchObject(chrome_on_cros_bisector.ChromeOnCrosBisector, 'UpdateCurrentCommit') evaluate_mock = self.PatchObject(DummyEvaluator, 'Evaluate') # Mock FlashCrosImage() to verify that customize_build_deploy is assigned # as expected. flash_cros_image_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'FlashCrosImage', side_effect=flash.FlashError('Flash failed.')) build_deploy_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'BuildDeploy') with self.assertRaises(flash.FlashError): self.bisector.ExchangeChromeSanityCheck() evaluate_mock.assert_not_called() flash_cros_image_mock.assert_called() build_deploy_mock.assert_not_called() def testFlashImage(self): """Tests FlashImage().""" flash_mock = self.PatchObject(flash, 'Flash') xbuddy_path = self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION) self.bisector.FlashCrosImage(xbuddy_path) flash_mock.assert_called_with( self.DUT, xbuddy_path, board=self.BOARD, clobber_stateful=True, disable_rootfs_verification=True) def testFlashImageRetry(self): """Tests FlashImage() with retry success.""" flash_mock_call_counter = itertools.count() def flash_mock_return(*unused_args, **unused_kwargs): nth_call = next(flash_mock_call_counter) if nth_call < 3: raise flash.FlashError('Flash failed.') flash_mock = self.PatchObject(flash, 'Flash') flash_mock.side_effect = flash_mock_return xbuddy_path = self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION) self.bisector.FlashCrosImage(xbuddy_path) flash_mock.assert_called_with( self.DUT, xbuddy_path, board=self.BOARD, clobber_stateful=True, disable_rootfs_verification=True) def testFlashImageRetryFailed(self): """Tests FlashImage() with retry failed.""" flash_mock = self.PatchObject(flash, 'Flash') flash_mock.side_effect = flash.FlashError('Flash failed.') xbuddy_path = self.bisector.GetCrosXbuddyPath(self.GOOD_CROS_VERSION) with self.assertRaises(flash.FlashError): self.bisector.FlashCrosImage(xbuddy_path) flash_mock.assert_called_with( self.DUT, xbuddy_path, board=self.BOARD, clobber_stateful=True, disable_rootfs_verification=True) def testCrosVersionToChromeCommit(self): """Tests CrosVersionToChromeCommit().""" metadata_url = ( 'gs://chromeos-image-archive/%s-release/%s/partial-metadata.json' % (self.BOARD, self.GOOD_CROS_VERSION)) gs_mock = self.StartPatcher(gs_unittest.GSContextMock()) gs_mock.AddCmdResult(['cat', metadata_url], output=self.GOOD_METADATA_CONTENT) git_log_content = '\n'.join([ '8967dd66ad72 (tag: 60.0.3112.53) Publish DEPS for Chromium ' '60.0.3112.53', '27ed0cc0c2f4 Incrementing VERSION to 60.0.3112.53']) git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['log', '--oneline', '-n', '2', '60.0.3112.53'], output=git_log_content) self.bisector.gs_ctx = gs.GSContext() self.assertEqual( '27ed0cc0c2f4', self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION)) def testCrosVersionToChromeCommitFail(self): """Tests failure case of CrosVersionToChromeCommit().""" metadata_url = ( 'gs://chromeos-image-archive/%s-release/%s/partial-metadata.json' % (self.BOARD, self.GOOD_CROS_VERSION)) gs_mock = self.StartPatcher(gs_unittest.GSContextMock()) gs_mock.AddCmdResult(['cat', metadata_url], returncode=1) self.bisector.gs_ctx = gs.GSContext() self.assertIsNone( self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION)) metadata_content = 'not_a_json' gs_mock.AddCmdResult(['cat', metadata_url], output=metadata_content) self.assertIsNone( self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION)) metadata_content = '\n'.join([ '{', ' "metadata-version": "2",', ' "toolchain-url": "2017/05/%(target)s-2017.05.25.101355.tar.xz",', ' "suite_scheduling": true,', ' "build_id": 1644146,', ' "version": {}', '}']) gs_mock.AddCmdResult(['cat', metadata_url], output=metadata_content) self.assertIsNone( self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION)) gs_mock.AddCmdResult(['cat', metadata_url], output=self.GOOD_METADATA_CONTENT) git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult(['log', '--oneline', '-n', '2', '60.0.3112.53'], returncode=128) self.assertIsNone( self.bisector.CrosVersionToChromeCommit(self.GOOD_CROS_VERSION)) def testResolveChromeBisectRangeFromCrosVersion(self): """Tests ResolveChromeBisectRangeFromCrosVersion().""" self.SetUpBisectorWithCrosVersion() cros_to_chrome_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'CrosVersionToChromeCommit') cros_to_chrome_mock.side_effect = [self.GOOD_COMMIT_SHA1, self.BAD_COMMIT_SHA1] self.assertTrue(self.bisector.ResolveChromeBisectRangeFromCrosVersion()) self.assertTrue(self.GOOD_COMMIT_SHA1, self.bisector.good_commit) self.assertTrue(self.BAD_COMMIT_SHA1, self.bisector.bad_commit) cros_to_chrome_mock.assert_has_calls([mock.call(self.GOOD_CROS_VERSION), mock.call(self.BAD_CROS_VERSION)]) cros_to_chrome_mock.reset_mock() cros_to_chrome_mock.side_effect = [None] self.assertFalse(self.bisector.ResolveChromeBisectRangeFromCrosVersion()) cros_to_chrome_mock.assert_called_with(self.GOOD_CROS_VERSION) cros_to_chrome_mock.reset_mock() cros_to_chrome_mock.side_effect = [self.GOOD_COMMIT_SHA1, None] self.assertFalse(self.bisector.ResolveChromeBisectRangeFromCrosVersion()) cros_to_chrome_mock.assert_has_calls([mock.call(self.GOOD_CROS_VERSION), mock.call(self.BAD_CROS_VERSION)]) def testPrepareBisect(self): """Tests PrepareBisect().""" # Pass SanityCheck(). git_mock = self.StartPatcher(git_bisector_unittest.GitMock(self.repo_dir)) git_mock.AddRunGitResult( partial_mock.InOrder(['rev-list', self.GOOD_COMMIT_SHA1])) git_mock.AddRunGitResult( partial_mock.InOrder(['rev-list', self.BAD_COMMIT_SHA1])) git_mock.AddRunGitResult( partial_mock.InOrder(['show', self.GOOD_COMMIT_SHA1]), output=str(self.GOOD_COMMIT_TIMESTAMP)) git_mock.AddRunGitResult( partial_mock.InOrder(['show', self.BAD_COMMIT_SHA1]), output=str(self.BAD_COMMIT_TIMESTAMP)) # Inject score for both side. git_mock.AddRunGitResult(['checkout', self.GOOD_COMMIT_SHA1]) git_mock.AddRunGitResult(['checkout', self.BAD_COMMIT_SHA1]) build_deploy_eval_mock = self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'BuildDeployEval') build_deploy_eval_mock.side_effect = [self.GOOD_COMMIT_SCORE, self.BAD_COMMIT_SCORE] # Set auto_threshold. self.bisector.auto_threshold = True self.assertTrue(self.bisector.PrepareBisect()) def testPrepareBisectCrosVersion(self): """Tests PrepareBisect() with CrOS version.""" self.SetUpBisectorWithCrosVersion() self.StartPatcher(gs_unittest.GSContextMock()) self.PatchObject(builder_module.Builder, 'SyncToHead') self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'ResolveChromeBisectRangeFromCrosVersion').return_value = True self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'SanityCheck').return_value = True self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'ObtainBisectBoundaryScore').return_value = True self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'GetThresholdFromUser').return_value = True self.PatchObject( chrome_on_cros_bisector.ChromeOnCrosBisector, 'ExchangeChromeSanityCheck').return_value = True self.assertTrue(self.bisector.PrepareBisect())
For best health benefits, medical experts advise people to perform a minimum of half an hour of cardio train at the very least 2 to 3 occasions in per week. Daily bodily activity will help forestall coronary heart disease and stroke by strengthening your heart muscle, lowering your blood strain, elevating your high-density lipoprotein (HDL) ranges (good ldl cholesterol) and reducing low-density lipoprotein (LDL) ranges (unhealthy cholesterol), bettering blood stream, and growing your heart’s working capacity. To consider the problems/implications/benefits of train participation in relation to unhealthy people and particular populations just about professional pointers. You can choose to review both modern problems with food and vitamin or extreme environments and ergogenic aids. On this page well will explore the significance of sports diet and the way a sports activities nutritionist can assist your training regime. Develop a complicated crucial understanding of train and health behaviour theories and their application. The module considers the function of train and physical activity as a prescription remedy to medical ailments and develops the scientific expertise to observe and assess health, health and performance. Discuss and critically evaluate the position of vitamin in the aetiology and administration of chosen problems. Consistent train and proper nutrition play a vital role in your potential to maintain your general well being and quality of life. Timing of amino acid-carbohydrate ingestion alters anabolic response of muscle to resistance exercise. Her work seems on various websites and covers numerous topics together with neuroscience, physiology, nutrition and fitness. If you exercise vigorously for 30 minutes a day, you might discover you sleep much more soundly at night. Children ought to get 60 minutes of moderately intense physical exercise day-after-day to realize maximum well being advantages. In May 2016 the new function-constructed Rowett Institute of Nutrition and Health constructing on the University’s Foresterhill campus opened. Case Western Reserve University notes that diet and train are vital to optimal athletic efficiency. Many scientific disciplines contribute to train and health sciences, together with physiology, biochemistry, vitamin, anatomy and biomechanics. If you are looking to improve your weight loss plan to enrich your coaching plan, use our superior search device to discover a sports activities vitamin professional in your area that may assist. These signs let us know that the exercise has depleted the muscle’s gas resources, caused some minor harm, and that the muscle is in want of replenishment and restore. In addition, since muscle protein is degraded during exercise, the addition of a relatively great amount of protein to your put up exercise meal is critical to help rebuild the structural elements of the muscle.
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-27 06:29 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0004_auto_20170725_1332'), ] operations = [ migrations.AlterField( model_name='cace', name='sales_main_staff', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sales_main_staff', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='cace', name='sales_sub_staff', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sales_sub_staff', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='cace', name='technical_main_staff', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='technical_main_staff', to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='cace', name='technical_sub_staff', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='technical_sub_staff', to=settings.AUTH_USER_MODEL), ), ]
Living shorelines or natural infrastructure can help lessen storm surge and flooding while also providing habitat, water filtration and beautiful places to enjoy. Despite their diminutive appearance, scientists discovered that mangroves provide value by storing huge amounts of carbon. What's the Value of Nature? Collaboration between Dow and The Nature Conservancy demonstrates that valuing nature can be profitable for business. Read more about What's the Value of Nature?
# encoding: utf-8 # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Kyle Lahnakoski (kyle@lahnakoski.com) # from __future__ import unicode_literals from __future__ import division from dzAlerts.util.thread.threads import Queue from ..env.logs import Log class worker(object): def __init__(func, inbound, outbound, logging): logger = Log_usingInterProcessQueue(logging) class Log_usingInterProcessQueue(Log): def __init__(self, outbound): self.outbound = outbound def write(self, template, params): self.outbound.put({"template": template, "param": params}) class Multiprocess(object): # THE COMPLICATION HERE IS CONNECTING THE DISPARATE LOGGING TO # A CENTRAL POINT def __init__(self, functions): self.outbound = Queue() self.inbound = Queue() self.inbound = Queue() # MAKE # MAKE THREADS self.threads = [] for t, f in enumerate(functions): thread = worker( "worker " + unicode(t), f, self.inbound, self.outbound, ) self.threads.append(thread) def __enter__(self): return self # WAIT FOR ALL QUEUED WORK TO BE DONE BEFORE RETURNING def __exit__(self, a, b, c): try: self.inbound.close() # SEND STOPS TO WAKE UP THE WORKERS WAITING ON inbound.pop() except Exception, e: Log.warning("Problem adding to inbound", e) self.join() # IF YOU SENT A stop(), OR STOP, YOU MAY WAIT FOR SHUTDOWN def join(self): try: # WAIT FOR FINISH for t in self.threads: t.join() except (KeyboardInterrupt, SystemExit): Log.note("Shutdow Started, please be patient") except Exception, e: Log.error("Unusual shutdown!", e) finally: for t in self.threads: t.keep_running = False for t in self.threads: t.join() self.inbound.close() self.outbound.close() # RETURN A GENERATOR THAT HAS len(parameters) RESULTS (ANY ORDER) def execute(self, parameters): # FILL QUEUE WITH WORK self.inbound.extend(parameters) num = len(parameters) def output(): for i in xrange(num): result = self.outbound.pop() yield result return output() # EXTERNAL COMMAND THAT RETURNS IMMEDIATELY def stop(self): self.inbound.close() # SEND STOPS TO WAKE UP THE WORKERS WAITING ON inbound.pop() for t in self.threads: t.keep_running = False
IF WE are going to survive, that means that we have to decide who “we” are. That’s so basic, so obvious, so essential that it’s amazing how few people give much thought to it. The Blacks have very little trouble deciding who they are. They know at a glance. They know by instinct. And, primitive as they may be, they are racially-minded to a degree that White folks would envy, if White folks had an ounce of sense. What do you think Ferguson was all about? What do you think bloc voting is all about? Race comes first for them — and they feel it in their bones.
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Dummy benchmarks for the bisect FYI integration tests. The number they produce aren't meant to represent any actual performance data of the browser. For more information about these dummy benchmarks, see: https://goo.gl/WvZiiW """ import random from telemetry import benchmark from telemetry.page import legacy_page_test from core import perf_benchmark from page_sets import dummy_story_set class _DummyTest(legacy_page_test.LegacyPageTest): def __init__(self, avg, std): super(_DummyTest, self).__init__() self._avg = avg self._std = std def ValidateAndMeasurePage(self, page, tab, results): del tab # unused value = random.gauss(self._avg, self._std) results.AddMeasurement('gaussian-value', 'ms', value) class _DummyBenchmark(perf_benchmark.PerfBenchmark): page_set = dummy_story_set.DummyStorySet @benchmark.Info(emails=['crouleau@chromium.org'], component='Test>Telemetry') class DummyBenchmarkOne(_DummyBenchmark): """A low noise benchmark with mean=100 & std=1.""" def CreatePageTest(self, options): return _DummyTest(168, 1) @classmethod def Name(cls): return 'dummy_benchmark.stable_benchmark_1' @benchmark.Info(emails=['crouleau@chromium.org'], component='Test>Telemetry') class DummyBenchmarkTwo(_DummyBenchmark): """A noisy benchmark with mean=50 & std=20.""" def CreatePageTest(self, options): return _DummyTest(50, 20) @classmethod def Name(cls): return 'dummy_benchmark.noisy_benchmark_1'
Tiny silver hooked tag with a circular undecorated plate c. 10mm in diameter. There are two small circular attachment holes close to each other near the top; these have raised edges around them on the reverse, suggesting that they were bored or drilled through from the front. The hook at the bottom is thicker, with a distinct step on the front but a smooth junction on the reverse. It bends backwards but the tip is missing. 16.0mm long, 10.25mm wide, plate 0.5mm thick. Weight: 0.4g. Early-medieval hooked tags range in date the 7th to the 11th centuries, but those with circular plates do not occur in the earliest contexts, 7th-century graves (Geake 1997, 66). An 8th- to 11th-century date is therefore likely. As the object is made of more than 10% precious metal and is over 300 years old, it constitutes potential Treasure under the Treasure Act 1996.
#!/usr/bin/python import sys, getopt, warnings, os, re def mergeClosestClusters (CandidateCluster, distanceThreshold) : bReVal = True firstBestCulsterId = -1 secondBestCulsterId = -1 #print CandidateCluster for i in range(len(CandidateCluster) - 1) : averageFirst = sum(CandidateCluster[i])/float(len(CandidateCluster[i])) averageSecond = sum(CandidateCluster[i+1])/float(len(CandidateCluster[i+1])) if (averageFirst > averageSecond) : print "wrong rank!" sys.exit(0) currentDistance = averageSecond - averageFirst if ( currentDistance <= distanceThreshold) : if ((firstBestCulsterId == -1) or (secondBestCulsterId == -1)): # first pair of good clusters minDistance = currentDistance firstBestCulsterId = i secondBestCulsterId = i+1 elif ( currentDistance < minDistance) : # two bettter clusters minDistance = currentDistance firstBestCulsterId = i secondBestCulsterId = i+1 # print minDistance, currentDistance if ((firstBestCulsterId != -1) and (secondBestCulsterId != -1)) : #merge two clusters mergedCluster = CandidateCluster [firstBestCulsterId] + CandidateCluster [secondBestCulsterId] del CandidateCluster[firstBestCulsterId] del CandidateCluster[firstBestCulsterId] CandidateCluster.insert(firstBestCulsterId, mergedCluster) else : bReVal = False return bReVal def hierarchicalClustering (ldCandidatePct, distanceThreshold) : ldCandidatePct.sort() CandidateCluster = [] if (len(ldCandidatePct) == 1) : CandidateCluster.append([ldCandidatePct[0]]) elif (len(ldCandidatePct) > 1) : # each cluster has one candidate for i in range(len(ldCandidatePct)) : CandidateCluster.append([ldCandidatePct[i]]) #cluters merge bMerge = mergeClosestClusters(CandidateCluster, distanceThreshold) while (bMerge) : # print CandidateCluster bMerge = mergeClosestClusters(CandidateCluster, distanceThreshold) return CandidateCluster
Sharon Van Etten’s heart was broken; now, it seems, it’s fixed. Remind Me Tomorrow, the fifth album from the New Jersey-born singer-songwriter, is more settled than any she’s made before. Where she spent the last decade lacing complaints about love with irony and pith, she’s recently spoken of finding motherhood and, with it, a measure of peace.
# # This file is part of the onema.io evee Package. # For the full copyright and license information, # please view the LICENSE file that was distributed # with this source code. # # @author Juan Manuel Torres <software@onema.io> # from collections import MutableMapping from evee.event import Event class GenericEvent(Event, MutableMapping): def __init__(self, subject=None, arguments: dict = None): super().__init__() if arguments: self._arguments = arguments else: self._arguments = {} self.__subject = subject def get_subject(self): return self.__subject def get_argument(self, key): try: return self[key] except KeyError: raise KeyError('Argument "{}" not found.'.format(key)) def set_argument(self, key, value): self[key] = value def get_arguments(self): return dict(self._arguments) def set_arguments(self, args: dict = None): if args: self._arguments = args return self def has_argument(self, key): return key in self def __delitem__(self, key): del(self._arguments[key]) def __setitem__(self, key, value): self._arguments[key] = value def __iter__(self): return iter(self._arguments) def __getitem__(self, key): return self._arguments[key] def __len__(self): return len(self._arguments)
Are you looking for a day trip that combines beautiful vistas, history, excitement, and wild horses? Visit the Cape Lookout National Seashore in North Carolina! This beautiful preserve stretches over 56 miles of undeveloped beach. During your visit, you’ll find horses, an antique lighthouse, and a fascinating historic village! You won’t want to miss this slice of paradise! Cape Lookout National Seashore is just one of the many amazing attractions in the area. For a full list, download our free Emerald Isle Vacation Guide! Ditch the outdated travel books and get local recommendations from a source you can trust. We’ve listed the best restaurants, historic sites, museums, and family-friendly attractions on the Crystal Coast. Planning your vacation is easy when you use our handy Vacation Guide! Getting to Cape Lookout National Seashore can be a little tricky for first-time visitors. Let us help break it down for you! There are several choices for transportation, and it’s important to decide which is best for you before heading out. The most popular way to get to the park is by taking an authorized Cape Lookout National Seashore ferry. There are several ferry services available that are authorized. If you’d like to take an ATV or 4WD, there are vehicle ferries available. While there is no entry fee to the Cape, you will have to purchase a ticket to the ferry. If you’re hoping to get some exercise before visiting the Cape, you may kayak or take your boat from one of the barrier islands. There are also plenty of privately operated tours available to get on the island. These are a great way to learn fun Cape Lookout facts and get an incredible view of the shore! Once you get to Cape Lookout National Seashore, you’re free to roam! The beaches are one of the highlights of the park, with plenty of great swimming, shelling, and fishing available. The Horse Sense and Survival Tour is one of the most popular activities in the park. With the guidance of a ranger, you’ll travel to Shackleford Banks to get a first-hand look at wild horse behaviors and the ins and outs of managing a herd. If you’re traveling with your family, this tour is a must. Kids love being able to see the horses. Who knows: you might even have a future equestrian on your hands! If you’re interested in the history of the area, check out Portsmouth Village. This historic site was once a busy seaport. Inside, you can see exhibits on how the residents used to live. There are tours of the site available seasonally, as well as a self-guided audio tour. If you’re visiting the park from mid-May to mid-September, be sure to climb to the top of Cape Lookout Lighthouse! It was built in 1859 and features an antique, third-order Fresnel lens. Self-guided tours of the lighthouse run from Wednesday to Sunday, starting at 9:45 a.m. with the last tour beginning at 4:15 p.m. Tickets for the lighthouse tour are $8 for adults and $4 for children 12 and younger. After you’ve enjoyed a day at Cape Lookout National Seashore, return to a stunning rental home from Spinnaker’s Reach! You won’t find a better selection of properties on Emerald Isle. Whether you’re looking for rentals with a private pool or pet-friendly accommodations, you’ll find it in our inventory! Consider renting Hale Kena Kai, a spectacular eight-bedroom, seven-bathroom home that features a private book, fully furnished covered and uncovered decks, and a hot tub! Inside, there is a recreation room with a pool and a ping-pong table, separate televisions for gaming and movie-watching, and plenty of space to stretch out! The fully equipped kitchen is the perfect place to cook a feast for the family. Look at the rest of our rentals and book today! We look forward to having you.
# Python 3.4 on Mac OS X import glob import re import csv from pandas import DataFrame,Series class Jd: def __init__(self): self.ls = [] def file_list(self, path): self.file_list = glob.glob(path) def contents(self, f_name): with open(f_name) as f: # readlines don't work as regex need string instead of list self.f_c = f.read() def parse(self, tag): if tag == 'id': pattern = 'Auto\sreq\sID.*\n' elif tag == 'title': pattern = 'Job\sTitle.*\n' elif tag == 'status': pattern = 'Job\sStatus.*\n' result = ''.join(re.findall(pattern, self.f_c)) _, value = re.split('\t', result) self.ls.append(value) def export_csv(self, path, rows): with open(path,'a') as f: writer = csv.writer(f) writer.writerow(rows) self.ls = [] if __name__ == '__main__': tags = ['id', 'title','status'] path = '/Users/Sail/Desktop/*.txt' jd = Jd() jd.file_list(path) jd.export_csv('jd_modified.csv', tags) for f in jd.file_list: jd.contents(f) for tag in tags: jd.parse(tag) jd.export_csv('jd_modified.csv', jd.ls) print('Bingo')
Abbey College (Ramsey) have not yet added detail to the description of their organisation. If they become members and update their 'Organisation Description' it will automatically replace this default statement. Abbey College (Ramsey) does not have any Social Media links recorded in our database!
import re from plone import api from plone.app.layout.viewlets.common import ViewletBase from plone.app.layout.navigation.root import getNavigationRoot #from ftw.subsite.interfaces import ISubsite from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from ..behaviors import ISubSite from ..utils import all_dexterity_fieldnames decamel_regxp = re.compile('[A-Z][^A-Z]+') class CSSViewlet(ViewletBase): template = ViewPageTemplateFile('cssviewlet.pt') def __init__(self, context, request, view, manager=None): super(CSSViewlet, self).__init__(context, request, view, manager) self.subsite = None def render(self): nav_root = api.portal.get_navigation_root(context=self.context) if ISubSite.providedBy(nav_root): self.subsite = nav_root return self.template() else: self.subsite = None return '' def parse_fieldname(self, name): "parse css_tag[tagname]_id[id]_class[classname]_attr[attrname] format" parsethese = ("tag", "id", "class", "cssattr") rawspecs = name.split('_') parsed_spec = [None, None, None, None] for rawspec in rawspecs: for idx, parsed in enumerate(parsethese): value = rawspec[len(parsed):] if parsed in rawspec else None if value: parsed_spec[idx] = value return parsed_spec def get_css(self): "generate css from specially formatted fields" if not self.subsite: return '' css_fields = [n for n in all_dexterity_fieldnames(self.subsite) if n.startswith("css_")] styles = [] for css_fn in css_fields: tag, id, klass, cssattr = self.parse_fieldname(css_fn[4:]) # strip prefix selector = tag selector += '#' + '-'.join(decamel_regxp.findall(id)).lower() if id else '' selector += '.' + '-'.join(decamel_regxp.findall(klass)).lower() if klass else '' # convert BackgroundColor to background-color cssattr = '-'.join(decamel_regxp.findall(cssattr)).lower() if cssattr else '' # if cssatr, field value has just css attr value, othewise full 'attr: val' css" field_value = getattr(self.context, css_fn) if not field_value: field_value = '' attr_plus_val = cssattr + ": " + field_value if cssattr else field_value style = selector + " {\n %s;\n}" % attr_plus_val styles.append(style) return '\n'.join(styles)
Who do you think you are? Qbert? Jumping all over the place like a schizophrenic mad man stuck in blow up jump room. Yes, y’all know what I’m talkin’ about, the crazaziness that is the pogo monster! Well, for those of you who don’t know anything about anything, a pogo stick is a toy or for those of you who find offense to that, it’s an exercise equipment. The pogo stick is basically two pegs which you stand on with a spring in the middle that recoils and springs back when hitting the ground and a handle bar that’s attached to the pegs and springs. wha? what? A Pogo stick’s gonna make ya, JUMP JUMP, The springs on the stick’s gonna make ya JUMP JUMP, a MacDaddy’s gonna make ya JUMP JUMP, Clothes worn in the 80’s gonna make ya, JUMP JUMP! Supposedly, the Pogo stick was invented by two German guys, Max Pohlig and Ernst Gottschall back in 1920. The name of Pogo was also thought to be the combination of Po from Pohlig and Go from Gottschall, maybe. A two handed version of the Pogo stick was also patented but by a guy named George B. Hansburg in 1957. George was inspired by some story about a father making a Pogo like device for his daughter so she can travel to the local temple for prayers. I guess people can’t just walk or run to the temple anymore, they have to jump around and hurt themselves just to say some prayers. To each his own I guess. Over the years, the Pogo stick have been greatly improved upon. Pogo sticks seems almost indestructible nowadays. People can jump off of tall staircases and the only thing they would hurt is themselves, yeehaw! Anyway, since I was a little kid growing up in the 80’s, I’ve always wanted to be able to ride a Pogo stick but I just was never balanced enough for such contraptions. They actually look like a ton of fun, sort of like a trampoline that’s attached to your feet that can potentially kill you. Any of you guys out there ride Pogo sticks? Have had any bad experiences? Wanna share? Too embarrassed? Sissies! Here’s a video of some crazy modern Xtreme-Pogo Sticking action. It’s pretty wild!
import logging import re import time from vtdb import dbexceptions from vtdb import vtdb_logger from vtproto import vtrpc_pb2 INITIAL_DELAY_MS = 5 NUM_RETRIES = 3 MAX_DELAY_MS = 100 BACKOFF_MULTIPLIER = 2 def log_exception(exc, keyspace=None, tablet_type=None): """This method logs the exception. Args: exc: exception raised by calling code keyspace: keyspace for the exception tablet_type: tablet_type for the exception """ logger_object = vtdb_logger.get_logger() shard_name = None if isinstance(exc, dbexceptions.IntegrityError): logger_object.integrity_error(exc) else: logger_object.vtclient_exception(keyspace, shard_name, tablet_type, exc) def exponential_backoff_retry( retry_exceptions, initial_delay_ms=INITIAL_DELAY_MS, num_retries=NUM_RETRIES, backoff_multiplier=BACKOFF_MULTIPLIER, max_delay_ms=MAX_DELAY_MS): """Decorator for exponential backoff retry. Log and raise exception if unsuccessful. Do not retry while in a session. Args: retry_exceptions: tuple of exceptions to check. initial_delay_ms: initial delay between retries in ms. num_retries: number max number of retries. backoff_multiplier: multiplier for each retry e.g. 2 will double the retry delay. max_delay_ms: upper bound on retry delay. Returns: A decorator method that returns wrapped method. """ def decorator(method): def wrapper(self, *args, **kwargs): attempt = 0 delay = initial_delay_ms while True: try: return method(self, *args, **kwargs) except retry_exceptions as e: attempt += 1 if attempt > num_retries or self.session: # In this case it is hard to discern keyspace # and tablet_type from exception. log_exception(e) raise e logging.error( "retryable error: %s, retrying in %d ms, attempt %d of %d", e, delay, attempt, num_retries) time.sleep(delay/1000.0) delay *= backoff_multiplier delay = min(max_delay_ms, delay) return wrapper return decorator class VitessError(Exception): """VitessError is raised by an RPC with a server-side application error. VitessErrors have an error code and message. """ _errno_pattern = re.compile(r'\(errno (\d+)\)') def __init__(self, method_name, error=None): """Initializes a VitessError with appropriate defaults from an error dict. Args: method_name: RPC method name, as a string, that was called. error: error dict returned by an RPC call. """ if error is None or not isinstance(error, dict): error = {} self.method_name = method_name self.code = error.get('Code', vtrpc_pb2.UNKNOWN_ERROR) self.message = error.get('Message', 'Missing error message') # Make self.args reflect the error components super(VitessError, self).__init__(self.message, method_name, self.code) def __str__(self): """Print the error nicely, converting the proto error enum to its name""" return '%s returned %s with message: %s' % (self.method_name, vtrpc_pb2.ErrorCode.Name(self.code), self.message) def convert_to_dbexception(self, args): """Converts from a VitessError to the appropriate dbexceptions class. Args: args: argument tuple to use to create the new exception. Returns: An exception from dbexceptions. """ if self.code == vtrpc_pb2.TRANSIENT_ERROR: return dbexceptions.TransientError(args) if self.code == vtrpc_pb2.INTEGRITY_ERROR: # Prune the error message to truncate after the mysql errno, since # the error message may contain the query string with bind variables. msg = self.message.lower() parts = self._errno_pattern.split(msg) pruned_msg = msg[:msg.find(parts[2])] new_args = (pruned_msg,) + tuple(args[1:]) return dbexceptions.IntegrityError(new_args) return dbexceptions.DatabaseError(args) def extract_rpc_error(method_name, response): """Extracts any app error that's embedded in an RPC response. Args: method_name: RPC name, as a string. response: response from an RPC. Raises: VitessError if there is an app error embedded in the reply """ reply = response.reply if not reply or not isinstance(reply, dict): return response # Handle the case of new client => old server err = reply.get('Err', None) if err: raise VitessError(method_name, err)
BOGOTÁ, Dec 13 2013 (IPS) - The ousted left-wing mayor of the Colombian capital, Gustavo Petro, is a casualty of the battle over the introduction of a Zero Garbage programme, which had included thousands of informal recyclers in the waste disposal business. “His removal was arbitrary,” said Nelson Rojas, one of the workers in the city government’s Basura Cero (Zero Garbage) programme. “We don’t know what is going to happen now,” he told IPS in Plaza de Bolívar, where tens of thousands of people have demonstrated every day in front of city hall in support of the mayor since he was sacked on Monday Dec. 9. Petro was fired and barred from holding public office for 15 years due to three “extremely serious infringements,” according to the inspector general, Alejandro Ordóñez, who has the authority to investigate and dismiss public officials. The measure against Petro appeared to be final. But legal experts have said they found an article in the constitution establishing that the mayor of Bogotá can only be removed by the president, at the inspector general’s request. According to the ultra-conservative Ordóñez, the mayor’s decision to put 63 percent of the lucrative waste disposal business in public hands violated the principle of free competition. At the time, the business was run by four private contractors. The inspector general charged Petro with handing garbage collection over to public companies that supposedly lacked experience and that used garbage dumpsters instead of trucks for six months, which caused the death of one worker. “There’s a lot of money in waste disposal,” said Rojas, wearing a green Basura Cero uniform. “The private companies are opposed because they got rich off the collection of garbage. “The inspector general is an ally of the rich and they are against the mayor’s policies,” he said, as people rallied in Plaza de Bolívar, where Petro had urged people to come out to protest his removal. “The private companies don’t give work to women or to older people,” he said. Three women and a man wearing the same green coveralls agreed. “In Basura Cero, 60 percent of the workers are women. And it is mainly women who are employed to sweep the streets in Bogotá,” said another one of the protesters. “More than 3,000 families will be left without a livelihood…we’re going to keep working in Basura Cero, we’re going to protest in shifts,” he added. Jorge Estrada, 37, also wearing a green coverall, held up a sign with the reasons the mayor was fired: “For giving the recyclers decent working conditions”; “For taking the garbage business out of the hands of the Bogotá mafia”. This city of eight million people is run as an autonomous capital district made up of 20 municipalities. Over the past year, garbage collection in 12 of them – 63 percent of the waste disposal in the city – has been in the hands of Aguas de Bogotá, a subsidiary of the state-run Empresa de Acueducto y Alcantarillado de Bogotá water and sewage company. In the rest of the municipalities waste disposal is still carried out by three of the original four private consortiums. Dec. 18, 2012 was the deadline for the city government to fulfil a constitutional court order for all organised garbage pickers to be included in the waste disposal business nationwide. The aim was to create equal conditions for those who make a living scavenging for and reselling recyclable materials. Petro’s predecessors failed to fulfil a similar sentence in 2003, instead extending the contracts held by the private companies, which are the exclusive owners of the rubbish in their areas. In practice, waste pickers, who go through the bags of unseparated trash that residents of Bogotá leave out on the sidewalk, made a tiny dent in the private companies’ monopoly. The contractors are paid per ton of garbage trucked to the Doña Juana dump on the south side of Bogotá – a system that discourages recycling. After taking office in January 2012 it took Petro six months to win city council approval for his development plan, which included a new rubbish collection system. The idea was to move towards the goal of zero garbage by reducing the amount of waste dumped in landfills by separating garbage at source and recycling. A city government census found that there were some 15,000 garbage pickers in Bogotá. The Zero Garbage programme hired 3,000 of them, and the rest are paid to transport recyclable waste to warehouses, instead of only paying the private contractors. The new system extends the life of the city dump, and incorporates a vulnerable segment of the population in the business of trash collection. But the private companies, who wanted to bid for new seven-year contracts, were not pleased when Petro tried to temporarily extend their contracts as he worked out the details of the new system. In heated negotiations, Petro talked about putting the entire system into public hands. That is what inspector general Ordóñez cited when he argued that Petro was violating the principles of free enterprise and free competition. When the city government saw no agreement was going to be reached, it prepared a district company to collect the garbage after the Dec. 18 deadline. In just over two months it reconverted Aguas de Bogotá, which cleaned up sludge and garbage from sewers in dumpsters that were specially conditioned to transport leachates. But the Petro administration suffered a severe backlash. The contractors did not agree to return the garbage trucks to the city. There weren’t enough dumpsters and the city government faced legal limits that kept it from acquiring trucks or adopting other measures before Dec. 18 because officially there was no emergency yet. Three days before that date, the influx of garbage to the landfill dropped, according to measurements by the Special Administrative Unit of Public Services of Bogotá. The night before, mountains of garbage had begun to appear on the streets, which the city government garbage collection service was unable to pick up because it would have violated the terms of the private companies’ contracts, which gave them exclusive control over the waste. The city government was careful not to allow the contractors’ trucks into the landfill after Dec. 18, because it would have meant an automatic extension of the contracts. The Petro administration used the dumpsters for leachate and rented used garbage trucks from the city of New York. Although it took the trucks weeks to arrive, the system was working again within three to eight days after the Dec. 18 deadline, depending on the municipality. In the negotiations, Petro finally agreed to allow three consortiums to continue operating in eight of the municipalities. But in the view of the inspector general, it was not necessary to put garbage collection into public hands in order to live up to the constitutional court order. The inclusion of thousands of garbage pickers in the system has involved carrying out a census, issuing special cards, and helping people open savings accounts – a process that has not yet been completed. Justice Minister Alfonso Gómez Méndez announced that the government would propose a constitutional reform to modify the post of inspector general, who is named by Congress, has practically absolute power, and has 30,000 public employees under him. But in the meantime, the constitution allows the inspector general to sack publicly elected officials, whose only recourse is to appeal to the inspector general’s office itself. Only if the constitutional article that would leave the case in the hands of the president, Juan Manuel Santos, prevails will Petro be able to return to the mayor’s office.
#! /usr/bin/env python import roslib roslib.load_manifest('path_follower') import rospy # Brings in the SimpleActionClient import actionlib # Brings in the messages used by the fibonacci action, including the # goal message and the result message. import path_follower.msg import math from tf.transformations import quaternion_from_euler from geometry_msgs.msg import Quaternion from geometry_msgs.msg import PoseStamped # Convert from given degree of yaw rotation to geometry_msgs.msg.Quaternion def quaternion_from_yaw_degree(yaw_degree): q = quaternion_from_euler(0, 0, math.radians(yaw_degree)) return Quaternion(*q) # Define a fixed example path def fixed_path(): p00 = PoseStamped() p00.pose.position.x = 0.0 p00.pose.orientation = quaternion_from_yaw_degree(0) p0 = PoseStamped() p0.pose.position.x = 2.0 p0.pose.orientation = quaternion_from_yaw_degree(0) p1 = PoseStamped() p1.pose.position.x = 2.0 p1.pose.position.y = 2.0 p1.pose.orientation = quaternion_from_yaw_degree(0) p2 = PoseStamped() p2.pose.position.x = 0.0 p2.pose.position.y = 2.0 p2.pose.orientation = quaternion_from_yaw_degree(0) p3 = PoseStamped() p3.pose.position.x = 0.0 p3.pose.position.y = 0.0 p3.pose.orientation = quaternion_from_yaw_degree(0) return [p00,p0, p1, p2, p3] def test(): # Creates the SimpleActionClient, passing the type of the action to the constructor. client = actionlib.SimpleActionClient('/follow_path', path_follower.msg.FollowPathAction) # Waits until the action server has started up and started # listening for goals. client.wait_for_server() # Creates a goal to send to the action server. goal = path_follower.msg.FollowPathGoal() path = goal.path path.header.stamp = rospy.Time.now() path.header.frame_id = 'odom' path.poses = fixed_path() # Sends the goal to the action server. client.send_goal(goal) # Waits for the server to finish performing the action. client.wait_for_result() print client.get_state(), client.get_goal_status_text() # Prints out the result of executing the action return client.get_result() # final pose if __name__ == '__main__': try: # Initializes a rospy node so that the SimpleActionClient can # publish and subscribe over ROS. rospy.init_node('path_follower_test') result = test() print "Result:", result except rospy.ROSInterruptException: print "program interrupted before completion"
‘I don’t think we are a charity. We are a successful, multi-national educational institution,’ explained the public school head to me. And he was right. As it happens, he was a highly progressive head committed to using the wealth and resource he enjoyed to collaborate with an under-performing local academy. For the first time, their partner school now had maths graduates teaching physics and a new range of language options. But he had no doubt that he should not be in receipt of charitable status and tax relief. Which is why one of the few components of Theresa May’s school reforms I can support is the Prime Minister’s determination to introduce ‘stronger, more demanding public benefit tests for independent schools.’ But the reforms which they are consulting on are ill thought through and illiberal. There is a smart way and a stupid way to promote richer and deeper collaboration between the fee-paying and state sector. We set it all out in the 2015 Labour Party manifesto when – surprise, surprise – the Tories accused us of ‘class war’ and ‘dumbing down.’ Now, it is their flagship policy. The starting point is that collaboration between and amongst schools of all types work. When selection, peer group and class size are removed from the equation (three big omissions), the quality of teaching in the state sector is better than the private. According to Professor Dylan Williams, teachers in the state sector deliver the same quality of teaching in classes of 25 that their private school counterparts deliver in classes of 13. Similarly, on tracking pupil attainment, on Special Educational Needs, on teacher training, there is much that the public schools could benefit from with better partnership. But we also know that when it comes to playing fields and boat houses, art blocks and theatres, Oxbridge-interview prepping and career guidance, confidence building and network stretching, the public schools continue to have a profound advantage over their state school peers. The scandal, of course, is that these schools benefit from £140 million a year state subsidy thanks to their charitable status and accompanying business rate relief. Whilst Stoke-on-Trent 6th Form Collage has to pay VAT, some of the country’s wealthiest boarding schools get off scot-free – despite the state having trained their teachers and headteachers. Labour’s 2015 plan was progressive and workable: we would allow each private school in receipt of business rate relief to think about its own area of excellence (sports training; history teaching; pottery; lab work) and seek out a collaborative relationship with a nearby school. We were not asking them all to sponsor an academy (which they often were not very good at) or indulge in some patronising noblesse oblige. It was to be a hard partnership from which both schools benefited, and its quality would be judged by the rigorous Independent Schools Inspectorate. If the public schools failed to deliver – or thought opening their gardens; dodgy bursaries; or putting art on the walls was enough – then their business rate relief would be annulled. By contrast, the government seem to be choosing a far more complicated and illiberal route, which would end up with Ministers instructing the Charity Commission which institution should or should not be a charity on purely political ground. This is a dangerous assault on civil society which could, in the long run, see politically awkward charities like Oxfam or Greenpeace struck off by government fiat. At the same time, they are asking the Charity Commission to adjudicate on the quality of educational partnership between state and private, which they do not have the skills to do. I think we are now heading towards a political consensus that the state subsidy of private schools in their current form is coming to an end. The Labour party should support that part of the package. Because looking at the reception Justine Greening got in the Commons, they might need the votes. Are Tristram Hunt’s plans for private schools a class war or just a bit pointless?
#!/usr/bin/python # -*- coding: utf-8 -*- ## # circuit.py: Modeling for stabilizer circuits. ## # © 2012 Christopher E. Granade (cgranade@gmail.com) and # Ben Criger (bcriger@gmail.com). # This file is a part of the QuaEC project. # Licensed under the AGPL version 3. ## # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ## ## IMPORTS ## from sys import version_info if version_info[0] == 3: PY3 = True from importlib import reload elif version_info[0] == 2: PY3 = False else: raise EnvironmentError("sys.version_info refers to a version of " "Python neither 2 nor 3. This is not permitted. " "sys.version_info = {}".format(version_info)) from copy import copy from operator import add, mul import itertools as it from functools import reduce if PY3: from . import PauliClass as pc from . import CliffordClass as cc from . import utils as u else: import PauliClass as pc import CliffordClass as cc import utils as u ## ALL ## __all__ = [ 'Location', 'Circuit', 'ensure_loc', 'propagate_fault', 'possible_output_faults', 'possible_faults' ] ## INTERNAL FUNCTIONS ## def qubits_str(qubits, qubit_names=None): if qubit_names is None: return ' '.join('q{}'.format(idx + 1) for idx in qubits) else: return ' '.join(qubit_names[idx] for idx in qubits) ## CLASSES ## class Location(object): """ Represents a gate, wait, measurement or preparation location in a circuit. Note that currently, only gate locations are implemented. :param kind: The kind of location to be created. Each kind is an abbreviation drawn from ``Location.KIND_NAMES``, or is the index in ``Location.KIND_NAMES`` corresponding to the desired location kind. :type kind: int or str :param qubits: Indicies of the qubits on which this location acts. :type qubits: tuple of ints. """ ## PRIVATE CLASS CONSTANTS ## _CLIFFORD_GATE_KINDS = [ 'I', 'X', 'Y', 'Z', 'H', 'R_pi4', 'CNOT', 'CZ', 'SWAP' ] _CLIFFORD_GATE_FUNCS = { 'I': lambda nq, idx: cc.eye_c(nq), 'X': lambda nq, idx: pc.elem_gen(nq, idx, 'X').as_clifford(), 'Y': lambda nq, idx: pc.elem_gen(nq, idx, 'Y').as_clifford(), 'Z': lambda nq, idx: pc.elem_gen(nq, idx, 'Z').as_clifford(), 'H': cc.hadamard, 'R_pi4': cc.phase, 'CNOT': cc.cnot, 'CZ': cc.cz, 'SWAP': cc.swap } _QCVIEWER_NAMES = { 'I': 'I', # This one is implemented by a gate definition # included by Circuit.as_qcviewer(). 'X': 'X', 'Y': 'Y', 'Z': 'Z', 'H': 'H', 'R_pi4': 'P', 'CNOT': 'tof', 'CZ': 'Z', 'SWAP': 'swap' } ## PUBLIC CLASS CONSTANTS ## #: Names of the kinds of locations used by QuaEC. KIND_NAMES = sum([ _CLIFFORD_GATE_KINDS ], []) ## INITIALIZER ## def __init__(self, kind, *qubits): if isinstance(kind, int): self._kind = kind elif isinstance(kind, str): self._kind = self.KIND_NAMES.index(kind) else: raise TypeError("Location kind must be an int or str.") #if not all(isinstance(q, int) for q in qubits): # raise TypeError('Qubit indices must be integers. Got {} instead, which is of type {}.'.format( # *(iter((q, type(q)) for q in qubits if not isinstance(q, int)).next()) # )) try: self._qubits = tuple(map(int, qubits)) except TypeError as e: raise TypeError('Qubit integers must be int-like.') self._is_clifford = bool(self.kind in self._CLIFFORD_GATE_KINDS) ## REPRESENTATION METHODS ## def __str__(self): return " {:<4} {}".format(self.kind, ' '.join(map(str, self.qubits))) def __repr__(self): return "<{} Location on qubits {}>".format(self.kind, self.qubits) def __hash__(self): return hash((self._kind,) + self.qubits) ## IMPORT METHODS ## @staticmethod def from_quasm(source): """ Returns a :class:`qecc.Location` initialized from a QuASM-formatted line. :type str source: A line of QuASM code specifying a location. :rtype: :class:`qecc.Location` :returns: The location represented by the given QuASM source. """ parts = source.split() return Location(parts[0], *list(map(int, parts[1:]))) ## PROPERTIES ## @property def kind(self): """ Returns a string defining which kind of location this instance represents. Guaranteed to be a string that is an element of ``Location.KIND_NAMES``. """ return self.KIND_NAMES[self._kind] @property def qubits(self): """ Returns a tuple of ints describing which qubits this location acts upon. """ return self._qubits @property def nq(self): """ Returns the number of qubits in the smallest circuit that can contain this location without relabeling qubits. For a :class:`qecc.Location` ``loc``, this property is defined as ``1 + max(loc.nq)``. """ return 1 + max(self.qubits) @property def is_clifford(self): """ Returns ``True`` if and only if this location represents a gate drawn from the Clifford group. """ return self._is_clifford @property def wt(self): """ Returns the number of qubits on which this location acts. """ return len(self.qubits) ## SIMULATION METHODS ## def as_clifford(self, nq=None): """ If this location represents a Clifford gate, returns the action of that gate. Otherwise, a :obj:`RuntimeError` is raised. :param int nq: Specifies how many qubits to represent this location as acting upon. If not specified, defaults to the value of the ``nq`` property. :rtype: :class:`qecc.Clifford` """ if not self.is_clifford: raise RuntimeError("Location must be a Clifford gate.") else: if nq is None: nq = self.nq elif nq < self.nq: raise ValueError('nq must be greater than or equal to the nq property.') return self._CLIFFORD_GATE_FUNCS[self.kind](nq, *self.qubits) ## EXPORT METHODS ## def as_qcviewer(self, qubit_names=None): """ Returns a representation of this location in a format suitable for inclusion in a QCViewer file. :param qubit_names: If specified, the given aliases will be used for the qubits involved in this location when exporting to QCViewer. Defaults to "q1", "q2", etc. :rtype: str Note that the identity (or "wait") location requires the following to be added to QCViewer's ``gateLib``:: NAME wait DRAWNAME "1" SYMBOL I 1 , 0 0 , 1 """ # FIXME: link to QCViewer in the docstring here. return ' {gatename} {gatespec}\n'.format( gatename=self._QCVIEWER_NAMES[self.kind], gatespec=qubits_str(self.qubits, qubit_names), ) ## OTHER METHODS ## def relabel_qubits(self, relabel_dict): """ Returns a new location related to this one by a relabeling of the qubits. The relabelings are to be indicated by a dictionary that specifies what each qubit index is to be mapped to. >>> import qecc as q >>> loc = q.Location('CNOT', 0, 1) >>> print loc CNOT 0 1 >>> print loc.relabel_qubits({1: 2}) CNOT 0 2 :param dict relabel_dict: If `i` is a key of `relabel_dict`, then qubit `i` will be replaced by `relabel_dict[i]` in the returned location. :rtype: :class:`qecc.Location` :returns: A new location with the qubits relabeled as specified by `relabel_dict`. """ return Location(self.kind, *tuple(relabel_dict[i] if i in relabel_dict else i for i in self.qubits)) def ensure_loc(loc): if isinstance(loc, tuple): loc = Location(*loc) elif not isinstance(loc, Location): raise TypeError('Locations must be specified either as Location instances or as tuples.') return loc class Circuit(list): def __init__(self, *locs): # Circuit(('CNOT', 0, 2), ('H', 1)) works, but # Circuit('CNOT', 0, 2) doesn't work. list.__init__(self, list(map(ensure_loc, locs))) ## SEQUENCE PROTOCOL ## def append(self, newval): super(Circuit, self).append(ensure_loc(newval)) append.__doc__ = list.append.__doc__ def insert(self, at, newval): super(Circuit, self).insert(at, ensure_loc(newval)) insert.__doc__ = list.insert.__doc__ def __getitem__(self, *args): item = super(Circuit, self).__getitem__(*args) if not isinstance(item, list): return item else: return Circuit(*item) def __getslice__(self, *args): return Circuit(*super(Circuit, self).__getslice__(*args)) def __add__(self, other): if not isinstance(other, Circuit): other = Circuit(*other) return Circuit(*super(Circuit, self).__add__(other)) def __iadd__(self, other): if not isinstance(other, Circuit): other = Circuit(*other) return Circuit(*super(Circuit, self).__iadd__(other)) ## PROPERTIES ## @property def nq(self): """ Returns the number of qubits on which this circuit acts. """ return max(loc.nq for loc in self) if self else 0 @property def size(self): """ Returns the number of locations in this circuit. Note that this property is synonymous with :obj:`len`, in that ``len(circ) == circ.size`` for all :class:`qecc.Circuit` instances. """ return len(self) @property def depth(self): """ Returns the minimum number of timesteps required to implement exactly this circuit in parallel. """ return len(list(self.group_by_time())) ## IMPORT CLASS METHODS ## @staticmethod def from_quasm(source): """Returns a :class:`qecc.Circuit` object from a QuASM-formatted file, producing one location per line.""" if not isinstance(source, str): # Assume source is a file-like, so that iter(source) returns lines # in the file. it = iter(source) else: it = iter(source.split('\n')) return Circuit(*list(map(Location.from_quasm, it))) ## PRETTY PRINTING ## def __repr__(self): return "Circuit({})".format(", ".join(map(repr, self))) def __str__(self): return "\n".join(map(str, self)) def as_quasm(self): """ Returns a representation of the circuit in an assmembler-like format. In this format, each location is represented by a single line where the first field indicates the kind of location and the remaining fields indicate the qubits upon which the location acts. >>> import qecc as q >>> circ = q.Circuit(('CNOT', 0, 2), ('H', 2), ('SWAP', 1, 2), ('I', 0)) >>> print circ.as_quasm() CNOT 0 2 H 2 SWAP 1 2 I 0 """ return str(self) def as_qcviewer(self, inputs=(0,), outputs=(0,), qubit_names=None): """ Returns a string representing this circuit in the format recognized by `QCViewer`_. :param tuple inputs: Specifies which qubits should be marked as inputs in the exported QCViewer circuit. :param tuple outputs: Specifies which qubits should be marked as outputs in the exported QCViewer circuit. :param qubit_names: Names to be used for each qubit when exporting to QCViewer. .. _QCViewer: http://qcirc.iqc.uwaterloo.ca/index.php?n=Projects.QCViewer """ header = '.v ' + qubits_str(list(range(self.nq)), qubit_names) + '\n' header += '.i ' + qubits_str(inputs, qubit_names) + '\n' header += '.o ' + qubits_str(outputs, qubit_names) + '\n' circ_text = 'BEGIN\n' for loc in self: circ_text += loc.as_qcviewer(qubit_names) circ_text += 'END\n' return header + circ_text def as_qcircuit(self, C=None, R=None): r""" Typesets this circuit using the `Qcircuit`_ package for :math:`\text{\LaTeX}`. :param float C: Width (in ems) of each column. :param float R: Height (in ems) of each column. :rtype: :obj:`str` :returns: A string containing :math:`\text{\LaTeX}` source code for use with `Qcircuit`_. .. _Qcircuit: http://www.cquic.org/Qcircuit/ """ trans_cells = [] for timestep in self.group_by_time(): col = [r'\qw'] * self.nq # If nothing else, place a \qw. hidden_qubits = set() for loc in timestep: if any(qubit in hidden_qubits for qubit in range(min(loc.qubits), max(loc.qubits)+1)): # A qubit is hidden, so append and reset. trans_cells.append(col) col = [r'\qw'] * self.nq # If nothing else, place a \qw. hidden_qubits = set() if loc.wt == 1: col[loc.qubits[0]] = r"\gate{{{0}}}".format(loc.kind if loc.kind != "I" else r"\id") elif loc.kind == 'CNOT': col[loc.qubits[0]] = r'\ctrl{{{0}}}'.format(loc.qubits[1] - loc.qubits[0]) col[loc.qubits[1]] = r'\targ' else: raise NotImplementedError("Location kind {0.kind} not supported by this method.".format(loc)) hidden_qubits.update(list(range(min(loc.qubits), max(loc.qubits)+1))) trans_cells.append(col) cells = u.transpose([[''] * self.nq] + trans_cells + [[r'\qw'] * self.nq]) return r""" \Qcircuit {C} {R} {{ {0} }} """.format(u.latex_array_contents(cells), C="@C{}em".format(C) if C is not None else "", R="@R{}em".format(R) if R is not None else "" ) ## CIRCUIT SIMULATION METHODS ## def as_clifford(self): """ If this circuit is composed entirely of Clifford operators, converts it to a :class:`qecc.Clifford` instance representing the action of the entire circuit. If the circuit is not entirely Clifford gates, this method raises a :obj:`RuntimeError`. """ if not all(loc.is_clifford for loc in self): raise RuntimeError('All locations must be Clifford gates in order to represent a circuit as a Clifford operator.') nq = self.nq return reduce(mul, (loc.as_clifford(nq) for loc in reversed(self)), cc.eye_c(nq)) ## CIRCUIT SIMPLIFICATION METHODS ## def cancel_selfinv_gates(self, start_at=0): """ Transforms the circuit, removing any self-inverse gates from the circuit if possible. Note that not all self-inverse gates are currently supported by this method. :param int start_at: Specifies which location to consider first. Any locations before ``start_at`` are not considered for cancelation by this method. """ SELFINV_GATES = ['H', 'X', 'Y', 'Z', 'CNOT'] if start_at == len(self): return self loc = self[start_at] if loc.kind in SELFINV_GATES: if len(loc.qubits) == 1: # TODO: add two-qubit gates. q = loc.qubits[0] for idx_future in range(start_at + 1, len(self)): if q in self[idx_future].qubits: # Check that the kind matches. if self[idx_future].kind == loc.kind: self.pop(idx_future) self.pop(start_at) return self.cancel_selfinv_gates(start_at=start_at) else: # Go on to the next gate, since there's another gate # between here. return self.cancel_selfinv_gates(start_at=start_at+1) return self.cancel_selfinv_gates(start_at=start_at+1) def replace_cz_by_cnot(self): """ Changes all controlled-:math:`Z` gates in this circuit to controlled-NOT gates, adding Hadamard locations as required. """ # FIXME: this is inefficient as hell right now. try: idx = next((idx for idx in range(len(self)) if self[idx].kind == 'CZ')) q = self[idx].qubits self[idx] = Location('CNOT', *q) self.insert(idx + 1, ('H', q[1])) self.insert(idx, ('H', q[1])) return self.replace_cz_by_cnot() except StopIteration: return self def group_by_time(self, pad_with_waits=False): """ Returns an iterator onto subcircuits of this circuit, each of depth 1. :param bool pad_with_waits: If ``True``, each subcircuit will have wait locations added such that every qubit is acted upon in every subcircuit. :yields: each depth-1 subcircuit, corresponding to time steps of the circuit """ nq = self.nq found = [False] * nq group_acc = Circuit() for loc in self: if any(found[qubit] for qubit in loc.qubits): if pad_with_waits: group_acc += [('I', qubit) for qubit in range(nq) if not found[qubit]] yield group_acc found = [False] * nq group_acc = Circuit() for qubit in loc.qubits: found[qubit] = True group_acc.append(loc) if pad_with_waits: group_acc += [('I', qubit) for qubit in range(nq) if not found[qubit]] yield group_acc def pad_with_waits(self): """ Returns a copy of the :class:`qecc.Circuit` ``self``, which contains explicit wait locations. """ return sum(self.group_by_time(pad_with_waits=True), Circuit()) ## OTHER METHODS ## def relabel_qubits(self, relabel_dict): """ Returns a new circuit related to this one by a relabeling of the qubits. The relabelings are to be indicated by a dictionary that specifies what each qubit index is to be mapped to. >>> import qecc as q >>> loc = q.Location('CNOT', 0, 1) >>> print loc CNOT 0 1 >>> print loc.relabel_qubits({1: 2}) CNOT 0 2 :param dict relabel_dict: If `i` is a key of `relabel_dict`, then qubit `i` will be replaced by `relabel_dict[i]` in the returned circuit. :rtype: :class:`qecc.Circuit` :returns: A new circuit with the qubits relabeled as specified by `relabel_dict`. """ return Circuit(*[ loc.relabel_qubits(relabel_dict) for loc in self ]) ## FUNCTIONS ## def propagate_fault(circuitlist, fault): """ Given a list of circuits representing a list of timesteps (see :meth:`qecc.Circuit.group_by_time`) and a Pauli fault, propagates that fault through the remainder of the time-sliced circuit. :param list circuitlist: A list of :class:`qecc.Circuit` instances representing the timesteps of a larger circuit. :param qecc.Pauli fault: A Pauli fault to occur immediately before timestep ``timestep``. :param int timestep: The timestep immediately following when the fault to be propagated occured. :rtype: :class:`qecc.Pauli` :returns: The effective fault after propagating ``fault`` through the remainder of ``circuitlist``. """ fault_out = fault for step in circuitlist: fault_out = step.as_clifford().conjugate_pauli(fault_out) return fault_out def possible_faults(circuit): """ Takes a sub-circuit which has been padded with waits, and returns an iterator onto Paulis which may occur as faults after this sub-circuit. :param qecc.Circuit circuit: Subcircuit to in which faults are to be considered. """ return it.chain.from_iterable( pc.restricted_pauli_group(loc.qubits, circuit.nq) for loc in circuit ) def possible_output_faults(circuitlist): """ Gives an iterator onto all possible effective faults due to 1-fault paths occuring within ``circuitlist``, assuming it has been padded with waits. :param list circuitlist: A list of :class:`qecc.Circuit` instances representing timesteps in a larger circuit. See :meth:`qecc.Circuit.group_by_time`. :yields: :class:`qecc.Pauli` instances representing possible effective faults due to 1-fault paths within the circuit represented by ``circuitlist``. """ outputs = iter([]) for timestep_idx in range(len(circuitlist)): outputs = it.imap( lambda fault: propagate_fault( circuitlist[timestep_idx+1:],fault), possible_faults( circuitlist[timestep_idx] )) for output in outputs: yield output
This week, we embark on a topic that has been buzzing all over the web this past month: “Welcome to your tape.” Netflix released a 13-part series titled “13 Reasons Why,” based off the book of the same title by Jay Asher, released in 2007. Please be warned: this blog post and subsequent podcast episode deals with certain triggers regarding teen suicide and mental health. Wilx read the book when it was released, and Netflix picking up the series brought it once again to the forefront of current conversations. The show deals with the suicide of teen Hannah Baker, who recorded and delivered 13 tapes dedicated to people she felt contributed to her reasons for committing suicide. The story is narrated from the perspective of her close friend and romantic interest Clay Jensen. Though the show was intended to bring to light the topic of teen suicide and mental health (an intention also expressed by singer and actress Selena Gomez, Executive Producer), we can’t help but wonder if this might have been a misguided attempt that causes more harm than good. We grapple with the sentiment that it brings it to light, yet does so in a twisted sort of revenge fantasy that doesn’t succinctly depict that Hannah Baker, or anyone who commits suicide with revenge in mind, will never get the comeuppance intended. Does 13 Reasons Why glamorize teen suicide? Ultimately, it’s still using it as a form of entertainment and ultimately, profit. What do you think? Did you read the book and/or watch the series? What redeeming qualities can be found in a show of this kind? Sound off below! As always, please don’t forget to rate and review. You have no idea how much it truly means to us! We can all help prevent suicide. The Lifeline provides 24/7, free and confidential support for people in distress, prevention and crisis resources for you or your loved ones, and best practices for professionals. Please call 1-800-273-8255.
#!/usr/bin/env python import os from setuptools import setup from diff_cover import VERSION, DESCRIPTION REQUIREMENTS = [line.strip() for line in open(os.path.join("requirements", "requirements.txt")).readlines()] setup( name='diff_cover', version=VERSION, author='Matt Bachmann', url='https://github.com/Bachmann1234/diff-cover', test_suite='nose.collector', description=DESCRIPTION, license='Apache 2.0', python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=['Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance'], packages=['diff_cover', 'diff_cover/violationsreporters'], package_data={'diff_cover': ['templates/*.txt', 'templates/*.html', 'templates/*.css']}, install_requires=REQUIREMENTS, entry_points={ 'console_scripts': ['diff-cover = diff_cover.diff_cover_tool:main', 'diff-quality = diff_cover.diff_quality_tool:main'] } )
Keeping your employees motivated is challenging for any business owner or manager. Here are 6 secrets that will help! Instilling motivation isn’t easy, but it’s necessary if you want your employees to grow and stay satisfied with their jobs. It’s the driving factor that leads people to work harder, meaning more productivity for your organization, and the most important contributing factor to overall satisfaction, which leads to higher employee retention. That being said, there isn’t any single strategy that can magically motivate all your employees at once and keep them motivated throughout their employment. Everyone is unique, with unique values and ideas, and if you want to be successful in instilling company-wide motivation, you have to find multiple strategies to reach each individual. Here are six motivation secrets that can help you keep your work force happy and driven to succeed. While teamwork is an important element of company success, and grouping your employees together has advantages in building that “team” mentality, nothing beats individual attention when it comes to individual motivation. In large corporations, this is especially true, since employees can feel isolated or unrecognized in a vast sea of workers. Taking a moment to speak to an individual alone and personally canmake him or her feel truly appreciated. The best way to go about this is to offer direct praise when an individual exceeds performance goals or does some exemplary work. Not only does this make the employee feel recognized and appreciated, it also reinforces the positive behavior for the entire workforce. But the importance of individual attention extends beyond simple praise. If someone is underperforming, or is overwhelmed by a specific duty, take him/her aside for some personal coaching or one-on-one talks that can help that employee work through his/her problems. This type of individual attention demonstrates that you care about the individual behind the work as much as the work itself, and that you’re willing to take extra steps to make the individual feel comfortable. 2. Advancement opportunities are enticing. People tend to feel stifled when their job becomes repetitive or stagnant. Going too long in the same position, with no changes or hope for change, will eventually demotivate even the most ambitious employees. However, if you offer opportunities for advancement and improvement, your employees will be motivated to work harder. As a simple example, promoting from within rather than hiring outside experts can have a profound effect on your company’s overall morale. But advancement doesn’t always have to come with a raise and a new job title. Offering new training or education opportunities for your employees is also motivating, as is offering new responsibilities to those willing to take them on. Help your employees grow and change in their own ways, and they’ll be far more excited about working for you. 3. Leaders set the example. As a leader within your organization, people are going to look to you to set an example for the rest of the group. You’re going to be setting a tone, a work ethic, and a set of values for the company whether you mean to directly or not, and setting the right example can have a meaningful effect on the mentality of your group. For example, if you work hard and stay optimistic about everything, even in the face of enormous challenges, your employees will be likely to do the same. If you set an example of positivity and understanding, your workers will mirror you, and the entire culture of the work environment will become more motivating. In larger organizations, it’s important to convey this idea to all the leaders who work individually with others, especially bosses and supervisors. Having consistent good examples across the board can dramatically alter the landscape of your workplace. 4. Environmental motivators can make or break you. How you shape your work environment has a major effect on your team’s mentality. There’s no right or wrong way to go about this, since every company is going to have a different culture, but it is important to include both opportunities to “get away” from the traditional work environment and pieces of color or flair that make the office interesting. For example, some companies have torn down their cubicle walls in an effort to make a more open, team-based workspace. If this is too extreme for your company culture, implementing something simple like a decked-out break room could be just as effective. Stereotypical motivational posters aren’t going to instantly motivate your team every day, but including pictures, quotes, and artwork on the walls of the office can inspire creativity and make the office feel like a much more human, organic place to work. It’s much easier to become and stay motivated when you feel comfortable in your workplace. If you’re ever concerned about the effectiveness of your workplace, ask around. Chances are, your employees will tell you directly if they feel like your office is dull or uninspiring. 5. Socialization makes people more committed. Most people try to separate their personal and professional lives, and it’s usually for the best. Trying to make everyone in the office best friends is a bad idea for a number of reasons, but that doesn’t mean they shouldn’t have meaningful conversations outside of a typical work environment. Being friendly with your work force builds bonds and a collective sense of teamwork, and makes work seem less machine-like and more like an organic team effort. You can prompt people to socialize with each other more by holding team-based events. They can be outside gatherings, like parties or group activities, or something simpler like group lunches at which people are encouraged to let their hair down and talk casually to one another. 6. Transparency is the key to communication. Creating an environment of transparency, where you speak openly about your business to your employees and they feel comfortable coming to you with anything that’s on their minds can do wonders for the collective motivation of your workplace. That’s because transparency builds trust; when people understand that you aren’t hiding anything, and that you’ll listen to anybody, they’re far more likely to respect you as an authority and appreciate you as a leader. It also opens inter-departmental channels, giving employees and supervisors greater clarity and more opportunities to openly communicate. Employees are more comfortable bringing up what they like and don’t like, and there are more chances to nip potential problems in the bud by calling them out. Something as simple as an “open door policy” will, over time, make people feel more appreciated, more heard, and more valued. It also opens new lines of communication, and can improve your performance as a team. People are unique and unpredictable, with individual desires and complicated ideals. No matter how perfectly it all plays out in your head, no single strategy can ever hope to please all your workers all at once. As you work to find the best motivators for your team, remember that you’re going to need to make adjustments and changes as you discover what works and what doesn’t. Perfect your motivation strategy as you get to know the individuals in your company, and they’ll reward you with greater dedication and a confident vision for the future. ¡Gracias por ser parte de nuestra comunidad! ¡Comparte Viral Motivation con tus amigos e invitalos a construir una mentalidad de éxito! ¡A tus amigos de seguro les va encantar este video!
import sonnet as snt import tensorflow as tf from sonnet.python.modules.conv import Conv2D from tensorflow.contrib.layers.python.layers import utils from luminoth.models.base import BaseNetwork VALID_SSD_ARCHITECTURES = set([ 'truncated_vgg_16', ]) class SSDFeatureExtractor(BaseNetwork): def __init__(self, config, parent_name=None, name='ssd_feature_extractor', **kwargs): super(SSDFeatureExtractor, self).__init__(config, name=name, **kwargs) if self._architecture not in VALID_SSD_ARCHITECTURES: raise ValueError('Invalid architecture "{}"'.format( self._architecture )) self.parent_name = parent_name self.activation_fn = tf.nn.relu def _init_vgg16_extra_layers(self): self.conv6 = Conv2D(1024, [3, 3], rate=6, name='conv6') self.conv7 = Conv2D(1024, [1, 1], name='conv7') self.conv8_1 = Conv2D(256, [1, 1], name='conv8_1') self.conv8_2 = Conv2D(512, [3, 3], stride=2, name='conv8_2') self.conv9_1 = Conv2D(128, [1, 1], name='conv9_1') self.conv9_2 = Conv2D(256, [3, 3], stride=2, name='conv9_2') self.conv10_1 = Conv2D(128, [1, 1], name='conv10_1') self.conv10_2 = Conv2D(256, [3, 3], padding='VALID', name='conv10_2') self.conv11_1 = Conv2D(128, [1, 1], name='conv11_1') self.conv11_2 = Conv2D(256, [3, 3], padding='VALID', name='conv11_2') def _build(self, inputs, is_training=True): """ Args: inputs: A Tensor of shape `(batch_size, height, width, channels)`. Returns: A dict of feature maps to be consumed by an SSD network """ # TODO: Is there a better way to manage scoping in these cases? scope = self.module_name if self.parent_name: scope = self.parent_name + '/' + scope base_net_endpoints = super(SSDFeatureExtractor, self)._build( inputs, is_training=is_training)['end_points'] if self.truncated_vgg_16_type: # As it is pointed out in SSD and ParseNet papers, `conv4_3` has a # different features scale compared to other layers, to adjust it # we need to add a spatial normalization before adding the # predictors. vgg_conv4_3 = base_net_endpoints[scope + '/vgg_16/conv4/conv4_3'] tf.summary.histogram('conv4_3_hist', vgg_conv4_3) with tf.variable_scope('conv_4_3_norm'): # Normalize through channels dimension (dim=3) vgg_conv4_3_norm = tf.nn.l2_normalize( vgg_conv4_3, 3, epsilon=1e-12 ) # Scale. scale_initializer = tf.ones( [1, 1, 1, vgg_conv4_3.shape[3]] ) * 20.0 # They initialize to 20.0 in paper scale = tf.get_variable( 'gamma', dtype=vgg_conv4_3.dtype.base_dtype, initializer=scale_initializer ) vgg_conv4_3_norm = tf.multiply(vgg_conv4_3_norm, scale) tf.summary.histogram('conv4_3_normalized_hist', vgg_conv4_3) tf.add_to_collection('FEATURE_MAPS', vgg_conv4_3_norm) # The original SSD paper uses a modified version of the vgg16 # network, which we'll modify here vgg_network_truncation_endpoint = base_net_endpoints[ scope + '/vgg_16/conv5/conv5_3'] tf.summary.histogram( 'conv5_3_hist', vgg_network_truncation_endpoint ) # Extra layers for vgg16 as detailed in paper with tf.variable_scope('extra_feature_layers'): self._init_vgg16_extra_layers() net = tf.nn.max_pool( vgg_network_truncation_endpoint, [1, 3, 3, 1], padding='SAME', strides=[1, 1, 1, 1], name='pool5' ) net = self.conv6(net) net = self.activation_fn(net) net = self.conv7(net) net = self.activation_fn(net) tf.summary.histogram('conv7_hist', net) tf.add_to_collection('FEATURE_MAPS', net) net = self.conv8_1(net) net = self.activation_fn(net) net = self.conv8_2(net) net = self.activation_fn(net) tf.summary.histogram('conv8_hist', net) tf.add_to_collection('FEATURE_MAPS', net) net = self.conv9_1(net) net = self.activation_fn(net) net = self.conv9_2(net) net = self.activation_fn(net) tf.summary.histogram('conv9_hist', net) tf.add_to_collection('FEATURE_MAPS', net) net = self.conv10_1(net) net = self.activation_fn(net) net = self.conv10_2(net) net = self.activation_fn(net) tf.summary.histogram('conv10_hist', net) tf.add_to_collection('FEATURE_MAPS', net) net = self.conv11_1(net) net = self.activation_fn(net) net = self.conv11_2(net) net = self.activation_fn(net) tf.summary.histogram('conv11_hist', net) tf.add_to_collection('FEATURE_MAPS', net) # This parameter determines onto which variables we try to load the # pretrained weights self.pretrained_weights_scope = scope + '/vgg_16' # It's actually an ordered dict return utils.convert_collection_to_dict('FEATURE_MAPS') def get_trainable_vars(self): """ Returns a list of the variables that are trainable. Returns: trainable_variables: a tuple of `tf.Variable`. """ return snt.get_variables_in_module(self)
Originally i was going to just buy one but i recieved a large amount of offers ranging from $10-$20, So i thought i'd just allow everyone to showcase their talents ! I have no real requirements aslong as FansHut is visible and the page looks Sleek but professional. After this, i need 2 more twitter pages completed, one for my personal account and one for another of my websites. The winner will also get these jobs.
import os import furl from mako.lookup import TemplateLookup from mfr.core import extension from mfr.extensions.image import settings from mfr.extensions.utils import munge_url_for_localdev, escape_url_for_template class ImageRenderer(extension.BaseRenderer): TEMPLATE = TemplateLookup( directories=[ os.path.join(os.path.dirname(__file__), 'templates') ]).get_template('viewer.mako') def render(self): self.metrics.add('needs_export', False) if self.metadata.ext in settings.EXPORT_EXCLUSIONS: download_url = munge_url_for_localdev(self.url) safe_url = escape_url_for_template(download_url.geturl()) return self.TEMPLATE.render(base=self.assets_url, url=safe_url) exported_url = furl.furl(self.export_url) if settings.EXPORT_MAXIMUM_SIZE and settings.EXPORT_TYPE: exported_url.args['format'] = '{}.{}'.format(settings.EXPORT_MAXIMUM_SIZE, settings.EXPORT_TYPE) elif settings.EXPORT_TYPE: exported_url.args['format'] = settings.EXPORT_TYPE else: download_url = munge_url_for_localdev(self.url) safe_url = escape_url_for_template(download_url.geturl()) return self.TEMPLATE.render(base=self.assets_url, url=safe_url) self.metrics.add('needs_export', True) safe_url = escape_url_for_template(exported_url.url) return self.TEMPLATE.render(base=self.assets_url, url=safe_url) @property def file_required(self): return False @property def cache_result(self): return False
Well, Cathie Black's been let out of her cage after a good two weeks of Sarah Palinizing and what insights has she gleaned in the hour or two she spent in public schools? Looks like she's fixing to fire teachers. There's no better way, apparently, to help city children than by firing their teachers. Arne Duncan and Bill Gates have determined larger class sizes are the way to go, and Cathie is gonna help them get their wish. Now when Cathie fires teachers, which she has already determined to do, the way she wants to do it is Mike Bloomberg's way, however the hell she feels like it. That's why she wants to get rid of last in first out. You see, teachers with no experience have a lot of enthusiasm and she doesn't want to lose them. Perhaps, in that case, it would be better not to fire them. But Cathie doesn't think like that. Since she's already determined to fire people, why not go after the older and higher-paid teachers? That would put a bigger dent in the bottom line. And then she wouldn't have to bother with any of that nasty due process in that inconvenient tenure law. Oh, she wants to get rid of that, too. Perish forbid any American worker should have job security. We'd all be better off if people like Cathie Black could fire us whenever the mood struck them, for any reason whatsoever or for no reason at all. I don't know about you, but it seems to me that job security becomes more important as you get older. When I started teaching I wasn't married, didn't have kids, and lived in a hovel of a rented apartment. I'm not complaining, but it was a lot easier to lose my job under those circumstances. If they want to get rid of me now, it ought to be for a reason other than the inconvenience of paying my salary. There is a process for getting rid of teachers, and it's not my fault Joel Klein spent years trying to get rid of teachers who gave away watches or used DOE fax machines, making a mockery of the system and failing miserably in his efforts. But Cathie Black agrees with everything Joel Klein did. That's fine with me. Let her go after teachers for nonsense. There's a reason teachers need tenure, and that's to protect us from demagogues like Cathie Black, who get into education for two weeks and have the audacity to behave as though they're experts. I wouldn't want her teaching my kid. Fortunately, she isn't licensed to teach, and they aren't yet handing out waivers for that.
""" Juju helpers """ import os import sys from concurrent import futures from functools import partial, wraps from subprocess import DEVNULL, PIPE, CalledProcessError, Popen, TimeoutExpired import yaml import macumba from bundleplacer.charmstore_api import CharmStoreID from conjureup import async from conjureup.app_config import app from conjureup.utils import juju_path, run from macumba.v2 import JujuClient JUJU_ASYNC_QUEUE = "juju-async-queue" this = sys.modules[__name__] # vars this.IS_AUTHENTICATED = False this.CLIENT = None this.USER_TAG = None # login decorator def requires_login(f): def _decorator(*args, **kwargs): if not this.IS_AUTHENTICATED: login(force=True) return f(*args, **kwargs) return wraps(f)(_decorator) def read_config(name): """ Reads a juju config file Arguments: name: filename without extension (ext defaults to yaml) Returns: dictionary of yaml object """ abs_path = os.path.join(juju_path(), "{}.yaml".format(name)) if not os.path.isfile(abs_path): raise Exception("Cannot load {}".format(abs_path)) return yaml.safe_load(open(abs_path)) def get_current_controller(): """ Grabs the current default controller """ try: return get_controllers()['current-controller'] except KeyError: return None def get_controller(id): """ Return specific controller Arguments: id: controller id """ if 'controllers' in get_controllers() \ and id in get_controllers()['controllers']: return get_controllers()['controllers'][id] return None def get_controller_in_cloud(cloud): """ Returns a controller that is bootstrapped on the named cloud Arguments: cloud: cloud to check for Returns: available controller or None if nothing available """ controllers = get_controllers()['controllers'].items() for controller_name, controller in controllers: if cloud == controller['cloud']: return controller_name return None def login(force=False): """ Login to Juju API server """ if this.IS_AUTHENTICATED is True and not force: return if app.current_controller is None: raise Exception("Unable to determine current controller") if app.current_model is None: raise Exception("Tried to login with no current model set.") env = get_controller(app.current_controller) account = get_account(app.current_controller) uuid = get_model(app.current_controller, app.current_model)['model-uuid'] server = env['api-endpoints'][0] this.USER_TAG = "user-{}".format(account['user'].split("@")[0]) url = os.path.join('wss://', server, 'model', uuid, 'api') this.CLIENT = JujuClient( user=this.USER_TAG, url=url, password=account['password']) try: this.CLIENT.login() except macumba.errors.LoginError as e: raise e this.IS_AUTHENTICATED = True # noqa def bootstrap(controller, cloud, series="xenial", credential=None): """ Performs juju bootstrap If not LXD pass along the newly defined credentials Arguments: controller: name of your controller cloud: name of local or public cloud to deploy to series: define the bootstrap series defaults to xenial log: application logger credential: credentials key """ cmd = "juju-2.0 bootstrap {} {} " \ "--config image-stream=daily ".format( controller, cloud) cmd += "--config enable-os-upgrade=false " cmd += "--default-model conjure-up " if app.argv.http_proxy: cmd += "--config http-proxy={} ".format(app.argv.http_proxy) if app.argv.https_proxy: cmd += "--config https-proxy={} ".format(app.argv.https_proxy) if app.argv.apt_http_proxy: cmd += "--config apt-http-proxy={} ".format(app.argv.apt_http_proxy) if app.argv.apt_https_proxy: cmd += "--config apt-https-proxy={} ".format(app.argv.apt_https_proxy) if app.argv.no_proxy: cmd += "--config no-proxy={} ".format(app.argv.no_proxy) if app.argv.bootstrap_timeout: cmd += "--config bootstrap-timeout={} ".format( app.argv.bootstrap_timeout) if app.argv.bootstrap_to: cmd += "--to {} ".format(app.argv.bootstrap_to) cmd += "--bootstrap-series={} ".format(series) if cloud != "localhost": cmd += "--credential {}".format(credential) app.log.debug("bootstrap cmd: {}".format(cmd)) try: pathbase = os.path.join(app.config['spell-dir'], '{}-bootstrap').format(app.current_controller) with open(pathbase + ".out", 'w') as outf: with open(pathbase + ".err", 'w') as errf: p = Popen(cmd, shell=True, stdout=outf, stderr=errf) while p.poll() is None: async.sleep_until(2) return p except CalledProcessError: raise Exception("Unable to bootstrap.") except async.ThreadCancelledException: p.terminate() try: p.wait(timeout=2) except TimeoutExpired: p.kill() p.wait() return p except Exception as e: raise e def bootstrap_async(controller, cloud, credential=None, exc_cb=None): """ Performs a bootstrap asynchronously """ return async.submit(partial(bootstrap, controller=controller, cloud=cloud, credential=credential), exc_cb, queue_name=JUJU_ASYNC_QUEUE) def model_available(): """ Checks if juju is available Returns: True/False if juju status was successful and a working model is found """ try: run('juju-2.0 status', shell=True, check=True, stderr=DEVNULL, stdout=DEVNULL) except CalledProcessError: return False return True def autoload_credentials(): """ Automatically checks known places for cloud credentials """ try: run('juju-2.0 autoload-credentials', shell=True, check=True) except CalledProcessError: return False return True def get_credential(cloud, user): """ Get credentials for user Arguments: cloud: cloud applicable to user credentials user: user listed in the credentials """ creds = get_credentials() if cloud in creds.keys(): if user in creds[cloud].keys(): return creds[cloud][user] raise Exception( "Unable to locate credentials for: {}".format(user)) def get_credentials(secrets=True): """ List credentials This will fallback to reading the credentials file directly Arguments: secrets: True/False whether to show secrets (ie password) Returns: List of credentials """ cmd = 'juju list-credentials --format yaml' if secrets: cmd += ' --show-secrets' sh = run(cmd, shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: try: env = read_config('credentials') return env['credentials'] except: raise Exception( "Unable to list credentials: {}".format( sh.stderr.decode('utf8'))) env = yaml.safe_load(sh.stdout.decode('utf8')) return env['credentials'] def get_clouds(): """ List available clouds Returns: Dictionary of all known clouds including newly created MAAS/Local """ sh = run('juju-2.0 list-clouds --format yaml', shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: raise Exception( "Unable to list clouds: {}".format(sh.stderr.decode('utf8')) ) return yaml.safe_load(sh.stdout.decode('utf8')) def get_cloud(name): """ Return specific cloud information Arguments: name: name of cloud to query, ie. aws, lxd, local:provider Returns: Dictionary of cloud attributes """ if name in get_clouds().keys(): return get_clouds()[name] raise LookupError("Unable to locate cloud: {}".format(name)) def deploy(bundle): """ Juju deploy bundle Arguments: bundle: Name of bundle to deploy, can be a path to local bundle file or charmstore path. """ try: return run('juju-2.0 deploy {}'.format(bundle), shell=True, stdout=DEVNULL, stderr=PIPE) except CalledProcessError as e: raise e def add_machines(machines, msg_cb=None, exc_cb=None): """Add machines to model Arguments: machines: list of dictionaries of machine attributes. The key 'series' is required, and 'constraints' is the only other supported key """ def _prepare_constraints(constraints): new_constraints = {} if not isinstance(constraints, str): app.log.debug( "Invalid constraints: {}, skipping".format( constraints)) return new_constraints list_constraints = constraints.split(' ') for c in list_constraints: try: constraint, constraint_value = c.split('=') new_constraints[constraint] = constraint_value except ValueError as e: app.log.debug("Skipping constraint: {} ({})".format(c, e)) return new_constraints @requires_login def _add_machines_async(): machine_params = [{"series": m['series'], "constraints": _prepare_constraints( m.get('constraints', "")), "jobs": ["JobHostUnits"]} for m in machines] app.log.debug("AddMachines: {}".format(machine_params)) try: machine_response = this.CLIENT.Client( request="AddMachines", params={"params": machine_params}) app.log.debug("AddMachines returned {}".format(machine_response)) except Exception as e: if exc_cb: exc_cb(e) return if msg_cb: msg_cb("Added machines: {}".format(machine_response)) return machine_response return async.submit(_add_machines_async, exc_cb, queue_name=JUJU_ASYNC_QUEUE) def deploy_service(service, default_series, msg_cb=None, exc_cb=None): """Juju deploy service. If the service's charm ID doesn't have a revno, will query charm store to get latest revno for the charm. If the service's charm ID has a series, use that, otherwise use the provided default series. Arguments: service: Service to deploy msg_cb: message callback exc_cb: exception handler callback Returns a future that will be completed after the deploy has been submitted to juju """ @requires_login def _deploy_async(): if service.csid.series == "": service.csid.series = default_series if service.csid.rev == "": id_no_rev = service.csid.as_str_without_rev() mc = app.metadata_controller futures.wait([mc.metadata_future]) info = mc.get_charm_info(id_no_rev, lambda _: None) service.csid = CharmStoreID(info["Id"]) # Add charm to Juju app.log.debug("Adding Charm {}".format(service.csid.as_str())) rv = this.CLIENT.Client(request="AddCharm", params={"url": service.csid.as_str()}) app.log.debug("AddCharm returned {}".format(rv)) # We must load any resources prior to deploying resources = app.metadata_controller.get_resources( service.csid.as_str_without_rev()) app.log.debug("Resources: {}".format(resources)) if resources: params = {"tag": "application-{}".format(service.csid.name), "url": service.csid.as_str(), "resources": resources} app.log.debug("AddPendingResources: {}".format(params)) resource_ids = this.CLIENT.Resources( request="AddPendingResources", params=params) app.log.debug("AddPendingResources returned: {}".format( resource_ids)) application_to_resource_map = {} for idx, resource in enumerate(resources): pid = resource_ids['pending-ids'][idx] application_to_resource_map[resource['Name']] = pid service.resources = application_to_resource_map deploy_args = service.as_deployargs() deploy_args['series'] = service.csid.series app_params = {"applications": [deploy_args]} app.log.debug("Deploying {}: {}".format(service, app_params)) deploy_message = "Deploying {}... ".format( service.service_name) if msg_cb: msg_cb("{}".format(deploy_message)) rv = this.CLIENT.Application(request="Deploy", params=app_params) app.log.debug("Deploy returned {}".format(rv)) if msg_cb: msg_cb("{} deployed.".format(service.service_name)) return async.submit(_deploy_async, exc_cb, queue_name=JUJU_ASYNC_QUEUE) def set_relations(services, msg_cb=None, exc_cb=None): """ Juju set relations Arguments: services: list of services with relations to set msg_cb: message callback exc_cb: exception handler callback """ relations = set() for service in services: for a, b in service.relations: if (a, b) not in relations and (b, a) not in relations: relations.add((a, b)) @requires_login def do_add_all(): if msg_cb: msg_cb("Setting application relations") for a, b in list(relations): params = {"Endpoints": [a, b]} try: app.log.debug("AddRelation: {}".format(params)) rv = this.CLIENT.Application(request="AddRelation", params=params) app.log.debug("AddRelation returned: {}".format(rv)) except Exception as e: if exc_cb: exc_cb(e) return if msg_cb: msg_cb("Completed setting application relations") return async.submit(do_add_all, exc_cb, queue_name=JUJU_ASYNC_QUEUE) def get_controller_info(name=None): """ Returns information on current controller Arguments: name: if set shows info controller, otherwise displays current. """ cmd = 'juju-2.0 show-controller --format yaml' if name is not None: cmd += ' {}'.format(name) sh = run(cmd, shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: raise Exception( "Unable to determine controller: {}".format( sh.stderr.decode('utf8'))) out = yaml.safe_load(sh.stdout.decode('utf8')) try: return next(iter(out.values())) except: return out def get_controllers(): """ List available controllers Returns: List of known controllers """ sh = run('juju-2.0 list-controllers --format yaml', shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: raise LookupError( "Unable to list controllers: {}".format(sh.stderr.decode('utf8'))) env = yaml.safe_load(sh.stdout.decode('utf8')) return env def get_account(controller): """ List account information for controller Arguments: controller: controller id Returns: Dictionary containing list of accounts for controller and the current account in use. """ return get_accounts().get(controller, {}) def get_accounts(): """ List available accounts Returns: List of known accounts """ env = os.path.join(juju_path(), 'accounts.yaml') if not os.path.isfile(env): raise Exception( "Unable to find: {}".format(env)) with open(env, 'r') as c: env = yaml.load(c) return env['controllers'] raise Exception("Unable to find accounts") def get_model(controller, name): """ List information for model Arguments: name: model name controller: name of controller to work in Returns: Dictionary of model information """ models = get_models(controller)['models'] for m in models: if m['name'] == name: return m raise LookupError( "Unable to find model: {}".format(name)) def add_model(name, controller): """ Adds a model to current controller Arguments: controller: controller to add model in """ sh = run('juju-2.0 add-model {} -c {}'.format(name, controller), shell=True, stdout=DEVNULL, stderr=PIPE) if sh.returncode > 0: raise Exception( "Unable to create model: {}".format(sh.stderr.decode('utf8'))) def get_models(controller): """ List available models Arguments: controller: existing controller to get models for Returns: List of known models """ sh = run('juju-2.0 list-models --format yaml -c {}'.format(controller), shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: raise LookupError( "Unable to list models: {}".format(sh.stderr.decode('utf8'))) out = yaml.safe_load(sh.stdout.decode('utf8')) return out def get_current_model(): try: return get_models()['current-model'] except: return None def version(): """ Returns version of Juju """ sh = run('juju-2.0 version', shell=True, stdout=PIPE, stderr=PIPE) if sh.returncode > 0: raise Exception( "Unable to get Juju Version".format(sh.stderr.decode('utf8'))) out = sh.stdout.decode('utf8') if isinstance(out, list): return out.pop() else: return out
Water: The best choice for your wallet and your body! Water is a great drink choice for kids. Make sure that a pitcher of water is available every day at the dinner table. Kids should also drink plenty of water when they are playing and being active. Jazz up water with these simple tricks!
# -*- coding: utf-8 -*- from ws4py.messaging import TextMessage, BinaryMessage, CloseControlMessage,\ PingControlMessage, PongControlMessage from ws4py.framing import Frame, OPCODE_CONTINUATION, OPCODE_TEXT, \ OPCODE_BINARY, OPCODE_CLOSE, OPCODE_PING, OPCODE_PONG from ws4py.exc import FrameTooLargeException, ProtocolException, InvalidBytesError,\ TextFrameEncodingException, UnsupportedFrameTypeException, StreamClosed class Stream(object): def __init__(self): """ Represents a websocket stream of bytes flowing in and out. The stream doesn't know about the data provider itself and doesn't even know about sockets. Instead the stream simply yields for more bytes whenever it requires it. The stream owner is responsible to provide the stream with those bytes until a frame can be interpreted. >>> s = Stream() >>> s.parser.send(BYTES) >>> s.has_messages False >>> s.parser.send(MORE_BYTES) >>> s.has_messages True >>> s.messages.pop() <TextMessage ... > """ self.message = None """ Parsed test or binary messages. Whenever the parser reads more bytes from a fragment message, those bytes are appended to the most recent message. """ self.pings = [] """ Parsed ping control messages. They are instances of messaging.PingControlMessage """ self.pongs = [] """ Parsed pong control messages. They are instances of messaging.PongControlMessage """ self.closing = None """ Parsed close control messsage. Instance of messaging.CloseControlMessage """ self.errors = [] """ Detected errors while parsing. Instances of messaging.CloseControlMessage """ self.parser = self.receiver() """ Parser in charge to process bytes it is fed with. """ # Python generators must be initialized once. self.parser.next() def text_message(self, text): """ Returns a messaging.TextMessage instance ready to be built. Convenience method so that the caller doesn't need to import the TextMessage class itself. @param text: data to be carried by the message """ return TextMessage(text=text) def binary_message(self, bytes): """ Returns a messaging.BinaryMessage instance ready to be built. Convenience method so that the caller doesn't need to import the BinaryMessage class itself. @param text: data to be carried by the message """ return BinaryMessage(bytes) @property def has_message(self): """ Checks if the stream has received any message which, if fragmented, is completed. """ if self.message is not None: return self.message.completed return False def close(self, code=1000, reason=''): """ Returns a close control message built from a messaging.CloseControlMessage instance. @param code: closing status code @param reason: status message @return: bytes representing a close control single framed message """ return CloseControlMessage(code=code, reason=reason).single() def ping(self, data=''): """ Returns a ping control message built from a messaging.PingControlMessage instance. @param data: ping data @return: bytes representing a ping single framed message """ return PingControlMessage(data).single() def pong(self, data=''): """ Returns a ping control message built from a messaging.PongControlMessage instance. @param data: pong data @return: bytes representing a pong single framed message """ return PongControlMessage(data).single() def receiver(self): """ Parser that keeps trying to interpret bytes it is fed with as incoming frames part of a message. Control message are single frames only while data messages, like text and binary, may be fragmented accross frames. The way it works is by instanciating a framing.Frame object, then running its parser generator which yields how much bytes it requires to performs its task. The stream parser yields this value to its caller and feeds the frame parser. When the frame parser raises StopIteration, the stream parser tries to make sense of the parsed frame. It dispatches the frame's bytes to the most appropriate message type based on the frame's opcode. Overall this makes the stream parser totally agonstic to the data provider. """ running = True while running: frame = Frame() while True: try: bytes = (yield frame.parser.next()) if bytes is None: raise InvalidBytesError() frame.parser.send(bytes) except StopIteration: bytes = frame.body or '' if frame.masking_key and bytes: bytes = frame.unmask(bytes) if frame.opcode == OPCODE_TEXT: if self.message and not self.message.completed: # We got a text frame before we completed the previous one raise ProtocolException() try: m = TextMessage(bytes.decode("utf-8", "replace")) m.completed = (frame.fin == 1) self.message = m except UnicodeDecodeError: self.errors.append(CloseControlMessage(code=1007)) break elif frame.opcode == OPCODE_BINARY: m = BinaryMessage(bytes) m.completed = (frame.fin == 1) self.message = m elif frame.opcode == OPCODE_CONTINUATION: m = self.message if m is None: raise ProtocolException() m.completed = (frame.fin == 1) if m.opcode == OPCODE_TEXT: try: m.extend(bytes.decode("utf-8", "replace")) except UnicodeDecodeError: self.errors.append(CloseControlMessage(code=1007)) break else: m.extend(bytes) elif frame.opcode == OPCODE_CLOSE: self.closing = CloseControlMessage(reason=bytes.decode("utf-8", "replace")) elif frame.opcode == OPCODE_PING: self.pings.append(PingControlMessage(bytes)) elif frame.opcode == OPCODE_PONG: self.pongs.append(PongControlMessage(bytes)) else: self.errors.append(CloseControlMessage(code=1003)) # When the frame's payload is empty, we must yield # once more so that the caller is properly aligned if not bytes: yield 0 break except ProtocolException: self.errors.append(CloseControlMessage(code=1002)) except FrameTooLargeException: self.errors.append(CloseControlMessage(code=1004)) except StreamClosed: running = False break frame.parser.close()
Mexico lacks the spectacular growth rates of China and India, but it has one thing going for it: proximity to the United States. That means big business for maquiladoras: factories that import parts and machinery duty-free, then assemble, manufacture or package them and ship them back to the country of origin or to a third country. Mexico's government has been a big supporter of maquiladoras since 1965, when the end of the U.S. guest worker program sent thousands of well-paid Mexican workers home. "It's a way to transfer technology to Mexico, upgrade skills and increase the demand for Mexican goods," says Peter Linneman, chief economist for NAI Global, a New Jersey real estate services firm. U.S. companies like investing in maquiladoras because they offer lower transportation costs than factories in Asia, while wages and utilities are a fraction of what they would be in Texas or California. As more multinational companies have taken advantage of Mexico's cheap land and skilled workforce, construction standards have improved, putting them on par with those in the United States. While demand for office space is greatest in Mexico City and Monterrey, hotels are filling every inch along the coasts that doesn't have an ancient ruin on it, especially around Los Cabos and the Mayan Riviera. But the real prize will go to investors who capture the nascent growth in consumer spending. The unprecedented political and economic stability Mexico has experienced since the end of one-party rule in 2000 has produced a thriving middle class that has more disposable income, but only 2,500 malls in the entire country in which to spend it. The United States, with three times Mexico's population, has more than 90,000 malls. "A high-end resort in Cabo is obvious, so it's a competitive market," says Héctor Klerian, executive vice-president of Chicago-based Jones Lang LaSalle Inc.'s Mexico branch. "But there's a huge opportunity to build shopping centers or business hotels in mid-sized cities" like Zapopan, Mexicali and Hermosillo. One Chicago company taking aim at this market is Walton Street Capital LLC, which has been investing in Mexican malls and movie theaters since 1993. The company is so gung-ho on Mexico it's raising $350 million for a fund exclusively for property there. Overall, Mexico has attracted more than $17 billion of foreign direct investment annually since 2000. The pro-business government of President Felipe Calderón is busy building airports, seaports and roads to attract even more investment. And a provision of the North American Free Trade Agreement gives U.S. and Canadian investors most-favored nation trading status, meaning they're treated the same as local developers. That doesn't mean anybody can walk across the border and snap up cheap land. Many maquiladoras are built by the manufacturers themselves, leaving fewer opportunities for real estate developers. Patience, too, is a virtue, not so much because of a mañana attitude but because there is more bureaucracy in real estate deals. There's also one big cultural difference: Mexican business relationships are more personal. They're built less on experience and reputation than on trust and compatibility. For that reason, experts say it's critical to choose a local partner who's simpatico.
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org # from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'types05.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = ['xl/calcChain.xml', '[Content_Types].xml', 'xl/_rels/workbook.xml.rels'] self.ignore_elements = {} def test_write_formula_default(self): """Test writing formulas with strings_to_formulas on.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.write(0, 0, '=1+1', None, 2) worksheet.write_string(1, 0, '=1+1') workbook.close() self.assertExcelEqual() def test_write_formula_implicit(self): """Test writing formulas with strings_to_formulas on.""" workbook = Workbook(self.got_filename, {'strings_to_formulas': True}) worksheet = workbook.add_worksheet() worksheet.write(0, 0, '=1+1', None, 2) worksheet.write_string(1, 0, '=1+1') workbook.close() self.assertExcelEqual() def test_write_formula_explicit(self): """Test writing formulas with strings_to_formulas off.""" workbook = Workbook(self.got_filename, {'strings_to_formulas': False}) worksheet = workbook.add_worksheet() worksheet.write_formula(0, 0, '=1+1', None, 2) worksheet.write(1, 0, '=1+1') workbook.close() self.assertExcelEqual()
With a sticker price of $179 and the simple to use Chrome OS making the transition from Windows easier, expect the little Asus Chromebox to make big waves. What is the Asus Chromebox? Asus, a world-leader in technology manufacturing, has recently announced the upcoming release of a new Chrome-based desktop mini-PC, the Asus Chromebox. The new product is a fully functional PC with all the specs and inner-hardware of a Chromebook, but in a compact box nearly small enough to fit in your hand. The Asus Chromebox is a mini-PC designed around the same hardware of the recently released Chromebooks, like the Acer C720, but in a package that is meant for use on a desk with an accompanying monitor, mouse, and keyboard. The power behind the Chrome-based systems is the lightweight operating system, Google's Chrome OS, which provides access to Google's web-based ecosystem through the browser based interface. Users are meant to access the web, videos, and productivity services all through the Chrome browser and Google account. This will allow access to Google services such as YouTube, Google Docs, Google +, Gmail, Google Drive, and more. The two main features of the Asus Chromebox that really stand out are its size and price point. First, Asus is listing the price of the Chromebox at $179, which brings the new PC in at a substantially lower cost than the Chromebook. Then comes the overall size, the Asus Chromebox is said to measure 4.88" by 4.88" by 1.65", which is easily small enough to fit on almost every desk. The Chromebox will be available in higher-end variants with a 1.7GHz Core i3 and a 2.1GHz Core i7 and up to 4GB of RAM. Unfortunately, the Core i7 versions are not planned for release in American markets. At this time, no word has been given as to the scheduled release date of the new product.
# -*- coding: utf-8 -*- """ Dependency: `git-remote-bzr` from https://github.com/felipec/git-remote-bzr must be in the `$PATH`. """ from __future__ import absolute_import, print_function import argparse import os import subprocess from contextlib import contextmanager from pkg_resources import resource_string import yaml @contextmanager def cd(path): cwd = os.getcwd() os.chdir(path) yield os.chdir(cwd) class Migrate(object): def __init__(self, path, push=False, mapping=None): self.path = path self.push = push self.mapping = mapping def _init_git(self, project): # we keep the serie's name so we can handle both projects: # lp:banking-addons/7.0 # lp:banking-addons/bank-statement-reconcile-7.0 name = project.replace('/', '-') repo = os.path.join(self.path, name) print('Working on', repo) if not os.path.exists(repo): os.mkdir(repo) with cd(repo): print(' git init', name) subprocess.check_output(['git', 'init']) return repo def _add_remote(self, repo, name, remote): with cd(repo): remotes = subprocess.check_output(['git', 'remote']) remotes = remotes.split('\n') if name not in remotes: print(' git remote add', name, remote) subprocess.check_output(['git', 'remote', 'add', name, remote]) def _add_bzr_branch(self, repo, bzr_branch, gh_branch): with cd(repo): self._add_remote(repo, gh_branch, "bzr::%s" % bzr_branch) print(' git fetch', gh_branch, 'from', bzr_branch) subprocess.check_output(['git', 'fetch', gh_branch]) def _push_to_github(self, repo, refs): with cd(repo): print(' git push github', refs) if self.push: subprocess.check_output( ['git', 'push', 'github', refs]) def _push_tags_to_github(self, repo): with cd(repo): print(' git push github --tags') if self.push: subprocess.check_output( ['git', 'push', 'github', '--tags']) def _parse_mapping(self): if self.mapping: projects = open(self.mapping, 'r') else: projects = resource_string(__name__, 'branches.yaml') projects = yaml.load(projects) return projects def copy_branches(self, only_projects=None): projects = self._parse_mapping() for project in projects['projects']: gh_url = project['github'] gh_name = gh_url[15:-4] if only_projects: if gh_name not in only_projects: continue repo = self._init_git(gh_name) self._add_remote(repo, 'github', gh_url) for source, gh_branch in project['branches']: self._add_bzr_branch(repo, source, gh_branch) refs = ('refs/remotes/{branch}/master:' 'refs/heads/{branch}'.format(branch=gh_branch)) self._push_to_github(repo, refs) self._push_tags_to_github(repo) def main(): parser = argparse.ArgumentParser() parser.add_argument("path", help="Branches directory") parser.add_argument("--no-push", dest="push", action='store_false') parser.add_argument("--push", dest="push", action='store_true') parser.add_argument("--mapping", help="File that contains the declaration of the " "mapping.") parser.add_argument("--projects", nargs='*', help="Name of the Github projects that you want to " "migrate.") parser.set_defaults(push=False) args = parser.parse_args() if not os.path.exists(args.path): exit("Path %s does not exist" % args.path) if args.mapping and not os.path.exists(args.mapping): exit("File %s does not exist" % args.mapping) migration = Migrate(os.path.abspath(args.path), push=args.push, mapping=args.mapping) migration.copy_branches(only_projects=args.projects) if __name__ == '__main__': main()
Oh, the K-Stew/R-Patz saga could all have been so different. Turns out that all our lives could well have been VERY different, because Jennifer Lawrence was this close to being cast in the major role of Bella Swan in Twilight. As we all know, the part ended up going to Kristen Stewart, but just THINK of all the differences there could have been, had our favorite funny girl got the job. That sullen look Bella became famous for, thanks to Kristen Stewart's often less-than-upbeat looking face? It might never have existed. Bella could have been well-known for being "the chirpy one" in the Twilight series, for all we know. And let's not even THINK about the impact it might have had on their personal lives. Gone would have been the relationship between Jennifer and Nicholas Hoult, as she instead might have dated another young British actor, Robert Pattinson. R-Patz himself might not have had to go through the breakup trauma when Kristen Stewart later cheated on him with director Rupert Sanders. And he might never have ended up in a happy relationship with FKA Twigs. "I remember when the first movie came out, seeing Kristen Stewart on the red carpet and getting papped wherever she went. "I'd had no idea Twilight would be such a big deal. For me, and assuming for her, it was just another audition. Then it turned into this whole other thing." But then she starred in The Hunger Games, X-Men, Silver Linings Playbook, and loads of other great films. And then she became best friends with the equally funny Amy Schumer. So really, all is well. This story originally appeared on Cosmopolitan.co.uk. Minor edits have been made by the Cosmo.ph editors.
""" NBS14 test for allantools (https://github.com/aewallin/allantools) nbs14 datasets are from http://www.ieee-uffc.org/frequency-control/learning-riley.asp Stable32 was used to calculate the deviations we compare against. The small dataset and deviations are from http://www.ieee-uffc.org/frequency-control/learning-riley.asp http://www.wriley.com/paper1ht.htm see also: NIST Special Publication 1065 Handbook of Frequency Stability Analysis http://tf.nist.gov/general/pdf/2220.pdf around page 107 """ import math import time import sys import pytest import numpy import allantools as allan # 1000 point deviations from: # http://www.ieee-uffc.org/frequency-control/learning-riley.asp Table III # http://www.wriley.com/paper1ht.htm # http://tf.nist.gov/general/pdf/2220.pdf page 108 nbs14_1000_devs = [ [2.922319e-01, 9.965736e-02, 3.897804e-02], # 0 ADEV 1, 10, 100 [2.922319e-01, 9.159953e-02, 3.241343e-02], # 1 OADEV [2.922319e-01, 6.172376e-02, 2.170921e-02], # 2 MDEV #[2.922319e-01, 9.172131e-02, 3.501795e-02], # TOTDEV, http://www.ieee-uffc.org/frequency-control/learning-riley.asp # "Calculated using bias-corrected reflected method from endpoint-matched phase data" [2.922319e-01, 9.134743e-02, 3.406530e-02], # 3 TOTDEV, http://tf.nist.gov/general/pdf/2220.pdf page 108 # "Calculated using doubly reflected TOTVAR method" [2.943883e-01, 1.052754e-01, 3.910860e-02], # 4 HDEV [1.687202e-01, 3.563623e-01, 1.253382e-00], # 5 TDEV [2.943883e-01, 9.581083e-02, 3.237638e-02], # 6 OHDEV [2.884664e-01, 9.296352e-02, 3.206656e-02], # 7 standard deviation, sample (not population) [2.943883e-01, 9.614787e-02, 3.058103e-02], # 8 HTOTDEV #[2.418528e-01, 6.499161e-02, 2.287774e-02], # 9 MTOTDEV (from published table, WITH bias correction) [2.0664e-01, 5.5529e-02, 1.9547e-02], # MTOTDEV (from Stable32 v1.60 decade run, NO bias correction) #[1.396338e-01, 3.752293e-01, 1.320847e-00], # 10 TTOTDEV (from published table, WITH bias correction) [1.1930e-01, 3.2060e-01, 1.1285e+00 ], # 10 TTOTDEV (from Stable 32 v1.60 decade run, NO bias correction) [1.0757e-01, 3.1789e-02, 5.0524e-03 ], ] # 11 THEO1 (tau= 10,100,1000, from Stable32, NO bias correction # this generates the nbs14 1000 point frequency dataset. # random number generator described in # http://www.ieee-uffc.org/frequency-control/learning-riley.asp # http://tf.nist.gov/general/pdf/2220.pdf page 107 # http://www.wriley.com/tst_suit.dat def nbs14_1000(): """ 1000-point test dataset. data is fractional frequency """ n = [0]*1000 n[0] = 1234567890 for i in range(999): n[i+1] = (16807*n[i]) % 2147483647 # the first three numbers are given in the paper, so check them: assert( n[1] == 395529916 and n[2] == 1209410747 and n[3] == 633705974 ) n = [x/float(2147483647) for x in n] # normalize so that n is in [0, 1] return n nbs14_f = nbs14_1000() nbs14_phase = allan.frequency2phase(nbs14_f, 1.0) def check_dev(name, tau, a, b): print(name," tau=",tau, " ", a ," == ", b) assert( numpy.isclose( a, b) ) def test_oadev_rt_nbs14_1k(): oadev_rt = allan.realtime.oadev_realtime(afs=[1,10,100],tau0=1.0) for x in nbs14_phase: oadev_rt.add_phase(x) for n in range(3): check_dev('OADEV', oadev_rt.taus()[n], oadev_rt.dev[n], nbs14_1000_devs[1][n]) def test_ohdev_rt_nbs14_1k(): dev_rt = allan.realtime.ohdev_realtime(afs=[1,10,100],tau0=1.0) for x in nbs14_phase: dev_rt.add_phase(x) for n in range(3): check_dev('OHDEV', dev_rt.taus()[n], dev_rt.dev[n], nbs14_1000_devs[6][n]) def test_tdev_rt_nbs14_1k(): dev_rt = allan.realtime.tdev_realtime(afs=[1,10,100],tau0=1.0) for x in nbs14_phase: dev_rt.add_phase(x) for n in range(3): check_dev('TDEV', dev_rt.taus()[n], dev_rt.dev[n], nbs14_1000_devs[5][n]) if __name__ == "__main__": test_oadev_rt_nbs14_1k() test_ohdev_rt_nbs14_1k() test_tdev_rt_nbs14_1k()
Empleosbac.net is the unavailable:th largest website within the world. The website is created in 14/03/2005, currently located in Virgin Islands, British and is running on IP 208.91.197.128 registered by Network Solutions, LLC network. This analysis about top100.lt has been displayed 588 times. Proven and positioned, and custom website traffic analysis! With years of experience, and a custom developed platform, WeNotify.net is your go to source for web analysis. Charlotte.de is a website that uses as title "Schmuckmanufaktur EHINGER SCHWARZ 1876 Onlineshop für Schmuck" and tries to rank for the keywords "Schmuck", "Schmuck Online Shop", "Schmuckmanufaktur" and "Wandelbarer Schmuck". Yesterday Charlotte.de ranked on position 2,060,459 worldwide. The server is located in , . This analysis about fasching-onlineshop.de has been displayed 238 times. Proven and positioned, and custom website traffic analysis! With years of experience, and a custom developed platform, WeNotify.net is your go to source for web analysis. Analyze page for Mp3plus.ru - Mp3plus including statistics, performance, general information and density value. Analyze page for Mybes.com - Mybes including statistics, performance, general information and density value. Analyze page for Sahabattauhiid.com - Sahabattauhiid including statistics, performance, general information and density value. Analyze page for Headstartmt.org - Headstartmt including statistics, performance, general information and density value. Analyze page for Vahperd.org - Vahperd including statistics, performance, general information and density value. Analyze page for Ourfootyteam.com - Ourfootyteam including statistics, performance, general information and density value. Empleosbac.net - Empleosbac . Empleosbac.net.pandastats.net Empleosbac.net is the unavailable:th largest website within the world. The website is created in 14/03/2005, currently located in Virgin Islands, British and is running on IP 208.91.197.128 registered by Network Solutions, LLC network.
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import sys import traceback from pylib.base import base_test_result from pylib.linker import test_case from pylib.local.device import local_device_environment from pylib.local.device import local_device_test_run class LinkerExceptionTestResult(base_test_result.BaseTestResult): """Test result corresponding to a python exception in a host-custom test.""" def __init__(self, test_name, exc_info): """Constructs a LinkerExceptionTestResult object. Args: test_name: name of the test which raised an exception. exc_info: exception info, ostensibly from sys.exc_info(). """ exc_type, exc_value, exc_traceback = exc_info trace_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)) log_msg = 'Exception:\n' + trace_info super(LinkerExceptionTestResult, self).__init__( test_name, base_test_result.ResultType.FAIL, log="%s %s" % (exc_type, log_msg)) class LocalDeviceLinkerTestRun(local_device_test_run.LocalDeviceTestRun): def _CreateShards(self, tests): return tests def _GetTests(self): min_device_sdk = min(d.build_version_sdk for d in self._env.devices) return self._test_instance.GetTests(min_device_sdk) def _GetUniqueTestName(self, test): return test.qualified_name def _RunTest(self, device, test): assert isinstance(test, test_case.LinkerTestCaseBase) try: result = test.Run(device) except Exception: # pylint: disable=broad-except logging.exception('Caught exception while trying to run test: ' + test.tagged_name) exc_info = sys.exc_info() result = LinkerExceptionTestResult(test.tagged_name, exc_info) return result, None def SetUp(self): @local_device_environment.handle_shard_failures_with( on_failure=self._env.BlacklistDevice) def individual_device_set_up(dev): dev.Install(self._test_instance.test_apk) self._env.parallel_devices.pMap(individual_device_set_up) def _ShouldShard(self): return True def TearDown(self): pass def TestPackage(self): pass
Deena told the Daily Mirror that she’d found the death scenes to be an “anti climax” and seemed angry at how Viv’s death was portrayed on the show. However, while on This Morning, Deena explained the real story behind a headline that read “Viv’s Fury”. She said, “Substitute that for ‘sad and disappointed’. I’m not furious or angry at all. “I’ve loved my 18 years [at Emmerdale]. All good things have to come to an end and I’ve had such support from the viewers and everything. She said, “In the last year or so, [Viv’s] character had slightly changed and taken more of a backseat route to it all. “I was expecting my contract not to be renewed, so I wasn’t surprised. But I was a little bit shocked at how soon – they brought it forward to November. “I thought maybe I’d have a couple of months in the New Year – that was a surprise. “But I was told that I’d have a really big storyline, so when I read the script and I didn’t actually have anything to say, that was a surprise as well. Did you think Viv’s death could’ve been a tad more dramatic? I certainly did, especially as we didn’t see how Terry tried to save her! The scenes ended with his going in to the burning shop, so I do think they could’ve expanded on that, but let us know your thoughts!
import re import string class HRMException(Exception): pass class TileError(HRMException): def __init__(self, data): super().__init__( "Bad tile address! " "Tile with address {} does not exist! " "Where do you think you're going?".format(data) ) class OutOfBoundsError(HRMException): def __init__(self): super().__init__( "Overflow! " "Each data unit is restricted to values between -999 and 999. " "That should be enough for anybody." ) class OperandsError(HRMException): def __init__(self, operator): super().__init__( "You can't {0} with mixed operands! " "{0}'ing between one letter and one number is invalid. " "Only nice respectable pairs of two letters or two numbers are allowed.! ".format( operator ) ) class HRMType: letters = set() def get(self, *_): return self.data class Empty(HRMType): def __init__(self, data): self.data = data class Number(HRMType): letters = set(string.digits) def __init__(self, data): self.data = int(data) class Word(HRMType): letters = set(string.ascii_letters) def __init__(self, data): self.data = str(data) class Pointer: letters = set("[]") def __init__(self, other): self.other = other self.letters |= other.letters self.pointer = False self.data = None def __call__(self, data): data = str(data) self.pointer = False if data[0] == "[": if data[-1] != "]": raise HRMException("Mismatched parenths") self.pointer = True data = data[1:-1] self.data = self.other(data).get() return self def get(self, hrm): if self.pointer: d = hrm[self.data] return d.data if isinstance(d, HRMBox) else d return self.data class HRMBox: def __init__(self, data): if isinstance(data, HRMBox): self.word = data.word self.data = data.data return self.word = False data = str(data) if set(data) <= set(string.digits + "-"): data = int(data) elif not len(data): raise ValueError("HRMBox needs to be at least a size of one.") elif set(data) <= set(string.ascii_letters): self.word = True data = ord(data[0].upper()) - 64 else: raise ValueError("HRMBox can only be numbers and digits.") self.data = data @property def data(self): return self._data @data.setter def data(self, value): if value >= 1000 or value <= -1000: raise OutOfBoundsError() self._data = value @property def item(self): if self.word: return chr(self.data + 64) return self.data def __int__(self): if self.word: pass return self.data def __index__(self): return self.__int__() def __repr__(self): return "HRMBox({})".format(self.item) def __sub__(self, other): if not isinstance(other, HRMBox): other = HRMBox(other) if self.word is not other.word: raise OperandsError("") return HRMBox(self.data - other.data) def __add__(self, other): if not isinstance(other, HRMBox): other = HRMBox(other) if self.word is not other.word: raise OperandsError("") return HRMBox(self.data + other.data) def __eq__(self, other): if not isinstance(other, HRMBox): other = HRMBox(other) return self.data == other.data def __lt__(self, other): if not isinstance(other, HRMBox): other = HRMBox(other) return self.data < other.data COMMANDS = {} def hrm_fn(*types): def wrap(fn): def call(self, *args): def data(): fn(self, *[t(a).get(self) for t, a in zip(types, args)]) return data call.letters = [t.letters for t in types] COMMANDS[fn.__name__.upper()[1:]] = call return call return wrap class HRM: def __init__(self, program, tiles=0, tile_defaults=None): if tile_defaults is None: tile_defaults = {} self.tokens = list(remove_invalid_tokens(tokenise(program))) self.labels = { places[0]: i for i, (command, places) in enumerate(self.tokens) if command == "LABEL" } self.tiles = [None for _ in range(tiles)] for tile, value in tile_defaults.items(): self.tiles[tile] = HRMBox(value) self.hand = None @property def hand(self): return self._hand @hand.setter def hand(self, value): if value is None: self._hand = HRMBox(value) self._hand = value def __getitem__(self, index): try: return self.tiles[index] except IndexError: raise MemoryError(index) def __setitem__(self, index, value): try: self.tiles[index] = HRMBox(value) except IndexError: raise MemoryError(index) def __call__(self, input): self.input = iter(input) self.output = [] self.command = 0 self.hand = None commands = [COMMANDS[command](self, *value) for command, value in self.tokens] while True: try: commands[self.command]() except IndexError: # No more commands break except StopIteration: # No more input break self.command += 1 return self.output @hrm_fn(Empty) def _inbox(self): self.hand = HRMBox(next(self.input)) @hrm_fn(Empty) def _outbox(self): self.output.append(self.hand.item) self.hand = None @hrm_fn(Pointer(Number)) def _copyfrom(self, index): self.hand = self[index] @hrm_fn(Pointer(Number)) def _copyto(self, index): self[index] = self.hand @hrm_fn(Pointer(Number)) def _add(self, index): self.hand += self[index] @hrm_fn(Pointer(Number)) def _sub(self, index): self.hand -= self[index] @hrm_fn(Pointer(Number)) def _bumpup(self, index): self[index] += 1 self.hand = self[index] @hrm_fn(Pointer(Number)) def _bumpdn(self, index): self[index] -= 1 self.hand = self[index] @hrm_fn(Word) def _jump(self, label): self.command = self.labels[label] @hrm_fn(Word) def _jumpz(self, label): if self.hand == 0: self.command = self.labels[label] @hrm_fn(Word) def _jumpn(self, label): if self.hand < 0: self.command = self.labels[label] @hrm_fn(Number) def _comment(self, comment): pass @hrm_fn(Word) def _label(self, label): pass COMMAND_TYPES = {command: fn.letters for command, fn in COMMANDS.items()} def tokenise(hrm_string): for line in hrm_string.split("\n"): line = line.strip() if re.match("--", line) is not None: continue label = re.match("(\w+):", line) if label is not None: yield "LABEL", label.group(1) continue expression = line.split() if expression and all(re.match("\w+|\[\w+\]$", e) for e in expression): yield expression continue def remove_invalid_tokens(tokens): for command, *values in tokens: command = command.upper() command_types = COMMAND_TYPES.get(command, None) if command_types is not None and all( set(v) <= c for c, v in zip(command_types, values) ): yield command, values
Texas is admitted to the Union as the 28th state. A local shortage of sugar has forced a limit of five pounds per consumer by San Antonio retailers. Mayor John Tobin and Park Commissioner Ray Lambert announced proposed plans for the beautification of San Pedro Creek from Travis to N. Flores. Baylor set school bowl records for points in a quarter, half and a game. The 43 combined third-quarter points set a Valero Alamo Bowl record. Baylor’s 67 points are the most scored by a team in FBS bowl history (previous was 66 by Nebraska in 2000 Alamo Bowl). Baylor and Washington established a FBS bowl record for most combined points (123) in a regulation bowl game (previous was 2001 GMAC Bowl; Marshall and East Carolina were tied 51-51 before heading into overtime). Baylor and Washington established a FBS bowl record for most combined yards of total offense with 1,397, shattering the previous mark of 1,211 total yards set in the 2005 Insight Bowl. Baylor established a FBS bowl record for most yards of total offense with 777, shattering the previous mark of 718 total yards set by Arizona State in the 1972 Fiesta Bowl. Posted on December 29, 2011, in Texana and tagged 1845, 1917, 1923, 2011. Bookmark the permalink. Leave a comment.
# Copyright (c) 2014 Sean Vig # Copyright (c) 2014 zordsdavini # Copyright (c) 2014 Alexandr Kriptonov # Copyright (c) 2014 Tycho Andersen # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from libqtile.log_utils import logger from . import base import imaplib import re class GmailChecker(base.ThreadedPollText): """A simple gmail checker""" orientations = base.ORIENTATION_HORIZONTAL defaults = [ ("update_interval", 30, "Update time in seconds."), ("username", None, "username"), ("password", None, "password"), ("email_path", "INBOX", "email_path"), ("fmt", "inbox[%s],unseen[%s]", "fmt"), ("status_only_unseen", False, "Only show unseen messages"), ] def __init__(self, **config): base._TextBox.__init__(self, **config) self.add_defaults(GmailChecker.defaults) def poll(self): self.gmail = imaplib.IMAP4_SSL('imap.gmail.com') self.gmail.login(self.username, self.password) answer, raw_data = self.gmail.status(self.email_path, '(MESSAGES UNSEEN)') if answer == "OK": dec = raw_data[0].decode() messages = int(re.search(r'MESSAGES\s+(\d+)', dec).group(1)) unseen = int(re.search(r'UNSEEN\s+(\d+)', dec).group(1)) if(self.status_only_unseen): return self.fmt % unseen else: return self.fmt % (messages, unseen) else: logger.exception( 'GmailChecker UNKNOWN error, answer: %s, raw_data: %s', answer, raw_data) return "UNKNOWN ERROR"
a-Flied out for Molina in the 5th. b-Grounded out for Howard in the 7th. c-Popped out for Ethier in the 7th. a-Struck out for Cabrera, M in the 6th. b-Struck out for Guerrero in the 6th. c-Struck out for Wigginton in the 7th. 1-Ran for Jeter in the 6th. 2-Ran for Hamilton, J in the 6th. 2B: McCann, B (1, Thornton). TB: Molina; Wright, D 2; Ethier; McCann, B 2; Rolen; Holliday. RBI: McCann, B 3 (3). 2-out RBI: McCann, B 3. Runners left in scoring position, 2 out: McCann, B; Phillips. SB: Wright, D (1, 2nd base off Verlander/Mauer). Outfield assists: Byrd (Ortiz, D at 2nd base). DP: 2 (Jimenez, U-Ramirez, H-Pujols, McCann, B-Phillips). 2B: Longoria (1, Jimenez, U), Buck (1, Wainwright). TB: Cabrera, M; Hamilton, J; Longoria 2; Ortiz, D; Jeter; Buck 2. Runners left in scoring position, 2 out: Cano; Hunter, To 2. SB: Crawford, C (1, 2nd base off Bell/McCann, B). CS: Andrus (1, 2nd base by Halladay/McCann, B). Game Scores: Jimenez, U 52, Price 55. Pitches-strikes: Jimenez, U 25-15, Johnson, Jo 29-18, Kuo 18-10, Bell 3-1, Halladay 17-12, Capps 5-3, Wainwright 17-10, Wilson, Br 10-8, Broxton 11-8, Price 23-16, Pettitte 9-8, Lee, C 6-5, Verlander 20-12, Lester 18-11, Hughes, P 13-9, Thornton 13-8, Bailey, A 12-6, Soriano, R 8-5, Valverde 15-11. Groundouts-flyouts: Jimenez, U 2-1, Johnson, Jo 1-1, Kuo 2-1, Bell 0-1, Halladay 0-0, Capps 0-0, Wainwright 1-0, Wilson, Br 2-0, Broxton 0-2, Price 3-0, Pettitte 1-0, Lee, C 2-0, Verlander 0-1, Lester 2-0, Hughes, P 1-0, Thornton 0-0, Bailey, A 0-0, Soriano, R 0-3, Valverde 0-0. Batters faced: Jimenez, U 8, Johnson, Jo 6, Kuo 4, Bell 1, Halladay 3, Capps 1, Wainwright 5, Wilson, Br 3, Broxton 4, Price 6, Pettitte 4, Lee, C 3, Verlander 5, Lester 3, Hughes, P 3, Thornton 3, Bailey, A 2, Soriano, R 3, Valverde 3. Inherited runners-scored: Bell 1-0, Capps 1-0, Thornton 2-2, Bailey, A 1-0. Umpires: HP: Mike Reilly. 1B: Mike Winters. 2B: Brian O'Nora. 3B: Laz Diaz. LF: Bruce Dreckman. RF: Jim Wolf.
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import numpy as np import unittest import paddle import paddle.fluid as fluid import paddle.fluid.core as core import paddle.fluid.layers as layers import paddle.fluid.framework as framework from paddle.fluid.executor import Executor from paddle.fluid.framework import Program, program_guard from paddle.fluid.backward import append_backward paddle.enable_static() class TestApiWhileLoop(unittest.TestCase): def test_var_tuple(self): def cond(i): return layers.less_than(i, ten) def body(i): return layers.elementwise_add(x=i, y=one) main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): i = layers.fill_constant(shape=[1], dtype='int64', value=0) one = layers.fill_constant(shape=[1], dtype='int64', value=1) ten = layers.fill_constant(shape=[1], dtype='int64', value=10) out = layers.while_loop(cond, body, (i, )) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) res = exe.run(main_program, fetch_list=out) self.assertTrue( np.allclose(np.asarray(res[0]), np.full((1), 10, np.int64))) def test_var_list(self): def cond(i, mem): return layers.less_than(i, ten) def body(i, mem): mem = layers.elementwise_add(x=mem, y=one) i = layers.increment(i) return [i, mem] main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): i = layers.zeros(shape=[1], dtype='int64') ten = layers.fill_constant(shape=[1], dtype='int64', value=10) mem = fluid.data(name='mem', shape=[10], dtype='float32') one = layers.fill_constant(shape=[10], dtype='float32', value=1) out = layers.while_loop(cond, body, [i, mem]) data = np.random.rand(10).astype('float32') data_one = np.ones(10).astype('float32') place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) res = exe.run(main_program, feed={'mem': data}, fetch_list=out) for i in range(10): data = np.add(data, data_one) self.assertTrue(np.allclose(np.asarray(res[1]), data)) def test_var_dict(self): def cond(i, ten, test_dict, test_list, test_list_dict): return layers.less_than(i, ten) def body(i, ten, test_dict, test_list, test_list_dict): test_dict["test_key"] = i test_dict["test_key"] += 1 test_list[0] = fluid.layers.reshape(test_list[0], [2, -1]) + 1 test_list_dict[0]["test_key"] += 1 test_list_dict[0]["test_key"] = fluid.layers.relu(test_list_dict[0][ "test_key"]) i = layers.increment(i) return [i, ten, test_dict, test_list, test_list_dict] main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): i = layers.zeros(shape=[1], dtype='int64') ten = layers.fill_constant(shape=[1], dtype='int64', value=10) test_data = layers.fill_constant(shape=[1], dtype='int64', value=0) test_dict = {"test_key": test_data} test_list = [ layers.fill_constant( shape=[1, 2], dtype='int64', value=0) ] test_list_dict = [{ "test_key": layers.fill_constant( shape=[1], dtype='float32', value=0) }] i, ten, test_dict, test_list, test_list_dict = layers.while_loop( cond, body, [i, ten, test_dict, test_list, test_list_dict]) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) res = exe.run(main_program, fetch_list=[ test_dict["test_key"], test_list[0], test_list_dict[0]["test_key"] ]) self.assertTrue( np.allclose( np.asarray(res[0]), np.full( shape=(1), fill_value=10, dtype=np.int64))) self.assertTrue( np.allclose( np.asarray(res[1]), np.full( shape=(2, 1), fill_value=10, dtype=np.int64))) self.assertTrue( np.allclose( np.asarray(res[2]), np.full( shape=(1), fill_value=10, dtype=np.float32))) class TestApiWhileLoop_Nested(unittest.TestCase): def test_nested_net(self): def external_cond(i, j, init, sums): return layers.less_than(i, loop_len1) def external_body(i, j, init, sums): def internal_cond(j, init, sums): return layers.less_than(j, loop_len2) def internal_body(j, init, sums): init = layers.elementwise_add(x=init, y=ones) sums = layers.elementwise_add(x=init, y=sums) j = layers.increment(j) return [j, init, sums] result = layers.while_loop(internal_cond, internal_body, [j, init, sums]) j = result[0] init = result[1] sums = result[2] sums = layers.elementwise_add(x=init, y=sums) i = layers.increment(i) return [i, j, init, sums] main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): i = layers.zeros(shape=[1], dtype='int64') j = layers.zeros(shape=[1], dtype='int64') init = fluid.data(name='init', shape=[3, 3], dtype='float32') sums = fluid.data(name='sums', shape=[3, 3], dtype='float32') loop_len1 = layers.fill_constant(shape=[1], dtype='int64', value=2) loop_len2 = layers.fill_constant(shape=[1], dtype='int64', value=3) ones = layers.fill_constant(shape=[3, 3], dtype='float32', value=1) out = layers.while_loop(external_cond, external_body, [i, j, init, sums]) data = np.random.rand(3, 3).astype('float32') data_sums = np.zeros([3, 3]).astype('float32') place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) res = exe.run(main_program, feed={'init': data, 'sums': data_sums}, fetch_list=out) for i in range(3): data = np.add(data, 1) data_sums = np.add(data, data_sums) for j in range(2): data_sums = np.add(data, data_sums) self.assertTrue(np.allclose(np.asarray(res[3]), data_sums)) class TestApiWhileLoop_Backward(unittest.TestCase): def test_while_loop_backward(self): def cond(i, x): return layers.less_than(i, eleven) def body(i, x): x = layers.elementwise_mul(x=i, y=i) i = layers.increment(i) return [i, x] main_program = Program() startup_program = Program() with fluid.program_guard(main_program, startup_program): i = fluid.data(name='i', shape=[1], dtype='float32') i.stop_gradient = False eleven = layers.fill_constant(shape=[1], dtype='float32', value=11) one = layers.fill_constant(shape=[1], dtype='float32', value=1) x = fluid.data(name='x', shape=[1], dtype='float32') x.stop_gradient = False out = layers.while_loop(cond, body, [i, x]) mean = layers.mean(out[1]) append_backward(mean) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) feed_i = np.ones(1).astype('float32') feed_x = np.ones(1).astype('float32') data = np.asarray([100]).astype('float32') i_grad = np.asarray([110]).astype('float32') res = exe.run(main_program, feed={'i': feed_i, 'x': feed_x}, fetch_list=[mean.name, i.grad_name]) self.assertTrue(np.allclose(np.asarray(res[0]), data)) self.assertTrue( np.allclose(np.asarray(res[1]), i_grad), msg=" \nres = \n{} \n\n ans = \n{}".format(res[1], i_grad)) def test_while_loop_backward2(self): def cond(i, x): return i < 3 def body(i, x): x = x * i i = i + 1 return [i, x] main_program = Program() startup_program = Program() with fluid.program_guard(main_program, startup_program): i = fluid.data(name='i', shape=[1], dtype='float32') i.stop_gradient = False x = fluid.data(name='x', shape=[1], dtype='float32') x.stop_gradient = False out = layers.while_loop(cond, body, [i, x]) mean = layers.mean(out[1]) append_backward(mean) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) feed_i = np.ones(1).astype('float32') feed_x = np.ones(1).astype('float32') data = np.asarray([2]).astype('float32') i_grad = np.asarray([3]).astype('float32') x_grad = np.asarray([2]).astype('float32') res = exe.run(main_program, feed={'i': feed_i, 'x': feed_x}, fetch_list=[mean.name, i.grad_name, x.grad_name]) self.assertTrue(np.allclose(np.asarray(res[0]), data)) self.assertTrue( np.allclose(np.asarray(res[1]), i_grad), msg=" \nres = \n{} \n\n ans = \n{}".format(res[1], i_grad)) self.assertTrue( np.allclose(np.asarray(res[2]), x_grad), msg=" \nres = \n{} \n\n ans = \n{}".format(res[2], x_grad)) class TestApiWhileLoop_NestedWithBackwardAndLoDTensorArray(unittest.TestCase): def test_nested_net_with_backward_and_lodtensor(self): def external_cond(i, j, x, mem_array): return layers.less_than(i, array_len) def external_body(i, j, x, mem_array): def internal_cond(j, x, mem_array): return layers.less_than(j, array_len2) def internal_body(j, x, mem_array): inner_data = layers.array_read(array=data_array, i=j) inner_prev = layers.array_read(array=mem_array, i=j) inner_sum_0 = layers.elementwise_add(x=inner_data, y=inner_prev) inner_sum_1 = layers.elementwise_add(x=x, y=inner_sum_0) j = layers.increment(x=j, in_place=True) layers.array_write(inner_sum_1, i=j, array=mem_array) return [j, x, mem_array] outer_data = layers.array_read(array=data_array, i=i) outer_prev = layers.array_read(array=mem_array, i=i) outer_sum_0 = layers.elementwise_add(x=outer_data, y=outer_prev) outer_sum_1 = layers.elementwise_add(x=x, y=outer_sum_0) i = layers.increment(x=i, in_place=True) layers.array_write(outer_sum_1, i=i, array=mem_array) j, x, mem_array = layers.while_loop(internal_cond, internal_body, [j, x, mem_array]) return [i, j, x, mem_array] main_program = Program() startup_program = Program() with fluid.program_guard(main_program, startup_program): d0 = fluid.data(name='d0', shape=[10], dtype='float32') d1 = fluid.data(name='d1', shape=[10], dtype='float32') d2 = fluid.data(name='d2', shape=[10], dtype='float32') x = fluid.data(name='x', shape=[10], dtype='float32') x.stop_gradient = False i = layers.zeros(shape=[1], dtype='int64') i.stop_gradient = True init = layers.zeros(shape=[10], dtype='float32') mem_array = layers.array_write(x=init, i=i) data_array = layers.array_write(x=d0, i=i) i = layers.increment(i) layers.array_write(d1, i, array=data_array) i = layers.increment(i) layers.array_write(d2, i, array=data_array) i = layers.zeros(shape=[1], dtype='int64') i.stop_gradient = True array_len = layers.fill_constant(shape=[1], dtype='int64', value=1) j = layers.fill_constant(shape=[1], dtype='int64', value=1) j.stop_gradient = True array_len2 = layers.fill_constant(shape=[1], dtype='int64', value=3) out = layers.while_loop(external_cond, external_body, [i, j, x, mem_array]) sum_result = layers.array_read(array=mem_array, i=j) mean = layers.mean(sum_result) append_backward(mean) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) d = [] for i in range(3): d.append(np.random.random(size=[10]).astype('float32')) feed_x = np.ones(10).astype('float32') data_sum = d[0] + d[1] + d[2] + 3 * feed_x x_grad = [0.3] * 10 res = exe.run( main_program, feed={'d0': d[0], 'd1': d[1], 'd2': d[2], 'x': feed_x}, fetch_list=[sum_result.name, x.grad_name]) self.assertTrue(np.allclose(res[0], data_sum)) self.assertTrue(np.allclose(res[1], x_grad)) class TestApiWhileLoopWithSwitchCase(unittest.TestCase): def test_with_switch_case(self): def cond(i): return layers.less_than(i, ten) def body(i): def fn_add_three(): data_add_three = layers.elementwise_add(x=i, y=three) return data_add_three def fn_square(): data_mul_data = layers.elementwise_mul(x=i, y=i) return data_mul_data def fn_add_one(): data_add_one = layers.elementwise_add(x=i, y=one) return data_add_one return layers.switch_case( branch_index=i, branch_fns={2: fn_add_three, 5: fn_square}, default=fn_add_one) main_program = Program() startup_program = Program() with fluid.program_guard(main_program, startup_program): i = layers.fill_constant(shape=[1], dtype='int64', value=1) ten = layers.fill_constant(shape=[1], dtype='int64', value=10) three = layers.fill_constant(shape=[1], dtype='int64', value=3) one = layers.fill_constant(shape=[1], dtype='int64', value=1) out = layers.while_loop(cond, body, [i]) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) res = exe.run(main_program, fetch_list=out) data = np.asarray([25]).astype('int64') self.assertTrue(np.allclose(np.asarray(res[0]), data)) class TestApiWhileLoop_Error(unittest.TestCase): def test_error(self): def cond_returns_constant(i): return 1 def cond_returns_not_bool_tensor(i): return layers.increment(i) def cond_returns_bool_tensor(i): return layers.less_than(i, ten) def cond_returns_2d_tensor(i): return layers.less_than(i, ten_2d) def cond_receives_two_args(i, ten): return layers.less_than(i, ten) def body(i): return layers.increment(i) def body_returns_error_length(i): i = layers.increment(i) return [i, i] def body_returns_error_type(i, ten): return layers.increment(i) def cond_returns_with_mutable_dict(i, test_dict): return i > 0 def body_returns_with_mutable_dict(i, test_dict): test_dict['new_key'] = layers.fill_constant( shape=[1], dtype='int64', value=1) return layers.increment(i), test_dict def cond_returns_with_mutable_list(i, test_list): return i > 0 def body_returns_with_mutable_list(i, test_list): test_list.append( layers.fill_constant( shape=[1], dtype='int64', value=1)) return layers.increment(i), test_list main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): data = layers.fill_constant(shape=[1], dtype='int64', value=1) data_1d = layers.fill_constant(shape=[1], dtype='int64', value=1) data_2d = layers.fill_constant(shape=[2, 2], dtype='int64', value=1) ten = layers.fill_constant(shape=[1], dtype='int64', value=10) ten_2d = layers.fill_constant(shape=[2, 2], dtype='int64', value=10) # The type of `cond` in Op(while_loop) must be callable def type_error_cond(): out = layers.while_loop(data, body, [data_1d]) self.assertRaises(TypeError, type_error_cond) # The type of `body` in Op(while_loop) must be callable def type_error_body(): out = layers.while_loop(cond_returns_bool_tensor, data, [data_1d]) self.assertRaises(TypeError, type_error_body) # The type of `loop_vars` in Op(while_loop) must be list or tuple def type_error_loop_vars(): out = layers.while_loop(cond_returns_bool_tensor, body, data_1d) self.assertRaises(TypeError, type_error_loop_vars) # The value of `loop_vars` is empty def value_error_loop_vars(): out = layers.while_loop(cond_returns_bool_tensor, body, []) self.assertRaises(ValueError, value_error_loop_vars) # The type of `cond` returns in Op(while_loop) must be Variable def type_error_cond_returns_not_variable(): out = layers.while_loop(cond_returns_constant, body, [data_1d]) self.assertRaises(TypeError, type_error_cond_returns_not_variable) # The type of `cond` returns in Op(while_loop) must be a bollean variable def type_error_cond_returns_not_boolean(): out = layers.while_loop(cond_returns_not_bool_tensor, body, [data_1d]) self.assertRaises(TypeError, type_error_cond_returns_not_boolean) # The shape of `cond` returns in Op(while_loop) must be 1 def type_error_shape_cond_returns_2d(): out = layers.while_loop(cond_returns_2d_tensor, body, [data_2d]) self.assertRaises(TypeError, type_error_shape_cond_returns_2d) # The length of `body` returns in Op(while_loop) must be same as `loop_vars` def value_error_body_returns_error_length(): out = layers.while_loop(cond_returns_bool_tensor, body_returns_error_length, [data]) self.assertRaises(ValueError, value_error_body_returns_error_length) # The type of `body` returns in Op(while_loop) must be same as `loop_vars` def value_error_body_returns_error_type(): out = layers.while_loop(cond_receives_two_args, body_returns_error_type, [data, ten]) self.assertRaises(ValueError, value_error_body_returns_error_type) # The length of `output_vars` with mutable value should keep same with `loop_vars` def value_error_body_returns_with_mutable_dict(): test_dict = { "int_constant": layers.fill_constant( shape=[2, 2], dtype='int64', value=1) } out = layers.while_loop(cond_returns_with_mutable_dict, body_returns_with_mutable_dict, [data, test_dict]) self.assertRaises(ValueError, value_error_body_returns_with_mutable_dict) def value_error_body_returns_with_mutable_list(): test_list = [ layers.fill_constant( shape=[2, 2], dtype='int64', value=1) ] out = layers.while_loop(cond_returns_with_mutable_list, body_returns_with_mutable_list, [data, test_list]) self.assertRaises(ValueError, value_error_body_returns_with_mutable_list) class TestApiWhileLoopSliceInBody(unittest.TestCase): def test_var_slice(self): def cond(z, i): return i + 1 <= x_shape[0] def body(z, i): z = z + x[i] i += 1 return z, i main_program = Program() startup_program = Program() with program_guard(main_program, startup_program): x = fluid.layers.data(name='x', shape=[5], dtype='int32') z = fluid.layers.fill_constant([1], 'int32', 0) x_shape = fluid.layers.shape(x) i = fluid.layers.fill_constant([1], 'int32', 0) z, _ = fluid.layers.while_loop(cond, body, [z, i]) place = fluid.CUDAPlace(0) if core.is_compiled_with_cuda( ) else fluid.CPUPlace() exe = fluid.Executor(place) np_x = np.array([1, 2, 3, 4, 5], dtype='int32') res = exe.run(main_program, feed={'x': np_x}, fetch_list=[z]) self.assertTrue(np.array_equal(res[0], [np.sum(np_x)])) if __name__ == '__main__': unittest.main()
find amazing places to take photos, anywhere in the world. placerville, ca hangtown's gold bug park gold stamp mill. 2635 gold bug lane placerville, gold bug mine and stamp mill in placerville california . photos, maps, description for 16515 stamp mill loop road, jamestown ca. search homes for sale, get school district and neighborhood info for jamestown, ca . california stamp mills – explore real california gold . puritan mine stamp mill. a standing 5 stamp mill in la county, . it is one of the few stamp mills left and recalls the feather river history as a rich gold . the amgen tour of california will be traveling through el dorado county for stage 6 of the tour de france style cycling road race created and presented by aeg.
''' New Integration test for image replication. @author: Legion ''' import os import time import random import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib image_name = 'iso-image-replication-test-' + time.strftime('%y%m%d%H%M%S', time.localtime()) test_stub = test_lib.lib_get_test_stub() img_repl = test_stub.ImageReplication() def test(): os.environ['ZSTACK_BUILT_IN_HTTP_SERVER_IP'] = os.getenv('zstackHaVip') bs_list = img_repl.get_bs_list() bs = random.choice(bs_list) img_repl.add_image(image_name, bs_uuid=bs.uuid, img_format='iso') img_repl.create_iso_vm() img_repl.wait_for_image_replicated(image_name) img_repl.check_image_data(image_name) test_util.test_pass('ISO Image Replication Test Success') img_repl.clean_on_expunge() def env_recover(): img_repl.delete_image() img_repl.expunge_image() img_repl.reclaim_space_from_bs() try: img_repl.vm.destroy() except: pass #Will be called only if exception happens in test(). def error_cleanup(): try: img_repl.delete_image() img_repl.expunge_image() img_repl.reclaim_space_from_bs() img_repl.vm.destroy() except: pass
After last year’s hugely successful competition, we’ve decided to give away another set of 1×2 weekend tickets (incl. camping) for Dour Festival! The Belgian festival’s 30th edition takes place from 11-15 July, with the likes of alt-J, Tyler the Creator and Little Dragon set to appear. And you can be there! But how? 4. Tell us why you should be there! Note: travel is NOT included in this opportunity. You will have to make your own way to Dour Festival if you win. This competition will close on Wednesday, 14 March at 10am, which means that you have two weeks to participate. If you have any questions, please send an email to info@allthingsloud.com with Dour in the subject. If you’ve entered via Instagram then you need to redirect yourself to this Facebook post. Furthermore, please be aware of the fact that we give away tickets so that people can attend an event they otherwise wouldn’t be able to. Good luck!
# -*- coding: utf-8 -*- ############################################################################## # # Author: Nicolas Bessi. Copyright Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Partner first name, last name', 'description': """ This module splits first name and last name for non company partners ==================================================================== The field 'name' becomes a stored function field concatenating lastname and firstname Note: in version 7.0, installing this module invalidates a yaml test in the 'edi' module Contributors ============ Jonathan Nemry <jonathan.nemry@acsone.eu> Olivier Laurent <olivier.laurent@acsone.eu> """, 'version': '1.2', 'author': 'Camptocamp', 'maintainer': 'Camptocamp, Acsone', 'category': 'Extra Tools', 'website': 'http://www.camptocamp.com, http://www.acsone.eu', 'depends': ['base'], 'data': [ 'partner_view.xml', 'res_user_view.xml', ], 'demo': [], 'test': [], 'auto_install': False, 'installable': True, 'images': [] }
Any kind of research into how to change IP addresses will lead you immediately to proxies and VPNs. The details of each of those can get fairly complicated, but in a nutshell, the two are just ways of re routing your traffic through a computer server somewhere outside the house of your current network. There are millions of forums that analyze the technical information on these two products, but I believe for users who are just getting started, it’s important to really know what they can do, and not how they work. Consequently, proxies will only mask a browsing activity anonymously. You will discover “open proxies” which are IP addresses and slot numbers you can get into your browser (Firefox, Chrome, Opera, IE) to reroute your traffic through a proxy server located someone else in the world. What this means is you avoid accessing websites directly – a proxy server will do the work for you – offering you stored data or making requests for you. Its’ a great way to bypass firewalls at the office, school, and open public networks. There is also “software” that’s available. What this type of software usually does, though it can vary from product to product, is to help you organize these open proxies, and perhaps even give you a browser to use them in. Because these products are a “one-time fee”, they cannot offer you private servers – only a way to find and organize the large numbers of open proxy computers that are located about the world. There are server proxy services out there – services that actually own their own servers, maintain them, and offer support. This kind of service, however, is going to require a monthly payment or fee for maintenance and support. They’ll probably cost you about 5 dollars or more a month. Most of the time these services will take the form of a web-based proxy server, and will not require installing any software – they are going to choose one of their servers randomly for you. There are also virtual private networks. These kinds of services work differently than proxies but are the kind of software that allows you to change Internet protocol address when linked to the private network, your data is encrypted and filtered through the network. After this, you run your entire machine (phone/computer) through that network Organization, which means that other programs and programs you run will also use the confidential IP address. This is an improved solution for users who are serious about total anonymity online.
from __future__ import absolute_import from __future__ import print_function from django.core.management.base import BaseCommand from zerver.lib.actions import do_create_stream from zerver.models import Realm, get_realm import sys class Command(BaseCommand): help = """Create a stream, and subscribe all active users (excluding bots). This should be used for TESTING only, unless you understand the limitations of the command.""" def add_arguments(self, parser): parser.add_argument('domain', metavar='<domain>', type=str, help='domain in which to create the stream') parser.add_argument('stream_name', metavar='<stream name>', type=str, help='name of stream to create') def handle(self, *args, **options): domain = options['domain'] stream_name = options['stream_name'] encoding = sys.getfilesystemencoding() try: realm = get_realm(domain) except Realm.DoesNotExist: print("Unknown domain %s" % (domain,)) exit(1) do_create_stream(realm, stream_name.decode(encoding))
Without The Cure music would not be what it is today. They were the pioneers of post-punk, alternative rock, and the ever popular early 2000s Gothic Rock with their only static member being their sole songwriter Robert Smith. Now, though, after an insanely long period of just The Cure reboots and old unheard of tracks, Pitchfork has release that The Cure will, in fact, be working on new demos. Recently, in an interview with the United Kingdom’s BBC 6 Music, Robert Smith confirmed that The Cure has, indeed, booked studio time for May of 2018. Smith states that while curating for his music festival Meltdown, he had discovered a vast breadth of new music that inspired him to begin writing new songs again. He has said that without his job to curate this new lineup there would be no “catalyst” for which to spark his songwriting creativity. When curating this year’s Meltdown bill, Smith has decided a large and beautiful set which includes the likes of Death Cab for Cutie, Frightened Rabbit, Low, Suzanne Vega, the Twilight Sad, and the Joy Formidable amongst others. Check out the full lineup here. Lastly, in celebration of The Cure’s 40th year, Smith has released a teaser in which there is a hint at their first concert which will take place in Hyde Park this summer. In his BBC 6 interview he has said that he will not use Meltdown as another Cure reunion show, but will use that stage to debut new music as well as new interpretations to the classics he is known for. This year and the next seem to be an exciting year that will fill our gothic hearts and minds with the music that has become synonymous with the 1980s.
import sys import os import datetime import pickle import numpy as np import matplotlib.pyplot as plt import matplotlib.cm as cm import matplotlib.colors as colors from pprint import pprint import sqlite3 import calendar from datetime import datetime from datetime import timedelta import math import numpy.polynomial.polynomial as poly 986.22 725.78 1567.4 2941 5127.9 8463.7 13283 18566 24234 37216 52182 #mass fg, pk_ht, UNCORR AL_HG_incand_calib = [ [0.11144 ,986.22 ], [0.22994 ,725.78 ], [0.41189 ,1567.4 ], [0.67707 ,2941 ], [1.04293 ,5127.9 ], [1.52461 ,8463.7 ], [2.13496 ,13283 ], [2.88464 ,18566 ], [3.78215 ,24234 ], [6.04449 ,37216 ], [8.95095 ,52182 ], ] AL_LG_incand_calib = [ [0.67707 ,276.81], [1.04293 ,494.74], [1.52461 ,843], [2.13496 ,1325.2], [2.88464 ,1869.5], [3.78215 ,2448], [6.04449 ,3801.7], [8.95095 ,5368.3], ] HG_pkht = np.array([row[1] for row in AL_HG_incand_calib]) HG_mass = np.array([row[0] for row in AL_HG_incand_calib]) HG_mass_corr = np.array([row[0]/0.7 for row in AL_HG_incand_calib]) HG_fit = poly.polyfit(HG_pkht, HG_mass_corr, 2) print 'HG AD corr fit', HG_fit for line in AL_HG_incand_calib: incand_pk_ht = line[1] uncorr_mass_fg = line[0] AD_corr_fit = HG_fit[0] + HG_fit[1]*incand_pk_ht + HG_fit[2]*incand_pk_ht*incand_pk_ht line.append(AD_corr_fit) LG_pkht = np.array([row[1] for row in AL_LG_incand_calib]) LG_mass = np.array([row[0] for row in AL_LG_incand_calib]) LG_mass_corr = np.array([row[0]/0.7 for row in AL_LG_incand_calib]) LG_fit = poly.polyfit(LG_pkht, LG_mass_corr, 2) print 'LG AD corr fit', LG_fit for line in AL_LG_incand_calib: incand_pk_ht = line[1] uncorr_mass_fg = line[0] AD_corr_fit = LG_fit[0] + LG_fit[1]*incand_pk_ht + LG_fit[2]*incand_pk_ht*incand_pk_ht line.append(AD_corr_fit) HG_pk_ht = [row[1] for row in AL_HG_incand_calib] HG_uncorr_mass = [row[0] for row in AL_HG_incand_calib] HG_uncorr_fit = [row[2]*0.7 for row in AL_HG_incand_calib] HG_ADcorr_fit = [row[2] for row in AL_HG_incand_calib] LG_pk_ht = [row[1] for row in AL_LG_incand_calib] LG_uncorr_mass = [row[0] for row in AL_LG_incand_calib] LG_uncorr_fit = [row[2]*0.7 for row in AL_LG_incand_calib] LG_ADcorr_fit = [row[2] for row in AL_LG_incand_calib] fig = plt.figure(figsize=(12,10)) ax = fig.add_subplot(111) ax.scatter(HG_pk_ht,HG_uncorr_mass,color='r', label = 'HG uncorrected calibration') ax.plot(HG_pk_ht,HG_ADcorr_fit, '--r', label = 'HG Aquadag correction applied') ax.plot(HG_pk_ht,HG_uncorr_fit, '-r') ax.scatter(LG_pk_ht,LG_uncorr_mass,color = 'blue', label = 'LG uncorrected calibration') ax.plot(LG_pk_ht,LG_ADcorr_fit, '--b', label = 'LG Aquadag correction applied') ax.plot(LG_pk_ht,LG_uncorr_fit, '-b') plt.xlabel('Incandescent pk height (a.u.)') plt.ylabel('rBC mass (fg)') plt.text(9600,8, 'Aquadag corrected fit:\nrBC mass = 0.26887 + 1.9552E-4*HG_pkht + 8.31906E-10*HG_pkht^2') plt.text(5900,12, 'Aquadag corrected fit:\nrBC mass = 0.56062 + 1.7402E-3*LG_pkht + 1.0009E-7*LG_pkht^2') #plt.axhspan(1.8,12.8, color='g', alpha=0.25, lw=0) #plt.axhspan(0,1.8, color='c', alpha=0.25, lw=0) #plt.axhspan(12.8,41, color='y', alpha=0.25, lw=0) ax.set_ylim(0,16) ax.set_xlim(0,55000) plt.legend() os.chdir('C:/Users/Sarah Hanna/Documents/Data/Alert Data/SP2 Calibrations/') plt.savefig('Alert SP2#44 Aquadag calibration curves.png', bbox_inches='tight') plt.show()
KNOCK DOWN: Cr Michael Scott would like to see the two storey Waikanae Library replaced by a new single level library. Kapiti Coast District Council councillor Michael Scott has proposed a radical rethink for the Waikanae town centre upgrade. He's developed a new strategy which included a land swap with a purpose-built supermarket with underground parking on the large council-owned carpark next to a realigned Marae Lane. It would enable the current Countdown supermarket site to be opened up for other uses as well as creation of a new single level Waikanae Library next to it. Council needed to lead the way in the land swap and all the major players especially council, Progressive Enterprises and Manchester Unity needed to work together for the future good of the town centre, he said. Other commercial players would need to have a role in the process too. "We need to open up the northern end of Mahara Place. "It's a wonderful opportunity to have a light, open green space introduced into the town centre, rather than a patch of grass inside an enclosed Mahara Place. "It will be much cheaper and far more time effective to knock down the old library [a former post office]. "Just imagine a library all on the ground floor, flooded with natural light, with no need for ramps and stairs and expensive lifts to maintain. "Share community areas for user groups to meet would be part of the new concept, and importantly all within the town centres and other council budgets." He noted a memorandum of understanding between council and Mahara Gallery to develop the gallery and library jointly expired in June. The gallery had to raise $5.3 million for the gallery component. So far, council and the Waikanae Community Board had pledged $1.7 million but, as of February 29, the gallery had raised about $933,000. "I think they're [the gallery] acknowledging now that they're not going to get to where they need to be. "So we need to think of some different things and a land swap, new supermarket and place for the library is, I think, quite exciting." Cr Scott expected council chief executive Patrick Dougherty to brief council "to see whether this vision can come into place". "If it can, and I think you'll probably find it will, it will happen within the budget and within the timeframe. "There's $1.2 million in the town centre development budget for Waikanae this year [2016/17]. "This is about an $8 million project over the next few years, but we want to do it now, and then they can fix up Paraparaumu as best they can. "It's big thinking but this will be one that gets over the line I think." He said council's current proposals were "simply papering over fundamental cracks". Council's current priorities for the Waikanae town centre upgrade include projects related to turning SH1 into a local road, improving pedestrian crossing from the railway station to Ngaio Rd, and improving access to the Waikanae River via Te Moana Rd/SH1 intersection.
"""Steam service""" import json import os from gettext import gettext as _ from gi.repository import Gio from lutris import settings from lutris.config import LutrisConfig, write_game_config from lutris.database.games import add_game, get_game_by_field, get_games from lutris.database.services import ServiceGameCollection from lutris.game import Game from lutris.installer.installer_file import InstallerFile from lutris.services.base import BaseService from lutris.services.service_game import ServiceGame from lutris.services.service_media import ServiceMedia from lutris.util.log import logger from lutris.util.steam.appmanifest import AppManifest, get_appmanifests from lutris.util.steam.config import get_steam_library, get_steamapps_paths, get_user_steam_id from lutris.util.strings import slugify class SteamBanner(ServiceMedia): service = "steam" size = (184, 69) dest_path = os.path.join(settings.CACHE_DIR, "steam/banners") file_pattern = "%s.jpg" api_field = "appid" url_pattern = "http://cdn.akamai.steamstatic.com/steam/apps/%s/capsule_184x69.jpg" class SteamCover(ServiceMedia): service = "steam" size = (200, 300) dest_path = os.path.join(settings.CACHE_DIR, "steam/covers") file_pattern = "%s.jpg" api_field = "appid" url_pattern = "http://cdn.steamstatic.com/steam/apps/%s/library_600x900.jpg" class SteamBannerLarge(ServiceMedia): service = "steam" size = (460, 215) dest_path = os.path.join(settings.CACHE_DIR, "steam/header") file_pattern = "%s.jpg" api_field = "appid" url_pattern = "https://cdn.cloudflare.steamstatic.com/steam/apps/%s/header.jpg" class SteamGame(ServiceGame): """ServiceGame for Steam games""" service = "steam" installer_slug = "steam" runner = "steam" @classmethod def new_from_steam_game(cls, steam_game, game_id=None): """Return a Steam game instance from an AppManifest""" game = cls() game.appid = steam_game["appid"] game.game_id = steam_game["appid"] game.name = steam_game["name"] game.slug = slugify(steam_game["name"]) game.runner = cls.runner game.details = json.dumps(steam_game) return game class SteamService(BaseService): id = "steam" name = _("Steam") icon = "steam-client" medias = { "banner": SteamBanner, "banner_large": SteamBannerLarge, "cover": SteamCover, } default_format = "banner" is_loading = False runner = "steam" excluded_appids = [ "221410", # Steam for Linux "228980", # Steamworks Common Redistributables "1070560", # Steam Linux Runtime ] game_class = SteamGame def load(self): """Return importable Steam games""" if self.is_loading: logger.warning("Steam games are already loading") return self.is_loading = True steamid = get_user_steam_id() if not steamid: logger.error("Unable to find SteamID from Steam config") return steam_games = get_steam_library(steamid) if not steam_games: raise RuntimeError(_("Failed to load games. Check that your profile is set to public during the sync.")) for steam_game in steam_games: if steam_game["appid"] in self.excluded_appids: continue game = self.game_class.new_from_steam_game(steam_game) game.save() self.match_games() self.is_loading = False return steam_games def get_installer_files(self, installer, installer_file_id): steam_uri = "$WINESTEAM:%s:." if installer.runner == "winesteam" else "$STEAM:%s:." appid = str(installer.script["game"]["appid"]) return [ InstallerFile(installer.game_slug, "steam_game", { "url": steam_uri % appid, "filename": appid }) ] def install_from_steam(self, manifest): """Create a new Lutris game based on an existing Steam install""" if not manifest.is_installed(): return appid = manifest.steamid if appid in self.excluded_appids: return service_game = ServiceGameCollection.get_game(self.id, appid) if not service_game: return lutris_game_id = "%s-%s" % (self.id, appid) existing_game = get_game_by_field(lutris_game_id, "slug") if existing_game: return game_config = LutrisConfig().game_level game_config["game"]["appid"] = appid configpath = write_game_config(lutris_game_id, game_config) game_id = add_game( name=service_game["name"], runner="steam", slug=slugify(service_game["name"]), installed=1, installer_slug=lutris_game_id, configpath=configpath, platform="Linux", service=self.id, service_id=appid, ) return game_id def add_installed_games(self): games = [] steamapps_paths = get_steamapps_paths() for steamapps_path in steamapps_paths: for appmanifest_file in get_appmanifests(steamapps_path): app_manifest_path = os.path.join(steamapps_path, appmanifest_file) self.install_from_steam(AppManifest(app_manifest_path)) return games def generate_installer(self, db_game): """Generate a basic Steam installer""" return { "name": db_game["name"], "version": self.name, "slug": slugify(db_game["name"]) + "-" + self.id, "game_slug": slugify(db_game["name"]), "runner": self.runner, "appid": db_game["appid"], "script": { "game": {"appid": db_game["appid"]} } } def install(self, db_game): appid = db_game["appid"] db_games = get_games(filters={"service_id": appid, "installed": "1", "service": self.id}) existing_game = self.match_existing_game(db_games, appid) if existing_game: logger.debug("Found steam game: %s", existing_game) game = Game(existing_game.id) game.save() return service_installers = self.get_installers_from_api(appid) if not service_installers: service_installers = [self.generate_installer(db_game)] application = Gio.Application.get_default() application.show_installer_window(service_installers, service=self, appid=appid)
Masterchef Australia is the best cooking competition programme out there, so pleased its on uk tv! More than prawns on the barbie! We have loved this series of MCA so much and have really enjoyed being on the journey with all the contestants. Will be sad when it finishes! When is MA back on Really? it hasn't been on UK tv for ages. So much better than the UK version. I really miss it. We've had enough of Homes Under the Hammer! When will this series be aired on UK TV it's so much better than the UK version. It's starting on 28 September. I checked with Watch TV. You've seen the rest - now watch the best!
#!/usr/bin/env python ##print __name__ import re import sys import os import optparse import subprocess usage_line = """ process_rawreads.py Version 2.0 (2 December, 2014) License: GNU GPLv2 To report bugs or errors, please contact Daren Card (dcard@uta.edu). This script is provided as-is, with no support and no guarantee of proper or desirable functioning. Script that process raw RADseq reads that are generated using the Peterson et al. 2012 ddRADseq \ protocol. The script filters out PCR clones, trims away the 8bp unique molecular identifiers at \ the beginning of each read, parses by combinatorial barcodes (an inline barcode and standard Illumina \ index), and quality trims using either Stacks or Trimmomatic. The script will handle either single-end \ or paired-end reads appropriately. User must input the raw reads (unzipped fastq format) and a sample \ sheet (example is included as part of this repository). The script also includes the flexibility of \ running certain portions of the pipeline, which is useful if one doesn't need to repeat a specific step. \ The pipeline steps are as follows (with numbers corresponding to those passed using the -x flag): 1. Setup the environment - creates necessary directories. 2. Clone filtering - filters out PCR clones 3. Lead trimming - trims away 8bp UMI sequences 4. Sample parsing - parses sample sheet, creates barcode input, parses processed reads down to sample \ using this information, and renames the files logically using the sample sheet. 5. Quality filtering - quality filters the reads using Trimmomatic. If user specifies read quality \ filtering in Stacks, this filtering takes place simulteneously with read parsing (step 4). Dependencies include the Stacks pipeline (v. 1.10 - 1.19), the FastX toolkit, and Trimmomatic v. 0.32 \ (if desired), and all need to be installed in the users path. python process_rawreads.py -t <#threads> -s <samplesheet.txt> [-p -r] -c/-q -1 <single-end.fastq> \ [-2 <paired-end.fastq>] --renz1 <RE_1> --renz2 <RE_2> --bar_loc <inline/index> [-x [1,2,3,4,5] """ ################################################# ### Parse command options ### ################################################# usage = usage_line parser = optparse.OptionParser(usage=usage) parser.add_option("-t", action="store", type = "string", dest = "threads", help = "threads") parser.add_option("-s", action="store", type = "string", dest = "sheet", help = "Sample sheet file (see sample)") parser.add_option("-p", action="store_true", dest = "paired", help = "paired reads flag") parser.add_option("-c", action="store_true", dest = "clean", help = "quality trim reads using Stacks") parser.add_option("-q", action="store_true", dest = "quality", help = "quality trim reads using Trimmomatic") parser.add_option("-r", action="store_true", dest = "rescue", help = "rescue barcodes/restriction sites in Stacks (default settings)") parser.add_option("-1", action="store", type = "string", dest = "read1", help = "single end read") parser.add_option("-2", action="store", type = "string", dest = "read2", help = "paired end read") parser.add_option("--renz1", action="store", type = "string", dest = "renz1", help = "restriction enzyme 1 (common cutter)") parser.add_option("--renz2", action="store", type = "string", dest = "renz2", help = "restriction enzyme 2 (rare cutter)") parser.add_option("--bar_loc", action="store", type = "string", dest = "bar_loc", help = "location of barcode & index (per process_radtags documentation)") parser.add_option("-x", action="store", type = "string", dest = "run", help = "processes to run, separated by commas (e.g., 1,2,...,5) [1,2,3,4,5]", default = "1,2,3,4,5") options, args = parser.parse_args() ################################################# ### Setup the environment ### ################################################# def setup(r1nm): print "\n***Setting up the command environment***\n" ### Create output directories ### os.system("mkdir clone_filtered") os.system("mkdir lead_trimmed") os.system("mkdir parsed") os.system("mkdir cleaned") os.system("mkdir ./parsed/"+r1nm) ################################################# ### Clone filter reads ### ################################################# def PE_clone_filter(): print "\n***Filtering PCR duplicates***\n" os.system("clone_filter -1 "+options.read1+" -2 "+options.read2+" -o ./clone_filtered/ 2>&1 | tee ./clone_filtered/"+options.read1+".clonefilter.log") def SE_clone_filter(): print "\n***Filtering PCR duplicates***\n" os.system("clone_filter -1 "+options.read1+" -2 "+options.read1+" -o ./clone_filtered/ 2>&1 | tee ./clone_filtered/"+options.read1+".clonefilter.log") os.system("rm -f ./clone_filtered/*.fil.fq_2") ################################################# ### Trim leading 8bp UMI ### ################################################# def PE_lead_trim(r1nm, r2nm): print "\n***Trimming away leading 8bp unique molecular identifiers***\n" os.system("fastx_trimmer -Q 33 -f 9 -i ./clone_filtered/"+r1nm+".fil.fq_1 -o ./lead_trimmed/"+r1nm+".1.clone.trim.fastq") os.system("fastx_trimmer -Q 33 -f 9 -i ./clone_filtered/"+r2nm+".fil.fq_2 -o ./lead_trimmed/"+r2nm+".2.clone.trim.fastq") def SE_lead_trim(r1nm): print "\n***Trimming away leading 8bp unique molecular identifiers***\n" os.system("fastx_trimmer -Q 33 -f 9 -i ./clone_filtered/"+r1nm+".fil.fq_1 -o ./lead_trimmed/"+r1nm+".1.clone.trim.fastq") ################################################# ### Parse samples ### ################################################# def parse_sample_sheet(): print "\n***Parsing reads by sample***\n" ### Parse the sample sheet to create barcodes file ### barcodes = open("barcodes.txt", "w") for line in open(options.sheet, "r"): if not line.strip().startswith("#"): bar = line.rstrip().split("\t") if options.paired == True: # print bar[0], bar[1], bar[2], bar[3], bar[4] out = bar[3] + "\t" + bar[4] + "\n" # print out barcodes.write(out) else: out = bar[3] + "\n" barcodes.write(out) barcodes.close() ### process_radtags subroutine ### def PE_sample_parser(r1nm, r2nm): if options.rescue == True: if options.clean == True: alert = open("./cleaned/ATTENTION", "w") line = "You elected to quality-trim your reads using Stacks. This trimming was done simultaneously with parsing. See the 'parsed' folder for your trimmed reads." alert.write(line) alert.close() os.system("process_radtags -r -c -q -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_index --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -1 ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq -2 ./lead_trimmed/"+r2nm+".2.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Stacks***\n" else: os.system("process_radtags -r -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_index --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -1 ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq -2 ./lead_trimmed/"+r2nm+".2.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Trimmomatic***\n" else: if options.clean == True: os.system("process_radtags -c -q -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_index --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -1 ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq -2 ./lead_trimmed/"+r2nm+".2.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Stacks***\n" alert = open("./cleaned/ATTENTION", "w") line = "You elected to quality-trim your reads using Stacks. This trimming was done simultaneously with parsing. See the 'parsed' folder for your trimmed reads." alert.write(line) alert.close() else: os.system("process_radtags -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_index --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -1 ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq -2 ./lead_trimmed/"+r2nm+".2.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Trimmomatic***\n" def SE_sample_parser(r1nm): if options.rescue == True: if options.clean == True: alert = open("./cleaned/ATTENTION", "w") line = "You elected to quality-trim your reads using Stacks. This trimming was done simultaneously with parsing. See the 'parsed' folder for your trimmed reads." alert.write(line) alert.close() os.system("process_radtags -r -c -q -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_null --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -f ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Stacks***\n" else: os.system("process_radtags -r -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_null --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -f ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Trimmomatic***\n" else: if options.clean == True: alert = open("./cleaned/ATTENTION", "w") line = "You elected to quality-trim your reads using Stacks. This trimming was done simultaneously with parsing. See the 'parsed' folder for your trimmed reads." alert.write(line) alert.close() os.system("process_radtags -c -q -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_null --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -f ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Stacks***\n" else: os.system("process_radtags -b barcodes.txt -o ./parsed/"+str(r1nm)+" --inline_null --renz_1 "+str(options.renz1)+" --renz_2 "+str(options.renz2)+" -f ./lead_trimmed/"+str(r1nm)+".1.clone.trim.fastq 2>&1 | tee ./parsed/"+str(r1nm)+"/"+str(r1nm)+".parse.log") print "\n***Quality-trimming reads using Trimmomatic***\n" ### file renaming subroutine ### def PE_sample_rename(r1nm): for foo in open(options.sheet).read().splitlines(): bar = foo.split() parsep1_rename = "mv ./parsed/"+str(r1nm)+"/sample_"+bar[3]+"-"+bar[4]+".1.fq ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".P1.fq" parsep2_rename = "mv ./parsed/"+str(r1nm)+"/sample_"+bar[3]+"-"+bar[4]+".2.fq ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".P2.fq" remp1_rename = "mv ./parsed/"+str(r1nm)+"/sample_"+bar[3]+"-"+bar[4]+".rem.1.fq ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".rem.P1.fq" remp2_rename = "mv ./parsed/"+str(r1nm)+"/sample_"+bar[3]+"-"+bar[4]+".rem.2.fq ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".rem.P2.fq" combine_broken = "cat ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".rem.P1.fq /parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".rem.P2.fq > /parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+"-"+bar[4]+".rem.cat.fq" os.system(parsep1_rename) os.system(parsep2_rename) os.system(remp1_rename) os.system(remp2_rename) os.system(combine_broken) ### Place restriction site trimming routine here ### def SE_sample_rename(r1nm): for foo in open(options.sheet).read().splitlines(): bar = foo.split() parse_single = "mv ./parsed/"+str(r1nm)+"/sample_"+bar[3]+".fq ./parsed/"+str(r1nm)+"/"+bar[0]+"_"+bar[3]+".S1.fq" os.system(parse_single) ### Place restriction site trimming routine here ### ################################################# ### Quality-trim samples ### ################################################# def PE_quality_trim(r1nm): if options.quality == True: for foo in open(options.sheet).read().splitlines(): bar = foo.split() handle = bar[0]+"_"+bar[3]+"-"+bar[4] threads = options.threads PEclean = "trimmomatic-0.35.jar PE -threads "+threads+" -trimlog ./cleaned/"+handle+"_paired.qtrim.log ./parsed/"+str(r1nm)+"/"+handle+".P1.fq ./parsed/"+str(r1nm)+"/"+handle+".P2.fq ./cleaned/"+handle+".P1.qtrim ./cleaned/"+handle+".S1.qtrim ./cleaned/"+handle+".P2.qtrim ./cleaned/"+handle+".S2.qtrim LEADING:10 TRAILING:10 SLIDINGWINDOW:4:15 MINLEN:36 TOPHRED33 2>&1 | tee ./cleaned/"+handle+"_paired.qtrim.summary.log" broken_clean = "trimmomatic-0.35.jar SE -threads "+threads+" -trimlog ./cleaned/"+handle+"_broken.qtrim.log ./parsed/"+str(r1nm)+"/"+handle+".rem.cat.fq ./cleaned/"+handle+".broken.qtrim LEADING:10 TRAILING:10 SLIDINGWINDOW:4:15 MINLEN:36 TOPHRED33 2>&1 | tee ./cleaned/"+handle+".broken.qtrim.summary.log" os.system(str(PEclean)) os.system(str(broken_clean)) os.system("sed -i 's/\_1$/\ 1/g' ./cleaned/"+handle+".P1.qtrim") os.system("sed -i 's/\_2$/\ 2/g' ./cleaned/"+handle+".P2.qtrim") os.system("sed -i 's/\_1$/\ 1/g' ./cleaned/"+handle+".S1.qtrim") os.system("sed -i 's/\_2$/\ 2/g' ./cleaned/"+handle+".S2.qtrim") ### Put command to trim away restriction site here and below else for Trimmomatic option ### def SE_quality_trim(r1nm): if options.quality == True: for foo in open(options.sheet).read().splitlines(): bar = foo.split() handle = bar[0]+"_"+bar[3] threads = options.threads SEclean = "trimmomatic-0.35.jar SE -threads "+threads+" -trimlog ./cleaned/"+handle+".qtrim.log ./parsed/"+str(r1nm)+"/"+handle+".S1.fq ./cleaned/"+handle+".S1.qtrim LEADING:10 TRAILING:10 SLIDINGWINDOW:4:15 MINLEN:36 TOPHRED33 2>&1 | tee ./cleaned/"+handle+".qtrim.summary.log" os.system(str(SEclean)) os.system("sed -i 's/\_1$/\ 1/g' ./cleaned/"+handle+".S1.qtrim") os.system("sed -i 's/\_2$/\ 2/g' ./cleaned/"+handle+".S2.qtrim") ### Put command to trim away restriction site here and below else for Trimmomatic option ### ################################################# ### Specify processes ### ################################################# def main(): if options.paired == True: r1nm, r1ext = os.path.splitext(options.read1) r2nm, r2ext = os.path.splitext(options.read2) if "1" in options.run: setup(r1nm) if "2" in options.run: PE_clone_filter() if "3" in options.run: PE_lead_trim(r1nm, r2nm) if "4" in options.run: parse_sample_sheet() PE_sample_parser(r1nm, r2nm) PE_sample_rename(r1nm) if "5" in options.run: PE_quality_trim(r1nm) else: r1nm, r1ext = os.path.splitext(options.read1) if "1" in options.run: setup(r1nm) if "2" in options.run: SE_clone_filter() if "3" in options.run: SE_lead_trim(r1nm) if "4" in options.run: parse_sample_sheet() SE_sample_parser(r1nm) SE_sample_rename(r1nm) if "5" in options.run: SE_quality_trim(r1nm) main()
As we get closer to the 20th Anniversary celebrations on April 22nd, Disney continues to debut new merchandise for the event. We’ve already covered two waves of merchandise releases at the park (here and here), but today we’re back with some good news for those of you who like to collect pressed pennies. Disney’s Animal Kingdom has new pressed pennies featuring Simba, Timon, and Pumba which are now available in the park to celebrate their 20th Anniversary!
class intSet(object): """An intSet is a set of integers The value is represented by a list of ints, self.vals. Each int in the set occurs in self.vals exactly once.""" def __init__(self): """Create an empty set of integers""" self.vals = [] def insert(self, e): """Assumes e is an integer and inserts e into self""" if not e in self.vals: self.vals.append(e) def member(self, e): """Assumes e is an integer Returns True if e is in self, and False otherwise""" return e in self.vals def remove(self, e): """Assumes e is an integer and removes e from self Raises ValueError if e is not in self""" try: self.vals.remove(e) except: raise ValueError(str(e) + ' not found') def __str__(self): """Returns a string representation of self""" self.vals.sort() return '{' + ','.join([str(e) for e in self.vals]) + '}' def intersect(self, other): '''returns a new intSet containing elements that appear in both sets''' intersect_set = intSet() for i in other.vals: if self.member(i): intersect_set.insert(i) return intersect_set def __len__(self): return len(self.vals) # L11 Problem 6 class Queue(object): def __init__(self): self.queue = [] def insert(self, e): self.queue.append(e) def remove(self): if self.queue == []: raise ValueError() else: return self.queue.pop(0)
A "very chaotic" scene developed early Sunday as "multiple people" were shot during an all-night art event in New Jersey's capital city. Shooting broke out at an all-night art festival in Trenton, New Jersey, early Sunday, sending people stampeding from the scene and leaving one suspect dead and 22 people injured, a local prosecutor said. WATCH: Police say at a press conference 1 person was killed — identified as one of the suspected gunmen — and 20 others were treated for a variety of injuries. WATCH: Witnesses react to New Jersey art festival shooting.
import random def attack(attack_power, percent_to_hit, percent_to_critical=0.01): """Calculates the damage done based on attack power and percent to hit. Also calculates critical strike. Parameters: attack_power - attack power percent_to_hit - percent to hit Optional: percent_to_critical - percent to critical strike [default: 0.01] Returns: Returns damage """ damage_value = 0 # Calculate if creature was hit chance_to_hit = random.random() if chance_to_hit <= percent_to_hit: creature_was_hit = True else: creature_was_hit = False # Calculate final damage value if creature_was_hit: damage_value = random.randint(1, attack_power) if chance_to_hit <= percent_to_critical: damage_value = attack_power + damage_value return damage_value attack_power = raw_input("What is the attack power? ") percent_to_hit = raw_input("What is the percent to hit? ") percent_to_critical = raw_input("What is the percent to critical? ") attack_power = int(attack_power) percent_to_hit = float(percent_to_hit) percent_to_critical = float(percent_to_critical) player_wants_to_continue = True while(player_wants_to_continue): print attack(attack_power, percent_to_hit, percent_to_critical) answer = raw_input("Continue ([Y]/n)? ") if answer == "n": player_wants_to_continue = False
Elevate your stylish collection with this blue dress from Gerua. Made of rayon, this regular fit dress is comfortable to wear. Featuring a round neck, short sleeves and striped pattern, this dress boasts of stunning magnetism. Team this dress with contrast colored ballerinas for a chic look.
import math from decimal import * from fractions import Fraction def cube_root(operand): '''Return the cube root of x''' power = 1 / 3.0 result = math.pow(operand, power) return result def sci_notation(operand1, operand2): '''scientific notation. For nums x and y retuns x *10^y''' multiplier = math.pow(10, operand2) result = operand1 * multiplier return result def invert_sign(operand): '''invert sign of operand''' return -operand def decimal_places(operand1, operand2): '''changes the decimal precision of a floating point number''' precision = int(operand2) result = round(operand1, precision) return result def rnd(operand1, operand2): '''rounds to given precision''' num_before_dot = len(str(operand1).split('.')[0]) operand1 = float(operand1) operand2 = int(operand2) if num_before_dot +1 < operand2: num_after_dot = operand2 - num_before_dot round_str = '0.' for dig in range(num_after_dot -1): round_str += '0' round_str += '1' result= float(Decimal(operand1).quantize(Decimal(round_str), rounding=ROUND_HALF_UP)) elif num_before_dot == operand2: result = int(Decimal(operand1).quantize(Decimal('1'),rounding=ROUND_HALF_UP)) else: a = "%.*e" %(operand2-1, operand1) if num_before_dot < operand2: result = float(a) else: f = float(a) result = int(f) return result def fract(operand): '''finds a fractional approximation to a given floating point number''' return Fraction(operand).limit_denominator() def ncr(operand1, operand2): '''n Choose r function''' result = math.factorial(operand1) / math.factorial(operand2) / math.factorial(operand1-operand2) return result
The problem is the newly named company has been hit with some of the same old violations. During roadside inspections, its trucks were pulled out of service nearly 32 percent of the time, and inspectors put the brakes on Elite drivers almost 10 percent of the time. Both numbers are well above the national average of 20.7 percent for vehicles and 5.5 percent for drivers.. replica ray bans Nearly 24 million people attended more than 15,000 events in Global Spectrum venues last year. Based in Philadelphia, PA, Global Spectrum is part of Comcast http://www.fashionrayban.com/ Spectacor, one of the world largest sports and entertainment companies. Comcast Spectacor, which operates in 48 of the 50 United States, also owns the Philadelphia Flyers of the National Hockey League, Ovations Food Services, a food and beverage services provider, Paciolan, the leading provider of venue establishment ticketing, fundraising and marketing technology solutions, Front Row Marketing Services, a commercial rights sales company, and Flyers Skate Zone, a series of community ice skating rinks replica ray bans.
# Copyright 2005-2017 ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # # In applying this licence, ECMWF does not waive the privileges and immunities # granted to it by virtue of its status as an intergovernmental organisation # nor does it submit to any jurisdiction. # # Python implementation: bufr_read_synop # # Description: how to read data values from BUFR messages. # # Please note that SYNOP reports can be encoded in various ways in BUFR. # Therefore the code below might not work directly for other types of SYNOP # messages than the one used in the example. It is advised to use bufr_dump to # understand the structure of the messages. import traceback import sys from eccodes import * INPUT = '../../data/bufr/syno_multi.bufr' VERBOSE = 1 # verbose error reporting def example(): # open bufr file f = open(INPUT) # define the keys to be printed keys = [ 'blockNumber', 'stationNumber', 'latitude', 'longitude', 'airTemperatureAt2M', 'dewpointTemperatureAt2M', 'windSpeedAt10M', 'windDirectionAt10M', '#1#cloudAmount', # cloud amount (low and mid level) '#1#heightOfBaseOfCloud', '#1#cloudType', # cloud type (low clouds) '#2#cloudType', # cloud type (middle clouds) '#3#cloudType' # cloud type (highclouds) ] # The cloud information is stored in several blocks in the # SYNOP message and the same key means a different thing in different # parts of the message. In this example we will read the first # cloud block introduced by the key # verticalSignificanceSurfaceObservations=1. # We know that this is the first occurrence of the keys we want to # read so in the list above we used the # (occurrence) operator # accordingly. cnt = 0 # loop for the messages in the file while 1: # get handle for message bufr = codes_bufr_new_from_file(f) if bufr is None: break print "message: %s" % cnt # we need to instruct ecCodes to expand all the descriptors # i.e. unpack the data values codes_set(bufr, 'unpack', 1) # print the values for the selected keys from the message for key in keys: try: print ' %s: %s' % (key, codes_get(bufr, key)) except CodesInternalError as err: print 'Error with key="%s" : %s' % (key, err.msg) cnt += 1 # delete handle codes_release(bufr) # close the file f.close() def main(): try: example() except CodesInternalError as err: if VERBOSE: traceback.print_exc(file=sys.stderr) else: sys.stderr.write(err.msg + '\n') return 1 if __name__ == "__main__": sys.exit(main())
This service is ideal for smaller growers, city farms, smallholders and allotment associations. You can order trays of plants to be delivered in the week you require them. You will receive a discount for multiple trays purchased on any one delivery week. Though we are well known for our organic plants, we grow conventional plants as well. We produce up to 40 million plants a year including glasshouse crops (tomatoes, etc.), leeks, onions, celery, brassicas, salads and herbs. These can be blocks, modules, plugs or pots. We grow almost anything from seed. The main part of our business is our wholesale side where you can order pallets of plants to be delivered on lorries throughout the year. All of our wholesale orders are grown to order and quoted on request. The minimum order quantity for wholesale is 4,000 plants with a minimum of 1,000 plants per variety. If you would like a quote for wholesale please send an email with your company details and what you would like to be quoted for to jordan@delfland.co.uk.
""" Date: 01-06-15 Authors: R.L. Brouwer """ import logging import numpy as np from nifty.functionTemplates.NumericalFunctionWrapper import NumericalFunctionWrapper import nifty as ny from zetaFunctionUncoupled import zetaFunctionUncoupled from src.util.diagnostics import KnownError class HydroLead: # Variables logger = logging.getLogger(__name__) # Methods def __init__(self, input): self.input = input return def run(self): """Run function to initiate the calculation of the leading order water level and velocities Returns: Dictionary with results. At least contains the variables listed as output in the registry """ self.logger.info('Running module HydroLead') # Initiate variables self.OMEGA = self.input.v('OMEGA') self.G = self.input.v('G') self.L = self.input.v('L') self.x = self.input.v('grid', 'axis', 'x') * self.input.v('L') jmax = self.input.v('grid', 'maxIndex', 'x') kmax = self.input.v('grid', 'maxIndex', 'z') fmax = self.input.v('grid', 'maxIndex', 'f') self.z = self.input.v('grid', 'axis', 'z', 0, range(0, kmax+1)) self.zarr = ny.dimensionalAxis(self.input.slice('grid'), 'z')[:, :, 0]-self.input.v('R', x=self.x/self.L).reshape((len(self.x), 1)) #YMD 22-8-17 includes reference level; note that we take a reference frame z=[-H-R, 0] self.bca = ny.amp_phase_input(self.input.v('A0'), self.input.v('phase0'), (2,))[1] # Prepare output d = dict() d['zeta0'] = {} d['u0'] = {} d['w0'] = {} zeta = np.zeros((jmax+1, 1, fmax+1), dtype=complex) zetax = np.zeros((jmax+1, 1, fmax+1), dtype=complex) zetaxx = np.zeros((jmax+1, 1, fmax+1), dtype=complex) # Run computations zeta[:, 0, 1], zetax[:, 0, 1], zetaxx[:, 0, 1] = self.waterlevel() u, w = self.velocity(zeta[:, 0, 1], zetax[:, 0, 1], zetaxx[:, 0, 1]) # Save water level results nf = NumericalFunctionWrapper(zeta, self.input.slice('grid')) nf.addDerivative(zetax, 'x') nf.addDerivative(zetaxx, 'xx') d['zeta0']['tide'] = nf.function # Save velocity results nfu = NumericalFunctionWrapper(u[0], self.input.slice('grid')) nfu.addDerivative(u[1], 'x') nfu.addDerivative(u[2], 'z') nfu.addDerivative(u[3], 'zz') nfu.addDerivative(u[4], 'zzx') d['u0']['tide'] = nfu.function nfw = NumericalFunctionWrapper(w[0], self.input.slice('grid')) nfw.addDerivative(w[1], 'z') d['w0']['tide'] = nfw.function return d def rf(self, x): """Calculate the root r = \sqrt(i\sigma / Av) of the characteristic equation and its derivatives wrt x. Parameters: x - x-coordinate Returns: r - root of the characteristic equation of the leading order horizontal velocity """ Av = self.input.v('Av', x=x/self.L, z=0, f=0) Avx = self.input.d('Av', x=x/self.L, z=0, f=0, dim='x') Avxx = self.input.d('Av', x=x/self.L, z=0, f=0, dim='xx') r = np.sqrt(1j * self.OMEGA / Av) rx = -np.sqrt(1j * self.OMEGA) * Avx / (2. * Av**(3./2.)) rxx = np.sqrt(1j * self.OMEGA) * (3. * Avx**2 - 2. * Av * Avxx) / (4. * Av**(5./2.)) return r, rx, rxx def af(self, x, r, rx, rxx): """Calculate the coefficient alpha that appears in the solution for the leading order horizontal velocity. Parameters: x - x-coordinatemm Returns: a - coefficient alpha """ H = self.input.v('H', x=x/self.L) + self.input.v('R', x=x/self.L) Hx = self.input.d('H', x=x/self.L, dim='x') + self.input.d('R', x=x/self.L, dim='x'), Hxx = self.input.d('H', x=x/self.L, dim='xx') + self.input.d('R', x=x/self.L, dim='xx') # YMD 15-08-17 Reference level Av = self.input.v('Av', x=x/self.L, z=0, f=0) Avx = self.input.d('Av', x=x/self.L, z=0, f=0, dim='x') Avxx = self.input.d('Av', x=x/self.L, z=0, f=0, dim='xx') sf = self.input.v('Roughness', x=x/self.L, z=0, f=0) sfx = self.input.d('Roughness', x=x/self.L, z=0, f=0, dim='x') sfxx = self.input.d('Roughness', x=x/self.L, z=0, f=0, dim='xx') # sf = sf[:, 0] # BUG (?) 23-02-2018 # Define trigonometric values for ease of reference sinhrh = np.sinh(r * H) coshrh = np.cosh(r * H) cothrh = coshrh / sinhrh cschrh = 1 / sinhrh # Define parameters and their (second) derivative wrt x E = rx * H + r * Hx Ex = rxx * H + 2. * rx * Hx + r * Hxx F = rx + r * E * cothrh Fx = rxx + r * Ex * cothrh + E * (rx * cothrh - r * E**2 * cschrh**2) K = r * Avx + Av * F + sfx * cothrh + sf * E Kx = (r * Avxx + rx * Avx + Avx * F + Av * Fx + sfxx * cothrh - sfx * E * cschrh**2 + sfx * E + sf * Ex) G = r * Av * sinhrh + sf * coshrh Gx = sinhrh * K Gxx = E * K * coshrh + Kx * sinhrh # Calculate coefficient alpha a = sf / G # a ax = sfx / G - sf * Gx / G**2 # a_x axx = (sfxx-(2.*sfx*Gx + sf*Gxx)/G + 2.*sf*Gx**2/G**2) / G # YMD bug corrected 27-2-2018 return a, ax, axx def waterlevel(self): """Solves the boundary value problem for the water level Returns: zeta - water level and its first and second derivative w.r.t. x """ jmax = self.input.v('grid', 'maxIndex', 'x') fmax = self.input.v('grid', 'maxIndex', 'f') r, rx, rxx = self.rf(self.x) a, ax, axx = self.af(self.x, r, rx, rxx) H = self.input.v('H', x=self.x / self.L) + self.input.v('R', x=self.x / self.L) M = ((a * np.sinh(r * H) / r) - H) * self.input.v('B', x=self.x / self.L) * (self.G / (1j * self.OMEGA)) F = np.zeros((jmax+1, 1), dtype=complex) # Forcing term shape (x, number of right-hand sides) Fopen = np.zeros((1, 1), dtype=complex) # Forcing term shape (1, number of right-hand sides) Fclosed = np.zeros((1, 1), dtype=complex) # Forcing term shape (1, number of right-hand sides) Fopen[0,0] = self.bca Z, Zx, _ = zetaFunctionUncoupled(1, M, F, Fopen, Fclosed, self.input, hasMatrix = False) zeta = Z[:, 0] zeta_x = Zx[:, 0] zeta_xx = np.gradient(Zx[:, 0], self.x[1], edge_order=2) return zeta, zeta_x, zeta_xx def velocity(self, zeta0, zetax, zetaxx): """Calculates the horizontal and vertical flow velocities based on the water level zeta Parameters: zeta - water level and its first and second derivative w.r.t. x Returns: u - horizontal flow velocity and several derivatives w.r.t. x and z w - vertical flow velocity and its derivative w.r.t. z """ # Initiate variables u = np.zeros((5, len(self.x), len(self.z), 3), dtype=complex) w = np.zeros((2, len(self.x), len(self.z), 3), dtype=complex) # Extract parameters alpha and r and B r, rx, rxx = self.rf(self.x) a, ax, axx = self.af(self.x, r, rx, rxx) r = r.reshape(len(self.x), 1) rx = rx.reshape(len(self.x), 1) a = a.reshape(len(self.x), 1) ax = ax.reshape(len(self.x), 1) B = self.input.v('B', x=self.x/self.L).reshape(len(self.x), 1) Bx = self.input.d('B', x=self.x/self.L, dim='x').reshape(len(self.x), 1) # reshape (derivatives of) zeta zeta0 = zeta0.reshape(len(self.x), 1) zetax = zetax.reshape(len(self.x), 1) zetaxx = zetaxx.reshape(len(self.x), 1) # Calculate velocities and derivatives c = self.G / (1j * self.OMEGA) sinhrz = np.sinh(r * self.zarr) coshrz = np.cosh(r * self.zarr) var1 = c * zetax var2 = (a * coshrz - 1.) var3 = a * rx * self.zarr * sinhrz # u u[0, :, :, 1] = var1 * var2 # u_x u[1, :, :, 1] = c * zetaxx * var2 + var1 * (ax * coshrz + var3) # u_z u[2, :, :, 1] = var1 * (a * r * sinhrz) # u_zz u[3, :, :, 1] = var1 * (a * r**2 * coshrz) # u_zz_x u[4, :, :, 1] = c * (zetaxx * a * r**2 * coshrz + zetax * (ax * r**2 * coshrz + 2. * a * r * rx * coshrz + r**2 * var3)) # w w[0, :, :, 1] = c * ((zetaxx + (Bx / B) * zetax) * (self.zarr - (a / r) * sinhrz) - (1 / r) * zetax * (sinhrz * ax + a * rx * (self.zarr * coshrz - (sinhrz / r))) - self.OMEGA**2 * zeta0 / self.G) # w_z w[1, :, :, 1] = -c * (var2 * (zetaxx + (Bx / B) * zetax) + zetax * (ax * coshrz + var3)) return u, w
On Saturday, March 9, 1,521 Bedford voters went to the polls for the 2019 Town Election. After accompanying my family to the polls earlier in the day, I returned by myself a few minutes before the doors were locked at 6 pm in order to see the results as quickly as possible. When I arrived, there was a nervous energy in the hall as people chatted while waiting for the results to be posted. This year was certainly more interesting than most, with three candidates running in a contested election for two seats as Selectmen. Congratulations and a large thank you to Town Clerk Doreen Tremblay, Election Warden Patricia Crew, and the dozens of Election Workers who toiled tirelessly to make this election a smooth process. Bedford’s 2019 Town Election, the most energetic contest the town has seen in several years, drew 1,521 voters and produced a new, or newly re-elected, team of town officers. Click the link in the story to see the official results posted by Town Clerk Doreen Tremblay. It’s not an extravagant turnout for a town election but entirely respectable: roughly 800 votes by midafternoon. Election Warden Patricia Crew opened the door to Bedford’s polling place at 8 o’clock sharp on Saturday morning, with just one voter waiting in line. There were reports of enthusiastic early voting, and the polls will be open until 6 pm, but the hordes were not out early. The Bedford Police Department reported a spate of missing campaign yard signs early Friday afternoon, March 8. When and Where to VoteThe polls will open at 8 am on Saturday, March 3 and will remain open until 6 pm. All four of Bedford’s precincts vote in the gymnasium at the John Glenn Middle School, 99 McMahon Road. In continuation of my debut to Bedford’s governmental and political landscape, I had the privilege of attending the League of Women Voters’ Candidate Forum on Sunday, March 3 at the John Glenn Middle School. The event, which also aired on Bedford TV, aimed to educate the Bedford voters about the various candidates for local office prior to the town election on Saturday, March 9. The candidates for the majority of offices, which were uncontested, were limited to only reading short campaign statements. However, the candidates for contested seats on the Library Board of Trustees and Selectmen were questioned more thoroughly by the attendees. 2019 League of Women Voters Town Election Candidate Forum – Watch the Tape! With thanks to Ralph Hammond and his intrepid JGMS volunteers, the Bedford League of Women Voters Candidate Forum is on the air!
# -*- coding: utf-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import numpy as np from .tools.math import gradient try: from .lib.direct import _cabel_direct_integral cython_ext = True except (ImportError, UnicodeDecodeError): cython_ext = False ########################################################################### # direct - calculation of forward and inverse Abel transforms by direct # numerical integration # # Roman Yurchak - Laboratoire LULI, Ecole Polytechnique/CNRS/CEA, France # 07.2018: DH fixed the correction for the case where r[0] = 0 # 03.2018: DH changed the default grid from 0.5, 1.5 ... to 0, 1, 2. # 01.2018: DH dhanged the integration method to trapz # 12.2015: RY Added a pure python implementation # 11.2015: RY moved to PyAbel, added more unit tests, reorganized code base # 2012: RY first implementation in hedp.math.abel ########################################################################### def _construct_r_grid(n, dr=None, r=None): """ Internal function to construct a 1D spatial grid """ if dr is None and r is None: # default value, we don't care about the scaling # since the mesh size was not provided dr = 1.0 if dr is not None and r is not None: raise ValueError('Both r and dr input parameters cannot be specified \ at the same time') elif dr is None and r is not None: if r.ndim != 1 or r.shape[0] != n: raise ValueError('The input parameter r should be a 1D array' 'of shape = ({},), got shape = {}'.format( n, r.shape)) # not so sure about this, needs verification -RY dr = np.gradient(r) else: if isinstance(dr, np.ndarray): raise NotImplementedError r = (np.arange(n))*dr return r, dr def direct_transform(fr, dr=None, r=None, direction='inverse', derivative=gradient, int_func=np.trapz, correction=True, backend='C', **kwargs): """ This algorithm performs a direct computation of the Abel transform integrals. When correction=False, the pixel at the lower bound of the integral (where y=r) is skipped, which causes a systematic error in the Abel transform. However, if correction=True is used, then an analytical transform transform is applied to this pixel, which makes the approximation that the function is linear across this pixel. With correction=True, the Direct method produces reasonable results. The Direct method is implemented in both Python and, if Cython is available during PyAbel's installation, a compiled C version, which is much faster. The implementation can be selected using the backend argument. By default, integration at all other pixels is performed using the Trapezoidal rule. Parameters ---------- fr : 1d or 2d numpy array input array to which direct/inverse Abel transform will be applied. For a 2d array, the first dimension is assumed to be the z axis and the second the r axis. dr : float spatial mesh resolution (optional, default to 1.0) r : 1D ndarray the spatial mesh (optional). Unusually, direct_transform should, in principle, be able to handle non-uniform data. However, this has not been regorously tested. direction : string Determines if a forward or inverse Abel transform will be applied. can be 'forward' or 'inverse'. derivative : callable a function that can return the derivative of the fr array with respect to r. (only used in the inverse Abel transform). int_func : function This function is used to complete the integration. It should resemble np.trapz, in that it must be callable using axis=, x=, and dx= keyword arguments. correction : boolean If False the pixel where the weighting function has a singular value (where r==y) is simply skipped, causing a systematic under-estimation of the Abel transform. If True, integration near the singular value is performed analytically, by assuming that the data is linear across that pixel. The accuracy of this approximation will depend on how the data is sampled. backend : string There are currently two implementations of the Direct transform, one in pure Python and one in Cython. The backend paremeter selects which method is used. The Cython code is converted to C and compiled, so this is faster. Can be 'C' or 'python' (case insensitive). 'C' is the default, but 'python' will be used if the C-library is not available. Returns ------- out : 1d or 2d numpy array of the same shape as fr with either the direct or the inverse abel transform. """ backend = backend.lower() if backend not in ['c', 'python']: raise ValueError f = np.atleast_2d(fr.copy()) r, dr = _construct_r_grid(f.shape[1], dr=dr, r=r) if direction == "inverse": f = derivative(f)/dr f *= - 1./np.pi else: f *= 2*r[None, :] if backend == 'c': if not cython_ext: print('Warning: Cython extensions were not built, \ the C backend is not available!') print('Falling back to a pure Python backend...') backend = 'python' elif not is_uniform_sampling(r): print('Warning: non uniform sampling is currently not \ supported by the C backend!') print('Falling back to a pure Python backend...') backend = 'python' f = np.asarray(f, order='C', dtype='float64') if backend == 'c': out = _cabel_direct_integral(f, r, int(correction)) else: out = _pyabel_direct_integral(f, r, int(correction), int_func) if f.shape[0] == 1: return out[0] else: return out def _pyabel_direct_integral(f, r, correction, int_func=np.trapz): """ Calculation of the integral used in Abel transform (both direct and inverse). ∞ ⌠ ⎮ f(r) ⎮ ────────────── dr ⎮ ___________ ⎮ ╱ 2 2 ⎮ ╲╱ y - r ⌡ y Returns: -------- np.array: of the same shape as f with the integral evaluated at r """ if correction not in [0, 1]: raise ValueError if is_uniform_sampling(r): int_opts = {'dx': abs(r[1] - r[0])} else: int_opts = {'x': r} out = np.zeros(f.shape) R, Y = np.meshgrid(r, r, indexing='ij') i_vect = np.arange(len(r), dtype=int) II, JJ = np.meshgrid(i_vect, i_vect, indexing='ij') mask = (II < JJ) I_sqrt = np.zeros(R.shape) I_sqrt[mask] = np.sqrt((Y**2 - R**2)[mask]) I_isqrt = np.zeros(R.shape) I_isqrt[mask] = 1./I_sqrt[mask] # create a mask that just shows the first two points of the integral mask2 = ((II > JJ-2) & (II < JJ+1)) for i, row in enumerate(f): # loop over rows (z) P = row[None, :] * I_isqrt # set up the integral out[i, :] = int_func(P, axis=1, **int_opts) # take the integral # correct for the extra triangle at the start of the integral out[i, :] = out[i, :] - 0.5*int_func(P*mask2, axis=1, **int_opts) """ Compute the correction. Here we apply an analytical integration of the cell with the singular value, assuming a piecewise linear behaviour of the data. The analytical abel transform for this trapezoid is: c0*acosh(r1/y) - c_r*y*acosh(r1/y) + c_r*sqrt(r1**2 - y**2) see: https://github.com/luli/hedp/blob/master/hedp/math/abel.py#L87-L104 """ if correction == 1: # precompute a few variables outside the loop: f_r = (f[:, 1:] - f[:, :-1])/np.diff(r)[None, :] isqrt = I_sqrt[II+1 == JJ] if r[0] < r[1]*1e-8: # special case for r[0] = 0 ratio = np.append(np.cosh(1), r[2:]/r[1:-1]) else: ratio = r[1:]/r[:-1] acr = np.arccosh(ratio) for i, row in enumerate(f): # loop over rows (z) out[i, :-1] += isqrt*f_r[i] + acr*(row[:-1] - f_r[i]*r[:-1]) return out def is_uniform_sampling(r): """ Returns True if the array is uniformly spaced to within 1e-13. Otherwise False. """ dr = np.diff(r) ddr = np.diff(dr) return np.allclose(ddr, 0, atol=1e-13)
Using RFID, companies can triple their order accuracy and reduce chargebacks. According to a new research from the Auburn University RFID Lab and GS1 US brand owners and retailers using Electronic Product Code (EP)-enabled radio frequency identification (RFID) to optimize inventory management and reconcile product shipments are capable of achieving 99.9 percent order accuracy. Hi Jim- We are working on some cutting edge stuff in India for retailers and many of them have shown keen interest in RTLS and cycle count – are you considering partner in the India Sub-continent including India, Bhutan, Sri-Lanka and Nepal.
# SensorTile Poket watch # by shaoziyang 2017 # http://www.micropython.org.cn # https://github.com/shaoziyang/SensorTilePocketWatch import pyb from st import SensorTile from pyb import Timer, Pin, ExtInt, RTC from micropython import const import baticon SLEEPCNT = const(18) SW_PIN = 'PG11' VUSB_PIN = 'PG10' st = SensorTile() from machine import I2C i2c=machine.I2C(-1, sda=machine.Pin("C1"), scl=machine.Pin("C0"), freq=400000) from ssd1306 import SSD1306_I2C oled = SSD1306_I2C(128, 64, i2c) oled.framebuf.rect(0,0,127,63,1) oled.msg('Pocket',40,8) oled.msg('Watch',44,28) oled.text('MPY SensorTile', 8, 48) oled.show() pyb.delay(1000) oled.fill(0) oled.show() flag = 1 sleepcnt = SLEEPCNT keypressed = 0 keycnt = 0 page = 0 def rtcisr(t): pyb.LED(1).toggle() return rtc=RTC() #rtc.init() rtc.wakeup(1000, rtcisr) def tmisr(t): global flag flag = 1 tm = Timer(1, freq=1, callback=tmisr) def show_bat(): oled.puts('%4.2fV'%st.BatVolt(), 16, 56) oled.puts('%2d'%sleepcnt, 112, 56) oled.show() def show_press(page): if(page==1): oled.puts('%8.3f'%st.P(), 64, 0) elif(page==2): oled.msg('%8.3f'%st.P(), 48, 20) oled.msg("%5.1fC"%st.T(), 72, 36) def show_temp(): oled.puts("%5.1fC"%st.T(), 64, 56) def show_accel(page): if(page==1): oled.puts("%7.2f"%st.AX(), 64, 8) oled.puts("%7.2f"%st.AY(), 64, 16) oled.puts("%7.2f"%st.AZ(), 64, 24) elif(page==3): oled.msg("%7.2f"%st.AX(), 56, 0) oled.msg("%7.2f"%st.AY(), 56, 16) oled.msg("%7.2f"%st.AZ(), 56, 32) def show_gyro(page): if(page==1): oled.puts("%7.2f"%st.GX(), 64, 32) oled.puts("%7.2f"%st.GY(), 64, 40) oled.puts("%7.2f"%st.GZ(), 64, 48) elif(page==4): oled.msg("%7.2f"%st.GX(), 56, 0) oled.msg("%7.2f"%st.GY(), 56, 16) oled.msg("%7.2f"%st.GZ(), 56, 32) def show_title(page): oled.fill(0) # clear screen if(page==1): oled.puts("Press:", 0, 0) oled.puts("Accel:", 0, 8) oled.puts("Gyro:", 0, 32) elif(page==2): oled.msg("Press", 0, 0) elif(page==3): oled.msg("Accel", 0, 0) elif(page==4): oled.msg("Gyro", 0, 0) def show_time(): d = rtc.datetime() if(page==0): s = "%04d"%d[0]+"-"+"%02d"%d[1]+"-"+"%02d"%d[2] oled.msg(s, 16, 4) s = "%02d"%d[4]+":"+"%02d"%d[5]+":"+"%02d"%d[6] oled.msg(s, 16, 28) oled.puts("%8.1fC"%st.T(), 64, 56) else: s = "%02d"%d[4]+":"+"%02d"%d[5]+":"+"%02d"%d[6] oled.puts(s, 64, 56) def swisr(t): global keypressed keypressed = 1 #print('.') def showbaticon(n, x, y): if(n > 10): n = 10 if(n < 0): n = 0 for i in range(16): d = baticon.font[n*16+i] for j in range(8): oled.pixel(x+i, y+7-j, d&(1<<j)) sw = pyb.ExtInt(SW_PIN, pyb.ExtInt.IRQ_FALLING, pyb.Pin.PULL_UP, callback=swisr) btn = pyb.Pin(SW_PIN, pyb.Pin.IN, pull=pyb.Pin.PULL_UP) vusb = pyb.Pin(VUSB_PIN, pyb.Pin.IN, pull=pyb.Pin.PULL_NONE) batc = st.Bat() def showbat(): global batc if(vusb()): batc = batc + 1 if(batc > 10): batc = st.Bat() else: batc = st.Bat() showbaticon(batc, 0, 56) oled.puts('%4.2fV'%st.BatVolt(), 16, 56) show_title(page) while True: if(flag): flag = 0 # keypressed if(keypressed): keypressed = 0 sleepcnt = SLEEPCNT page = (page + 1)%5 show_title(page) # key long pressed if(btn()==0): keycnt = keycnt + 1 if(keycnt > 3): machine.soft_reset() else: keycnt = 0 #show sensor show_press(page) show_accel(page) show_gyro(page) #show battery showbat() show_time() #power save if(vusb()==0): if(sleepcnt>0): sleepcnt = sleepcnt - 1 else: oled.poweroff() while True: machine.idle() #machine.sleep() if(btn()==0): break; keypressed = 0 oled.poweron() sleepcnt = SLEEPCNT oled.puts('%d'%sleepcnt, 120, 48) else: oled.puts(' ', 120, 48) oled.show()
Are you looking for some content marketing ideas? Here you go! We have brought some excellent techniques to make content livelier, more attractive and engaging. Every content marketer needs to be acutely aware of ongoing topics and ideas. In this fast, pacing internet generation, this is important to switch contents quickly. At first, you need to know whom you are going to sell the content. Justifying the audiences is, of course, the priority. Try to involve all the existing group of the internet. The more traffic you can grow, the marketing will be much easier. The only source of gathering content ideas is the internet. So, you need to be updated along with the audiences. For your convenience, we have found 11 (+Bonus) content marketing ideas that can change the look of your contents and your audiences will fall in love with your posts. You need to prove your seriousness to the customers. The best way to do so is posting long blog posts. That should be informative and have proper citations. A certain amount of audiences loves to see something credible and useful. A 1000+ word nicely written blog content easily attracts people. But you need to be conscious of making it fascinating. Otherwise, audiences will not stick to the last of the post. It is possible to lose some readers for impatience too. So, make it right, turn it into an interesting, lengthy blog post. Go through your post twice or thrice. Now, find out the unclear parts of the your post. Then, make a list of the issues that may arise in your reader’s mind. This is quite an innovative content marketing idea. You can follow it to make a bridge between you and the audiences. When they finish reading the article, FAQs will satisfy them. And thus, they will LOVE it! You may feel the absence of your audience, though they read your posts. Let them connect with the subject more by participating in a poll. Ask their opinion by selecting one of the options available on the poll. This is an excellent way to interact and make the audience a part of your content. You can also add a feature of asking email address after participating there. Spread your marketing newsletter through those collected email addresses. Your audiences want credibility. This is why do not forget to provide the necessary facts and statistical data in your post. But that should not violate the essence of the central theme of your article. When the audience feels your seriousness and the necessity of the blog post, marketing will not be as difficult as before. In your content, add some merits and demerits. Your targeted audiences may want to see the upcoming advantageous and disadvantageous sides of a particular feature. Be honest and grab the attention! The post will be more user-friendly, and your audience can go through the whole subject at a glance. Again, the audience can easily remember the topic of your post. Another way to attract audiences with content is creating informative posts. People wants to get engaged and challenged with tasks. You can find a lot of Do-it-Yourself content over the internet. Ask them to participate in your product. Post How-to’s more and see the increasing traffic of your audiences. Yes! That can be a good idea to develop your content. Though some people like serious contents. You should add some fantastic, relevant quotes along with the posts. Some memorable lines of a poem can also interact with your targeted audiences. They must feel you as one of them. You should write what they want to read. This is the Mantra of becoming a legendary internet marketer. Do not just stick with writing. The picture is an excellent way to communicate with people. It is useful in making a universal content. If your audience does not understand the subject matter, the image will make it clear to him/her. Language will not be an obstacle for your content marketing. But you should remain sophisticated and stylize the picture according to your branding. Don’t create unnecessarily and irrelevant visual content. It is a good way to represent all the information about your content. You can also get a double advantage. Make a video of the content and publish it on YouTube. Then, add the link to your content. You will get more traffic on your website as well as more views on your YouTube channel. A lot of internet marketers follow this strategy. If you want to make content for social networks, do not make them lengthy. It is applicable only for websites or blogs. A micro-blogging site will not allow you to post long content. Add images, videos and links in a short social site content. Make it subjective and short. Let the audience comment, share and like your posts. Do not lose temper for negative comments. Work on those matters to improve the quality of contents. You can deliver better content in later posts. Follow a routine and create stories. People love storytelling! None likes a monotonous content. You can quickly switch a content into an amazing story. Firstly, introduce the subject. After that start to decorate the content with examples, facts, bars and ongoing affairs. Include all the positive sides and negative sides in the content. Let the audience be a part of your writing. The best part of the story should be your description. You should read a lot of attractive contents to improve this skill. You must have a good knowledge about your targeted audiences. Follow their links, profiles, and reaction. Your involvement in content making and marketing can lead you to a future legendary writer. These content marketing ideas are something that tons of folks are doing right now. So, try to innovate and decorate. Let the content speak for you! Enjoy! Subscribe now to get more free stuffs like this directly into your inbox next time. Which of these content marketing ideas you liked? Do you have some other suggestions? If so, please don’t hesitate in suggesting us by commenting below or contact us personally. I appreciate this informative blog post. I totally agree with your Technique #1. Writing longer blog posts can be a daunting task but it’s one way to outperform other pieces of content out there. I mean, there are so many blogs competing for your audience’s attention. So you have to make every effort count. I also suggest running an online contest to generate the best content marketing ideas. Basically what you’ll do is provide an incentive for every online participant who contributes a blog post idea and shares your campaign through a unique invite link. That way, you get brand exposure as well! Thanks for your vote of agreement. Building longer, useful and in-depth content for your blog is the foundation of any content marketing ideas. Also, online contest could be one of the best content marketing ideas as well. Appreciate your suggestion. Sometimes, it’s hard to find the right content for your blog. Writing the long posts and using the lists can be useful. Inviting your readers to participate in a poll is a great idea. It’s always about engaging your readers in a conversation to have the positive output. Yeah, sure, writing long posts and using the lists is always beneficial. And, agreed with you, it’s all about to engage your readers. And for that, what could be better than inviting your readers to participate in the poll to increase engagement rate. Thanks for your amazing feedback. Writing longer blog post will help by providing much more amount of information for readers to enjoy. But hopefully, not too many bloggers want to take this the wrong by simply adding more words to article. Yes, the main purpose of writing long blog post is to provide your visitors with most comprehensive informative. I am glad that you dropped by. It’s a good idea to write a long post. great !! Yes, no doubt about it. I am glad that you stopped by. Whoah, this weblog is excellent. I really like studying your posts. I am glad you stopped by. Thanks. Amazing Post! Its very tough to get blog post ideas. You shared some very informative points here. Good Job! keep it up. Love the way you format your post, That’s an excellent article, thanks Shakir! Glad you liked it, Nikhil. Glad you found this blog post about content marketing ideas helpful. Sorry for the late reply! I generally use a voice recorder to record all my thoughts and then writing becomes much more easier!
#!/usr/bin/env python3 class LogicGate: def __init__(self, label): self.name = label self.output = None def getName(self): return self.name def getOutput(self): self.output = self.calcLogicOutput() return self.output class BinaryGate(LogicGate): def __init__(self, label): LogicGate.__init__(self, label) self.pinA = None self.pinB = None def getPinA(self): if self.pinA == None: UsrPinA = input("Enter Pin A value for " + self.name + ": ") return int(UsrPinA) >= 1 else: return self.pinA.getFrom().getOutput() def getPinB(self): if self.pinB == None: UsrPinB = input("Enter Pin B value for " + self.name + ": ") return int(UsrPinB) >= 1 else: return self.pinB.getFrom().getOutput() def SetSrcPin(self, source): if self.pinA == None: self.pinA = source else: if self.pinB == None: self.pinB = source else: print("Source pins are already occupied") class AndGate(BinaryGate): def __init__(self, label): BinaryGate.__init__(self, label) def calcLogicOutput(self): pinA = self.getPinA() pinB = self.getPinB() return int(pinA == 1 and pinB == 1) class OrGate(BinaryGate): def __init__(self, label): BinaryGate.__init__(self, label) def calcLogicOutput(self): pinA = self.getPinA() pinB = self.getPinB() return int(pinA == 1 or pinB == 1) class NorGate(BinaryGate): def __init__(self, label): BinaryGate.__init__(self, label) def calcLogicOutput(self): pinA = self.getPinA() pinB = self.getPinB() return int(not(pinA == 1 or pinB == 1)) class NandGate(BinaryGate): def __init__(self, label): BinaryGate.__init__(self, label) def calcLogicOutput(self): pinA = self.getPinA() pinB = self.getPinB() return int(not(pinA == 1 and pinB == 1)) class XorGate(BinaryGate): def __init__(self, label): BinaryGate.__init__(self, label) def calcLogicOutput(self): pinA = self.getPinA() pinB = self.getPinB() return int(pinA != pinB) class UnaryGate(LogicGate): def __init__(self, label): LogicGate.__init__(self, label) self.pin = None def getPin(self): if self.pin == None: UsrPin = input("Enter Pin value for " + self.name + ": ") return int(UsrPin) >= 1 else: return self.pin.getFrom().getOutput() def SetSrcPin(self, source): if self.pin == None: self.pin = source else: print("Source pins are already occupied") class NotGate(UnaryGate): def __init__(self, label): UnaryGate.__init__(self, label) def calcLogicOutput(self): return int(not self.getPin()) class CommonInput(LogicGate): def __init__(self, label): LogicGate.__init__(self, label) self.pin = None def calcLogicOutput(self): if self.pin == None: self.pin = input("Enter Pin value for " + self.name + ": ") self.pin = int(self.pin) >= 1 return self.pin else: return self.pin class Connector: def __init__(self, fromGate, toGate): self.fromGate = fromGate self.toGate = toGate toGate.SetSrcPin(self) def getFrom(self): return self.fromGate def getTo(self): return self.toGate def HalfAdder(): g1 = CommonInput("A") g2 = CommonInput("B") g3 = XorGate("Sum") g4 = AndGate("Carrier") c1 = Connector(g1, g3) c2 = Connector(g2, g3) c3 = Connector(g1, g4) c4 = Connector(g2, g4) print(g3.getOutput()) print(g4.getOutput()) def Test1(): g1 = AndGate("G1") g2 = AndGate("G2") g3 = OrGate("G3") g4 = NotGate("G4") c1 = Connector(g1, g3) c2 = Connector(g2, g3) c3 = Connector(g3, g4) print(g4.getOutput()) def Test2(): g1 = AndGate("G1") g2 = AndGate("G2") g3 = NotGate("G3") g4 = NotGate("G4") g5 = AndGate("G5") c1 = Connector(g1, g3) c2 = Connector(g2, g4) c3 = Connector(g3, g5) c4 = Connector(g4, g5) print(g5.getOutput()) g1 = XorGate("xor") print(g1.getOutput()) if __name__ == "__main__": #g1 = NandGate("l1") #print(g1.calcLogicOutput()) #g2 = NorGate("12") #print(g2.calcLogicOutput()) #Test1() HalfAdder()
FeeFreeTicket is your best and last stop for Bob Dylan Birthday Celebration: Big Brass Bed tickets without fees. For over 10 years we have been in beautiful New England selling tickets nationwide for Bob Dylan Birthday Celebration: Big Brass Bed. We offer without a doubt the largest inventory online for all Bob Dylan Birthday Celebration: Big Brass Bed tickets at prices that others resellers cannot compete with. Unlike many other ticket sites or ticket brokers, FeeFreeTicket.com will never add hidden fees or charges to your order and will guarantee your Bob Dylan Birthday Celebration: Big Brass Bed ticket purchase with our No Worry Guarantee and No Service Fee Promise! If you are looking for more information about Bob Dylan Birthday Celebration: Big Brass Bed you should check the wiki, current news and videoes for Bob Dylan Birthday Celebration: Big Brass Bed that we have listed below. Bob Dylan Birthday Celebration: Big Brass Bed News, videos and wiki pages are here for your convenience and to help when searching for Bob Dylan Birthday Celebration: Big Brass Bed tickets. When you need Bob Dylan Birthday Celebration: Big Brass Bed tickets we are here!
import psycopg2 as dbapi2 from flask import current_app from message import Message from flask_login import current_user class MessageList: def __init__(self): self.messages = {} self.last_key = 0 def add_message(self, message): connection = dbapi2.connect(current_app.config['dsn']) cursor = connection.cursor() cursor.execute("""SELECT ID FROM USERS WHERE USERNAME=%s""", (message.reciever,)) recieverid = cursor.fetchone() cursor.execute("""SELECT ID FROM USERS WHERE USERNAME=%s""", (current_user.username,)) senderid = cursor.fetchone() cursor.execute("""INSERT INTO MESSAGES (SENDERID, RECIEVERID, CONTENT, SENT) VALUES (%s, %s, %s, %s)""", (senderid, recieverid, message.content, message.sent)) connection.commit() def delete_message(self, messageid): connection = dbapi2.connect(current_app.config['dsn']) cursor = connection.cursor() cursor.execute("DELETE FROM MESSAGES WHERE MESSAGEID = %s""", (messageid,)) connection.commit() def get_message(self): connection = dbapi2.connect(current_app.config['dsn']) cursor = connection.cursor() query = "SELECT MESSAGES.SENDERID, MESSAGES.RECIEVERID, MESSAGES.CONTENT, USERPROFILE.NICKNAME FROM MESSAGES INNER JOIN USERPROFILE ON MESSAGES.SENDERID = USERPROFILE.ID" cursor.execute(query) senderid, recieverid, content, nickname = cursor.fetchone() return Message(nickname, recieverid, content) def get_messages(self): connection = dbapi2.connect(current_app.config['dsn']) cursor = connection.cursor() cursor.execute("""SELECT ID FROM USERS WHERE USERNAME=%s""", (current_user.username,)) userid = cursor.fetchone() cursor.execute("SELECT T1.MESSAGEID, T1.SENDERID, T1.RECIEVERID, T1.CONTENT, T2.NICKNAME AS SENDERNICK, T3.NICKNAME AS RECIEVERNICK FROM MESSAGES AS T1 INNER JOIN USERPROFILE AS T2 ON T1.SENDERID = T2.ID INNER JOIN USERPROFILE AS T3 ON T1.RECIEVERID = T3.ID WHERE SENDERID = %s OR RECIEVERID = %s""",(userid,userid)) messages = [(key, Message(sendernick, recievernick, content)) for key, sender, reciever, content, sendernick, recievernick in cursor] return messages
so no prefixing of logging output is generically done. unprefixed and should remain unprefixed. compilation units that do not want output message prefixing. that could be removed if the default is changed. This script should be run after this patch series is applied.
# ================================================================================================== # Copyright 2011 Twitter, Inc. # -------------------------------------------------------------------------------------------------- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== from __future__ import print_function __author__ = 'John Sirois' from . import Command from twitter.pants.base import BuildFile, Target class Filemap(Command): """Outputs a mapping from source file to the target that owns the source file.""" __command__ = 'filemap' def setup_parser(self, parser, args): parser.set_usage("%prog filemap") parser.epilog = """Outputs a mapping from source file to the target that owns the source file. The mapping is output in 2 columns.""" def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if self.args: self.error("The filemap subcommand accepts no arguments.") def execute(self): for buildfile in BuildFile.scan_buildfiles(self.root_dir): for address in Target.get_all_addresses(buildfile): target = Target.get(address) if hasattr(target, 'sources') and target.sources is not None: for sourcefile in target.sources: print(sourcefile, address)
Identify the threats that are most likely to happen in your situation, and then do enough research to give you an idea of how to go about planning for that particular risk. An example might be if you’re in an area known for very strong storms. How often does this happen? What’s the chance of another disastrous storm in the near future? What are the typical consequences that you must expect – breakdown of ordinary services such as electricity, communications, etc.? What about the possibility of looting? Once you have identified the major risks you may face, you will determine the length of time your supplies will need to last. One threat of nature may require enough supplies for a week or so, but a man made widespread societal disruption could go on for a long time. Once you have amassed all this data, the next step is to list the resources and supplies you will need for each eventuality. Don’t forget to think through the skills you will need for each eventuality, and list them under “resources”. When considering supplies and equipment, it is wise to keep in mind that the time may come when barter will become a means of exchange and certain items will hold particular value in such a case. Even if you have a well-stocked pantry and arsenal and your home is as defensible as possible, there may very well be a situation in which the most sensible plan is to abandon your safe haven, if even only for a short time. You should prepare an emergency plan and at least one alternate plan for each such contingency, so that when any of these unexpected events occur, you will be able to implement the appropriate plan immediately. When a hurricane is predicted to hit, or if you’ve been alerted to a high probability of a major terrorist incident that is imminent, are times when such pre-planning may save your life. This entry was posted in Featured, Survival Skills and tagged featured, Risks Assessment, Survival Skills, Threat Analysis.
#------------------------------------------------------------------------------- # # Copyright (c) 2013 # IMB, RWTH Aachen University, # ISM, Brno University of Technology # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in the AramisCDT top directory "license.txt" and may be # redistributed only under the conditions described in the aforementioned # license. # # Thanks for using Simvisage open source! # #------------------------------------------------------------------------------- from traits.api import \ HasTraits, Property, cached_property, Int, Array, Instance, Tuple, Button, List, Float from traitsui.api import View, UItem import numpy as np import os import re import platform import time if platform.system() == 'Linux': sysclock = time.time elif platform.system() == 'Windows': sysclock = time.clock from aramis_cdt.aramis_info import AramisInfo class AramisNPyGen(HasTraits): '''Class providing tools for preparation (generation) *.npy data files from Aramis *.txt files for Aramis Crack Detection Tool (AramisCDT). *.npy files enable faster loading from disk. ''' aramis_info = Instance(AramisInfo) # TODO: add to UI and multiply force force_t_mult_coef = Float(100.0) '''Multiplication coefficient to obtain force_t from AD channel value ''' #=========================================================================== # Undeformed state data #=========================================================================== X = Property(Array, depends_on='aramis_info.data_dir') '''Array of values for undeformed state in the first step. ''' @cached_property def _get_X(self): '''Load data (undeformed coordinates) for the first step from *.txt and save as *.npy. ''' fname = self.aramis_info.undeformed_coords_filename print 'loading', fname, '...' start_t = sysclock() dir_npy = self.aramis_info.npy_dir if os.path.exists(dir_npy) == False: os.mkdir(dir_npy) fname_npy = os.path.join(dir_npy, fname + '.npy') fname_txt = os.path.join(self.aramis_info.data_dir, fname + '.txt') data_arr = np.loadtxt(fname_txt, # skiprows=14, # not necessary usecols=[0, 1, 2, 3, 4]) self.x_idx_min_0 = int(np.min(data_arr[:, 0])) self.y_idx_min_0 = int(np.min(data_arr[:, 1])) self.x_idx_max_0 = int(np.max(data_arr[:, 0])) self.y_idx_max_0 = int(np.max(data_arr[:, 1])) self.ni = int(self.x_idx_max_0 - self.x_idx_min_0 + 1) self.nj = int(self.y_idx_max_0 - self.y_idx_min_0 + 1) data_arr = self._prepare_data_structure(data_arr) np.save(fname_npy, data_arr) print 'loading time =', sysclock() - start_t print 'number of missing facets is', np.sum(np.isnan(data_arr).astype(int)) return data_arr x_idx_min_0 = Int '''Minimum value of the indices in the first column of the undeformed state. ''' y_idx_min_0 = Int '''Minimum value of the indices in the second column of the undeformed state. ''' x_idx_max_0 = Int '''Maximum value of the indices in the first column of the undeformed state. ''' y_idx_max_0 = Int '''Maximum value of the indices in the second column of the undeformed state. ''' ni = Int '''Number of facets in x-direction ''' nj = Int '''Number of facets in y-direction ''' ad_channels_lst = List '''List of tuples (undeformed, deformed) obtained from AD channels in aramis file header ''' x_0_shape = Property(Tuple, depends_on='X') '''Shape of undeformed data array. ''' @cached_property def _get_x_0_shape(self): return (3, self.nj, self.ni) i = Property(Int, depends_on='X') '''Indices in the first column of the undeformed state starting with zero. ''' @cached_property def _get_i(self): return (np.arange(self.ni)[np.newaxis, :] * np.ones(self.nj)[:, np.newaxis]).astype(int) j = Property(Int, depends_on='X') '''Indices in the first column of the undeformed state starting with zero. ''' @cached_property def _get_j(self): return (np.arange(self.nj)[np.newaxis, :] * np.ones(self.ni)[:, np.newaxis]).T ad_channels_arr = Property(Array) def _get_ad_channels_arr(self): data= np.array(self.ad_channels_lst, dtype=float) return data generate_npy = Button '''Generate npy files from Aramis *.txt data ''' def _generate_npy_fired(self): self.ad_channels_lst = [] for step_idx in self.aramis_info.step_list: self._load_step_data(step_idx) self.__decompile_ad_channels(step_idx) np.save(os.path.join(self.aramis_info.npy_dir, 'ad_channels.npy'), self.ad_channels_arr) #=========================================================================== # Data preparation methods #=========================================================================== def _load_step_data(self, step_idx): '''Load data for the specified step from *.npy file. If file *.npy does not exist the data is load from *.txt and saved as *.npy. (improve speed of loading) ''' fname = '%s%d' % (self.aramis_info.displacements_basename, self.aramis_info.aramis_stage_list[step_idx]) print 'loading', fname, '...' start_t = sysclock() dir_npy = self.aramis_info.npy_dir if os.path.exists(dir_npy) == False: os.mkdir(dir_npy) fname_npy = os.path.join(dir_npy, fname + '.npy') fname_txt = os.path.join(self.aramis_info.data_dir, fname + '.txt') # if os.path.exists(fname_npy): # data_arr = np.load(fname_npy) # else: data_arr = np.loadtxt(fname_txt, # skiprows=14, # not necessary usecols=[0, 1, 2, 3, 4]) data_arr = self._prepare_data_structure(data_arr) np.save(fname_npy, data_arr) print 'loading time =', sysclock() - start_t print 'number of missing facets is', np.sum(np.isnan(data_arr).astype(int)) return data_arr def _prepare_data_structure(self, input_arr): if self.ni == 0: self.X data_arr = np.empty((self.ni * self.nj, input_arr.shape[1] - 2), dtype=float) data_arr.fill(np.nan) # input indices (columns 1 and 2) in_indices = input_arr[:, :2].astype(int) in_indices[:, 0] -= self.x_idx_min_0 in_indices[:, 1] -= self.y_idx_min_0 in_indices = in_indices.view([('', in_indices.dtype)] * in_indices.shape[1]) # undeformed state indices un_indices = np.hstack((self.i.ravel()[:, np.newaxis], self.j.ravel()[:, np.newaxis])).astype(int) un_indices = un_indices.view([('', un_indices.dtype)] * un_indices.shape[1]) # data for higher steps have the same order of rows as # undeformed one but missing values mask = np.in1d(un_indices, in_indices, assume_unique=True) data_arr[mask] = input_arr[:, 2:] print data_arr.shape, self.x_0_shape data_arr = data_arr.T.reshape(self.x_0_shape) return data_arr def __decompile_ad_channels(self, step_idx): fname = '%s%d' % (self.aramis_info.displacements_basename, self.aramis_info.aramis_stage_list[step_idx]) with open(os.path.join(self.aramis_info.data_dir, fname + '.txt')) as infile: for i in range(20): line = infile.readline() m = re.match(r'#\s+AD-0:\s+[-+]?\d+\.\d+\s+(?P<force>[-+]?\d+\.\d+)', line) if m: force = float(m.groups('force')[0]) else: force = 0 m = re.match(r'#\s+deformt:\s+(?P<time>[-+]?\d+\.\d+)', line) if m: time = float(m.groups('time')[0]) self.ad_channels_lst.append([time, force]) view = View( UItem('generate_npy'), ) if __name__ == '__main__': ns=['TTb-1C-3cm-0-3300EP-V3_B1-Aramis2d-sideview-Xf15s13-Yf15s13'] data_dir = '/media/raid/Aachen/simdb_large_txt/' for n in ns: AI = AramisInfo(data_dir=data_dir+n) print AI.step_list AG = AramisNPyGen(aramis_info=AI) #AG.configure_traits() AG.generate_npy = True
I was fortunate enough to present as the keynote speaker for HouSecCon 4. The first part of my presentation focused on the parallels between information security today and the dawn of the space age in the late 1950s. The second section dove into internet-wide measurement and details about Project Sonar. Since it may be a while before the video of the presentation is online, I wanted to share the content for those who may be interested and could not attend the event. A summary of the first section is below and the full presentation is attached to the end of this post . In 1957 the soviet union launched Sputnik, the first artificial satellite. The US public, used to being first in most things scientific, panicked at being beaten to space. This lead to unprecedented levels of funding for science, math, and technology programs, the creation of NASA, and the first iteration of DARPA, known then as ARPA. Sputnik also set the precedent for the “freedom of space”. Although Sputnik was the first salvo in the space race, it was quickly left in the dust by increasingly powerful spy satellites The cold war accelerated technology by providing both funding and a focus for new development. Limited visibility meant that both sides were required to overestimate the capabilities of the other, driving both reconnaissance and weapon technology to new heights. The space age changed how we looked at the world. The internet was born from ARPA's attack-resistant computer network. Military technology was downleveled for civil use. GPS and public satellite imagery shrank the physical world. Public visibility led to accountability for despots and companies around the world. Global imagery shone a light on some of the darkest corners of the planet. Technology developed in the paranoid shadows of the cold war radically changed how we live our lives today. The proverbial swords turned into plowshares faster than we could imagine. The differences between military and consumer capabilities are shrinking every year. Crypto export control is useless in the face of strong implementations in the public domain. Alternatives to GPS are launching soon and location awareness has gone well beyond satellite triangulation. High-resolution thermal imaging systems are available off-the-shelf from international suppliers. So what does all of this have to do with information security? There are direct parallels between the start of the space age and the last decade of information security. Public fear over being out-gunned and out-innovated triggered a demand for improved security. Consumers and businesses are becoming aware of the real-world impact that a security failure can have. The more we move online, the more is at risk. Technology is been pushing forward at a phenomenal pace. Network neutrality draws similarities with the concept of “freedom in space”. Out of this environment, predators have emerged, first opportunistic criminals, and now organized crime, law enforcement, and intelligence agencies. The Snowden leaks have painted a detailed picture of how the US and its allies monitor and infiltrate computer networks around the globe. Although most of the security community assumed this kind of intelligence gathering went on, having it confirmed and brought into the limelight has been something else. Even the tin foil hat crowd didn't appear to be paranoid enough. Indeed, claims against China and Russia look weak in comparison to what we now know about US intelligence activities. To me, the most surprising thing is the lack of “cutting edge” techniques that have been exposed. Most of the methods and tools that have been leaked are not much different from what the security community is actively discussing at conferences like this. In fact, many of the tools and processes used by both intelligence and military groups are based on work by the security community. Snort, Nmap, Metasploit, and dozens of other open source security tools are mainstays of government-funded security operations, both defensive and offensive. Governments of every major power are pouring money into “cyber”, but the overlap between “secret” and “this talk I saw at defcon” is larger than ever. The biggest difference is where and how the techniques or tools are being used. Operationalized offense and defense processes are the dividing line between the defense industrial base and everyone else. It doesn't take a lot of skill or resources to break into most internet-facing systems. If the specific target is well-secured, the attacker can shift focus to another system nearby or even a system upstream. The number of vulnerable embedded devices on the internet is simply mind boggling. The Snowden leaks also confirmed that routers and switches are often preferred targets for intelligence operatives for this reason. My research efforts over the last few years have uncovered tens of millions of easily compromised devices on the internet. The number doesn't get any smaller. More and more vulnerable equipment continues to pile up. IBM, Symantec, SANS, and SecureWorks all provide internet “Threat Levels”. Dozens of commercial firms offer “threat intelligence” services. What actionable data are you getting from these firms? How do you know whether what they are providing is even accurate? Case in point. During 2012, an unknown researcher compromised 1.2 million nodes, using telnet and one of three passwords. 420,000 nodes were then used to conduct a scan of over 700 TCP and UDP ports across the entire internet. The same nodes were also used to send icmp probes and traceroutes to every addressable IPv4 address. Not a single “threat intelligence” vendor noticed the telnet exposure, its mass-compromise, or detected the scanning activity. In fact, nobody noticed the activity, and the internet became aware only after the researcher published a 9Tb data dump and extensive documentation and statistics from the project. The graphic you are seeing now is a 24 hour cycle of active public internet IPS (via ICMP), from this project. We can't improve things unless we can measure them. We cant defend our networks without knowing all of the weak links. We are starved for real information about internet threats. Not the activities of mindless bots and political activists, but the vulnerabilities that will be used against us in the future. Without this, we can't make good decisions, and we cant place pressure on negligent organizations. So, lets measure it. It is time for better visibility. It is time for accelerated improvement. It is time for a security space age.
# -*- coding: utf-8 -*- """ ========================== Bipartite Graph Algorithms ========================== """ # Copyright (C) 2013-2019 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. import networkx as nx from networkx.algorithms.components import connected_components __author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>', 'Aric Hagberg <aric.hagberg@gmail.com>']) __all__ = ['is_bipartite', 'is_bipartite_node_set', 'color', 'sets', 'density', 'degrees'] def color(G): """Returns a two-coloring of the graph. Raises an exception if the graph is not bipartite. Parameters ---------- G : NetworkX graph Returns ------- color : dictionary A dictionary keyed by node with a 1 or 0 as data for each node color. Raises ------ exc:`NetworkXError` if the graph is not two-colorable. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> c = bipartite.color(G) >>> print(c) {0: 1, 1: 0, 2: 1, 3: 0} You can use this to set a node attribute indicating the biparite set: >>> nx.set_node_attributes(G, c, 'bipartite') >>> print(G.nodes[0]['bipartite']) 1 >>> print(G.nodes[1]['bipartite']) 0 """ if G.is_directed(): import itertools def neighbors(v): return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) else: neighbors = G.neighbors color = {} for n in G: # handle disconnected graphs if n in color or len(G[n]) == 0: # skip isolates continue queue = [n] color[n] = 1 # nodes seen with color (1 or 0) while queue: v = queue.pop() c = 1 - color[v] # opposite color of node v for w in neighbors(v): if w in color: if color[w] == color[v]: raise nx.NetworkXError("Graph is not bipartite.") else: color[w] = c queue.append(w) # color isolates with 0 color.update(dict.fromkeys(nx.isolates(G), 0)) return color def is_bipartite(G): """ Returns True if graph G is bipartite, False if not. Parameters ---------- G : NetworkX graph Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> print(bipartite.is_bipartite(G)) True See Also -------- color, is_bipartite_node_set """ try: color(G) return True except nx.NetworkXError: return False def is_bipartite_node_set(G, nodes): """Returns True if nodes and G/nodes are a bipartition of G. Parameters ---------- G : NetworkX graph nodes: list or container Check if nodes are a one of a bipartite set. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> X = set([1,3]) >>> bipartite.is_bipartite_node_set(G,X) True Notes ----- For connected graphs the bipartite sets are unique. This function handles disconnected graphs. """ S = set(nodes) for CC in (G.subgraph(c).copy() for c in connected_components(G)): X, Y = sets(CC) if not ((X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))): return False return True def sets(G, top_nodes=None): """Returns bipartite node sets of graph G. Raises an exception if the graph is not bipartite or if the input graph is disconnected and thus more than one valid solution exists. See :mod:`bipartite documentation <networkx.algorithms.bipartite>` for further details on how bipartite graphs are handled in NetworkX. Parameters ---------- G : NetworkX graph top_nodes : container, optional Container with all nodes in one bipartite node set. If not supplied it will be computed. But if more than one solution exists an exception will be raised. Returns ------- X : set Nodes from one side of the bipartite graph. Y : set Nodes from the other side. Raises ------ AmbiguousSolution Raised if the input bipartite graph is disconnected and no container with all nodes in one bipartite set is provided. When determining the nodes in each bipartite set more than one valid solution is possible if the input graph is disconnected. NetworkXError Raised if the input graph is not bipartite. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> X, Y = bipartite.sets(G) >>> list(X) [0, 2] >>> list(Y) [1, 3] See Also -------- color """ if G.is_directed(): is_connected = nx.is_weakly_connected else: is_connected = nx.is_connected if top_nodes is not None: X = set(top_nodes) Y = set(G) - X else: if not is_connected(G): msg = 'Disconnected graph: Ambiguous solution for bipartite sets.' raise nx.AmbiguousSolution(msg) c = color(G) X = {n for n, is_top in c.items() if is_top} Y = {n for n, is_top in c.items() if not is_top} return (X, Y) def density(B, nodes): """Returns density of bipartite graph B. Parameters ---------- G : NetworkX graph nodes: list or container Nodes in one node set of the bipartite graph. Returns ------- d : float The bipartite density Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.complete_bipartite_graph(3,2) >>> X=set([0,1,2]) >>> bipartite.density(G,X) 1.0 >>> Y=set([3,4]) >>> bipartite.density(G,Y) 1.0 Notes ----- The container of nodes passed as argument must contain all nodes in one of the two bipartite node sets to avoid ambiguity in the case of disconnected graphs. See :mod:`bipartite documentation <networkx.algorithms.bipartite>` for further details on how bipartite graphs are handled in NetworkX. See Also -------- color """ n = len(B) m = nx.number_of_edges(B) nb = len(nodes) nt = n - nb if m == 0: # includes cases n==0 and n==1 d = 0.0 else: if B.is_directed(): d = m / (2.0 * float(nb * nt)) else: d = m / float(nb * nt) return d def degrees(B, nodes, weight=None): """Returns the degrees of the two node sets in the bipartite graph B. Parameters ---------- G : NetworkX graph nodes: list or container Nodes in one node set of the bipartite graph. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- (degX,degY) : tuple of dictionaries The degrees of the two bipartite sets as dictionaries keyed by node. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.complete_bipartite_graph(3,2) >>> Y=set([3,4]) >>> degX,degY=bipartite.degrees(G,Y) >>> dict(degX) {0: 2, 1: 2, 2: 2} Notes ----- The container of nodes passed as argument must contain all nodes in one of the two bipartite node sets to avoid ambiguity in the case of disconnected graphs. See :mod:`bipartite documentation <networkx.algorithms.bipartite>` for further details on how bipartite graphs are handled in NetworkX. See Also -------- color, density """ bottom = set(nodes) top = set(B) - bottom return (B.degree(top, weight), B.degree(bottom, weight))
Torque security Bangor supply and install the latest in CCTV systems for homes and businesses. Every security setup is different and that’s why our security expert will customise a CCTV solution to suit your home or business ensuring your business, home and assets are protected. Prevent stock loss and malicious damage in Bangor . Oversee workplace activity/ inactivity and workplace safety in Bangor . Help with marketing in Bangor . They provide the ability to monitor your premises off site for maximum peace of mind in Bangor .
#!/usr/bin/python import sys import os import time import signal import struct import logging import argparse from collections import namedtuple from threading import Event from periphery import GPIO, Serial import urllib import urllib2 logging.basicConfig(level=logging.DEBUG, stream=sys.stderr, format='%(asctime)-15s %(levelname)-8s %(message)s') log = logging.getLogger() parser = argparse.ArgumentParser(description='PMS5003 data logger') parser.add_argument( "-p", "--serial-port", type=str, default="/dev/ttyS1", help="Serial port connected to the PMS5003 sensor") parser.add_argument( "--reset-pin", type=int, default=None, help="GPIO number connected to the RESET signal") parser.add_argument( "--enable-pin", type=int, default=None, help="GPIO number connected to the SET (enable) signal") parser.add_argument( "--warmup-time", type=int, default=30, help="Seconds to wait before reading data") subparsers = parser.add_subparsers(dest="cmd") cmd_monitor_parser = subparsers.add_parser("monitor") cmd_monitor_parser.add_argument( "--measure-period", type=int, default=60 * 5, help="Seconds between measurements") cmd_oneshot_parser = subparsers.add_parser("oneshot") cmd_domoticz_parser = subparsers.add_parser("domoticz") cmd_domoticz_parser.add_argument( "-ip", "--domoticz-ip", required=True, help="IP address of domoticz server") cmd_domoticz_parser.add_argument( "-p", "--domoticz-port", default=8080, help="Port of domoticz server") cmd_domoticz_parser.add_argument( "-m", "--mode", default='oneshot', choices=['oneshot', 'monitor'], help="Monitor or oneshot mode") cmd_domoticz_parser.add_argument( "--measure-period", type=int, default=60 * 5, help="Seconds between measurements") cmd_domoticz_parser.add_argument( "--pm_1_idx", help="IDX of PM1 - if empty nothing will be reported to domoticz") cmd_domoticz_parser.add_argument( "--pm_25_idx", help="IDX of PM2.5 - if empty nothing will be reported to domoticz") cmd_domoticz_parser.add_argument( "--pm_10_idx", help="IDX of PM10 - if empty nothing will be reported to domoticz") cmd_domoticz_parser.add_argument( "--pm_1_percent_idx", help="IDX of PM1 percent (100%% is 25 ug/m3) - if empty nothing will be reported to domoticz") cmd_domoticz_parser.add_argument( "--pm_25_percent_idx", help="IDX of PM2.5 percent (100%% is 25 ug/m3) - if empty nothing will be reported to domoticz") cmd_domoticz_parser.add_argument( "--pm_10_percent_idx", help="IDX of PM10 percent (100%% is 50 ug/m3) - if empty nothing will be reported to domoticz") Packet = namedtuple('Packet', [ 'pm1_std', 'pm25_std', 'pm10_std', 'pm01_atm', 'pm2_5_atm', 'pm10_atm', 'count_03um', 'count_05um', 'count_1um', 'count_2_5um', 'count_5um', 'count_10um']) class PMS5003(object): def __init__(self, port, enable_pin=None, reset_pin=None): self.port = Serial(port, 9600) self.gpio_enable = None self.gpio_reset = None self.stop = Event() # suspend sensor by default if enable_pin: self.gpio_enable = GPIO(enable_pin, "low") if reset_pin: self.gpio_reset = GPIO(reset_pin, "high") def reset(self): if self.gpio_reset is None: return self.gpio_reset.write(False) self.enable() time.sleep(.1) self.gpio_reset.write(True) def enable(self): if not self.gpio_enable: return log.info("Enable sensor (via gpio %s)", self.gpio_enable.pin) self.gpio_enable.write(True) def disable(self): if not self.gpio_enable: return log.info("Disable sensor (via gpio %s)", self.gpio_enable.pin) self.gpio_enable.write(False) def discard_input(self): while self.port.input_waiting(): self.port.read(4096, 0) def warmup(self, seconds): log.info("Warming up for %s seconds", seconds) self.stop.wait(seconds) self.discard_input() @staticmethod def packet_from_data(data): numbers = struct.unpack('>16H', data) csum = sum(data[:-2]) if csum != numbers[-1]: log.warn("Bad packet data: %s / %s", data, csum) return return Packet(*numbers[2:-2]) def receive_one(self): while not self.stop.is_set(): c = self.port.read(1) if not c or c != '\x42': continue c = self.port.read(1, .1) if not c or c != '\x4d': continue data = bytearray((0x42, 0x4d,)) data += self.port.read(30, .1) if len(data) != 32: continue p = self.packet_from_data(data) if p: return p def run_monitor(sensor, args): start_at = time.time() sleep_period = args.measure_period - args.warmup_time if args.enable_pin: sensor.enable() if args.warmup_time: sensor.warmup(args.warmup_time) try: while not sensor.stop.is_set(): packet = sensor.receive_one() if not packet: break packet_at = time.time() log.info("@{: 6.2f}\t{}".format((packet_at - start_at), packet)) if args.cmd == "domoticz": report_to_domoticz(packet, args) if sleep_period > 0: sensor.disable() sensor.stop.wait(sleep_period) if sensor.stop.is_set(): break sensor.reset() sensor.enable() sensor.warmup(args.warmup_time) else: sensor.stop.wait(args.measure_period) except KeyboardInterrupt: log.info("Bye bye.") finally: sensor.disable() def run_oneshot(sensor, args): if args.enable_pin: sensor.enable() if args.warmup_time: sensor.warmup(args.warmup_time) try: packet = sensor.receive_one() log.info("{}".format(packet)) if args.cmd == "domoticz": report_to_domoticz(packet, args) except KeyboardInterrupt: log.info("Bye bye.") finally: sensor.disable() sensor.disable() def install_signal_handlers(sensor): def _sighandler(signum, frame): log.info("Got %s", signum) sensor.stop.set() signal.signal(signal.SIGINT, _sighandler) signal.signal(signal.SIGTERM, _sighandler) def report_to_domoticz(packet, args): if args.pm_1_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_1_idx, idx_value=packet.pm01_atm) if args.pm_25_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_25_idx, idx_value=packet.pm2_5_atm) if args.pm_10_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_10_idx, idx_value=packet.pm10_atm) if args.pm_1_percent_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_1_percent_idx, idx_value=packet.pm01_atm * 4) if args.pm_25_percent_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_25_percent_idx, idx_value=packet.pm2_5_atm * 4) if args.pm_10_percent_idx: send_http_request_to_domoticz(ip=args.domoticz_ip, port=args.domoticz_port, idx=args.pm_10_percent_idx, idx_value=packet.pm10_atm * 2) def send_http_request_to_domoticz(ip, port, idx, idx_value): url = "http://" + ip + ":" + port + "/json.htm?type=command&param=udevice&nvalue=0&idx=" + str( idx) + "&svalue=" + str(idx_value) # print(url) request = urllib2.Request(url) try: response = urllib2.urlopen(request) except urllib2.HTTPError, e: log.info('HTTPError = ' + str(e.code)) except urllib2.URLError, e: log.info('URLError = ' + str(e.reason)) # except httplib.HTTPException, e: # log.info('HTTPException') except Exception: import traceback log.info('generic exception: ' + traceback.format_exc()) def main(): args = parser.parse_args() sensor = PMS5003(args.serial_port, args.enable_pin, args.reset_pin) sensor.reset() install_signal_handlers(sensor) if args.cmd == "monitor": run_monitor(sensor, args) elif args.cmd == "oneshot": run_oneshot(sensor, args) elif args.cmd == "domoticz": if args.mode == "monitor": run_monitor(sensor, args) elif args.mode == "oneshot": run_oneshot(sensor, args) if __name__ == "__main__": main()
The soft hue of 14K yellow gold highlights the sparkle of the round diamonds in this enchanting engagement ring. The center features a lovely sun-inspired design as the main attraction, with delicate milgrain to complement. The ring has a total diamond weight of 1 carat. Diamond Total Carat Weight may range from .95 - 1.11 carats.
# Copyright 2017 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import time from oslo_serialization import jsonutils as json from tempest.lib.api_schema.response.volume import versions as schema from tempest.lib.common import rest_client from tempest.lib.services.volume import base_client class VersionsClient(base_client.BaseClient): def list_versions(self): """List API versions For a full list of available parameters, please refer to the official API reference: https://docs.openstack.org/api-ref/block-storage/v3/#list-all-api-versions """ version_url = self._get_base_version_url() start = time.time() resp, body = self.raw_request(version_url, 'GET') end = time.time() # NOTE: We need a raw_request() here instead of request() call because # "list API versions" API doesn't require an authentication and we can # skip it with raw_request() call. self._log_request('GET', version_url, resp, secs=(end - start), resp_body=body) self._error_checker(resp, body) body = json.loads(body) self.validate_response(schema.list_versions, resp, body) return rest_client.ResponseBody(resp, body) def show_version(self, version): """Show API version details For a full list of available parameters, please refer to the official API reference: https://docs.openstack.org/api-ref/block-storage/v3/#show-api-v3-details """ version_url = os.path.join(self._get_base_version_url(), version) resp, body = self.get(version_url) body = json.loads(body) self.validate_response(schema.volume_api_version_details, resp, body) return rest_client.ResponseBody(resp, body)
Autodesk, Inc. is a leader in 3D design, engineering and entertainment software. Customers across manufacturing, architecture, building, construction, and media and entertainment industries-including the last 17 Academy Award winners for Best Visual Effects-use Autodesk software to design, visualize, and simulate their ideas before they’re ever built or created. From blockbuster visual effects and buildings that create their own energy to electric cars and the batteries that power them, the work of our 3D software customers is everywhere you look. Through our apps for iPhone, iPad, iPod, and Android, we’re also making design technology accessible to professional designers as well as amateur designers, homeowners, students, and casual creators. Whether it’s a kid looking to build a new contraption, a seasoned pro sketching out a great new idea, or someone who just wants to amp up their creative output, we’re taking technology originally built for movie studios, automakers, and architectural firms, and making it available to anyone who wants to create and share ideas with the world. Since the introduction of AutoCAD software in 1982, Autodesk continues to develop the broadest portfolio of state-of-the-art 3D software for global markets.
import os.path import subprocess from codecs import open from setuptools.command.sdist import sdist from setuptools import setup import motioneye here = os.path.abspath(os.path.dirname(__file__)) name = 'motioneye' version = motioneye.VERSION with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() # update the version according to git git_version = subprocess.Popen('git describe --tags', stdout=subprocess.PIPE, stderr=open('/dev/null'), shell=True).communicate()[0].strip() if git_version: print 'detected git version %s' % git_version version = git_version else: print 'using found version %s' % version class custom_sdist(sdist): def run(self): if git_version: subprocess.Popen("sed -ri 's/VERSION = (.+)/VERSION = \"%s\"/' %s/__init__.py" % (git_version, name), shell=True).communicate() sdist.run(self) setup( name=name, version=version, description='motionEye server', long_description=long_description, url='https://bitbucket.org/ccrisan/motioneye/', author='Calin Crisan', author_email='ccrisan@gmail.com', license='GPLv3', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: End Users/Desktop', 'Topic :: Multimedia :: Video', 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7' ], keywords='motion video surveillance frontend', packages=['motioneye'], install_requires=['tornado>=3.1', 'jinja2', 'pillow', 'pycurl'], package_data={ 'motioneye': [ 'static/*.*', 'static/*/*', 'templates/*' ] }, data_files=[ (os.path.join('share/%s' % name, root), [os.path.join(root, f) for f in files]) for (root, dirs, files) in os.walk('extra') ], entry_points={ 'console_scripts': [ 'meyectl=motioneye.meyectl:main', ], }, cmdclass={ 'sdist': custom_sdist } )
Helix M12 Wheel Stud Kits make wheel changes quicker and easier. Instead of having to line up the wheel lug bolt holes with the hub holes while holding the wheel and threading the lug bolts in, you can now just put the wheel on the hub and thread on the 19mm lugnut. This is great for changing wheels at the autocross or track. Or if you have a trailer queen has to be so clean that you actually remove the wheels to wash them. Quick easy install. Come in silver and included 16 studs and nuts as shown. Note: In late 2006, MINIs went to M14 wheel lug bolts. Please check your wheel lug bolt size before ordering. Note: These studs, while longer than stock will not work with spacers. Look at the WMW Studs for that option. As advertised, shiny studs and shiny steel nuts, works great and comes with cool decals. EXCELLENT to be able to "hang" the wheel while changing wheels. REMEMBER that the OE Mini lug wrench is 17mm, so put a 19mm wrench in the car! Also, your Department Store "X" lug wrench isn't typically deep enough to loosen these lug nuts on the studs. I threw an 18" breaker bar and a 19mm deep well socket in the car, total cost under $20 from (insert name of discount tool shop).
__author__ = 'jcastro' from tournament import Tournament import abc class Game(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def calculate_score(self, score): """Returns final score for player""" class Basketball(Game): point_position = { 'G': [2, 3, 1], 'F': [2, 2, 2], 'C': [2, 1, 3] } def calculate_score(self, game): score_player = game['score'] position_player = game['position'] score_total = 0 if position_player in self.point_position: score_position = self.point_position[position_player] for i in range(len(score_player)): score_total += int(score_player[i]) * int(score_position[i]) return score_total # player 1;nick1;4;Team A;G;10;2;7 # player 2;nick2;8;Team A;F;0;10;0 # player 3;nick3;15;Team A;C;15;10;4 # player 4;nick4;16;Team B;G;20;0;0 # player 5;nick5;23;Team B;F;4;7;7 # player 6;nick6;42;Team B;C;8;10;0 my_tournament = Tournament() my_tournament.create_game("basketball", Basketball()) strPlayer = "player 1;nick1;4;Team A;G;10;2;7" strPlayer2 = "player 2;nick2;8;Team A;F;0;10;0" strPlayer3 = "player 4;nick4;16;Team B;G;20;0;0" my_tournament.parser("basketball", strPlayer) my_tournament.parser("basketball", strPlayer2) my_tournament.parser("basketball", strPlayer3) my_tournament.create_game("basketballHARD", Basketball()) strPlayer = "player 1;nick1;4;Team A;G;10;2;7" strPlayer2 = "player 2;nick2;8;Team A;F;0;10;0" strPlayer3 = "player 4;nick4;16;Team B;G;230;0;0" my_tournament.parser("basketballHARD", strPlayer) my_tournament.parser("basketballHARD", strPlayer2) my_tournament.parser("basketballHARD", strPlayer3) print(my_tournament.mvp())
A DoneAction with id 12, freeing the enclosing synth when the UGen is finished. If the successor node is a group, calls deepFree on that group. If the successor node is a synth, frees that synth. The identifier which is recognized by UGens such as Done.
""" Harvest a detailed list of clients seen by online routers. """ import itertools import pickle import pkg_resources from . import base class List(base.ECMCommand): """ Show the currently connected clients on a router. The router must be connected to ECM for this to work. """ # XXX Broken when len(clients) > page_size name = 'ls' wifi_bw_modes = { 0: "20", 1: "40", 2: "80" } wifi_modes = { 0: "802.11b", 1: "802.11g", 2: "802.11n", 3: "802.11n-only", 4: "802.11ac" } wifi_bands = { 0: "2.4", 1: "5" } def setup_args(self, parser): self.add_router_argument('idents', nargs='*') self.add_argument('-v', '--verbose', action="store_true") self.inject_table_factory() @property def mac_db(self): try: return self._mac_db except AttributeError: mac_db = pkg_resources.resource_stream('ecmcli', 'mac.db') self._mac_db = pickle.load(mac_db) return self._mac_db def mac_lookup_short(self, info): return self.mac_lookup(info, 0) def mac_lookup_long(self, info): return self.mac_lookup(info, 1) def mac_lookup(self, info, idx): mac = int(''.join(info['mac'].split(':')[:3]), 16) localadmin = mac & 0x20000 # This really only pertains to cradlepoint devices. if localadmin and mac not in self.mac_db: mac &= 0xffff return self.mac_db.get(mac, [None, None])[idx] def make_dns_getter(self, ids): dns = {} for leases in self.api.get_pager('remote', 'status/dhcpd/leases', id__in=','.join(ids)): if not leases['success'] or not leases['data']: continue dns.update(dict((x['mac'], x['hostname']) for x in leases['data'])) return lambda x: dns.get(x['mac'], '') def make_wifi_getter(self, ids): wifi = {} radios = {} for x in self.api.get_pager('remote', 'config/wlan/radio', id__in=','.join(ids)): if x['success']: radios[x['id']] = x['data'] for x in self.api.get_pager('remote', 'status/wlan/clients', id__in=','.join(ids)): if not x['success'] or not x['data']: continue for client in x['data']: client['radio_info'] = radios[x['id']][client['radio']] wifi[client['mac']] = client return lambda x: wifi.get(x['mac'], {}) def wifi_status_acc(self, client, default): """ Accessor for WiFi RSSI, txrate and mode. """ if not client: return default status = [ self.get_wifi_rssi(client), '%d Mbps' % client['txrate'], self.wifi_modes[client['mode']], ] return ', '.join(status) def get_wifi_rssi(self, wifi_info): rssi_vals = [] for i in itertools.count(0): try: rssi_vals.append(wifi_info['rssi%d' % i]) except KeyError: break rssi = sum(rssi_vals) / len(rssi_vals) if rssi > -40: fmt = '<b><green>%.0f</green></b>' elif rssi > -55: fmt = '<green>%.0f</green>' elif rssi > -65: fmt = '<yellow>%.0f</yellow>' elif rssi > -80: fmt = '<red>%.0f</red>' else: fmt = '<b><red>%.0f</red></b>' return fmt % rssi + ' dBm' def wifi_bss_acc(self, client, default): """ Accessor for WiFi access point. """ if not client: return default radio = client['radio_info'] bss = radio['bss'][client['bss']] band = self.wifi_bands[client['radio_info']['wifi_band']] return '%s (%s Ghz)' % (bss['ssid'], band) def run(self, args): if args.idents: routers = [self.api.get_by_id_or_name('routers', x) for x in args.idents] else: routers = self.api.get_pager('routers', state='online', product__series=3) ids = dict((x['id'], x['name']) for x in routers) if not ids: raise SystemExit("No online routers found") data = [] for clients in self.api.get_pager('remote', 'status/lan/clients', id__in=','.join(ids)): if not clients['success']: continue by_mac = {} for x in clients['data']: x['router'] = ids[str(clients['id'])] if x['mac'] in by_mac: by_mac[x['mac']]['ip_addresses'].append(x['ip_address']) else: x['ip_addresses'] = [x['ip_address']] by_mac[x['mac']] = x data.extend(by_mac.values()) dns_getter = self.make_dns_getter(ids) ip_getter = lambda x: ', '.join(sorted(x['ip_addresses'], key=len)) headers = ['Router', 'IP Addresses', 'Hostname', 'MAC', 'Hardware'] accessors = ['router', ip_getter, dns_getter, 'mac'] if not args.verbose: accessors.append(self.mac_lookup_short) else: wifi_getter = self.make_wifi_getter(ids) headers.extend(['WiFi Status', 'WiFi AP']) na = '' accessors.extend([ self.mac_lookup_long, lambda x: self.wifi_status_acc(wifi_getter(x), na), lambda x: self.wifi_bss_acc(wifi_getter(x), na) ]) with self.make_table(headers=headers, accessors=accessors) as t: t.print(data) class Clients(base.ECMCommand): name = 'clients' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_subcommand(List, default=True) command_classes = [Clients]
There are a variety of problems that can happen to the display monitor of an Acer laptop. The most common problem is the screen fading or becoming too hard to see. This can be corrected by adjusting the brightness levels. Another common problem involves incorrect color, which can be resolved by adjusting the display color settings. Yet another big problem is the display freezing or appearing jumpy, which can be resolved by updating the display monitor. Tap the right-arrow key to brighten the display screen. Tap the left-arrow key to darken the display screen. Release the "Fn" key to save your display settings. Right-click on the desktop and click "Graphic Properties." Adjust the screen resolution by clicking "Display Settings" and choosing a resolution setting from the drop-down menu. Click "Apply" to check the resolution of the display. Click "OK" to save the resolution settings. Click "OK" to exit the Graphic Properties application. Right-click on the desktop. Click "Personalize," "Display Settings" and "Advanced Settings." Click the "Intel® Graphics" tab and choose "Monitor Properties." Click "Driver." Click "Update Driver" and choose "Search Automatically." o 4 Click "Install" to install the updated monitor driver. After the driver has been installed, click "OK" to restart the laptop when prompted.
# Copyright (C) 2018 Collin Capano # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 3 of the License, or (at your # self.option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """Provides I/O support for emcee_pt. """ from __future__ import absolute_import import numpy from .base_sampler import BaseSamplerFile from .base_mcmc import EnsembleMCMCMetadataIO from .base_multitemper import (CommonMultiTemperedMetadataIO, write_samples, ensemble_read_raw_samples) class EmceePTFile(EnsembleMCMCMetadataIO, CommonMultiTemperedMetadataIO, BaseSamplerFile): """Class to handle file IO for the ``emcee`` sampler.""" name = 'emcee_pt_file' @property def betas(self): """The betas that were used.""" return self[self.sampler_group].attrs["betas"] def write_samples(self, samples, **kwargs): r"""Writes samples to the given file. Calls :py:func:`base_multitemper.write_samples`. See that function for details. Parameters ---------- samples : dict The samples to write. Each array in the dictionary should have shape ntemps x nwalkers x niterations. \**kwargs : All other keyword arguments are passed to :py:func:`base_multitemper.write_samples`. """ write_samples(self, samples, **kwargs) def read_raw_samples(self, fields, **kwargs): r"""Base function for reading samples. Calls :py:func:`base_multitemper.ensemble_read_raw_samples`. See that function for details. Parameters ----------- fields : list The list of field names to retrieve. \**kwargs : All other keyword arguments are passed to :py:func:`base_multitemper.ensemble_read_raw_samples`. Returns ------- dict A dictionary of field name -> numpy array pairs. """ return ensemble_read_raw_samples(self, fields, **kwargs) def write_sampler_metadata(self, sampler): """Adds writing betas to MultiTemperedMCMCIO. """ super(EmceePTFile, self).write_sampler_metadata(sampler) self[self.sampler_group].attrs["betas"] = sampler.betas def read_acceptance_fraction(self, temps=None, walkers=None): """Reads the acceptance fraction. Parameters ----------- temps : (list of) int, optional The temperature index (or a list of indices) to retrieve. If None, acfs from all temperatures and all walkers will be retrieved. walkers : (list of) int, optional The walker index (or a list of indices) to retrieve. If None, samples from all walkers will be obtained. Returns ------- array Array of acceptance fractions with shape (requested temps, requested walkers). """ group = self.sampler_group + '/acceptance_fraction' if walkers is None: wmask = numpy.ones(self.nwalkers, dtype=bool) else: wmask = numpy.zeros(self.nwalkers, dtype=bool) wmask[walkers] = True if temps is None: tmask = numpy.ones(self.ntemps, dtype=bool) else: tmask = numpy.zeros(self.ntemps, dtype=bool) tmask[temps] = True return self[group][:][numpy.ix_(tmask, wmask)] def write_acceptance_fraction(self, acceptance_fraction): """Write acceptance_fraction data to file. Results are written to ``[sampler_group]/acceptance_fraction``; the resulting dataset has shape (ntemps, nwalkers). Parameters ----------- acceptance_fraction : numpy.ndarray Array of acceptance fractions to write. Must have shape ntemps x nwalkers. """ # check assert acceptance_fraction.shape == (self.ntemps, self.nwalkers), ( "acceptance fraction must have shape ntemps x nwalker") group = self.sampler_group + '/acceptance_fraction' try: self[group][:] = acceptance_fraction except KeyError: # dataset doesn't exist yet, create it self[group] = acceptance_fraction
If you are serious about computers and gaming then you will have to spend some time alone with the web site I'm about to show you. Seriously. I've passed on these links to a few of my geek friends and they went into immediate shock. A few even had to assume the fetal position in the corner because it just overloaded their mind and made their new custom built system looks like a cheap Fisher Price Toy. It was really that bad. Once they came out of the shock their first reaction is to call Visa for a credit limit increase or immediately plan to knock over a bank. In other words, please don't continue to read this if you recently spent all your money on a new computer or if you have less than $5000 in the bank that you can spend. What you see here will keep you up at night trying to figure out ways to buy it! This, my friend, is the L Mach 3.8. To start, it features 3.8GHz Accelerated Hyper-Threading Intel® Pentium 4 Extreme Edition CPU with 2MB L3 Cache, a 512K L2 Cache, and a 950MHz System Bus. If thats not enough you can order systems with as much as 2 Terabytes of storage and up to 16GB Total RAM with optional RamDrives or up to 4GB PC4200 533MHz Dual-Channel Performance DDR RAM! In addition it has everything else you can imagine. LCD multi-information displays on the case, 6 in 1 Flash Memory Card Readers, digital VU meters, USB 2.0, and Firewire 800, the best in video graphics cards, and even the cooling system in itself nothing less than AMAZING! If their desktops weren't enough to send you into shock then the 17" Hollywood Laptop will. This system is faster and more robust than most desktops sold in stores or by direct retailers! As the name suggests it has a 17" widescreen display which is WXGA 1440-by-900 pixels and is optimized for high speed graphics such as gaming. Since it was designed to directly compete with desktops it features at least a 3.20GHz Hyper-Threading Intel® Pentium 4 Extreme Edition CPU w/ 2MB L3 Cache, 512K L2 Cache, 800MHz System Bus, up to 2GB of 433MHz Dual-Channel DDR Memory, up to 80GB of storage using a 7200rpm HD, ATI Radeon™ 9600 PRO AGP 128MB DDR Memory, Integrated CCD Video camera with full-motion video at 30 frames per second in 24-bit color, and IEEE 1394 FireWire & USB 2.0 connectivity. You also have a choice of an optical drive: CD-ROM, DVD-ROM, CD-RW, COMBO (DVD-ROM/CD-RW), or high-speed 2x DVD-Burner (DVD-R/-RW.) To round it out it has an Internal 10/100 LAN, Internal 56K V90/92 Modem, Full Bandwidth 32-bit PC CardBus Slot, internal Floppy Drive, 6-in-1 card reader, Internal Sub-Woofer with surround sound, a TV-Tuner with Remote, and an optional internal wireless LAN 802.11b/g and/or Bluetooth multi-Channel. Imagine bringing just this baby to a LAN party -- oh and you better not let it leave your sight because it will walk away on its own! Now that you've been totally tempted to get one of their desktops and laptops you need to be careful to avoid looking at their Monitors. I won't even go into describing them. They simply must be seen to be believed. Enough about L's products. I have some stock to sell and an armed robbery to plan. HA! Just look at the web site for the laptop. Can you say Apple Rip-off? I knew you could.
#!/usr/bin/env python """ Script to generate a PDF of desired sightline Requires specdb for the spectral data """ import pdb def parser(options=None): import argparse # Parse parser = argparse.ArgumentParser( description='Analyze the desired sightline and generate a PDF (v1.0)') parser.add_argument("plate", type=int, help="Plate") parser.add_argument("fiber", type=int, help="Fiber") parser.add_argument("survey", type=str, help="SDSS_DR7, DESI_MOCK") if options is None: args = parser.parse_args() else: args = parser.parse_args(options) return args def main(args=None): from pkg_resources import resource_filename from dla_cnn.data_model.sdss_dr7 import process_catalog_dr7 from dla_cnn.data_model.desi_mocks import process_catalog_desi_mock if args is None: pargs = parser() else: pargs = args default_model = resource_filename('dla_cnn', "models/model_gensample_v7.1") if pargs.survey == 'SDSS_DR7': process_catalog_dr7(kernel_size=400, model_checkpoint=default_model, output_dir="./", pfiber=(pargs.plate, pargs.fiber), make_pdf=True) elif pargs.survey == 'DESI_MOCK': process_catalog_desi_mock(kernel_size=400, model_checkpoint=default_model, output_dir="./", pfiber=(pargs.plate, pargs.fiber), make_pdf=True) # print("See predictions.json file for outputs") # Command line execution if __name__ == '__main__': args = parser() main(args)
i was able to fill the cupcakes using the large cookie scoop that my mom picked up for me a few weeks ago. i’m really loving this thing! line your cupcake pans with paper lines, place an oreo half in each liner, cream side up. in a large bowl, combine the butter and sugar, beating at med-high speed for 2 min. until light and fluffy. blend in the egg whites, followed by the vanilla. on low speed, beat in half of the dry ingredients, until just incorporated. add milk and beat until just combined and then mix in the remaining dry ingredients. fold the chopped oreos in with a rubber spatula, just until evenly distributed. using a large cookie scoop, evenly divide the batter between the prepared cupcake liners. bake for 18-20 min., or until a toothpick comes out clean. cool in the pan for 5 min. and then transfer to a wire rack to cool completely. while the cupcakes are baking, make the frosting. combine the cream cheese and butter in the bowl of an electric mixer (this is important, a hand mixer won’t cut it.) beat on med-high speed for 1 min. blend in vanilla. beat in powdered sugar until smooth, 1-2 min. add heavy cream to the bowl, beat on med-low speed until just incorporated. crank it up to med-high and whip for 4 min. until light and fluffy, scraping down the sides of the bowl as needed. after cupcakes are cool, frost. coat with oreo crumbs and top with oreo halves.
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division from django.utils.encoding import force_text from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_noop import copy import re from treemap.DotDict import DotDict from treemap.lib.object_caches import udf_defs DEFAULT_MOBILE_SEARCH_FIELDS = DotDict({ 'standard': [ {'identifier': 'species.id'}, {'identifier': 'tree.diameter'}, {'identifier': 'tree.height'} ], 'missing': [ {'identifier': 'species.id'}, {'identifier': 'tree.diameter'}, {'identifier': 'mapFeaturePhoto.id'} ] }) DEFAULT_SEARCH_FIELDS = DotDict({ 'general': [ {'identifier': 'mapFeature.updated_at'}, {'identifier': 'mapFeature.updated_by'} ], 'missing': [ {'identifier': 'species.id'}, {'identifier': 'tree.diameter'}, {'identifier': 'plot.owner_orig_id'}, {'identifier': 'mapFeaturePhoto.id'} ], 'Plot': [ {'identifier': 'plot.owner_orig_id'} ], 'Tree': [ {'identifier': 'tree.diameter'}, {'identifier': 'tree.date_planted'} ] }) DEFAULT_MOBILE_API_FIELDS = ( {'header': ugettext_noop('Tree Information'), 'model': 'tree', 'field_keys': ['tree.species', 'tree.diameter', 'tree.height', 'tree.date_planted']}, {'header': ugettext_noop('Planting Site Information'), 'model': 'plot', 'field_keys': ['plot.width', 'plot.length']}, {'header': ugettext_noop('Stewardship'), 'collection_udf_keys': ['plot.udf:Stewardship', 'tree.udf:Stewardship'], 'sort_key': 'Date'} ) DEFAULT_WEB_DETAIL_FIELDS = ( {'header': ugettext_noop('Tree Information'), 'model': 'tree', 'field_keys': ['tree.id', 'tree.species', 'tree.diameter', 'tree.height', 'tree.canopy_height', 'tree.date_planted', 'tree.date_removed'], 'collection_udf_keys': ['tree.udf:Stewardship']}, {'header': ugettext_noop('Planting Site Information'), 'model': 'plot', 'field_keys': ['plot.width', 'plot.length', 'plot.address_street', 'plot.address_city', 'plot.address_zip', 'plot.owner_orig_id'], 'collection_udf_keys': ['plot.udf:Stewardship']}, ) INSTANCE_FIELD_ERRORS = { 'no_field_groups': _('Must be a non-empty list'), 'group_has_no_header': _( 'Every mobile field group must have a non-empty header'), 'group_has_no_keys': _( 'All mobile field groups must have either a "field_keys" or ' '"collection_udf_keys" containing a non-empty list'), 'group_has_both_keys': _( 'Mobile field groups cannot contain both "field_keys" and ' '"collection_udf_keys" properties'), 'group_has_no_sort_key': _( 'Collection field groups must have a non-empty "sort_key" property ' 'defined'), 'group_has_missing_cudf': _( 'Collection field groups can only contain existing custom collection ' 'fields'), 'group_has_invalid_sort_key': _( 'The "sort_key" property of a collection field group must be the name ' 'of a field on present on every collection field in the group'), 'duplicate_fields': _('Fields cannot be specified more than once'), 'group_missing_model': _( 'Normal field groups need a model property of either "tree" or "plot"' ), 'group_invalid_model': _( 'Normal field groups can only have keys that match their "model"' ), 'missing_field': _( 'Normal field groups may only contain existing fields. If you specify ' 'a custom field, it cannot be a collection field'), } ALERT_IDENTIFIER_PATTERN = re.compile(r'udf:(tree|plot):(\d+)\..+') def advanced_search_fields(instance, user): from treemap.models import Tree, MapFeature # prevent circular import def make_display_filter(feature_name): if feature_name == 'Plot': plural = _('empty planting sites') feature_name = 'EmptyPlot' else: plural = get_plural_feature_name(feature_name) return { 'label': _('Show %(models)s') % {'models': plural.lower()}, 'model': feature_name } def get_plural_feature_name(feature_name): if feature_name == 'Tree': Feature = Tree else: Feature = MapFeature.get_subclass(feature_name) return Feature.terminology(instance)['plural'] def get_visible_fields(field_infos, user): visible_fields = [] for field_info in field_infos: model, field_name = _parse_field_info(instance, field_info) if model.field_is_visible(user, field_name): visible_fields.append(field_info) return visible_fields fields = copy.deepcopy(instance.search_config) fields = {category: get_visible_fields(field_infos, user) for category, field_infos in fields.iteritems()} for field_info in fields.get('missing', []): _set_missing_search_label(instance, field_info) field_info['search_type'] = 'ISNULL' field_info['value'] = 'true' fields['display'] = [make_display_filter('Tree'), make_display_filter('Plot')] fields['display'] += [ make_display_filter(feature) for feature in sorted(instance.map_feature_types) if feature != 'Plot'] num = 0 for filters in fields.itervalues(): for field in filters: # It makes styling easier if every field has an identifier id = "%s_%s" % (field.get('identifier', ''), num) id = id.replace(' ', '_') field['id'] = id num += 1 more = [] for feature_name in sorted(instance.map_feature_types): if feature_name in fields and feature_name != 'Plot': filters = fields.pop(feature_name) filters = get_visible_fields(filters, user) if len(filters) > 0: more.append({ 'name': feature_name, 'title': get_plural_feature_name(feature_name), 'fields': filters }) fields['more'] = more return fields def mobile_search_fields(instance): from treemap.templatetags.form_extras import (field_type_label_choices, ADD_BLANK_NEVER) search_fields = copy.deepcopy(instance.mobile_search_fields) for field in search_fields['standard']: identifier = field['identifier'] alert_info = get_alert_field_info(identifier, instance) if alert_info is not None: field.update(alert_info) continue Model, field_name = _parse_field_info(instance, field) set_search_field_label(instance, field) field_type, __, __, choices = field_type_label_choices( Model, field_name, add_blank=ADD_BLANK_NEVER) if identifier == 'species.id': field['search_type'] = 'SPECIES' elif field_type in {'int', 'float'}: field['search_type'] = 'RANGE' elif field_type in {'date', 'datetime'}: field['search_type'] = 'DATERANGE' elif field_type == 'string': field['search_type'] = 'STRING' elif field_type == 'bool': field['search_type'] = 'BOOL' elif field_type == 'choice': field['search_type'] = 'CHOICE' elif field_type == 'multichoice': field['search_type'] = 'MULTICHOICE' if choices: field['choices'] = choices for field in search_fields['missing']: _set_missing_search_label(instance, field) return search_fields def _set_missing_search_label(instance, field_info): label = get_search_field_label(instance, field_info) field_info['label'] = _('Show Missing %(field)s') % {'field': label} def set_search_field_label(instance, field_info): if 'label' not in field_info: field_info['label'] = get_search_field_label(instance, field_info) return field_info def get_search_field_label(instance, field_info): """ Searches for missing data are controlled by fields, and those fields need labels. Two wrinkles: 1) Fields like species.id and mapFeaturePhoto.id need special handling. 2) Fields from all models are shown in the "Missing Data" category, so prefix the field name with the model name. """ from treemap.templatetags.form_extras import field_type_label_choices Model, field_name = _parse_field_info(instance, field_info) if field_name == 'id': if hasattr(Model, 'terminology'): label = Model.terminology(instance)['plural'] else: label = Model._meta.verbose_name_plural else: __, label, __, __ = field_type_label_choices(Model, field_name, '') if hasattr(Model, 'terminology'): prefix = force_text(Model.terminology(instance)['singular']) else: prefix = force_text(Model._meta.verbose_name) label = force_text(label) if not label.startswith(prefix): label = "%s %s" % (prefix, label) return label def _parse_field_info(instance, field_info): from treemap.util import get_model_for_instance model_name, field_name = field_info['identifier'].split('.', 2) Model = get_model_for_instance(model_name, instance) return Model, field_name def get_udfc_search_fields(instance, user): from treemap.models import InstanceUser from treemap.udf import UDFModel from treemap.util import to_object_name, leaf_models_of_class from treemap.lib.perms import udf_write_level, READ, WRITE try: iu = instance.instanceuser_set.get(user__pk=user.pk) except InstanceUser.DoesNotExist: iu = None data = DotDict({'models': set(), 'udfc': {}}) for clz in leaf_models_of_class(UDFModel): model_name = clz.__name__ if model_name not in ['Tree'] + instance.map_feature_types: continue for k, v in clz.collection_udf_settings.items(): udfds = (u for u in udf_defs(instance, model_name) if u.name == k) for udfd in udfds: if udf_write_level(iu, udfd) in (READ, WRITE): _base_nest_path = 'udfc.%s.' % (to_object_name(k)) ids_nest_path = ('%sids.%s' % (_base_nest_path, to_object_name(model_name))) models_nest_path = ('%smodels.%s' % (_base_nest_path, to_object_name(model_name))) data[ids_nest_path] = udfd.pk data[models_nest_path] = { 'udfd': udfd, 'fields': udfd.datatype_dict[0]['choices'] } p = 'udfc.%s.' % to_object_name(k) data[p + 'action_verb'] = v['action_verb'] data[p + 'range_field_key'] = v['range_field_key'] data[p + 'action_field_key'] = v['action_field_key'] data['models'] |= {clz} return data def get_alert_field_info(identifier, instance): from treemap.util import get_model_for_instance alert_match = ALERT_IDENTIFIER_PATTERN.match(identifier) if alert_match: model_name, pk = alert_match.groups() Model = get_model_for_instance(model_name, instance) udf_def = next(udf for udf in udf_defs(instance) if udf.pk == int(pk)) display_name = force_text(Model.terminology(instance)['singular']) return { 'identifier': identifier, 'search_type': 'DEFAULT', 'default_identifier': udf_def.full_name, 'label': 'Open %(model)s Alerts' % {'model': display_name}, } return None
Pine Island Marketplace is located at the corner of Pine Island Rd. (SR 78) & Nicholas Pkwy., Cape Coral. It is a Power Center which opened in 2006. It is an Open shopping mall. It covers an area of 200,442 sqft. and has about 15 stores.