text
stringlengths
29
850k
# -*- coding: utf-8 -*- # Copyright 2008-2015 Canonical # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # For further info, check http://launchpad.net/filesync-server """Test the capabilities decorator.""" from twisted.trial.unittest import TestCase from twisted.trial.reporter import TestResult from twisted.internet import defer from ubuntuone.storage.server.testing.caps_helpers import required_caps from ubuntuone.storage.server.testing.aq_helpers import ( TestWithDatabase, failure_expected, ) from ubuntuone.storage.server import server as server_module from ubuntuone import syncdaemon class RequiredCapsDecoratorTests(TestCase): """Tests for required_caps decorator""" _original_required_caps = syncdaemon.REQUIRED_CAPS @defer.inlineCallbacks def tearDown(self): """tearDown""" syncdaemon.REQUIRED_CAPS = self._original_required_caps yield super(RequiredCapsDecoratorTests, self).tearDown() def test_mismatch(self): """test tha a test is correctly skipped""" result = TestResult() syncdaemon.REQUIRED_CAPS = set(['supercalifragilistico']) class FakeTest(TestCase): """Testcase to test the decorator""" @required_caps([], validate=False) def test_method(innerself): """test method that allways fails""" innerself.fail() FakeTest('test_method').run(result) self.assertEquals(1, len(result.skips)) def test_match(self): """Check that a test is executed when the caps match.""" result = TestResult() syncdaemon.REQUIRED_CAPS = server_module.MIN_CAP class FakeTest(TestCase): """Testcase to test the decorator""" @required_caps(server_module.MIN_CAP) def test_method(innerself): """Test method that always pass.""" innerself.assertTrue(True) FakeTest('test_method').run(result) self.assertEquals(0, len(result.skips)) self.assertEquals(1, result.successes) def test_not_validate(self): """test that a test is executed when the supported_caps_set don't match the server SUPPORTED_CAPS and validate=False. """ result = TestResult() syncdaemon.REQUIRED_CAPS = set(['supercalifragilistico']) class FakeTest(TestCase): """Testcase to test the decorator""" @required_caps(['supercalifragilistico'], validate=False) def test_method(innerself): """test method that always pass""" innerself.assertTrue(True) FakeTest('test_method').run(result) self.assertEquals(0, len(result.skips)) self.assertEquals(1, result.successes) def test_validate(self): """test tha a test fails when the supported_caps_set don't match the server SUPPORTED_CAPS and validate=True. """ result = TestResult() class FakeTest(TestCase): """Testcase to test the decorator""" @required_caps([], ['supercalifragilistico', 'foo'], ['foo']) def test_method(innerself): """test method that always pass""" innerself.assertTrue(True) the_test = FakeTest('test_method') the_test.run(result) self.assertEquals(0, len(result.skips)) self.assertEquals(1, len(result.failures)) self.assertEquals(the_test, result.failures[0][0]) class TestClientCapabilities(TestWithDatabase): """Test the client side of query/set capabilities""" client = None # just to restore original values _original_supported_caps = server_module.SUPPORTED_CAPS _original_required_caps = syncdaemon.REQUIRED_CAPS def tearDown(self): """cleanup the mess""" server_module.SUPPORTED_CAPS = self._original_supported_caps syncdaemon.REQUIRED_CAPS = self._original_required_caps if self.aq.connector is not None: self.aq.disconnect() return super(TestClientCapabilities, self).tearDown() def assertInQ(self, deferred, containee, msg=None): """ deferredly assert that the containee is in the event queue. containee can be callable, in which case it's called before asserting. """ def check_queue(_): "the check itself" ce = containee() if callable(containee) else containee self.assertIn(ce, self.listener.q, msg) deferred.addCallback(check_queue) def connect(self): """Connect the client""" d = self.wait_for('SYS_CONNECTION_MADE') self.eq.push('SYS_INIT_DONE') self.eq.push('SYS_LOCAL_RESCAN_DONE') self.eq.push('SYS_USER_CONNECT', access_token=self.access_tokens['jack']) self.eq.push('SYS_NET_CONNECTED') return d def test_query_set_capabilities(self): """After connecting the server uses the caps specified by client.""" needed_event = self.wait_for('SYS_SET_CAPABILITIES_OK') d = self.connect() d.addCallback(lambda _: needed_event) return d @failure_expected("The server doesn't have the requested capabilities") def test_query_bad_capabilities(self): """Test how the client hanlde trying to set capabilities that the server don't have. """ syncdaemon.REQUIRED_CAPS = frozenset(['foo']) needed_event = self.wait_for('SYS_SET_CAPABILITIES_ERROR') d = self.connect() d.addCallback(lambda _: needed_event) self.assertInQ(d, ('SYS_SET_CAPABILITIES_ERROR', {'error': "The server doesn't have the requested " "capabilities"})) return d
Posted on Sunday, April 4, 2010, by Eric - TV Geek Army "Revered Leader" Quick Take: Parks and Recreation, "Woman of the Year" Review: Parks and Recreation, "Woman of the Year" (S0217) Okay, I have to start this off by saying how much I love this show, how quick the ramp up to love came about for me, and how it can do nearly no wrong for me right now. I'm in full swoon. The whole cast is amazing, the writing is quirky and funny and on point, but I have to get to my man Aziz Ansari, whom I have a near man crush on right now, as the wannabe-a-playa but so not Tom Haverford. When Tom strutted into the Snakehole Lounge this week and announced, "Yo Freddie, good news! Your liquor board license got got, yo!" like something out of The Wire channeled via Super Nerd, that hits an ultrasweet spot of comedy for me right there. And the payoff is even better as Freddie has no idea what Tom is talking about, and Tom must shift quickly back into earnest local bureaucrat-speak. Meanwhile, the woman of the year award storyline provides some nice interchanges for Ron and Leslie (Amy Poehler) and proves that there's a lot of character depth to match the comedy on Parks and Rec. "Every child has a right to play, no matter how boring the sport." – That sums up Leslie's character just about perfectly right there: innocent-to-a-fault honest, sweet, and playing in an area just over the left field line. I absolutely love the Parks and Recreation theme song. It's bright and perky and offbeat and traditional, somehow all at once. It works, as does the show. "Well, it's about time." – Ron Swanson (Nick Offerman), in reaction to winning Pawnee's Woman of the Year award. It took me a minute before I realized that club owner Freddie is Andy Milder, who plays Dean Hodes on Weeds. Tom dropping a Snooki reference (from MTV's Jersey Shore) during his "investment opportunity" presentation is simply priceless. Video: Parks and Recreation, "Woman of the Year" Recap: Parks and Recreation, "Woman of the Year" Leslie is outraged when Ron wins a "Woman of the Year" award for a project she started. Meanwhile, Tom is desperate to find some money to invest in a local Pawnee club. From Around the Web: Parks and Recreation, "Woman of the Year" TV with Alan Sepinwall: Tom in turn got to get his d-bag on - and then surround himself with the spectacularly douchey return of Ben Schwartz as Jean Ralphio (last seen auditioning to be Ron's secretary). Cultural Learnings: Leslie and Ron are, without question, the best platonic relationship ongoing on television right now. The relationship is really the core of that office dynamic in that Ron fully understands that Leslie does great work but doesn’t quite share her passion for anything but breakfast food, and he’s probably not so keen on her lack of wall-hanging proof of that passion.
""" Copyright (C) 2015 Quinn D Granfor <spootdev@gmail.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2, as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 for more details. You should have received a copy of the GNU General Public License version 2 along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ import uuid def db_metathesportsdb_select_guid(self, guid): """ # select """ self.db_cursor.execute('select mm_metadata_sports_json' ' from mm_metadata_sports' ' where mm_metadata_sports_guid = %s', (guid,)) try: return self.db_cursor.fetchone()['mm_metadata_sports_json'] except: return None def db_metathesportsdb_insert(self, series_id_json, event_name, show_detail, image_json): """ # insert """ new_guid = uuid.uuid4() self.db_cursor.execute('insert into mm_metadata_sports (mm_metadata_sports_guid,' ' mm_metadata_media_sports_id,' ' mm_metadata_sports_name,' ' mm_metadata_sports_json,' ' mm_metadata_sports_image_json)' ' values (%s,%s,%s,%s,%s)', (new_guid, series_id_json, event_name, show_detail, image_json)) self.db_commit() return new_guid def db_metathesports_update(self, series_id_json, event_name, show_detail, sportsdb_id): """ # updated """ self.db_cursor.execute('update mm_metadata_sports' ' set mm_metadata_media_sports_id = %s,' ' mm_metadata_sports_name = %s,' ' mm_metadata_sports_json = %s' ' where mm_metadata_media_sports_id->\'thesportsdb\' ? %s', (series_id_json, event_name, show_detail, sportsdb_id)) self.db_commit()
The Bitnami OXID eShop Stack provides a one-click install solution for OXID eShop. Download installers and virtual machines, or run your own OXID eShop server in the cloud. OXID eShop is a free open source ecommerce and shopping cart solution. Build a robust online, mobile-ready store, then expand it with the dozens of available modules for marketing, payment processing, order fulfillment and more. Why use the Bitnami OXID eShop Stack?
""" Phase vocoder. The phase vocoder is a digital signal processing technique of potentially great musical significance. It can be used to perform very high fidelity time scaling, pitch transposition, and myriad other modifications of sounds. """ """ Copyright 2009-2015 Olivier Belanger This file is part of pyo, a python module to help digital signal processing script creation. pyo is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. pyo is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with pyo. If not, see <http://www.gnu.org/licenses/>. """ from _core import * from _maps import * from _widgets import createSpectrumWindow from pattern import Pattern class PVAnal(PyoPVObject): """ Phase Vocoder analysis object. PVAnal takes an input sound and performs the phase vocoder analysis on it. This results in two streams, one for the bin's magnitudes and the other for the bin's true frequencies. These two streams are used by the PVxxx object family to transform the input signal using spectral domain algorithms. The last object in the phase vocoder chain must be a PVSynth to perform the spectral to time domain conversion. :Parent: :py:class:`PyoPVObject` :Args: input : PyoObject Input signal to process. size : int {pow-of-two > 4}, optional FFT size. Must be a power of two greater than 4. Defaults to 1024. The FFT size is the number of samples used in each analysis frame. overlaps : int, optional The number of overlaped analysis block. Must be a power of two. Defaults to 4. More overlaps can greatly improved sound quality synthesis but it is also more CPU expensive. wintype : int, optional Shape of the envelope used to filter each input frame. Possible shapes are: 0. rectangular (no windowing) 1. Hamming 2. Hanning (default) 3. Bartlett (triangular) 4. Blackman 3-term 5. Blackman-Harris 4-term 6. Blackman-Harris 7-term 7. Tuckey (alpha = 0.66) 8. Sine (half-sine window) >>> s = Server().boot() >>> s.start() >>> a = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=0.7) >>> pva = PVAnal(a, size=1024, overlaps=4, wintype=2) >>> pvs = PVSynth(pva).mix(2).out() """ def __init__(self, input, size=1024, overlaps=4, wintype=2): pyoArgsAssert(self, "oiii", input, size, overlaps, wintype) PyoPVObject.__init__(self) self._input = input self._size = size self._overlaps = overlaps self._wintype = wintype self._in_fader = InputFader(input) in_fader, size, overlaps, wintype, lmax = convertArgsToLists(self._in_fader, size, overlaps, wintype) self._base_objs = [PVAnal_base(wrap(in_fader,i), wrap(size,i), wrap(overlaps,i), wrap(wintype,i)) for i in range(lmax)] def setInput(self, x, fadetime=0.05): """ Replace the `input` attribute. :Args: x : PyoObject New signal to process. fadetime : float, optional Crossfade time between old and new input. Default to 0.05. """ pyoArgsAssert(self, "oN", x, fadetime) self._input = x self._in_fader.setInput(x, fadetime) def setSize(self, x): """ Replace the `size` attribute. :Args: x : int new `size` attribute. """ pyoArgsAssert(self, "i", x) self._size = x x, lmax = convertArgsToLists(x) [obj.setSize(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setOverlaps(self, x): """ Replace the `overlaps` attribute. :Args: x : int new `overlaps` attribute. """ pyoArgsAssert(self, "i", x) self._overlaps = x x, lmax = convertArgsToLists(x) [obj.setOverlaps(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setWinType(self, x): """ Replace the `wintype` attribute. :Args: x : int new `wintype` attribute. """ self._wintype = x x, lmax = convertArgsToLists(x) [obj.setWinType(wrap(x,i)) for i, obj in enumerate(self._base_objs)] @property def input(self): """PyoObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def size(self): """int. FFT size.""" return self._size @size.setter def size(self, x): self.setSize(x) @property def overlaps(self): """int. FFT overlap factor.""" return self._overlaps @overlaps.setter def overlaps(self, x): self.setOverlaps(x) @property def wintype(self): """int. Windowing method.""" return self._wintype @wintype.setter def wintype(self, x): self.setWinType(x) class PVSynth(PyoObject): """ Phase Vocoder synthesis object. PVSynth takes a PyoPVObject as its input and performed the spectral to time domain conversion on it. This step converts phase vocoder magnitude and true frequency's streams back to a real signal. :Parent: :py:class:`PyoObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. wintype : int, optional Shape of the envelope used to filter each input frame. Possible shapes are: 0. rectangular (no windowing) 1. Hamming 2. Hanning (default) 3. Bartlett (triangular) 4. Blackman 3-term 5. Blackman-Harris 4-term 6. Blackman-Harris 7-term 7. Tuckey (alpha = 0.66) 8. Sine (half-sine window) >>> s = Server().boot() >>> s.start() >>> a = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=0.7) >>> pva = PVAnal(a, size=1024, overlaps=4, wintype=2) >>> pvs = PVSynth(pva).mix(2).out() """ def __init__(self, input, wintype=2, mul=1, add=0): pyoArgsAssert(self, "piOO", input, wintype, mul, add) PyoObject.__init__(self, mul, add) self._input = input self._wintype = wintype input, wintype, mul, add, lmax = convertArgsToLists(self._input, wintype, mul, add) self._base_objs = [PVSynth_base(wrap(input,i), wrap(wintype,i), wrap(mul,i), wrap(add,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setWinType(self, x): """ Replace the `wintype` attribute. :Args: x : int new `wintype` attribute. """ pyoArgsAssert(self, "i", x) self._wintype = x x, lmax = convertArgsToLists(x) [obj.setWinType(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMapMul(self._mul)] PyoObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def wintype(self): """int. Windowing method.""" return self._wintype @wintype.setter def wintype(self, x): self.setWinType(x) class PVAddSynth(PyoObject): """ Phase Vocoder additive synthesis object. PVAddSynth takes a PyoPVObject as its input and resynthesize the real signal using the magnitude and true frequency's streams to control amplitude and frequency envelopes of an oscillator bank. :Parent: :py:class:`PyoObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. pitch : float or PyoObject, optional Transposition factor. Defaults to 1. num : int, optional Number of oscillators used to synthesize the output sound. Defaults to 100. first : int, optional The first bin to synthesize, starting from 0. Defaults to 0. inc : int, optional Starting from bin `first`, resynthesize bins `inc` apart. Defaults to 1. >>> s = Server().boot() >>> s.start() >>> a = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=0.7) >>> pva = PVAnal(a, size=1024, overlaps=4, wintype=2) >>> pvs = PVAddSynth(pva, pitch=1.25, num=100, first=0, inc=2).out() """ def __init__(self, input, pitch=1, num=100, first=0, inc=1, mul=1, add=0): pyoArgsAssert(self, "pOiiiOO", input, pitch, num, first, inc, mul, add) PyoObject.__init__(self, mul, add) self._input = input self._pitch = pitch self._num = num self._first = first self._inc = inc input, pitch, num, first, inc, mul, add, lmax = convertArgsToLists(self._input, pitch, num, first, inc, mul, add) self._base_objs = [PVAddSynth_base(wrap(input,i), wrap(pitch,i), wrap(num,i), wrap(first,i), wrap(inc,i), wrap(mul,i), wrap(add,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setPitch(self, x): """ Replace the `pitch` attribute. :Args: x : float or PyoObject new `pitch` attribute. """ pyoArgsAssert(self, "O", x) self._pitch = x x, lmax = convertArgsToLists(x) [obj.setPitch(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setNum(self, x): """ Replace the `num` attribute. :Args: x : int new `num` attribute. """ pyoArgsAssert(self, "i", x) self._num = x x, lmax = convertArgsToLists(x) [obj.setNum(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setFirst(self, x): """ Replace the `first` attribute. :Args: x : int new `first` attribute. """ pyoArgsAssert(self, "i", x) self._first = x x, lmax = convertArgsToLists(x) [obj.setFirst(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setInc(self, x): """ Replace the `inc` attribute. :Args: x : int new `inc` attribute. """ pyoArgsAssert(self, "i", x) self._inc = x x, lmax = convertArgsToLists(x) [obj.setInc(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0.25, 4, "lin", "pitch", self._pitch), SLMapMul(self._mul)] PyoObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def pitch(self): """float or PyoObject. Transposition factor.""" return self._pitch @pitch.setter def pitch(self, x): self.setPitch(x) @property def num(self): """int. Number of oscillators.""" return self._num @num.setter def num(self, x): self.setNum(x) @property def first(self): """int. First bin to synthesize.""" return self._first @first.setter def first(self, x): self.setFirst(x) @property def inc(self): """int. Synthesized bin increment.""" return self._inc @inc.setter def inc(self, x): self.setInc(x) class PVTranspose(PyoPVObject): """ Transpose the frequency components of a pv stream. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. transpo : float or PyoObject, optional Transposition factor. Defaults to 1. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.7) >>> pva = PVAnal(sf, size=1024) >>> pvt = PVTranspose(pva, transpo=1.5) >>> pvs = PVSynth(pvt).out() >>> dry = Delay(sf, delay=1024./s.getSamplingRate(), mul=.7).out(1) """ def __init__(self, input, transpo=1): pyoArgsAssert(self, "pO", input, transpo) PyoPVObject.__init__(self) self._input = input self._transpo = transpo input, transpo, lmax = convertArgsToLists(self._input, transpo) self._base_objs = [PVTranspose_base(wrap(input,i), wrap(transpo,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setTranspo(self, x): """ Replace the `transpo` attribute. :Args: x : int new `transpo` attribute. """ pyoArgsAssert(self, "O", x) self._transpo = x x, lmax = convertArgsToLists(x) [obj.setTranspo(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0.25, 4, "lin", "transpo", self._transpo)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def transpo(self): """float or PyoObject. Transposition factor.""" return self._transpo @transpo.setter def transpo(self, x): self.setTranspo(x) class PVVerb(PyoPVObject): """ Spectral domain reverberation. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. revtime : float or PyoObject, optional Reverberation factor, between 0 and 1. Defaults to 0.75. damp : float or PyoObject, optional High frequency damping factor, between 0 and 1. 1 means no damping and 0 is the most damping. Defaults to 0.75. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> pva = PVAnal(sf, size=2048) >>> pvg = PVGate(pva, thresh=-36, damp=0) >>> pvv = PVVerb(pvg, revtime=0.95, damp=0.95) >>> pvs = PVSynth(pvv).mix(2).out() >>> dry = Delay(sf, delay=2048./s.getSamplingRate(), mul=.4).mix(2).out() """ def __init__(self, input, revtime=0.75, damp=0.75): pyoArgsAssert(self, "pOO", input, revtime, damp) PyoPVObject.__init__(self) self._input = input self._revtime = revtime self._damp = damp input, revtime, damp, lmax = convertArgsToLists(self._input, revtime, damp) self._base_objs = [PVVerb_base(wrap(input,i), wrap(revtime,i), wrap(damp,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setRevtime(self, x): """ Replace the `revtime` attribute. :Args: x : int new `revtime` attribute. """ pyoArgsAssert(self, "O", x) self._revtime = x x, lmax = convertArgsToLists(x) [obj.setRevtime(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setDamp(self, x): """ Replace the `damp` attribute. :Args: x : int new `damp` attribute. """ pyoArgsAssert(self, "O", x) self._damp = x x, lmax = convertArgsToLists(x) [obj.setDamp(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0, 1, "lin", "revtime", self._revtime), SLMap(0, 1, "lin", "damp", self._damp)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def revtime(self): """float or PyoObject. Reverberation factor.""" return self._revtime @revtime.setter def revtime(self, x): self.setRevtime(x) @property def damp(self): """float or PyoObject. High frequency damping factor.""" return self._damp @damp.setter def damp(self, x): self.setDamp(x) class PVGate(PyoPVObject): """ Spectral gate. :Parent: :py:class:`PyoObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. thresh : float or PyoObject, optional Threshold factor in dB. Bins below that threshold will be scaled by `damp` factor. Defaults to -20. damp : float or PyoObject, optional Damping factor for low amplitude bins. Defaults to 0. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> pva = PVAnal(sf, size=2048) >>> pvg = PVGate(pva, thresh=-50, damp=0) >>> pvs = PVSynth(pvg).mix(2).out() """ def __init__(self, input, thresh=-20, damp=0.): pyoArgsAssert(self, "pOO", input, thresh, damp) PyoPVObject.__init__(self) self._input = input self._thresh = thresh self._damp = damp input, thresh, damp, lmax = convertArgsToLists(self._input, thresh, damp) self._base_objs = [PVGate_base(wrap(input,i), wrap(thresh,i), wrap(damp,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setThresh(self, x): """ Replace the `thresh` attribute. :Args: x : int new `thresh` attribute. """ pyoArgsAssert(self, "O", x) self._thresh = x x, lmax = convertArgsToLists(x) [obj.setThresh(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setDamp(self, x): """ Replace the `damp` attribute. :Args: x : int new `damp` attribute. """ pyoArgsAssert(self, "O", x) self._damp = x x, lmax = convertArgsToLists(x) [obj.setDamp(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(-120, 18, "lin", "thresh", self._thresh), SLMap(0, 2, "lin", "damp", self._damp)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def thresh(self): """float or PyoObject. Threshold factor.""" return self._thresh @thresh.setter def thresh(self, x): self.setThresh(x) @property def damp(self): """float or PyoObject. Damping factor for low amplitude bins.""" return self._damp @damp.setter def damp(self, x): self.setDamp(x) class PVCross(PyoPVObject): """ Performs cross-synthesis between two phase vocoder streaming object. The amplitudes from `input` and `input2` (scaled by `fade` argument) are applied to the frequencies of `input`. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. Frequencies from this pv stream are used to compute the output signal. input2 : PyoPVObject Phase vocoder streaming object which gives the second set of magnitudes. Frequencies from this pv stream are not used. fade : float or PyoObject, optional Scaling factor for the output amplitudes, between 0 and 1. 0 means amplitudes from `input` and 1 means amplitudes from `input2`. Defaults to 1. .. note:: The two input pv stream must have the same size and overlaps. It is the responsibility of the user to be sure they are consistent. To change the size (or the overlaps) of the phase vocoder process, one must write a function to change both at the same time (see the example below). Another possibility is to use channel expansion to analyse both sounds with the same PVAnal object. >>> s = Server().boot() >>> s.start() >>> sf = SineLoop(freq=[80,81], feedback=0.07, mul=.5) >>> sf2 = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> pva = PVAnal(sf) >>> pva2 = PVAnal(sf2) >>> pvc = PVCross(pva, pva2, fade=1) >>> pvs = PVSynth(pvc).out() >>> def size(x): ... pva.size = x ... pva2.size = x >>> def olaps(x): ... pva.overlaps = x ... pva2.overlaps = x """ def __init__(self, input, input2, fade=1): pyoArgsAssert(self, "ppO", input, input2, fade) PyoPVObject.__init__(self) self._input = input self._input2 = input2 self._fade = fade input, input2, fade, lmax = convertArgsToLists(self._input, self._input2, fade) self._base_objs = [PVCross_base(wrap(input,i), wrap(input2,i), wrap(fade,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setInput2(self, x): """ Replace the `input2` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input2 = x x, lmax = convertArgsToLists(x) [obj.setInput2(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setFade(self, x): """ Replace the `fade` attribute. :Args: x : float or PyoObject new `fade` attribute. """ pyoArgsAssert(self, "O", x) self._fade = x x, lmax = convertArgsToLists(x) [obj.setFade(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0, 1, "lin", "fade", self._fade)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def input2(self): """PyoPVObject. Second set of amplitudes.""" return self._input2 @input2.setter def input2(self, x): self.setInput2(x) @property def fade(self): """float or PyoObject. Scaling factor.""" return self._fade @fade.setter def fade(self, x): self.setFade(x) class PVMult(PyoPVObject): """ Multiply magnitudes from two phase vocoder streaming object. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. Frequencies from this pv stream are used to compute the output signal. input2 : PyoPVObject Phase vocoder streaming object which gives the second set of magnitudes. Frequencies from this pv stream are not used. .. note:: The two input pv stream must have the same size and overlaps. It is the responsibility of the user to be sure they are consistent. To change the size (or the overlaps) of the phase vocoder process, one must write a function to change both at the same time (see the example below). Another possibility is to use channel expansion to analyse both sounds with the same PVAnal object. >>> s = Server().boot() >>> s.start() >>> sf = FM(carrier=[100,150], ratio=[.999,.5005], index=20, mul=.4) >>> sf2 = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> pva = PVAnal(sf) >>> pva2 = PVAnal(sf2) >>> pvc = PVMult(pva, pva2) >>> pvs = PVSynth(pvc).out() >>> def size(x): ... pva.size = x ... pva2.size = x >>> def olaps(x): ... pva.overlaps = x ... pva2.overlaps = x """ def __init__(self, input, input2): pyoArgsAssert(self, "pp", input, input2) PyoPVObject.__init__(self) self._input = input self._input2 = input2 input, input2, lmax = convertArgsToLists(self._input, self._input2) self._base_objs = [PVMult_base(wrap(input,i), wrap(input2,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setInput2(self, x): """ Replace the `input2` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input2 = x x, lmax = convertArgsToLists(x) [obj.setInput2(wrap(x,i)) for i, obj in enumerate(self._base_objs)] @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def input2(self): """PyoPVObject. Second set of magnitudes.""" return self._input2 @input2.setter def input2(self, x): self.setInput2(x) class PVMorph(PyoPVObject): """ Performs spectral morphing between two phase vocoder streaming object. According to `fade` argument, the amplitudes from `input` and `input2` are interpolated linearly while the frequencies are interpolated exponentially. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object which gives the first set of magnitudes and frequencies. input2 : PyoPVObject Phase vocoder streaming object which gives the second set of magnitudes and frequencies. fade : float or PyoObject, optional Scaling factor for the output amplitudes and frequencies, between 0 and 1. 0 is `input` and 1 in `input2`. Defaults to 0.5. .. note:: The two input pv stream must have the same size and overlaps. It is the responsibility of the user to be sure they are consistent. To change the size (or the overlaps) of the phase vocoder process, one must write a function to change both at the same time (see the example below). Another possibility is to use channel expansion to analyse both sounds with the same PVAnal object. >>> s = Server().boot() >>> s.start() >>> sf = SineLoop(freq=[100,101], feedback=0.12, mul=.5) >>> sf2 = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> pva = PVAnal(sf) >>> pva2 = PVAnal(sf2) >>> pvc = PVMorph(pva, pva2, fade=0.5) >>> pvs = PVSynth(pvc).out() >>> def size(x): ... pva.size = x ... pva2.size = x >>> def olaps(x): ... pva.overlaps = x ... pva2.overlaps = x """ def __init__(self, input, input2, fade=0.5): pyoArgsAssert(self, "ppO", input, input2, fade) PyoPVObject.__init__(self) self._input = input self._input2 = input2 self._fade = fade input, input2, fade, lmax = convertArgsToLists(self._input, self._input2, fade) self._base_objs = [PVMorph_base(wrap(input,i), wrap(input2,i), wrap(fade,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setInput2(self, x): """ Replace the `input2` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input2 = x x, lmax = convertArgsToLists(x) [obj.setInput2(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setFade(self, x): """ Replace the `fade` attribute. :Args: x : float or PyoObject new `fade` attribute. """ pyoArgsAssert(self, "O", x) self._fade = x x, lmax = convertArgsToLists(x) [obj.setFade(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0, 1, "lin", "fade", self._fade)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. First input signal.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def input2(self): """PyoPVObject. Second input signal.""" return self._input2 @input2.setter def input2(self, x): self.setInput2(x) @property def fade(self): """float or PyoObject. Scaling factor.""" return self._fade @fade.setter def fade(self, x): self.setFade(x) class PVFilter(PyoPVObject): """ Spectral filter. PVFilter filters frequency components of a pv stream according to the shape drawn in the table given in argument. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. table : PyoTableObject Table containing the filter shape. If the table length is smaller than fftsize/2, remaining bins will be set to 0. gain : float or PyoObject, optional Gain of the filter applied to the input spectrum. Defaults to 1. mode : int, optional Table scanning mode. Defaults to 0. If 0, bin indexes outside table size are set to 0. If 1, bin indexes are scaled over table length. >>> s = Server().boot() >>> s.start() >>> t = ExpTable([(0,1),(61,1),(71,0),(131,1),(171,0),(511,0)], size=512) >>> src = Noise(.4) >>> pva = PVAnal(src, size=1024) >>> pvf = PVFilter(pva, t) >>> pvs = PVSynth(pvf).out() """ def __init__(self, input, table, gain=1, mode=0): pyoArgsAssert(self, "ptOi", input, table, gain, mode) PyoPVObject.__init__(self) self._input = input self._table = table self._gain = gain self._mode = mode input, table, gain, mode, lmax = convertArgsToLists(self._input, table, gain, mode) self._base_objs = [PVFilter_base(wrap(input,i), wrap(table,i), wrap(gain,i), wrap(mode,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setTable(self, x): """ Replace the `table` attribute. :Args: x : PyoTableObject new `table` attribute. """ pyoArgsAssert(self, "t", x) self._table = x x, lmax = convertArgsToLists(x) [obj.setTable(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setGain(self, x): """ Replace the `gain` attribute. :Args: x : float or PyoObject new `gain` attribute. """ pyoArgsAssert(self, "O", x) self._gain = x x, lmax = convertArgsToLists(x) [obj.setGain(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setMode(self, x): """ Replace the `mode` attribute. :Args: x : int new `mode` attribute. """ pyoArgsAssert(self, "i", x) self._mode = x x, lmax = convertArgsToLists(x) [obj.setMode(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0, 1, "lin", "gain", self._gain)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def table(self): """PyoTableObject. Table containing the filter shape.""" return self._table @table.setter def table(self, x): self.setTable(x) @property def gain(self): """float or PyoObject. Gain of the filter.""" return self._gain @gain.setter def gain(self, x): self.setGain(x) @property def mode(self): """int. Table scanning mode.""" return self._mode @mode.setter def mode(self, x): self.setMode(x) class PVDelay(PyoPVObject): """ Spectral delays. PVDelay applies different delay times and feedbacks for each bin of a phase vocoder analysis. Delay times and feedbacks are specified with PyoTableObjects. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. deltable : PyoTableObject Table containing delay times, as integer multipliers of the FFT hopsize (fftsize / overlaps). If the table length is smaller than fftsize/2, remaining bins will be set to 0. feedtable : PyoTableObject Table containing feedback values, between -1 and 1. If the table length is smaller than fftsize/2, remaining bins will be set to 0. maxdelay : float, optional Maximum delay time in seconds. Available at initialization time only. Defaults to 1.0. mode : int, optional Tables scanning mode. Defaults to 0. If 0, bin indexes outside table size are set to 0. If 1, bin indexes are scaled over table length. >>> s = Server().boot() >>> s.start() >>> SIZE = 1024 >>> SIZE2 = SIZE / 2 >>> OLAPS = 4 >>> MAXDEL = 2.0 # two seconds delay memories >>> FRAMES = int(MAXDEL * s.getSamplingRate() / (SIZE / OLAPS)) >>> # Edit tables with the graph() method. yrange=(0, FRAMES) for delays table >>> dt = DataTable(size=SIZE2, init=[i / float(SIZE2) * FRAMES for i in range(SIZE2)]) >>> ft = DataTable(size=SIZE2, init=[0.5]*SIZE2) >>> src = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=0.5) >>> pva = PVAnal(src, size=SIZE, overlaps=OLAPS) >>> pvd = PVDelay(pva, dt, ft, maxdelay=MAXDEL) >>> pvs = PVSynth(pvd).out() """ def __init__(self, input, deltable, feedtable, maxdelay=1.0, mode=0): pyoArgsAssert(self, "pttni", input, deltable, feedtable, maxdelay, mode) PyoPVObject.__init__(self) self._input = input self._deltable = deltable self._feedtable = feedtable self._maxdelay = maxdelay self._mode = mode input, deltable, feedtable, maxdelay, mode, lmax = convertArgsToLists(self._input, deltable, feedtable, maxdelay, mode) self._base_objs = [PVDelay_base(wrap(input,i), wrap(deltable,i), wrap(feedtable,i), wrap(maxdelay,i), wrap(mode,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setDeltable(self, x): """ Replace the `deltable` attribute. :Args: x : PyoTableObject new `deltable` attribute. """ pyoArgsAssert(self, "t", x) self._deltable = x x, lmax = convertArgsToLists(x) [obj.setDeltable(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setFeedtable(self, x): """ Replace the `feedtable` attribute. :Args: x : PyoTableObject new `feedtable` attribute. """ pyoArgsAssert(self, "t", x) self._feedtable = x x, lmax = convertArgsToLists(x) [obj.setFeedtable(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setMode(self, x): """ Replace the `mode` attribute. :Args: x : int new `mode` attribute. """ pyoArgsAssert(self, "i", x) self._mode = x x, lmax = convertArgsToLists(x) [obj.setMode(wrap(x,i)) for i, obj in enumerate(self._base_objs)] @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def deltable(self): """PyoTableObject. Table containing the delay times.""" return self._deltable @deltable.setter def deltable(self, x): self.setDeltable(x) @property def feedtable(self): """PyoTableObject. Table containing feedback values.""" return self._feedtable @feedtable.setter def feedtable(self, x): self.setFeedtable(x) @property def mode(self): """int. Table scanning mode.""" return self._mode @mode.setter def mode(self, x): self.setMode(x) class PVBuffer(PyoPVObject): """ Phase vocoder buffer and playback with transposition. PVBuffer keeps `length` seconds of pv analysis in memory and gives control on playback position and transposition. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. index : PyoObject Playback position, as audio stream, normalized between 0 and 1. pitch : float or PyoObject, optional Transposition factor. Defaults to 1. length : float, optional Memory length in seconds. Available at initialization time only. Defaults to 1.0. .. note:: The play() method can be called to start a new recording of the current pv input. >>> s = Server().boot() >>> s.start() >>> f = SNDS_PATH+'/transparent.aif' >>> f_len = sndinfo(f)[1] >>> src = SfPlayer(f, mul=0.5) >>> index = Phasor(freq=1.0/f_len*0.25, phase=0.9) >>> pva = PVAnal(src, size=1024, overlaps=8) >>> pvb = PVBuffer(pva, index, pitch=1.25, length=f_len) >>> pvs = PVSynth(pvb).out() """ def __init__(self, input, index, pitch=1.0, length=1.0): pyoArgsAssert(self, "poOn", input, index, pitch, length) PyoPVObject.__init__(self) self._input = input self._index = index self._pitch = pitch self._length = length input, index, pitch, length, lmax = convertArgsToLists(self._input, index, pitch, length) self._base_objs = [PVBuffer_base(wrap(input,i), wrap(index,i), wrap(pitch,i), wrap(length,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setIndex(self, x): """ Replace the `index` attribute. :Args: x : PyoObject new `index` attribute. """ pyoArgsAssert(self, "o", x) self._index = x x, lmax = convertArgsToLists(x) [obj.setIndex(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setPitch(self, x): """ Replace the `pitch` attribute. :Args: x : float or PyoObject new `pitch` attribute. """ pyoArgsAssert(self, "O", x) self._pitch = x x, lmax = convertArgsToLists(x) [obj.setPitch(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0.25, 4, "lin", "pitch", self._pitch)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def index(self): """PyoObject. Reader's normalized position.""" return self._index @index.setter def index(self, x): self.setIndex(x) @property def pitch(self): """float or PyoObject. Transposition factor.""" return self._pitch @pitch.setter def pitch(self, x): self.setPitch(x) class PVShift(PyoPVObject): """ Spectral domain frequency shifter. PVShift linearly moves the analysis bins by the amount, in Hertz, specified by the the `shift` argument. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. shift : float or PyoObject, optional Frequency shift factor. Defaults to 0. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.7) >>> pva = PVAnal(sf, size=1024) >>> pvt = PVShift(pva, shift=500) >>> pvs = PVSynth(pvt).out() """ def __init__(self, input, shift=0): pyoArgsAssert(self, "pO", input, shift) PyoPVObject.__init__(self) self._input = input self._shift = shift input, shift, lmax = convertArgsToLists(self._input, shift) self._base_objs = [PVShift_base(wrap(input,i), wrap(shift,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setShift(self, x): """ Replace the `shift` attribute. :Args: x : float or PyoObject new `shift` attribute. """ pyoArgsAssert(self, "O", x) self._shift = x x, lmax = convertArgsToLists(x) [obj.setShift(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(-5000, 5000, "lin", "shift", self._shift)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def shift(self): """float or PyoObject. Frequency shift factor.""" return self._shift @shift.setter def shift(self, x): self.setShift(x) class PVAmpMod(PyoPVObject): """ Performs frequency independent amplitude modulations. PVAmpMod modulates the magnitude of each bin of a pv stream with an independent oscillator. `basefreq` and `spread` are used to derive the frequency of each modulating oscillator. Internally, the following operations are applied to derive oscillator frequencies (`i` is the bin number): spread = spread * 0.001 + 1.0 f_i = basefreq * pow(spread, i) :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. basefreq : float or PyoObject, optional Base modulation frequency, in Hertz. Defaults to 1. spread : float or PyoObject, optional Spreading factor for oscillator frequencies, between -1 and 1. 0 means every oscillator has the same frequency. >>> s = Server().boot() >>> s.start() >>> src = PinkNoise(.3) >>> pva = PVAnal(src, size=1024, overlaps=4) >>> pvm = PVAmpMod(pva, basefreq=4, spread=0.5) >>> pvs = PVSynth(pvm).out() """ def __init__(self, input, basefreq=1, spread=0): pyoArgsAssert(self, "pOO", input, basefreq, spread) PyoPVObject.__init__(self) self._input = input self._basefreq = basefreq self._spread = spread input, basefreq, spread, lmax = convertArgsToLists(self._input, basefreq, spread) self._base_objs = [PVAmpMod_base(wrap(input,i), wrap(basefreq,i), wrap(spread,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setBasefreq(self, x): """ Replace the `basefreq` attribute. :Args: x : float or PyoObject new `basefreq` attribute. """ pyoArgsAssert(self, "O", x) self._basefreq = x x, lmax = convertArgsToLists(x) [obj.setBasefreq(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setSpread(self, x): """ Replace the `spread` attribute. :Args: x : float or PyoObject new `spread` attribute. """ pyoArgsAssert(self, "O", x) self._spread = x x, lmax = convertArgsToLists(x) [obj.setSpread(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def reset(self): """ Resets modulation pointers to 0. """ [obj.reset() for obj in self._base_objs] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0.1, 20, "log", "basefreq", self._basefreq), SLMap(-1, 1, "lin", "spread", self._spread)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def basefreq(self): """float or PyoObject. Modulator's base frequency.""" return self._basefreq @basefreq.setter def basefreq(self, x): self.setBasefreq(x) @property def spread(self): """float or PyoObject. Modulator's frequency spreading factor.""" return self._spread @spread.setter def spread(self, x): self.setSpread(x) class PVFreqMod(PyoPVObject): """ Performs frequency independent frequency modulations. PVFreqMod modulates the frequency of each bin of a pv stream with an independent oscillator. `basefreq` and `spread` are used to derive the frequency of each modulating oscillator. Internally, the following operations are applied to derive oscillator frequencies (`i` is the bin number): spread = spread * 0.001 + 1.0 f_i = basefreq * pow(spread, i) :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. basefreq : float or PyoObject, optional Base modulation frequency, in Hertz. Defaults to 1. spread : float or PyoObject, optional Spreading factor for oscillator frequencies, between -1 and 1. 0 means every oscillator has the same frequency. depth : float or PyoObject, optional Amplitude of the modulating oscillators, between 0 and 1. Defaults to 0.1. >>> s = Server().boot() >>> s.start() >>> src = SfPlayer(SNDS_PATH+"/accord.aif", loop=True, mul=0.5) >>> pva = PVAnal(src, size=1024, overlaps=4) >>> pvm = PVFreqMod(pva, basefreq=8, spread=0.75, depth=0.05) >>> pvs = PVSynth(pvm).out() """ def __init__(self, input, basefreq=1, spread=0, depth=0.1): pyoArgsAssert(self, "pOOO", input, basefreq, spread, depth) PyoPVObject.__init__(self) self._input = input self._basefreq = basefreq self._spread = spread self._depth = depth input, basefreq, spread, depth, lmax = convertArgsToLists(self._input, basefreq, spread, depth) self._base_objs = [PVFreqMod_base(wrap(input,i), wrap(basefreq,i), wrap(spread,i), wrap(depth,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setBasefreq(self, x): """ Replace the `basefreq` attribute. :Args: x : float or PyoObject new `basefreq` attribute. """ pyoArgsAssert(self, "O", x) self._basefreq = x x, lmax = convertArgsToLists(x) [obj.setBasefreq(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setSpread(self, x): """ Replace the `spread` attribute. :Args: x : float or PyoObject new `spread` attribute. """ pyoArgsAssert(self, "O", x) self._spread = x x, lmax = convertArgsToLists(x) [obj.setSpread(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setDepth(self, x): """ Replace the `depth` attribute. :Args: x : float or PyoObject new `depth` attribute. """ pyoArgsAssert(self, "O", x) self._depth = x x, lmax = convertArgsToLists(x) [obj.setDepth(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def reset(self): """ Resets modulation pointers to 0. """ [obj.reset() for obj in self._base_objs] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(0.1, 20, "log", "basefreq", self._basefreq), SLMap(-1, 1, "lin", "spread", self._spread), SLMap(0, 1, "lin", "depth", self._depth)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def basefreq(self): """float or PyoObject. Modulator's base frequency.""" return self._basefreq @basefreq.setter def basefreq(self, x): self.setBasefreq(x) @property def spread(self): """float or PyoObject. Modulator's frequencies spreading factor.""" return self._spread @spread.setter def spread(self, x): self.setSpread(x) @property def depth(self): """float or PyoObject. Amplitude of the modulators.""" return self._depth @depth.setter def depth(self, x): self.setDepth(x) class PVBufLoops(PyoPVObject): """ Phase vocoder buffer with bin independent speed playback. PVBufLoops keeps `length` seconds of pv analysis in memory and gives control on playback position independently for every frequency bin. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. low : float or PyoObject, optional Lowest bin speed factor. Defaults to 1.0. high : float or PyoObject, optional Highest bin speed factor. Defaults to 1.0. mode : int, optional Speed distribution algorithm. Available algorithms are: 0. linear, line between `low` and `high` (default) 1. exponential, exponential line between `low` and `high` 2. logarithmic, logarithmic line between `low` and `high` 3. random, uniform random between `low` and `high` 4. rand expon min, exponential random from `low` to `high` 5. rand expon max, exponential random from `high` to `low` 6. rand bi-expon, bipolar exponential random between `low` and `high` length : float, optional Memory length in seconds. Available at initialization time only. Defaults to 1.0. .. note:: The play() method can be called to start a new recording of the current pv input. >>> s = Server().boot() >>> s.start() >>> f = SNDS_PATH+'/transparent.aif' >>> f_len = sndinfo(f)[1] >>> src = SfPlayer(f, mul=0.5) >>> pva = PVAnal(src, size=1024, overlaps=8) >>> pvb = PVBufLoops(pva, low=0.9, high=1.1, mode=3, length=f_len) >>> pvs = PVSynth(pvb).out() """ def __init__(self, input, low=1.0, high=1.0, mode=0, length=1.0): pyoArgsAssert(self, "pOOin", input, low, high, mode, length) PyoPVObject.__init__(self) self._input = input self._low = low self._high = high self._mode = mode self._length = length input, low, high, mode, length, lmax = convertArgsToLists(self._input, low, high, mode, length) self._base_objs = [PVBufLoops_base(wrap(input,i), wrap(low,i), wrap(high,i), wrap(mode,i), wrap(length,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setLow(self, x): """ Replace the `low` attribute. :Args: x : float or PyoObject new `low` attribute. """ pyoArgsAssert(self, "O", x) self._low = x x, lmax = convertArgsToLists(x) [obj.setLow(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setHigh(self, x): """ Replace the `high` attribute. :Args: x : float or PyoObject new `high` attribute. """ pyoArgsAssert(self, "O", x) self._high = x x, lmax = convertArgsToLists(x) [obj.setHigh(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setMode(self, x): """ Replace the `mode` attribute. :Args: x : int new `mode` attribute. """ pyoArgsAssert(self, "i", x) self._mode = x x, lmax = convertArgsToLists(x) [obj.setMode(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def reset(self): """ Reset pointer positions to 0. """ [obj.reset() for obj in self._base_objs] def ctrl(self, map_list=None, title=None, wxnoserver=False): self._map_list = [SLMap(-4, 4, "lin", "low", self._low), SLMap(-4, 4, "lin", "high", self._high)] PyoPVObject.ctrl(self, map_list, title, wxnoserver) @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def low(self): """float or PyoObject. Lowest bin speed factor.""" return self._low @low.setter def low(self, x): self.setLow(x) @property def high(self): """float or PyoObject. Highest bin speed factor.""" return self._high @high.setter def high(self, x): self.setHigh(x) @property def mode(self): """int. Speed distribution algorithm.""" return self._mode @mode.setter def mode(self, x): self.setMode(x) class PVBufTabLoops(PyoPVObject): """ Phase vocoder buffer with bin independent speed playback. PVBufTabLoops keeps `length` seconds of pv analysis in memory and gives control on playback position, using a PyoTableObject, independently for every frequency bin. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object to process. speed : PyoTableObject Table which specify the speed of bin playback readers. length : float, optional Memory length in seconds. Available at initialization time only. Defaults to 1.0. .. note:: The play() method can be called to start a new recording of the current pv input. >>> s = Server().boot() >>> s.start() >>> f = SNDS_PATH+'/transparent.aif' >>> f_len = sndinfo(f)[1] >>> src = SfPlayer(f, mul=0.5) >>> spd = ExpTable([(0,1), (512,0.5)], exp=6, size=512) >>> pva = PVAnal(src, size=1024, overlaps=8) >>> pvb = PVBufTabLoops(pva, spd, length=f_len) >>> pvs = PVSynth(pvb).out() """ def __init__(self, input, speed, length=1.0): pyoArgsAssert(self, "ptn", input, speed, length) PyoPVObject.__init__(self) self._input = input self._speed = speed self._length = length input, speed, length, lmax = convertArgsToLists(self._input, speed, length) self._base_objs = [PVBufTabLoops_base(wrap(input,i), wrap(speed,i), wrap(length,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setSpeed(self, x): """ Replace the `speed` attribute. :Args: x : PyoTableObject new `speed` attribute. """ pyoArgsAssert(self, "t", x) self._speed = x x, lmax = convertArgsToLists(x) [obj.setSpeed(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def reset(self): """ Reset pointer positions to 0. """ [obj.reset() for obj in self._base_objs] @property def input(self): """PyoPVObject. Input signal to process.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def speed(self): """PyoTableObject. Table which specify the speed of bin playback readers.""" return self._speed @speed.setter def speed(self, x): self.setSpeed(x) class PVMix(PyoPVObject): """ Mix the most prominent components from two phase vocoder streaming objects. :Parent: :py:class:`PyoPVObject` :Args: input : PyoPVObject Phase vocoder streaming object 1. input2 : PyoPVObject Phase vocoder streaming object 2. .. note:: The two input pv stream must have the same size and overlaps. It is the responsibility of the user to be sure they are consistent. To change the size (or the overlaps) of the phase vocoder process, one must write a function to change both at the same time (see the example below). Another possibility is to use channel expansion to analyse both sounds with the same PVAnal object. >>> s = Server().boot() >>> s.start() >>> sf = SfPlayer(SNDS_PATH+"/transparent.aif", loop=True, mul=.5) >>> sf2 = SfPlayer(SNDS_PATH+"/accord.aif", loop=True, mul=.5) >>> pva = PVAnal(sf) >>> pva2 = PVAnal(sf2) >>> pvm = PVMix(pva, pva2) >>> pvs = PVSynth(pvm).out() >>> def size(x): ... pva.size = x ... pva2.size = x >>> def olaps(x): ... pva.overlaps = x ... pva2.overlaps = x """ def __init__(self, input, input2): pyoArgsAssert(self, "pp", input, input2) PyoPVObject.__init__(self) self._input = input self._input2 = input2 input, input2, lmax = convertArgsToLists(self._input, self._input2) self._base_objs = [PVMix_base(wrap(input,i), wrap(input2,i)) for i in range(lmax)] def setInput(self, x): """ Replace the `input` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input = x x, lmax = convertArgsToLists(x) [obj.setInput(wrap(x,i)) for i, obj in enumerate(self._base_objs)] def setInput2(self, x): """ Replace the `input2` attribute. :Args: x : PyoPVObject New signal to process. """ pyoArgsAssert(self, "p", x) self._input2 = x x, lmax = convertArgsToLists(x) [obj.setInput2(wrap(x,i)) for i, obj in enumerate(self._base_objs)] @property def input(self): """PyoPVObject. Phase vocoder streaming object 1.""" return self._input @input.setter def input(self, x): self.setInput(x) @property def input2(self): """PyoPVObject. Phase vocoder streaming object 2.""" return self._input2 @input2.setter def input2(self, x): self.setInput2(x)
Messenger Destination allows advertisers to create ads that send people into a conversation within Facebook Messenger. It’s important to understand that Messenger Destination is not a placement. When creating an ad, you’ll have the option of choosing your destination.... BEWARE: Facebook Messenger Scam Tricks Users Into Clicking on Viruses. It looks like a regular message from a friend, but it can destroy your computer or phone. Facebook Messenger is the most popular messaging platform in the world (closely tied with WhatsApp), which makes it one of the best tools for getting in touch with people fast and for free.... Facebook Messenger is a free messaging service that lets Facebook users chat with friends both on mobile as well as on the main website. Facebook is an extremely valuable tool to utilize along with your website to not only maintain your current users/clients but to also gain new potential ones. If you're running the Facebook app on a smart phone, you can access the link to your profile by going to your profile page and selecting the "More" button. Choose "Copy Link to Profile" from the menu. You can now go to email or other social media apps and paste the link into any text field.... Facebook Messenger is the most popular messaging platform in the world (closely tied with WhatsApp), which makes it one of the best tools for getting in touch with people fast and for free. Or use the web link to create a post in Facebook. Then you can style the post exactly how you'd like, post it on your Timeline, and share the post directly with friends in Messenger using Facebook…... Before you bang your head off for Facebook Messenger customer service number, users of Facebook Messenger must understand that Facebook Messenger does not have a customer support number for the technical solution. Once enabled, Messenger can handle all your SMS and Facebook conversations. Facebook also notes the company doesn’t store or save any of your SMS conversations on its servers. This is the most simple method, you need to just add Messenger link in a hyperlink, make sure your have used your profile/page username with Messenger link. Clicking on the link will take you Messenger interface to message. You can use Messenger on your desktop or make in-app payments from a mobile device. How to Make a Payment Sending money with Facebook is about as easy as sending a private message. Facebook Messenger is a free messaging service that lets Facebook users chat with friends both on mobile as well as on the main website. Facebook is an extremely valuable tool to utilize along with your website to not only maintain your current users/clients but to also gain new potential ones.
from indigox.exception import IndigoSearchError class _Element(object): def __init__(self, name, symbol, group, period, number, mass, atomic_radii, covalent_radii, vdw_radii, chi, hyper=None): self.name = name self.symbol = symbol self.group = group self.period = period self.number = number self.mass = mass self.atomic_radii = atomic_radii self.covalent_radii = covalent_radii self.vdw_radii = vdw_radii self.chi = chi if group < 13: self.valence = group elif group == 18 and symbol == 'He': self.valence = 2 else: self.valence = group - 10 if group == 1 or symbol == 'He': self.octet = 2 elif group == 2: self.octet = 4 elif group == 13: self.octet = 6 else: self.octet = 8 if hyper is None: self.hyper = self.octet else: self.hyper = hyper def __str__(self): return self.symbol def __eq__(self, c): if self.symbol == c or self.number == c or self.name == c: return True return object.__eq__(self, c) class _PeriodicTable(object): def __init__(self, elements): self.elements_number = dict() self.elements_name = dict() self.elements_symbol = dict() for e in elements: self.elements_number[e.number] = e self.elements_name[e.name] = e self.elements_symbol[e.symbol] = e def __getattr__(self, name): return self[name] def __getitem__(self, name): try: int(name) except ValueError: intable = False else: intable = True if not intable and name.title() in self.elements_name: return self.elements_name[name.title()] elif not intable and name.title() in self.elements_symbol: return self.elements_symbol[name.title()] elif intable and int(name) in self.elements_number: return self.elements_number[int(name)] else: raise IndigoSearchError('Unknown element type: {}' .format(name.title())) _elements = [ # Name Symbol Group Period AtomicNumber Mass Radius Cvradius VdWradius chi _Element("Nullium", "X", 18, 9, 0, 0.0000, 0.00, 0.0, 0.0, 0.0 ), _Element("Actinium", "Ac", 3, 7, 89, 227.0278, 1.88, 0.0, 0.0, 1.3 ), _Element("Aluminum", "Al", 13, 3, 13, 26.981539, 1.43, 1.25, 2.05, 1.61), _Element("Americium", "Am", 0, 7, 95, 243.0614, 1.73, 0.0, 0.0, 1.3 ), _Element("Antimony", "Sb", 15, 5, 51, 121.76, 1.82, 1.41, 2.2, 2.05), _Element("Argon", "Ar", 18, 3, 18, 39.948, 1.74, 0.0, 1.91, 0.0 ), _Element("Arsenic", "As", 15, 4, 33, 74.92159, 1.25, 1.21, 2.0, 2.18), _Element("Astatine", "At", 17, 6, 85, 209.9871, 0.0, 0.0, 0.0, 1.96), _Element("Barium", "Ba", 2, 6, 56, 137.327, 2.17, 1.98, 0.0, 0.89), _Element("Berkelium", "Bk", 0, 7, 97, 247.0703, 1.70, 0.0, 0.0, 1.3 ), _Element("Beryllium", "Be", 2, 2, 4, 9.012182, 1.13, 0.89, 0.0, 1.57), _Element("Bismuth", "Bi", 15, 6, 83, 208.98037, 1.55, 1.52, 2.4, 2.0 ), _Element("Bohrium", "Bh", 7, 7, 107, 262.12, 0.0, 0.0, 0.0, 0.0 ), _Element("Boron", "B" , 13, 2, 5, 10.811, 0.83, 0.88, 2.08, 2.04), _Element("Bromine", "Br", 17, 4, 35, 79.904, 0.0, 1.14, 1.95, 2.96, 12), _Element("Cadmium", "Cd", 12, 5, 48, 112.411, 1.49, 1.41, 0.0, 1.69), _Element("Caesium", "Cs", 1, 6, 55, 132.90543, 2.654, 2.35, 2.62, 0.79), _Element("Calcium", "Ca", 2, 4, 20, 40.078, 1.97, 1.74, 0.0, 1.0 ), _Element("Californium", "Cf", 0, 7, 98, 251.0796, 1.69, 0.0, 0.0, 1.3 ), _Element("Carbon", "C", 14, 2, 6, 12.011, 0.77, 0.77, 1.85, 2.55), _Element("Cerium", "Ce", 0, 6, 58, 140.115, 1.825, 1.65, 0.0, 1.12), _Element("Chlorine", "Cl", 17, 3, 17, 35.4527, 0.0, 0.99, 1.81, 3.16), _Element("Chromium", "Cr", 6, 4, 24, 51.9961, 1.25, 0.0, 0.0, 1.66), _Element("Cobalt", "Co", 9, 4, 27, 58.9332, 1.25, 1.16, 0.0, 1.88), _Element("Copper", "Cu", 11, 4, 29, 63.546, 1.28, 1.17, 0.0, 1.9 ), _Element("Curium", "Cm", 0, 7, 96, 247.0703, 1.74, 0.0, 0.0, 1.3 ), _Element("Dubnium", "Db", 4, 7, 104, 261.11, 0.0, 0.0, 0.0, 0.0 ), _Element("Dysprosium", "Dy", 0, 6, 66, 162.5, 1.77, 1.59, 0.0, 1.23), _Element("Einsteinium", "Es", 0, 7, 99, 252.083, 2.03, 0.0, 0.0, 1.3 ), _Element("Erbium", "Er", 0, 6, 68, 167.26, 1.76, 1.57, 0.0, 1.25), _Element("Europium", "Eu", 0, 6, 63, 151.965, 2.04, 1.85, 0.0, 1.2 ), _Element("Fermium", "Fm", 0, 7, 100, 257.0951, 0.0, 0.0, 0.0, 1.3 ), _Element("Fluorine", "F" , 17, 2, 9, 18.9984032, 0.709, 0.58, 1.35, 3.98), _Element("Francium", "Fr", 1, 7, 87, 223.0197, 2.7, 0.0, 0.0, 0.7 ), _Element("Gadolinium", "Gd", 0, 6, 64, 157.25, 1.8, 1.61, 0.0, 0.94), _Element("Gallium", "Ga", 13, 4, 31, 69.723, 1.22, 1.25, 0.0, 1.81), _Element("Germanium", "Ge", 14, 4, 32, 72.61, 1.23, 1.22, 0.0, 2.01), _Element("Gold", "Au", 11, 6, 79, 196.96654, 1.44, 1.34, 0.0, 2.0 ), _Element("Hafnium", "Hf", 4, 6, 72, 178.49, 1.56, 1.44, 0.0, 1.5 ), _Element("Hahnium", "Hn", 8, 7, 108, 0.0, 0.0, 0.0, 0.0, 0.0 ), _Element("Helium", "He", 18, 1, 2, 4.002602, 1.28, 0.0, 1.22, 0.0 ), _Element("Holmium", "Ho", 0, 6, 67, 164.93032, 1.77, 1.58, 0.0, 1.24), _Element("Hydrogen", "H" , 1, 1, 1, 1.00797, 0.78, 0.3, 1.2, 2.2 ), _Element("Indium", "In", 13, 5, 49, 114.818, 1.63, 1.5, 0.0, 1.78), _Element("Iodine", "I" , 17, 5, 53, 126.90447, 0.0, 1.33, 2.15, 2.66), _Element("Iridium", "Ir", 9, 6, 77, 192.217, 1.36, 1.26, 0.0, 2.28), _Element("Iron", "Fe", 8, 4, 26, 55.845, 1.24, 1.16, 0.0, 1.83), _Element("Joliotium", "Jl", 5, 7, 105, 262.114, 0.0, 0.0, 0.0, 0.0 ), _Element("Krypton", "Kr", 18, 4, 36, 83.80, 0.0, 1.89, 1.98, 0.0 ), _Element("Lanthanum", "La", 3, 6, 57, 138.9055, 1.88, 1.69, 0.0, 1.1 ), _Element("Lawrencium", "Lr", 3, 7, 103, 262.11, 0.0, 0.0, 0.0, 0.0 ), _Element("Lead", "Pb", 14, 6, 82, 207.2, 1.75, 1.54, 0.0, 2.02), _Element("Lithium", "Li", 1, 2, 3, 6.941, 1.52, 1.23, 0.0, 0.98), _Element("Lutetium", "Lu", 3, 6, 71, 174.967, 1.72, 1.56, 0.0, 1.3 ), _Element("Magnesium", "Mg", 2, 3, 12, 24.30506, 1.6, 1.36, 0.0, 1.31), _Element("Manganese", "Mn", 7, 4, 25, 54.93805, 1.24, 1.77, 0.0, 1.55), _Element("Meitnerium", "Mt", 9, 7, 109, 0.0, 0.0, 0.0, 0.0, 0.0 ), _Element("Mendelevium", "Md", 0, 7, 101, 258.1, 0.0, 0.0, 0.0, 1.3 ), _Element("Mercury", "Hg", 12, 6, 80, 200.59, 1.60, 1.44, 0.0, 1.8 ), _Element("Molybdenum", "Mo", 6, 5, 42, 95.94, 1.36, 1.29, 0.0, 2.16), _Element("Neodymium", "Nd", 0, 6, 60, 144.24, 1.82, 1.64, 0.0, 1.14), _Element("Neon", "Ne", 18, 2, 10, 20.1797, 0.0, 0.0, 1.6, 0.0 ), _Element("Neptunium", "Np", 0, 7, 93, 237.0482, 1.5, 0.0, 0.0, 1.28), _Element("Nickel", "Ni", 10, 4, 28, 58.6934, 1.25, 1.15, 0.0, 1.91), _Element("Niobium", "Nb", 5, 5, 41, 92.90638, 1.43, 1.34, 0.0, 1.6 ), _Element("Nitrogen", "N" , 15, 2, 7, 14.00674, 0.71, 0.7, 1.54, 3.04), _Element("Nobelium", "No", 0, 7, 102, 259.1009, 0.0, 0.0, 0.0, 0.0 ), _Element("Osmium", "Os", 8, 6, 76, 190.23, 1.35, 1.26, 0.0, 2.2 ), _Element("Oxygen", "O" , 16, 2, 8, 15.9994, 0.6, 0.66, 1.4, 3.44), _Element("Palladium", "Pd", 10, 5, 46, 106.42, 1.38, 1.28, 0.0, 2.2 ), _Element("Phosphorus", "P" , 15, 3, 15, 30.973762, 1.15, 1.10, 1.9, 2.19, 10), _Element("Platinum", "Pt", 10, 6, 78, 195.08, 1.38, 1.29, 0.0, 2.54), _Element("Plutonium", "Pu", 7, 0, 94, 244.0642, 0.0, 0.0, 0.0, 1.3 ), _Element("Polonium", "Po", 16, 6, 84, 208.9824, 1.67, 1.53, 0.0, 2.2 ), _Element("Potassium", "K" , 1, 4, 19, 39.0983, 2.27, 2.03, 2.31, 0.82), _Element("Praseodymium", "Pr", 0, 6, 59, 140.90765, 1.83, 1.65, 0.0, 1.13), _Element("Promethium", "Pm", 0, 6, 61, 144.9127, 1.81, 0.0, 0.0, 0.94), _Element("Protactinium", "Pa", 0, 7, 91, 231.03588, 1.61, 0.0, 0.0, 1.38), _Element("Radium", "Ra", 2, 7, 88, 226.0254, 2.23, 0.0, 0.0, 0.89), _Element("Radon", "Rn", 18, 6, 86, 222.0176, 0.0, 0.0, 0.0, 0.7 ), _Element("Rhenium", "Re", 7, 6, 75, 186.207, 1.37, 1.28, 0.0, 2.2 ), _Element("Rhodium", "Rh", 9, 5, 45, 102.9055, 1.34, 1.25, 0.0, 2.28), _Element("Rubidium", "Rb", 1, 5, 37, 85.4678, 1.475, 0.0, 2.44, 0.82), _Element("Ruthenium", "Ru", 8, 5, 44, 101.07, 1.34, 1.24, 0.0, 2.2 ), _Element("Rutherfordium", "Rf", 6, 7, 106, 263.118, 0.0, 0.0, 0.0, 0.0 ), _Element("Samarium", "Sm", 0, 6, 62, 150.36, 1.8, 1.66, 0.0, 1.17), _Element("Scandium", "Sc", 3, 4, 21, 44.95591, 1.61, 1.44, 0.0, 1.36), _Element("Selenium", "Se", 16, 4, 34, 78.96, 2.15, 1.17, 2.0, 2.55), _Element("Silicon", "Si", 14, 3, 14, 28.0855, 1.17, 1.17, 2.0, 1.9 ), _Element("Silver", "Ag", 11, 5, 47, 107.8682, 1.44, 1.34, 0.0, 1.93), _Element("Sodium", "Na", 1, 3, 11, 22.989768, 1.54, 0.0, 2.31, 0.93), _Element("Strontium", "Sr", 2, 5, 38, 87.62, 2.15, 1.92, 0.0, 0.95), _Element("Sulfur", "S" , 16, 3, 16, 32.066, 1.04, 1.04, 1.85, 2.58, 12), _Element("Tantalum", "Ta", 5, 6, 73, 180.9479, 1.43, 1.34, 0.0, 2.36), _Element("Technetium", "Tc", 7, 5, 43, 98.9072, 1.36, 0.0, 0.0, 1.9 ), _Element("Tellurium", "Te", 16, 5, 52, 127.6, 1.43, 1.37, 2.2, 2.1 ), _Element("Terbium", "Tb", 0, 6, 65, 158.92534, 1.78, 1.59, 0.0, 1.22), _Element("Thallium", "Tl", 13, 6, 81, 204.3833, 1.7, 1.55, 0.0, 2.33), _Element("Thorium", "Th", 0, 7, 90, 232.0381, 1.80, 0.0, 0.0, 0.0 ), _Element("Thulium", "Tm", 0, 6, 69, 168.93421, 1.75, 1.56, 0.0, 0.96), _Element("Tin", "Sn", 14, 5, 50, 118.71, 1.41, 1.4, 2.0, 1.96), _Element("Titanium", "Ti", 4, 4, 22, 47.867, 1.45, 1.32, 0.0, 1.54), _Element("Tungsten", "W" , 6, 6, 74, 183.84, 1.37, 1.3, 0.0, 1.9 ), _Element("Uranium", "U" , 0, 7, 92, 238.0289, 1.54, 0.0, 0.0, 1.26), _Element("Vanadium", "V" , 5, 4, 23, 50.9415, 1.32, 0.0, 0.0, 1.63), _Element("Xenon", "Xe", 18, 5, 54, 131.29, 2.18, 2.09, 2.16, 2.6 ), _Element("Ytterbium", "Yb", 0, 6, 70, 173.04, 1.94, 1.7, 0.0, 1.27), _Element("Yttrium", "Y" , 3, 5, 39, 88.90585, 1.81, 1.62, 0.0, 1.22), _Element("Zinc", "Zn", 12, 4, 30, 65.39, 1.33, 1.25, 0.0, 1.65), _Element("Zirconium", "Zr", 4, 5, 40, 91.224, 1.6, 1.45, 0.0, 1.3 ) ] PeriodicTable = _PeriodicTable(_elements)
You’re Engaged? Great: Start Pre-Marital Counseling! An oldie but goodie because the information is simply timeless. Since this post was initially shared, we discovered TherapyforBlackGirls.com. Check them out. Another therapist resource to have on your radar is RobinStone.com. Congratulations! Your beloved popped the question or maybe you popped the question — hey, it is 2018 — and now you’re ready to begin planning your wedding…right? Not so fast. Before your mama prepares an engagement announcement for your hometown newspaper, before you begin creating a Pinterest board of your favorite wedding gowns, before you go crazy posting a gazillion pics of your engagement ring bling on Facebook and Instagram, Triple B highly suggests you do one thing. Marriage Matters Monday – Wanda & Marvin Celebrate 47 Years of Marriage! Talented photographer Amber Robinson of Images by Amber shared these images of her parents last week on social media and they are going viral — as they should. Even more beautiful than the pics are her accompanying caption. Amber also shared some fun facts exclusively with Triple B about her parents like the fact that her mother’s sequins gown was scored at…Wait for it: Amazon! This entire feature is an example of the power of love and family. Enjoy! The Road to Mrs. – I’s Married Now! Our resident bride-to-be Keisa is officially a Mrs.! Married for nine years, actors Tia Mowry and Cory Hardict are one of the couples featured in OWN’s Black Love docuseries. The Road to Mrs. – Keisa’s Wedding Day Countdown: I’m Getting Married in 8 days! The Road to Mrs. – Keisa’s Wedding Day Countdown: I’m Getting Married In Two Weeks! Rocking all white when I’m feeling Godly! The Road to Mrs. – RSVP for FOREVER! Resident bride-to-be Keisa’s parents have been married 40 years! I absolutely love being natural. Natural hair is all kinds of amazing and extremely versatile. Since making the transition almost 10 years ago, I vowed to never return to the creamy crack…even on those days when my natural curls seem most unruly or when my twist outs are just, well, “out”, I swear I will never revert back to old my old ways! The Road to Mrs. – Showered With Love At My Bridal Shower! Triple B’s resident bride-to-be flanked by loved ones during her bridal shower. The Road to Mrs. – The Name Game: What to Do With My Maiden Name? Introducing Mahogany Essence Wedding Brooms! Black Bridal Bliss Presents Mahogany Essence Brooms for Your Big Day! The Road to Mrs. – 100 Days to Go! The union of two people, two independent individuals with their own set of values, morals, and ideas about finances, marriage, and children, can lead to an extremely successful union or one that is destined to fail — sometimes very early on. Understanding that a marriage continues beyond the wedding day, we knew early on that we would seek premarital counseling.
#!/usr/bin/python2.7 import socket, sys, json, os, subprocess, datetime, time from thread import * # function that sets config variables with the following priority chain # envvar > config file > default value def read_config(conf_var): try: conf_value = os.environ[conf_var.upper()] if conf_var == "handlers": conf_value_list = conf_value.split(",") return conf_value_list except: try: conf_value = conf_file[conf_var] except: try: conf_value = default_conf[conf_var] except: print "critical - missing config value for " + conf_var sys.exit(2) return conf_value # function to handle connections def client_thread(conn): # Sending message to connected client conn.send(reply) # came out of loop conn.close() # function to run handlers def run_handlers(handlers): now = datetime.datetime.now() current_run_unix_time = time.time() if current_run_unix_time > float(last_run_unix_time + timeout): for handler in handlers: json_reply = str(json.dumps({ "hostname": hostname, "ip": offender_ip, "time": now.isoformat() })) subprocess.call([handler_exec + " " + handler + " '" + json_reply + "'"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd="handlers") return current_run_unix_time else: return last_run_unix_time # set config variables default_conf = {"port": 8888, "interface": "", "reply": "", "timeout": 300, "handler_exec": "/usr/bin/python2.7"} try: conf_file = json.load(open("conf.json")) print "loaded conf.json file" except: print "Warning - unable to load correctly phrased json config file" try: port = int(read_config('port')) interface = str(read_config('interface')) timeout = int(read_config('timeout')) reply = str(read_config('reply')) handlers = read_config('handlers') handler_exec = read_config('handler_exec') hostname = socket.gethostname() last_run_unix_time = 0 except: print "critical - problem setting config variable" sys.exit(2) # bind socket and start listening s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) print 'Socket created' try: s.bind((interface, port)) except socket.error as msg: print 'Bind failed. Error Code : ' + str(msg[0]) + ' Message ' + msg[1] sys.exit(2) print 'Socket bind complete' s.listen(5) print 'Socket now listening' # keep waiting for connections while 1: conn, addr = s.accept() offender_ip = addr[0] print "attempted connection from " + offender_ip last_run_unix_time = run_handlers(handlers) start_new_thread(client_thread, (conn,)) s.close()
"Gingerbread House & Landscape Inhaler" A building from the 1950’s, originating in the style of an old picturesque farmhouse, should be adapted to the desires of the new inhabitants and be extended. Since the property is located in the country scape (Freiland) of Graz, we were allowed to exactly double the existing living space in m2 from the old house. Therefore, to the romantic introverted farmhouse - the so called Gingerbread House - an extroverted Landscape Inhaler was added , Two very different living conditions could be created.
""" Run with 'python -m horsephrase._regen_words > horsephrase/words.txt' - Stop allowing words less than 3 characters; if we have the possibility of words that short, it's trivially possible to attack the password as letters rather than as selections from this list. - Add words (both sourced from https://norvig.com/ngrams/) from a list of correctly-spelled words (the YAWL) in the order of word frequency (count_1w.txt) until we reach a desirable count - The oldest recorded living human -- courtesy of this list https://en.wikipedia.org/wiki/List_of_oldest_living_people - is presently 116 years and 74 days old. Determine the desirable count from the number of guesses that will exceed that time with a 5-word passphrase assuming a trillion guesses per second. - Remove words that are offensive, triggering or otherwise in poor taste. Horsephrases should be communicable to people over phone lines without being embarassing, offensive or unprofessional. If generating a horsephrase generates something offensive, add the sha256 of the offending word to _removed_words, run the command at the start of this module, and open a PR with both changes. """ if __name__ != "__main__": raise ImportError("module is not importable") import hashlib import itertools import requests # There's a whole bit about the oldest # living human or something. NUM_WORDS = 23600 # Removed words are specified by their hash, # as we do not want to offend people who read the source. _removed_words = set([ '31506a8448a761a448a08aa69d9116ea8a6cb1c6b3f4244b3043051f69c9cc3c', 'e9b6438440bf1991a49cfc2032b47e4bde26b7d7a6bf7594ec6f308ca1f5797c', ]) def get_words(session): yawl = session.get("https://norvig.com/ngrams/word.list") correct = set(yawl.text.split()) counts = session.get("https://norvig.com/ngrams/count_1w.txt") for line in counts.text.splitlines(): word, count = line.split() if word not in correct: continue yield word def valid_words(words): for word in words: if len(word) <= 3: continue digest = hashlib.sha256(word.encode('ascii')).hexdigest() if digest in _removed_words: continue yield word for word in sorted(itertools.islice(valid_words(get_words(requests.Session())), NUM_WORDS)): print(word)
Just before you complete an application for law school, make certain that it's a choice you have to produce. You're guaranteed of free plagiarism account for virtually any mission completed by our writing service team. All our writers pass a considerable procedure to have a check at their abilities. In addition, search closely in how many payment methods that the provider provides. Through direct communication, customers have the ability to make some changes which may result and therefore making sure your newspapers are of high caliber. Just then a firm goes to be thought of a booming business and bring new clients. You may consistently reach from a writer to furnish additional information or ask advice regarding the sequence's progress. The fantastic issue is that assistance is easily available online. When you should be using a web site to get a source, assess perhaps the page was revised recently. As a quantity of bathrooms will be supplied, the property gets workable for most families with enterprise owners. Thus, you might rest assured your termpaper service goes to be sent with a pro. Even being ready to finish the paper that you don't know what to start out with. with only a couple clicks. Are you really seeking an entirely free argumentative essay on tech topic. So in the event that you'd like to avoid your self by getting conned, you ought to learn testimonials of those respective writers around our site and create your selection. Writers' services create your life a little less complicated, as you do not will need to devote hours paper writing. So, have a glimpse at the business social networking profiles as well as think about moving right through independent review platforms in the event that you've got a opportunity. Further essay authors are prepared to manage missions of the maximum amount of issue. Some times it's sensible to cover slightly more and be certain the informative article will likely probably be outstanding. It needs to develop into illustrative. Affordable customized article writing is currently permitted by our adaptive authors, that compose various varieties of essays in line with needing the purchaser. Choosing online essay writers isn't a nightmare ahead. Keep Composing You aren't likely to secure improved should you not ever write. Our authors are likely to be thrilled to finish any kind of re vision within the specified period. All the difficult job is going to be achieved by the certified professionals. On handling the unwanted Being an dilemma of reality all of my tips concentrate. Research newspaper topics on illegal immigration can learn more about the subject from a collection of perspectives. After the investigation is done, you have to have a check in the data and draw conclusions concerning the result. Composing essays on the internet is a more efficacious means to earn revenue. Freelance Writing Isn't for everyone In the event you're not the type of individual who relishes the aforementioned mentioned scenarios and suggestions on the ideal method to deal with that, then you definitely have beenn't appropriate to your essay writer on-line way of life. It's so popular, as you don't really want to accomplish whatever because a composition writer will finish your mission fromscratch. Just examining the firm's web site to understand whether the essay author can deal with your paper well is inadequate. Someone may stay in contact a writer and control the method of accomplishment. One of the primary options that have a great producing service is the panel of authors. There really are a couple unique solutions our article writers perform for the customers. Like a way to make sure jurisdiction, attempt to learn the name of the au thor because an anonymous article isn't believed dependable. When you purchase the aid of on-line composition writer, it's clear which you desire to come across the cheapest value quote potential. It is possible to likewise communicate with your author and he'll be very happy to describe at which you've got done mistakes and also the best way to create the paper look better.
"""This module containts the abstract class Player and some implementations.""" from random import shuffle from card import Suit, Rank, Card, Deck from rules import is_card_valid class Player: """ Abstract class defining the interface of a Computer Player. """ def pass_cards(self, hand): """Must return a list of three cards from the given hand.""" return NotImplemented def play_card(self, hand, trick, trick_nr, are_hearts_broken): """ Must return a card from the given hand. trick is a list of cards played so far. trick can thus have 0, 1, 2, or 3 elements. are_hearts_broken is a boolean indicating whether the hearts are broken yet. trick_nr is an integer indicating the current trick number, starting with 0. """ return NotImplemented def see_played_trick(self, trick, trick_nr): """ Allows the player to have a look at all four cards in the trick being played. """ pass class StupidPlayer(Player): """ Most simple player you can think of. It just plays random valid cards. """ def pass_cards(self, hand): return hand[:3] def play_card(self, hand, trick, trick_nr, are_hearts_broken): # Play first card that is valid for card in hand: if is_card_valid(hand, trick, card, trick_nr, are_hearts_broken): return card raise AssertionError( 'Apparently there is no valid card that can be played. This should not happen.' ) class SimplePlayer(Player): """ This player has a notion of a card being undesirable. It will try to get rid of the most undesirable cards while trying not to win a trick. """ def __init__(self, verbose=False): self.verbose = verbose if verbose: deck = Deck() deck.cards.sort(key=self.undesirability) self.say('Card undesirability: ') for card in deck.cards: self.say('{}: {}', card, self.undesirability(card)) def say(self, message, *formatargs): if self.verbose: print(message.format(*formatargs)) def undesirability(self, card): return ( card.rank.value + (10 if card.suit == Suit.spades and card.rank >= Rank.queen else 0) ) def pass_cards(self, hand): hand.sort(key=self.undesirability, reverse=True) return hand[:3] def play_card(self, hand, trick, trick_nr, are_hearts_broken): # Lead with a low card if not trick: hand.sort(key=lambda card: 100 if not are_hearts_broken and card.suit == Suit.hearts else card.rank.value) return hand[0] hand.sort(key=self.undesirability, reverse=True) self.say('Hand: {}', hand) self.say('Trick so far: {}', trick) # Safe cards are cards which will not result in winning the trick leading_suit = trick[0].suit max_rank_in_leading_suit = max([card.rank for card in trick if card.suit == leading_suit]) valid_cards = [card for card in hand if is_card_valid(hand, trick, card, trick_nr, are_hearts_broken)] safe_cards = [card for card in valid_cards if card.suit != leading_suit or card.rank <= max_rank_in_leading_suit] self.say('Valid cards: {}', valid_cards) self.say('Safe cards: {}', safe_cards) try: return safe_cards[0] except IndexError: queen_of_spades = Card(Suit.spades, Rank.queen) # Don't try to take a trick by laying the queen of spades if valid_cards[0] == queen_of_spades and len(valid_cards) > 1: return valid_cards[1] else: return valid_cards[0]
A hugely versatile modular sofa with a casual aspect and a soft, yielding, malleable comfort. Consisting of simple block units which can be placed in a geometrical configuration but with irregular contours. A multi-density polyurethane foam core on a double panelled poplar frame with a quilted goose down envelope, fully removable covers in a range of fabrics and leathers. Adjustable black feet. NB: images show several units in conjunction - this base unit is 94cm wide x 94cm deep x 35cm high.
#!/usr/bin/python2.4 # # Copyright 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ======================================================================== """A Hammer-specific wrapper for generate_group_policy_template.""" from omaha.enterprise import generate_group_policy_template def BuildGroupPolicyTemplate(env, target, apps, apps_file_path=None): """Builds a Group Policy ADM template file, handling dependencies. Causes WriteGroupPolicyTemplate() to be called at build time instead of as part of the processing stage. Args: env: The environment. target: ADM output file. apps: A list of tuples containing information about each app. See generate_group_policy_template for details. apps_file_path: Optional path to the file that defines apps. Used to enforce dependencies. """ def _WriteAdmFile(target, source, env): """Called during the build phase to generate and write the ADM file.""" source = source # Avoid PyLint warning. generate_group_policy_template.WriteGroupPolicyTemplate( env.File(target[0]).abspath, env['public_apps']) return 0 adm_output = env.Command( target=target, source=[], action=_WriteAdmFile, public_apps=apps ) # Force ADM file to rebuild whenever the script or apps data change. dependencies = ['$MAIN_DIR/enterprise/generate_group_policy_template.py'] if apps_file_path: dependencies.append(apps_file_path) env.Depends(adm_output, dependencies)
BMW’s new inline-6 gasoline engine. Beginning with model year 2011 production, several BMW models (135i, 335i, 535i, 535i Gran Turismo, X5 xDrive35i, and X6 xDrive35i) will switch to the new, single twin-scroll turbocharged inline-6 “N55” engine. The newest BMW inline-6 engine is the first BMW inline-6 to combine turbocharging, High Precision direct fuel injection, and Valvetronic variable intake technology. It features a single, mid-sized turbocharger with a “twin-scroll” housing to boost performance and minimize the response lag. Together, this combination produces the same at 300hp and 300 lb-ft of the previous N54, but the fuel efficiency improves and the emissions are reduced. A further advantage of turbocharging is that this is the most weight-efficient method to boost engine power and performance. The N55 turbocharged inline-6 weighs approximately 150 lbs. less than an equally powerful eight-cylinder engine displacing 4.0 liters. This lower weight means a significant advantage not only in fuel economy, but also in balancing the car’s weight distribution. Using Valvetronic for the first time on a turbocharged inline-6 allows the engine to “inhale” air for combustion with virtually no delay and with reduced pumping losses. As a result, the engine makes power more quickly than ever before, and this is proven by the N55’s ability to reach peak torque at 1200rpm, 200rpm earlier than its twin-turbo predecessor [N54]. Packaging is better for the new engine, as the BMW Digital Motor Electronics (DME) control unit is now mounted directly to the engine, shortening the required wiring harness length and providing more efficient cooling to the DME through the use of intake air. Furthermore, using a single turbocharger means only a single exhaust stream must find its way to the catalytic converter, so only a single catalytic converter is used on the new engine. The single catalytic converter can be activated more quickly at cold start than the preceding twin-catalyst configuration, so the new engine has a cleaner cold start emissions signature than its predecessor. The new “N55” engine will be installed in the 135i Coupe and Convertible, the 335i Sedan, Coupe and Convertible (excludes 335is models), the new 535i Sedan when it arrives this summer, the 535i Gran Turismo arriving this spring and finally, the X5 xDrive35i and X6 xDrive35i Sports Activity Vehicles® starting with April production. 2011 BMW 1 Series Coupe & Convertible. In the 1 Series, BMW introduced a modern and authentic performance coupe that draws inspiration from the iconic BMW 2002 model of some 40 years ago. By combining sporty rear wheel drive dynamics, agile handling, powerful engines, and seating for four, the 1 Series Coupe rekindles the flames that the BMW 2002 first ignited. The 2011 model year 1 Series Coupe and Convertible went on sale in spring and are available in two versions: the 128i and the 135i. For model year 2011, the BMW 135i receives the all-new “N55” inline-6 engine (see Page 1), and the sporty 7-speed Dual Clutch Transmission from the Z4 Roadster replaces the 6-speed Steptronic automatic transmission. The 2011 128i Coupe and Convertible continue to be powered by the 3.0-liter, 230 horsepower inline 6-cylinder engine that generates 200 lb-ft of torque. The engine features Valvetronic throttle-less intake technology and magnesium /aluminum lightweight construction. An M Sport Package is now available on all 1 Series models, with sporty components perfectly matched to each vehicle. Last year, the 1 Series received interior enhancements which include the 4th generation iDrive system when equipped with optional BMW Navigation. It features new menus, new direct-select keys as well as Programmable Memory Keys. With access to GoogleMaps database search capability, subscribers to the optional BMW Assist Convenience Plan can access BMW Search to find a point of interest with a keyword. 2011 BMW 3 Series Sedan & Sports Wagon. The 2011 3 Series Sedan and Sports Wagon are poised to maintain their legendary market leadership with the latest exterior and interior styling, state-of-the-art technologies, and superb ergonomics. The 2011 335i Sedan receives BMW’s remarkable new turbocharged inline-6 engine with Valvetronic throttle-less intake technology and High Precision direct fuel injection, coupled with a single twin-scroll turbocharger. Output is 300 horsepower. The celebrated 6-speed manual transmission remains standard and the 6-speed Steptronic automatic is optional. 328i models continue to feature the 3.0-liter, 230 horsepower inline-6 engine with magnesium / aluminum lightweight construction and Valvetronic. With a subtle freshening in 2009, the benchmark 3 Series received updates to exterior design features, a more dynamic profile, and a wider stance. The 4th generation iDrive controller and high-resolution display are now included when equipped with the Navigation system option. Popular on 3 Series models is an M Sport Package, with sporty components perfectly matched to each vehicle. Beyond the popular Sport Package, the M Sport Package includes an Anthracite headliner, an exterior aerodynamic package with BMW Individual Shadowline trim, and M-specific wheels, door sills, footrest, gearshift lever, and steering wheel. The M exterior color LeMans Blue Metallic is also available on 3 Series models with the M Sport Package. BMW has announced substantial enhancements to the 2011 3 Series Coupe and Convertible, including outstanding style updates for 328i and 335i models and a new engine for 335i models. A new model, the high-performance 335is, also becomes available for the 2011 model year (see next section). All 2011 3 Series Coupe and Convertible models went on sale in Spring 2010. All-new is the engine for the 335i model (see Page 1). The 6-speed manual gearbox, beloved by North American enthusiast drivers, remains standard, and the well-known 6-speed Steptronic automatic is optional, now available with “pull-style” shift paddles on the multifunction steering wheel. The profiles of the BMW 3 Series Coupe and Convertible are instantly recognizable at first glance. Beyond the basic shape of the cars and their classic lines, the new design of the headlights, the enhanced depth and detail of the taillights, the interior, and even the exterior mirrors were specifically created for the Coupe and Convertible. These unique design elements reflect the sporty character of the car. The driver who chooses a two-door vehicle desires a car with dynamic looks as well as driving performance. With this in mind, BMW engineers and designers set out to create a unique, unmistakable blend of design features and most importantly, driving pleasure. The hot new BMW 335is Coupe and Convertible are each destined to take a place in history among the line of desirable, often collectible, BMW “s” models. Based on the newly-enhanced BMW 3 Series Coupe and Convertible (see previous page), the new BMW 335is features the award-winning, twin-turbocharged BMW inline-6 engine, specially tuned and equipped to produce 320 horsepower and 332 lb-ft of torque from its 3.0-liters. For the first time on a BMW 3 Series in the US, the 7-speed Double Clutch Transmission (DCT) with Launch Control will be available as an option, in place of the standard 6-speed manual transmission. Equipped with the DCT, the 335is Coupe is capable of 0-60mph acceleration in 5 seconds flat. The “s” was first seen by North Americans on the original BMW 3 Series, which was imported from 1977-1983. The 1980 BMW 320is included the most desirable 3 Series equipment, including Recaro sport seats, sport steering wheel, halogen high-beams, sunroof, dual color-keyed wing mirrors, anti-sway bars, exterior aerodynamic enhancements, and cross-spoke light-alloy wheels. Later in the1980s, the BMW 3 Series and 5 Series were each available as “s” models, including the BMW 325es, 325is, and 535is. Again, sticking with a proven formula, BMW gave the “s” models enhanced aerodynamics, sport suspension, on-board computer (OBC) and Check Control, sport leather seats, and other features. As the newest “s” model, therefore, the 2011 BMW 335is follows its lineage by offering stunning looks, more power, and a raft of functional, performance-inspired equipment tailored to suit the enthusiast driver. The world debut of the new BMW 5 Series Sedan marks the epitome of modern design and driving pleasure in the world of premium midsize sedans. Through its athletic and executive looks, driving dynamics typical of BMW, and innovative comfort and safety features, the sixth generation of BMW’s executive express accurately reflects the standards upheld by the world’s most successful manufacturer of premium cars. With the longest wheelbase in the segment, a long and sleek hood, short overhangs, and a coupe-like roofline, the new BMW 5 Series Sedan stands out clearly from the competition. Overall, the aesthetic looks of the car are achieved through balanced proportions. Design features emblematic of BMW are to be found at the front with its kidney grille inclined slightly forward, in the stylish and elegant side view of the car, and at the muscular rear. Within the interior, precise functionality and a fresh ambience come together in perfect harmony, unmistakable style, and clear orientation to the driver. Highly advanced drivetrain and suspension technologies combine agile handling for the driver and a high standard of riding comfort for passengers. When equipped with the Sport Package, the new BMW 5 Series Sedan includes Adaptive Drive, which encompasses Driving Dynamics Control, Electronic Damper Control, and Active Roll Stabilization. Optional Integral Active Steering will virtually extend or shorten the vehicle’s wheelbase for improved stability at high speeds and enhanced agility at lower speeds. The new BMW 5 Series Sedan is currently available with V-8 and inline-6 gasoline engines. The top-of-the-range BMW 550i is equipped with BMW’s “reverse-flow” V-8 engine featuring twin turbochargers and High Precision direct injection for maximum output of 400 hp. The new inline-6 of the BMW 535i (see Page 1) features a single twin-scroll turbocharger, High Precision direct injection, and, combined for the first time with turbocharging, Valvetronic throttle-less intake technology. It delivers a maximum output of 300 hp and 300 lb-ft. The 535i and 550i are available with a choice of 6-speed manual transmission, and BMW’s new 8-speed automatic transmission, or “Sport Automatic” 8-speed. The third variant of the 5 Series lineup is the 528i, featuring a 240 hp inline-6 with lightweight magnesium-aluminum construction and Valvetronic throttle-less intake technology for efficiency and unparalleled responsiveness. The 8-speed Steptronic automatic transmission is standard on the 528i. MSRP for the 528i is $45,425, including $875 Destination & Handling. Beginning with September production, xDrive will be available on the top two 5 Series models, with pricing for the 550i xDrive and 535i xDrive at $52,775 and $62,875 respectively. The M Sport Package returns to the 5 Series delivering performance and style for the most enthusiastic of BMW drivers. The package adds a number of M style visual cues to the Dynamic Handling Package. The M Aerodynamic Kit gives the 5 series a sportier and more aggressive look, while the M double-spoke light alloy wheels and performance run-flat tires (size 18” on the 528i and 19” on the 535i, 550i, 535i xDrive, and 550i xDrive) back up the look with impressive performance and handling. The M Steering Wheel adds to this package by providing the most ergonomic grip for confidence during dynamic maneuvers. To accentuate the M Sport package, the interior is fit with Aluminum Hexagon trim (wood available). BMW EfficientDynamics technologies are featured in appropriate combinations on each model, including features such as Brake Energy Regeneration, Electric Power Steering, a gearshift point indicator, active cooling air flaps, and on-demand operation of engine accessory drives. Lightweight materials are used intelligently to balance the vehicle. Components such as doors, hood, front fenders, and suspension assemblies are made of aluminum. The driver assistance systems offered with the new 5 Series include an all-new Parking Assistant, Top View cameras, and Frontal Collision Warning with application of the brakes when ordered with Active Cruise Control plus Stop & Go. Other driver assistance features available include Blind Spot Warning, Lane Departure Warning, a Head-Up Display, and BMW Night Vision with Pedestrian Detection. Entering its second model year, the 2011 BMW 5 Series Gran Turismo adds new facets to BMW’s Ultimate Driving Machine tradition and refines the concept of first-class travel. For the very first time, the attributes of an elegant sedan, a contemporary Sports Activity Vehicle and a classic Gran Turismo are masterfully melded into a single, harmonious vehicle concept. New for 2011 are the xDrive all-wheel drive equipped 5 Series Gran Turismo models. The 535i xDrive Gran Turismo will be priced from $59,175 and the 550i xDrive Gran Turismo will be priced from $67,075, in each case a $2300 premium over the traditional rear-wheel drive models. As a result of final confirmatory fuel economy testing, the highway and combined values for the 550i Gran Turismo have been increased by one MPG. This eliminates the gas guzzler tax on all 2011 5 Series Gran Turismo models. The traditional European term Gran Turismo (Grand Touring) was first applied by Italian carmakers in the 1950s when they combined the performance, handling, and style of a 2-seat sports car with luxurious amenities and luggage room for each occupant. BMW’s new Gran Turismo nurtures this tradition in its own contemporary way, providing exceptional space, style, luxury, and comfort for four passengers, plus the performance and driving pleasure of a BMW. Starting with September 2010 production, the 535i Gran Turismo will be available with xDrive, preserving the Gran Turismo’s high dynamic ability in all four seasons. This unique combination gives BMW’s newest 5 Series Gran Turismo a new level of performance and efficiency in the true spirit of BMW EfficientDynamics. BMW’s signature corona rings feature LED technology and take on several functions: as parking lights, as an identifying element when the headlights are on, and as daytime running lights. The BMW 5 Series Gran Turismo interior combines space, luxury and stylish materials to create a premium ambiance. In the rear seats as well as up front, there’s a sense of generous space and comprehensive amenities, promoting relaxed and accommodating travel. In standard form, the rear seating accommodates three passengers, with a 40-20-40 split of the backrest allowing for selective fold-down. In addition, the entire seat cushion can be adjusted 3.9-in. fore and aft and the individual backrest sections can be adjusted over a range of 15°-33°. Optional as part of the Luxury Rear Seating Package are individual, multi-adjustable power seats, separated by a center console with cupholders and a storage compartment. the sensor for Active Cruise Control with Stop & Go will be a smaller, less obvious sensor unit first introduced on the new 5 Series. The placement of the sensor on the 7 Series will remain to the right of kidney grille. The BMW 760Li Sedan went on sale in Fall of 2009 featuring the latest chapter in BMW’s decades-long history of harmonious V12 engines. This newest BMW V12 produces 535 horsepower and 550 pound-feet of torque with help from twin turbochargers and High Precision Direct Injection. It also marked the first use of BMW’s new 8-speed automatic transmission. The outcome is a 0-60 mph time of 4.5 seconds in a stylish, luxurious package. The long list of standard features and amenities ensures the comfort of each occupant. Also, in the continuing development of BMW’s flagship 7 Series, BMW’s intelligent xDrive all-wheel drive system has become available on the 750i and 750Li Sedans. In the 7 Series application, xDrive has been refined and enhanced to ensure a new benchmark is established for handling characteristics among all-wheel drive sedans. xDrive limits understeer by shifting power rearward (up to 80% to the rear axle) while preserving the stability for which BMW has been so widely recognized. Also, the system can now apply a precise combination of throttle and individual rear-wheel braking to maintain neutral handling in a corner, regardless of road surface conditions. These key enhancements to xDrive ensure that the 750i xDrive and 750Li xDrive are two of the most nimble all-wheel drive sedans available in any vehicle segment. For 2011, BMW announced the North American return of the 7 Series with an inline-6 engine. The new BMW 740i and BMW 740Li achieve a remarkable balance of power, efficiency, and sporty driving dynamics. Both models went on sale in the United States as 2011 models in Spring 2010. In 1977, the original BMW 7 Series was launched exclusively with inline-6 propulsion. The United States first met the BMW 7 Series in the 1978 model year as the 733i Sedan. The 733i featured a 3.2-liter inline-6 engine rated at 197 horsepower. The 733i remained on sale in America until it was replaced in 1985 by the BMW 735i Sedan. The 735i, which featured an updated inline-6 engine producing 218 horsepower from 3.4 liters, enjoyed a production run that lasted through the end of the 1992 model year. The 735i was joined by the extended-wheelbase 735iL in May of 1988. The 2011 model 740i and 740Li feature BMW’s internationally acclaimed twin-turbocharged 3.0-liter inline-6 engine with output of 315 horsepower. All-aluminum engine construction, High Precision direct fuel injection, VANOS variable camshaft technology, and Brake Energy Regeneration are a few of the technologies used under the BMW EfficientDynamics engineering philosophy to place the 740i and 740Li among the most powerful six-cylinder luxury sedans in the world. Delivering power to the rear wheels is BMW’s 6-speed automatic transmission, well-known for fast, smooth gearshifts and an ability to intelligently adapt to the driver. Both models are available with the full complement of 7 Series options and packages, including the M Sport Package, Driver Assistance Package, Luxury Seating Packages, Rear Entertainment Package, and even the BMW Individual Composition Package. The BMW ActiveHybrid 7 arrived on American shores late in the second quarter of 2010 as a 2011 model. Based on the 5th-generation BMW 7 Series, this unique car sets new standards for performance and efficiency in the premium luxury class. Symbiosis of a twin-turbocharged V-8 engine with a 3-phase synchronous electric motor gives the BMW ActiveHybrid 7 a combined output of 455 hp and maximum torque of 515 lb-ft from 2000 – 3000rpm. Power is transmitted via an 8-speed automatic transmission with the electric motor positioned between the engine and the torque converter. The electric motor receives its supply of energy from a lithium-ion battery developed specifically for this automobile. The BMW ActiveHybrid 7 accelerates to 60 mph in 4.7 seconds, while delivering better fuel efficiency than the 750i and 750Li. Auto Start Stop maximizes efficiency by avoiding engine idling when stopped. The ActiveHybrid 7 is distinguished by aerodynamically-tuned ten-spoke 19-inch Aero wheels, and is available in Bluewater Metallic exterior paintwork. The BMW ActiveHybrid 7L has a wheelbase increased by 5.5″, all of which is devoted to the enhanced comfort of rear-seat passengers. Both models are available with a wide range of options from the portfolio of equipment offered on the BMW 7 Series. The BMW ALPINA B7 represents the second generation of ALPINA automobiles based on the BMW 7 Series to be offered in the US. The first BMW ALPINA B7 was offered as a limited edition for the 2007 and 2008 model years. Newly interpreted and based on the current BMW 7 Series platform, the B7 again sets the benchmark in terms of performance, refinement, technology, luxury and exclusivity. The BMW ALPINA B7 is offered with xDrive intelligent all-wheel drive, and in both standard- and long-wheelbase editions. All B7 sedans are equipped with BMW’s twin-turbocharged 4.4 liter, all-aluminum, High Precision Direct Injection V-8 engine, enhanced by ALPINA to produce 500 (373kW) horsepower and 516 lb-ft (700Nm) of torque. This rear-wheel drive performance/luxury sedan achieves 0-60 mph in 4.5 seconds (4.6 seconds with xDrive). With maximum torque available across an unusually broad engine range, from 3,000 to 4,750 rpm, the B7 accelerates effortlessly from virtually any speed. The B7 sedan is finished with ALPINA aerodynamic developments, including front and rear spoilers which not only enhance the appearance but also improve stability and performance at high speeds. In true “form-follows-function” fashion, the ALPINA front integrates the fresh-air requirements of the transmission and engine oil coolers, which are separated out of the main cooler module for improved efficiency. The spoilers reduce lift at the front by 30% and rear by15%. The exhaust system’s two double tailpipes are seamlessly integrated into the rear bumper. The newest evolution of the traditional 20-spoke ALPINA Classic wheels gives the B7 a distinctive appearance and powerful stance. The interior of the B7 sedan features ALPINA blue illuminated door sill trims, the Switch-tronic steering wheel in hand-stitched Lavalina leather, and the Black-Panel LCD screen instrument cluster branded with ALPINA. The modern BMW 6 Series revived the essential attributes of its landmark 1976-89 predecessor: sporting, yet luxurious accommodations in the 2+2 format; sporty and gorgeous exterior design; strong, refined performance; an extra measure of sporty driving pleasure; and extensive standard equipment, encompassing outstanding luxury and convenience features. Summer of 2010 marks the end of production of the current generation 6 Series and M6 Coupe and Convertible. Look for a new 6 Series to be released in due time. For 2011, the BMW X5 models are enhanced with new powertrains and updated design. Innovative new design elements and powertrains are the result of over 4,000 new parts created by BMW designers and engineers for the 2011 BMW X5. Two of the X5’s powertrains are all-new for 2011, creating the new X5 xDrive50i ($59,275) and X5 xDrive35i ($46,675), in addition to the familiar BMW Advanced Diesel X5 xDrive35d ($52,175). All models receive a design update that enhances the brilliant focus and aesthetic appearance of the X5 brand. The gasoline models receive BMW’s new 8-speed automatic transmission for better acceleration and fuel efficiency. The X5 xDrive50i features BMW’s 4.4-liter “reverse flow” V-8 engine with 400 horsepower and 450 lb-ft of torque, coupled to the new 8-speed automatic transmission. The X5 xDrive35i features BMW’s newest inline-6 engine (see Page 1), with 300 horsepower and 300 lb-ft, also coupled to the 8-speed automatic. These two models are capable of 0-60mph acceleration in 5.3 seconds and 6.4 seconds, respectively, while achieving EPA highway fuel efficiency ratings of 20 and 23 miles per gallon, respectively. The updated body design of the 2011 BMW X5 is characterized by well-balanced proportions that accentuate the vehicle’s powerful presence and agility. A long wheelbase, short front and rear body overhangs, and large light-alloy wheels provide a visual balance between the front and rear of the vehicle. Square-shaped, carefully flared wheel arches hint at the X5’s all-wheel drive traction and other-roads capability. The body lines sweep upwards from the front and rear wheels, communicating BMW’s near-perfect 50-50 front-rear weight distribution which is so essential to the X5’s legendary driving dynamics. With its distinctively contoured hood, large BMW kidney grille and dual round headlights, the new BMW X5 boasts an expressive front end that symbolizes power and presence. The newest interpretation of the X5’s look is led by the redesign of the front fascia and position of the standard fog lamps. Due to an increase in the number of elements painted body color, the front end gives the impression of being lower. At the same time, the increased size of both the central and outer air intakes hint at the boosted engine output. The black trim at the bottom edge of the body is now considerably narrower. A matte-finished silver protection plate extends across the entire width of the central air intake, emphasizing the muscular features of the SAV, and accentuates the new X5’s character. An M Sport Package is now available on most X5 models, with sporty components perfectly matched to each vehicle. Last year showcased the introduction of the BMW Advanced Diesel-powered X5 xDrive35d. It features a 3.0-liter sequential twin-turbo inline-6 diesel engine with 265 horsepower and 425 lb-ft of torque. The X5 xDrive 35d offers class-leading performance and fuel efficiency: 0 – 60 mph in 6.9 seconds and an equally impressive 19 mpg city/26 mpg highway EPA rating. What if you could have the other-roads capability of BMW’s celebrated X5, combined into one vehicle with the presence and remarkable driving dynamics of a BMW Coupe? The BMW X6 Sports Activity Coupe is equally at home taking the family skiing, or taking a few laps around the famous Nϋrburgring. The superb, even remarkable driving dynamics of the X6 are attributable to another BMW innovation: Dynamic Performance Control. DPC distributes torque as required in all situations, feeding a varying degree of power and torque to the two rear wheels. DPC is the only system of its kind able to provide its stabilizing effect both under acceleration and deceleration. The X6 is available with a choice of twin-turbocharged engines. The 2011 X6 xDrive35i ($57.375) features BMW’s all-new 3.0-liter twin-scroll turbo inline-6 with 300 horsepower and 300 lb-ft of torque. The 2011 X6 xDrive50i ($68,075) features BMW’s 4.4-liter twin-turbo V-8 with 400 horsepower and 450 lb-ft of torque. Both models feature the new 8-speed Sport Automatic transmission. The BMW ActiveHybrid X6 ($89,775) made its world premiere at the Frankfurt Motor Show (IAA) in September 2009 and went on sale at US BMW Centers in December 2009. BMW ActiveHybrid technology provides outstanding driving performance with equally impressive efficiency. It melds both of these qualities to offer a truly unique driving experience. The drive system featured in the BMW ActiveHybrid X6 consists of a 400 hp twin-turbocharged V-8 gasoline engine and two electric synchronous motors delivering 91 hp and 86 hp, respectively. Maximum system output is 480 hp, and peak torque reaches 575 lb-ft. ActiveHybrid technology offers a boost to performance and simultaneously improves fuel efficiency by approximately 20 percent versus a comparable vehicle powered by a combustion engine alone. The result is an enjoyable driving experience and an increase of four miles per gallon to 17 mpg on the City drive cycle (EPA). Precisely controlled interaction of the gasoline engine and electric motors contributes to the efficiency of the BMW ActiveHybrid X6 at all speeds. 0-60 mph takes 5.4 seconds. Top speed is limited electronically to 130 mph. These performance figures are made possible by utilizing a two-mode active transmission. The ideal combination of the two power modes can be controlled for enhanced efficiency and dynamic performance in any driving condition. BMW’s first full hybrid model is able to run exclusively on electric power – entirely free of CO2 – up to a speed of 37 mph for up to 1.6 miles. The all-new BMW X3 Sports Activity Vehicle will be introduced late in 2010 as a 2011 model, and will go on sale early in the first quarter of 2011. In the United States, the X3 xDrive28i and X3 xDrive35i will be available. Read the complete press release at bmwusanews.com. The classic roadster is back – more powerful and more stylish than ever before. It is the BMW Z4, the only car in its segment that combines classic roadster proportions with a seating position near the rear axle, rear-wheel drive, and a retractable hardtop. Nothing matches the BMW Z4’s ability to offer all the driving pleasure of a BMW Roadster with particularly refined and stylish flair. Roof down, this two-seater offers a refreshing experience of sunshine wind. Drive it with the hardtop closed and it provides all the comfort of a sports coupe. The design of the new Z4 comes out through classic details interpreted in new, up-to-date style. This outstanding two-seater offers a unique combination of elegance, agility, and comfort. The aluminum shells of the two-piece lightweight hardtop come to rest in the roof compartment, saving maximum space in the process. Even with the roof closed, the new Z4 retains the proportions typical of a genuine roadster. The new BMW Z4 is offered in the US with two well-known and acclaimed inline-6 engines, both displacing 3.0 liters: 300 hp in the BMW Z4 sDrive35i and 255 hp in the BMW Z4 sDrive30i ensure truly outstanding acceleration and response at all times. Comprehensive use of BMW’s EfficientDynamics technologies serves to provide an unparalleled balance of driving pleasure and fuel efficiency. While both models feature a manual 6-speed gearbox, the Z4 sDrive35i is available with the 7-speed Double Clutch Transmission (DCT). The 6-speed sport automatic is optional on the Z4 sDrive30i. An all-new exclusive Citrus Yellow package will be available as of September 2010 production on all Z4 models. Unique to this package are a number of premium features. First, Sport seats with an exclusive black Alcantara/leather combination cradles passengers in comfort and luxury. The center of the seats and backrest panels are black Alcantara with yellow accents, matching the unique color scheme of the car. The armrests and center console are draped in black Nappa leather with door inserts in yellow Alcantara featuring a double lapped seam. The center of the passenger-side dashboard is also fit with black Alcantara, while the lower dashboard is colored in Citrus Yellow. Unique for this package is the Individual Piano Black wood trim, anthracite colored headliner and exclusive Atacama Yellow exterior. The new BMW Z4 sDrive35is ($61,925) is one of BMW’s fastest and best-handling Roadsters ever. The turbocharger system developed for the engine of the new BMW Z4 sDrive35is maintains a high output throughout the entire engine speed range. Nominal peak torque is increased to 332 lb-ft from 1,400 rpm to 4,500 rpm. Maximum output of 335 hp is delivered at 5,800 rpm. The BMW Z4 sDrive35is will complete the sprint from a standstill to 60 mph in 4.7 seconds (preliminary). Acoustic tuning of the exhaust system of the BMW Z4 sDrive35is gives the car a very unique sound across the speed range. The sound is characterized by a deep rumble with a focus on the low frequency sound range. The BMW Z4 sDrive35is includes new M Sport upgrades such as M Aerodynamics and adaptive M Suspension. The adaptive M Suspension combines a ride-height reduction of 10 millimeters (almost 0.4”) with electronically controlled shock absorbers to improve the vehicle’s agility without compromising ride comfort. The M Sport upgrades emphasize the Z4’s athletic character through aerodynamically optimized exterior features and customized interior components. Special 18-inch light-alloy wheels round off the sporty character of the Z4 sDrive35is. 19-inch light-alloy wheels are optional. In addition, the new model is further distinguished by a striking front bumper with a matte aluminum trim bar in each outer air intake, a rear bumper with accentuated surrounds on the tailpipes, and a rear diffuser finished at top in body color and black textured surface underneath. Inside, a full complement of M Sport details augments the new Aluminum Carbon interior trim and available new Kansas Walnut leather. When Atacama Yellow is ordered on a Z4 sDrive35is (see Citrus Yellow package, Page 11), the mirror caps will be yellow rather than the oxide silver as is standard on other 35is models. The all-new Exclusive Citrus Yellow package will be available in combination with the M Sport Package. BMW raised the performance benchmark once again with the introduction of the 4th generation M3. The latest M3 is again available, as was the 2nd generation, as a Sedan ($56,275), Coupe ($59,275) or Convertible ($67,925). All share the unique, high-revving, 414-horsepower V-8 engine and choice of 6-speed manual or 7-speed M Dual Clutch Transmission with Drivelogic. For the M3’s 2011 model year, which started in March 2010, buyers of the M3 Sedan and M3 Coupe can choose a new Competition Package which reduces vehicle ride height by 10mm and includes unique 19” wheels with wider offset for enhanced stability, standard Electronic Damping Control with enhanced programming, and higher-threshold programming for the M Dynamic Mode (if equipped). Activate the “second level” of Dynamic Stability Control known as M Dynamic Mode (MDM). Activate the Sport setting of the Servotronic variable power steering system. Arm the shift lights (when equipped with M-DCT). Select the pre-stored setting for Electronic Damping Control (EDC), if equipped. Select the pre-stored setting for Drivelogic (when equipped with M-DCT). Finally, BMW’s Auto Stop-Start feature becomes standard on the M3 with September production. Auto Start-Stop is a BMW EfficientDynamics fuel saving measure previously unavailable in the US except on ActiveHybrid models. Auto Stop-Start will automatically stop the engine when the transmission is placed in Neutral and the clutch is released. The engine will immediately restart as soon as the clutch is depressed. The system is driver selectable and will only function once the engine is up to operating temperature and the ambient temperature is between 37 Degrees F and 85 Degrees F. The new Manual M Sport Seats (M3 Sedan, Coupe) offer the same adjustments as the previous electric seats but with manual adjustment in order to save weight (approximately 5kg per seat) and expense when power memory seats are not required. Lumber support and seatback side bolters remain electrically adjustable. Power Seats remain standard in the M3 Convertible. The X5 M and X6 M are unchanged for the 2011 model year. Finding new ways to expand the boundaries of what is possible with existing BMW models has always been the singular purpose of the craftsmen at BMW M. The BMW X5 M and the BMW X6 M are the first all-wheel-drive models to offer the remarkable performance, dynamic driving experience, athletic design, and premium quality of a BMW M product. Both models are powered by a new-concept BMW M engine that departs purposefully from the established ultra-high-revving V-8 and V-10 M engines powering the M3, M5 and M6 models. For power, torque, and response characteristics, BMW M has adapted twin turbocharging, High Precision direct fuel injection and accompanying new technologies that achieve the ever-consistent BMW M goal of the highest levels of performance and refinement. Thanks to a special, M-tuned version of BMW’s Integrated Chassis Management, on-board systems including xDrive intelligent all-wheel drive and Dynamic Performance Control torque-vectoring system are maximized for handling while braking and accelerating under the most extreme circumstances. As a result, the X5 M and X6 M are able to maximize traction while precisely following the steering inputs of the driver. The X5 M and X6 M are equipped with the M Drive system, which allows a driver to pre-configure the vehicle for maximum performance and feedback at a touch of the steering wheel-mounted M Button. The Intelligent Way to Reduce Fuel Consumption and Emissions. BMW’s commitment to reduce harmful emissions and help protect the environment is unique in the automobile industry. BMW applies its EfficientDynamics principles to each model by using lightweight components in key areas, new technologies for improved powertrain efficiency and more importantly, planned sustainability for the long term reduction of fuel consumption and emissions output. A recent Environmental Defense report shows that BMW’s fleet average for CO2 emissions was reduced by 12% from 1990 – 2005 and its fuel economy improved by 14%, despite the introduction of many new passenger cars and light-duty trucks. This not only demonstrates our long-standing dedication to help the environment, but our ability to do so with every vehicle that wears the BMW roundel. In this period of time, BMW has introduced many industry-leading technologies across its entire model range, such as Valvetronic throttle-less engine control, on-demand coolant and hydraulic pumps for reduced parasitic losses, and High-Precision Direct Injection. In 2009, BMW added two 50-state BMW Advanced Diesel models for an unprecedented combination of performance and low emissions. In some 2010 and 2011 models, BMW introduced Brake Energy Regeneration, Auto Start-Stop technology and two BMW ActiveHybrid models. It is our unending goal to develop technologies and vehicles designed to combine the efficiency our customers desire with dynamics and performance our customers expect. Previous post E30 M3 vs. 1///M: Why The Comparison?
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2016-01-16 17:14 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import smart_selects.db_fields class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Book', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Continent', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Country', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('continent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.Continent')), ], ), migrations.CreateModel( name='Location', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('city', models.CharField(max_length=50)), ('street', models.CharField(max_length=100)), ('continent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.Continent')), ('country', smart_selects.db_fields.ChainedForeignKey(auto_choose=True, chained_field=b'continent', chained_model_field=b'continent', on_delete=django.db.models.deletion.CASCADE, to='test_app.Country')), ], ), migrations.CreateModel( name='Publication', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Writer', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('publications', models.ManyToManyField(blank=True, to='test_app.Publication')), ], ), migrations.AddField( model_name='book', name='publication', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.Publication'), ), migrations.AddField( model_name='book', name='writer', field=smart_selects.db_fields.ChainedManyToManyField(chained_field=b'publication', chained_model_field=b'publications', to='test_app.Writer'), ), ]
I have the below sentence please help. Am not sure if the use of walking is grammatically correct? Or walked or walk? I help that lady walking over the street. --> I help(ed) that lady WALK ACROSS the street.
class Resource: """Information about player resource. Include Ore, which is unused, but it can be enabled. Attributes: food (int): food wood (int): wood gold (int): gold stone (int): stone ore (int): ore """ def __init__(self, food=0, wood=0, gold=0, stone=0, ore=0): """Create Resource container for a player Args: food (int): starting food wood (int): starting wood gold (int): starting gold stone (int): starting stone ore (int): starting ore. unused """ self.food = food self.wood = wood self.gold = gold self.stone = stone self.ore = ore def __repr__(self): name = "Player resources: \n" res1 = "\tWood: {}\n\tFood: {}\n".format(self.wood, self.food) res2 = "\tGold: {}\n\tStone: {}\n".format(self.gold, self.stone) res3 = "\tOre*: {}".format(self.ore) return name + res1 + res2 + res3 def toJSON(self): """return JSON""" data = dict() data['food'] = self.food data['wood'] = self.wood data['stone'] = self.stone data['gold'] = self.gold return data def setAll(self, value): """Set value to all resources Args: value (int): a value set to all resources """ self.food = value self.wood = value self.gold = value self.stone = value self.ore = value def getAll(self): """get all resource Return: (tuple): resource values """ return (self.food, self.wood, self.gold, self.stone, self.ore)
Sirromet Winery at Mount Cotton is the ultimate winery experience for foodies and wine lovers. Take in the stunning scenery and unique architecture, visit the cellar door or dine at the onsite 5 star restaurant. Tour and tastings can also be arranged. Sirromet Winery also hosts Day on The Green Concerts which are a fabulous day out on the beautiful grassed area of the winery. Sirromet Winery is located about 20 minutes drive from The Glen Hotel & Suites at 850-938 Mount Cotton Road, Mount Cotton Queensland.
from vsg.token import variable_assignment_statement as token from vsg.vhdlFile.classify import conditional_variable_assignment from vsg.vhdlFile.classify import selected_variable_assignment from vsg.vhdlFile.classify import simple_variable_assignment from vsg.vhdlFile import utils def detect(iToken, lObjects): ''' variable_assignment_statement ::= [ label : ] simple_variable_assignment | [ label : ] conditional_variable_assignment | [ label : ] selected_variable_assignment ''' iCurrent = iToken if selected_variable_assignment.detect(iToken, lObjects): iCurrent = utils.tokenize_label(iCurrent, lObjects, token.label, token.label_colon) iCurrent = selected_variable_assignment.classify(iCurrent, lObjects) elif conditional_variable_assignment.detect(iToken, lObjects): iCurrent = utils.tokenize_label(iCurrent, lObjects, token.label, token.label_colon) iCurrent = conditional_variable_assignment.classify(iCurrent, lObjects) elif simple_variable_assignment.detect(iToken, lObjects): iCurrent = utils.tokenize_label(iCurrent, lObjects, token.label, token.label_colon) iCurrent = simple_variable_assignment.classify(iCurrent, lObjects) return iCurrent
Ethereum Meta (ETHM) is a cryptocurrency token and operates on the Ethereum platform. Ethereum Meta has a current supply of 96,571,598 ETHM with 87,164,175 ETHM in circulation. The last known price of Ethereum Meta is 0.001227 USD and is down 43.53% over the last 24 hours. It is currently trading on 3 active market(s) with 12 USD traded over the last 24 hours. More information can be found at https://ethermeta.com/.
"""Node action implementations""" import logging import six from cliff import command from cliff import lister from cliff import show class CreateNode(show.ShowOne): """Create compute Node command""" log = logging.getLogger(__name__ + ".create_node") def get_parser(self, prog_name): parser = super(CreateNode, self).get_parser(prog_name) parser.add_argument( "name", metavar="<name>", help="Name of the Node") parser.add_argument( "size", metavar="<size>", help="The size of the resource allocated") parser.add_argument( "image", metavar="<image>", help="OS image to boot on") parser.add_argument( "auth", metavar="<auth>", help="Initial authentication information") parser.add_argument( "location", metavar="<location>", help="which data center to create node") return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) #compute_client = self.app.client_manager.compute args = ( parsed_args.name, parsed_args.size, parsed_args.image, parsed_args.auth, parsed_args.location ) #Node = compute_client.Node.create(*args)._info.copy() #return zip(*sorted(six.iteritems(Node))) return "its works!" class DeleteNode(command.Command): """Delete compute agent command""" log = logging.getLogger(__name__ + ".DeleteAgent") def get_parser(self, prog_name): parser = super(DeleteNode, self).get_parser(prog_name) parser.add_argument( "id", metavar="<id>", help="ID of agent to delete") return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) print "node deleted" return class SetNode(show.ShowOne): """Set compute Node command""" log = logging.getLogger(__name__ + ".SetAgent") def get_parser(self, prog_name): parser = super(SetNode, self).get_parser(prog_name) parser.add_argument( "id", metavar="<id>", help="ID of the agent") parser.add_argument( "version", metavar="<version>", help="Version of the agent") parser.add_argument( "url", metavar="<url>", help="URL") parser.add_argument( "md5hash", metavar="<md5hash>", help="MD5 hash") return parser def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) args = ( parsed_args.id, parsed_args.version, parsed_args.url, parsed_args.md5hash ) print "node set" return
Office Sign Company offers a variety of mounting options (most at no additional charge) to best suit your needs and mounting surfaces. Most options have very specific applications, so please refer to the appropriate section below for the mounting option you have selected for installation instructions. Please note that we include enough of each selected mounting option to accommodate the size and weight of your signs. Office Sign Company does not accept responsibility for damage resulting from improper installation. Customers are responsible for determining the suitability of each mounting option. Testing is recommended prior to each use. Consult your local building codes/regulations for requirements regarding mounting height. Unfortunately, our enclosed metal frames do not have screw holes, but you can drill into the Masonite backer boards in order to install by screw. Office Sign Company will not be held responsible for any damage caused to our products by customers during mounting. Our two-part Velcro mounting system includes both hooks and loops to mount your signs to most hard surfaces. The hooks and loops are designed to work together, so Office Sign Company cannot guarantee that your signs will be securely mounted to your surface if you elect to exclude either portion. Our two-part magnetic mounting system includes both metal foil and magnetic strips to mount your signs to most hard surfaces. The foil and magnets are designed to work together, so Office Sign Company cannot guarantee that your signs will be securely mounted to your surface if you elect to exclude either portion. We have included a link below to some step-by-step instructions for easy installation of your custom vinyl, along with a demonstrational video that takes you through the entire process. We recommend reading these instructions and watching this video prior to mounting your vinyl product. If you have any questions, please contact us prior to installation for troubleshooting. You may reach us via e-mail at service@officesigncompany.com or call us at (701) 526-3835. We have tested these 3M Command Picture Hanging Strips on all of our products and offer them as a damage-free mounting option on the items they will support. Each pair of strips holds approximately 3 lbs, and the recommended amount of strips will be included on your sign. Just one "click" lets you know these strips are locked into place. If you need to remove your sign, the process is pain free. Just hold onto the bottom corners of your sign and gently peel it away from the wall. Once you have removed the sign, slowly pull the strips straight down at least 12 inches and they will release, leaving your wall intact. Stand offs are a decorative way to screw mount your sign. They include one portion with a hole in each end (the barrel or base), one portion with a hole on one side (the cap), and a threaded stud. Place the screw through the small hole in the barrel and drill into the wall. Next, twist the threaded stud into the base portion of the stand off and place the screw hole in your sign over the stud so the stud extends through the hole. Screw the cap of the stand off onto the stud until tight so the base and the cap are gripping your sign. Caution: do not excessively tighten or this may cause your sign to crack. For further recommendation on what mounting method is best for your particular sign, or to purchase these or any other custom mounting accessories, please feel free to contact our friendly staff by emailing service@officesigncompany.com or calling 701-526-3835. We would love to answer your questions on sign mounting! As an added resource, we have also put together some helpful guides that demonstrate How To Install Your Office Sign. Click the link to receive step-by-step instructions on how to mount your new office sign yourself. Thank you for choosing to shop with Office Sign Company!
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals from fontawesome_markdown import FontAwesomeExtension import os AUTHOR = u"Erwin Sterrenburg" SITENAME = u"PILOSA.EU" TAGLINE = 'Scribblings of World\'s Most Curious Anteater' TIMEZONE = 'Europe/Amsterdam' LOCALE='en_US.utf8' DEFAULT_LANG = u'en' DEFAULT_PAGINATION = 5 # By default we enable pretty highlighing in markdown: MD_EXTENSIONS = [FontAwesomeExtension(), 'codehilite(css_class=highlight,linenums=False)', 'extra', 'toc', 'typogrify'] # Leave this blank for local development, publishconf.py has the "real" value: SITEURL = 'http://localhost:8000/' RELATIVE_URLS = True # Feed generation is usually not desired when developing FEED_ALL_RSS = 'feed.xml' CATEGORY_FEED_RSS = None TRANSLATION_FEED_RSS = None FEED_ALL_ATOM = None CATEGORY_FEED_ATOM = None TRANSLATION_FEED_ATOM = None FEED_DOMAIN = SITEURL MENUITEMS = [('Archive', 'archives.html'), ('About', 'about.html'),] SOCIAL = ( ('envelope-o', 'mailto:ewsterrenburg@gmail.com'), ('github', 'https://github.com/ewsterrenburg/'), ('linkedin-square', 'https://www.linkedin.com/in/ewsterrenburg/'), ('rss', 'http://pilosa.eu/feed.xml'), ) # Static files # Uncomment and set to the filename of your favicon: FAVICON_FILENAME = 'favicon.ico' # Any extra files should be added here #STATIC_PATHS = ['images', 'extra/CNAME'] STATIC_PATHS = [ 'images', os.path.join('extras','robots.txt'), 'extras/CNAME', 'extras/favicon.ico', 'extras/apple-touch-icon.png' ] # Here's a sample EXTRA_PATH_METADATA that adds the favicon, an iOS touch icon and a GPG key: EXTRA_PATH_METADATA = dict() for f in os.listdir('content/extras'): STATIC_PATHS.append('extras' + os.sep + '{0}'.format(f)) EXTRA_PATH_METADATA['extras' + os.sep + '{0}'.format(f)]={'path': f} #Theme THEME = os.path.join(os.getcwd(), "themes", "pure-single-master") COVER_IMG_URL = "/images/bananas.jpeg" SINGLE_AUTHOR = True # Sole author and don't use categories ... disable these features AUTHOR_SAVE_AS = False AUTHORS_SAVE_AS = False DISPLAY_CATEGORIES_ON_MENU = False DEFAULT_DATE_FORMAT = ('%b %d %Y') TYPOGRIFY = True # Cleaner page links PAGE_URL = '{slug}.html' PAGE_SAVE_AS = '{slug}.html' PAGE_LANG_URL = '{slug}-{lang}.html' PAGE_LANG_SAVE_AS = '{slug}-{lang}.html' # Cleaner Articles ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/' ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html' PLUGIN_PATHS = [os.path.join(os.getcwd(), "..", "pelican-plugins")] #PLUGINS = ['render_math', 'extended_sitemap', 'better_codeblock_line_numbering'] PLUGINS = ['extended_sitemap', 'better_codeblock_line_numbering'] TAG_CLOUD_STEPS = 4 # Setting for the better_figures_and_images plugin RESPONSIVE_IMAGES = True
Workers at an underground test freeze, in 2012. All photos via Kevin O'Reilly. The Northern Canadian territories often get written off by all of us as a frigid tundra of nothingness. But it’s probably about time we start paying attention to the situation up there. Yellowknife is currently sitting on 237,000 tons of arsenic—enough to kill the entire human population of our planet a few times over. For years the local mine's arsenic production (averaging 22,000 pounds a day) was left unregulated until 1951, when a child from the Yellowknives Dene First Nation died of poisoning from eating snow in the area. It became apparent that the government needed to do something about it. But instead of shutting down operations, they thought the best course of action was to collect the massive amounts of poisonous dust in chambers underground, presumably hoping that the arsenic trioxide fairy would eventually come and take all of it away. They handed the child’s family $750 and decided to wait it out. However, with a crumbling infrastructure and increasing concern about leakage into the local water supply, the time has come to do something about it. Now that it is 2014 and the Canadian government is older and wiser, they have finally come up with what a reasonable solution to our minor poison problem: They are going to freeze the 237,000 tons of arsenic trioxide underground—for all of eternity. The government had originally hoped that permafrost would creep its way back into the area and freeze the arsenic naturally, despite warnings from engineers that came as early as the 1950s that this would not be the case. After a decade of fruitless waiting the government’s plan to achieve this “frozen block” solution is to mimic the way an ice rink is kept frozen. That is: by continuously pumping coolant into the ground, the arsenic should theoretically stay frozen. The freezing as of now is expected to cost at least $1 billion initially, and then an additional $2 million every subsequent year. An aerial view of the mines. “My own thoughts are that we probably should bring the stuff up above ground and process it into a less toxic form of arsenic and put it at the bottom of the mine,” says Kevin. But this solution is seen as too costly to implement. The government’s remediation plan of icing out the arsenic was initially met with uniform opposition from every group involved with Giant Mine. The Mackenzie Valley Review Board, an independent tribunal which aims to give the surrounding aboriginal peoples a greater say in the management of the area, proposed an environmental assessment, which will hopefully be approved by the minister in Ottawa any day now. The assessment lays out numerous amendments that include “forever” being reduced to only 100 years, as well as putting funding towards research that would seek to find a more attainable solution for the mine. “The project went from having uniform opposition from every group involved to having support from many groups after environmental assessment,” says Alan Ehrlich, a member of the Mackenzie Valley Review Board. The City of Yellowknife, as well as the Yellowknives Dene First Nation, unanimously passed a motion to accept the Review Board’s proposed amendments, as they would bring the people a slightly more hopeful future for their area. This is especially important when you put into context how much Giant Mine has affected the Dene people’s lands, upon which the mine is located. The carelessness with which the mine was operated in the first few years of production has had profound repercussions on the Dene people. With no pollution control, everything inside the mine was going up the stack. Even towards the end of the mine’s life in 2004, there was still around 60 pounds of arsenic being diffused into the air every day. The end result was complete contamination of the Dene’s land. It's no surprise that they would be concerned about how the proposed cleanup will affect them further. Numerous contaminated buildings will have to be exhumed and destroyed in an attempt to decontaminate the area. Soil will have to be removed, which has the potential to create toxic dust. The Dene people have raised concerns of how this will affect them physically and culturally, as many of their key cultural practices are closely tied to the land.
#!/usr/bin/env python """ A PyQt5 (client) interface to an INDI server. This will only work in the context of a PyQt application. """ from xml.etree import ElementTree from PyQt5 import QtCore, QtNetwork import snippets.indi_new.indi_xml as indiXML class QtINDIClientException(Exception): pass class QtINDIClient(QtCore.QObject): received = QtCore.pyqtSignal(object) # Received messages as INDI Python objects. def __init__(self, host='192.168.2.164', port=7624, verbose=True, **kwds): super().__init__(**kwds) self.device = None self.message_string = "" self.host = host self.port = port self.connected = False # Create socket. self.socket = QtNetwork.QTcpSocket() self.socket.disconnected.connect(self.handleDisconnect) self.socket.readyRead.connect(self.handleReadyRead) self.socket.hostFound.connect(self.handleHostFound) self.socket.connected.connect(self.handleConnected) self.socket.stateChanged.connect(self.handleStateChanged) self.socket.error.connect(self.handleError) # if not self.socket.waitForConnected(): # print("Cannot connect to indiserver at " + address + ", port " + str(port)) def handleDisconnect(self): print('handleDisconnect') self.connected = False self.socket.disconnectFromHost() def handleHostFound(self): print('handleHostFound') def handleConnected(self): print('handleConnected') print("Connect to indiserver at " + self.host + ", port " + str(self.port)) self.connected = True def handleError(self, socketError): print("The following error occurred: {0}".format(self.socket.errorString())) if socketError == QtNetwork.QAbstractSocket.RemoteHostClosedError: pass else: pass def handleStateChanged(self): print('State changed: {0}'.format(self.socket.state())) if self.socket.state() == QtNetwork.QAbstractSocket.ConnectedState: pass else: pass def handleReadyRead(self): # Add starting tag if this is new message. if (len(self.message_string) == 0): self.message_string = "<data>" # Get message from socket. while self.socket.bytesAvailable(): # FIXME: This does not work with Python2. tmp = str(self.socket.read(1000000), "ascii") self.message_string += tmp # Add closing tag. self.message_string += "</data>" # Try and parse the message. try: messages = ElementTree.fromstring(self.message_string) self.message_string = "" for message in messages: xml_message = indiXML.parseETree(message) # Filter message is self.device is not None. if self.device is not None: if (self.device == xml_message.getAttr("device")): self.received.emit(xml_message) # Otherwise just send them all. else: self.received.emit(xml_message) # Message is incomplete, remove </data> and wait.. except ElementTree.ParseError: self.message_string = self.message_string[:-7] def setDevice(self, device=None): self.device = device def sendMessage(self, indi_command): if (self.socket.state() == QtNetwork.QAbstractSocket.ConnectedState): self.socket.write(indi_command.toXML() + b'\n') else: print("Socket is not connected.") if (__name__ == "__main__"): import sys import time from PyQt5 import QtWidgets class Widget(QtWidgets.QWidget): def __init__(self): QtWidgets.QWidget.__init__(self) self.client = QtINDIClient() self.client.received.connect(self.handleReceived) def handleReceived(self, message): print(message) def send(self, message): self.client.sendMessage(message) app = QtWidgets.QApplication(sys.argv) widget = Widget() widget.show() # Get a list of devices. # widget.send(indiXML.clientGetProperties(indi_attr={"version": "1.0"})) # Connect to the CCD simulator. # widget.send(indiXML.newSwitchVector([indiXML.oneSwitch("On", indi_attr={"name": "CONNECT"})], indi_attr={"name": "CONNECTION", "device": "CCD Simulator"})) while True: time.sleep(1) QtWidgets.QApplication.processEvents() if not widget.client.connected and widget.client.socket.state() == 0: print('try to connect to', widget.client.host) widget.client.socket.connectToHost(widget.client.host, widget.client.port) # Enable BLOB mode. # widget.send(indiXML.enableBLOB("Also", indi_attr={"device": "CCD Simulator"})) # Request image. # widget.send(indiXML.newNumberVector([indiXML.oneNumber(1, indi_attr={"name": "CCD_EXPOSURE_VALUE"})], indi_attr={"name": "CCD_EXPOSURE", "device": "CCD Simulator"})) sys.exit(app.exec_())
Bus Éireann is registered with An Garda Síochána National Vetting Bureau to conduct vetting. This process is carried out in a highly sensitive and confidential manner. Why do we conduct Garda Vetting? Under the National Vetting Bureau (Children and Vulnerable Persons) Acts 2012 to 2016 vetting of personnel who work with children and vulnerable adults is required by law. The Act states that it is an offence for a person to engage in any work or activity with children and vulnerable adults, unless that person has been vetted by the National Vetting Bureau. Penalties will be applied to those found in breach of this Act. The Act applies to any person who is carrying out any work or activity, a necessary and regular part of which consists mainly of the person having access to, or contact with, children and vulnerable adults. Bus Éireann does not conduct vetting for individuals on a personal basis. (Bus Éireann assumes that the email provided is personal to you. We will not be liable if personal details are inadvertently disclosed through the email provided). Bus Éireann or a Bus Éireann Approved Contractor will invite an applicant to participate in the online vetting process. Following completion of the Bus Eireann Online Invitation Form, the applicant will receive an email from An Garda Síochána National Vetting Bureau (evetting.donotreply@garda.ie) requesting them to complete an online application. To access this application form an applicant will need their Email Address and Date of Birth. A list of all previous addresses since birth. The following information is required in relation to any criminal record that may be held by the applicant: Court Date, Court Name, Offence, Court Outcome. If the applicant does not access his/her invitation within 30 days, it will expire. Once expired the applicant will have to go through the Bus Éireann invitation process again from the beginning. The vetting itself is carried out by An Garda Síochána National Vetting Bureau and refers to an individual’s record of convictions in the Republic of Ireland or elsewhere. This includes all convictions and/or prosecutions, successful or not, pending or completed. The vetting process may also disclose information in relation to specified information such as pertinent information concerning a bona fide concern that there is a finding or allegation of harm to another person received by the National Vetting Bureau from An Garda Síochána or a Scheduled Organisation. The outcome of the vetting process is assessed by Bus Éireann. Each case is treated individually and gives due regard to the circumstances notified. Bus Éireann acknowledges that, in certain circumstances, in order to safeguard our standard of services, it may not be appropriate for an individual with convictions or prosecutions (completed or pending, successful or not) to provide services on its behalf. Bus Éireann reserves the right at its sole and absolute discretion to object at any time to any person nominated as a driver of a school transport service. Where can I get more Information on Garda vetting? IMPORTANT: Please ensure you have the correct documentation before you press the click here button below. (i.e. School Bus Contractor or Bus Éireann Department), MUST BE completed, signed and dated before uploading.
# Copyright 2010 OpenStack Foundation # Copyright 2012 University Of Minho # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock from os_brick.initiator import connector from oslo_concurrency import processutils from oslo_config import cfg from nova import exception from nova import test from nova.tests.unit.virt.libvirt import fakelibvirt from nova import utils from nova.virt.libvirt import host from nova.virt.libvirt import utils as libvirt_utils from nova.virt.libvirt.volume import volume CONF = cfg.CONF SECRET_UUID = '2a0a0d6c-babf-454d-b93e-9ac9957b95e0' class FakeSecret(object): def __init__(self): self.uuid = SECRET_UUID def getUUIDString(self): return self.uuid def UUIDString(self): return self.uuid def setValue(self, value): self.value = value return 0 def getValue(self, value): return self.value def undefine(self): self.value = None return 0 class LibvirtVolumeBaseTestCase(test.NoDBTestCase): """Contains common setup and helper methods for libvirt volume tests.""" def setUp(self): super(LibvirtVolumeBaseTestCase, self).setUp() self.executes = [] def fake_execute(*cmd, **kwargs): self.executes.append(cmd) return None, None self.stubs.Set(utils, 'execute', fake_execute) self.useFixture(fakelibvirt.FakeLibvirtFixture()) class FakeLibvirtDriver(object): def __init__(self): self._host = host.Host("qemu:///system") def _get_all_block_devices(self): return [] self.fake_conn = FakeLibvirtDriver() self.connr = { 'ip': '127.0.0.1', 'initiator': 'fake_initiator', 'host': 'fake_host' } self.disk_info = { "bus": "virtio", "dev": "vde", "type": "disk", } self.name = 'volume-00000001' self.location = '10.0.2.15:3260' self.iqn = 'iqn.2010-10.org.openstack:%s' % self.name self.vol = {'id': 1, 'name': self.name} self.uuid = '875a8070-d0b9-4949-8b31-104d125c9a64' self.user = 'foo' def _assertFileTypeEquals(self, tree, file_path): self.assertEqual(tree.get('type'), 'file') self.assertEqual(tree.find('./source').get('file'), file_path) class LibvirtVolumeTestCase(LibvirtVolumeBaseTestCase): def _assertNetworkAndProtocolEquals(self, tree): self.assertEqual(tree.get('type'), 'network') self.assertEqual(tree.find('./source').get('protocol'), 'rbd') rbd_name = '%s/%s' % ('rbd', self.name) self.assertEqual(tree.find('./source').get('name'), rbd_name) def _assertISCSINetworkAndProtocolEquals(self, tree): self.assertEqual(tree.get('type'), 'network') self.assertEqual(tree.find('./source').get('protocol'), 'iscsi') iscsi_name = '%s/%s' % (self.iqn, self.vol['id']) self.assertEqual(tree.find('./source').get('name'), iscsi_name) def _assertDiskInfoEquals(self, tree, disk_info): self.assertEqual(tree.get('device'), disk_info['type']) self.assertEqual(tree.find('./target').get('bus'), disk_info['bus']) self.assertEqual(tree.find('./target').get('dev'), disk_info['dev']) def _test_libvirt_volume_driver_disk_info(self): libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn) connection_info = { 'driver_volume_type': 'fake', 'data': { 'device_path': '/foo', }, 'serial': 'fake_serial', } conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertDiskInfoEquals(tree, self.disk_info) def test_libvirt_volume_disk_info_type(self): self.disk_info['type'] = 'cdrom' self._test_libvirt_volume_driver_disk_info() def test_libvirt_volume_disk_info_dev(self): self.disk_info['dev'] = 'hdc' self._test_libvirt_volume_driver_disk_info() def test_libvirt_volume_disk_info_bus(self): self.disk_info['bus'] = 'scsi' self._test_libvirt_volume_driver_disk_info() def test_libvirt_volume_driver_serial(self): libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn) connection_info = { 'driver_volume_type': 'fake', 'data': { 'device_path': '/foo', }, 'serial': 'fake_serial', } conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self.assertEqual('block', tree.get('type')) self.assertEqual('fake_serial', tree.find('./serial').text) self.assertIsNone(tree.find('./blockio')) def test_libvirt_volume_driver_blockio(self): libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn) connection_info = { 'driver_volume_type': 'fake', 'data': { 'device_path': '/foo', 'logical_block_size': '4096', 'physical_block_size': '4096', }, 'serial': 'fake_serial', } disk_info = { "bus": "virtio", "dev": "vde", "type": "disk", } conf = libvirt_driver.get_config(connection_info, disk_info) tree = conf.format_dom() blockio = tree.find('./blockio') self.assertEqual('4096', blockio.get('logical_block_size')) self.assertEqual('4096', blockio.get('physical_block_size')) def test_libvirt_volume_driver_iotune(self): libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn) connection_info = { 'driver_volume_type': 'fake', 'data': { "device_path": "/foo", 'qos_specs': 'bar', }, } disk_info = { "bus": "virtio", "dev": "vde", "type": "disk", } conf = libvirt_driver.get_config(connection_info, disk_info) tree = conf.format_dom() iotune = tree.find('./iotune') # ensure invalid qos_specs is ignored self.assertIsNone(iotune) specs = { 'total_bytes_sec': '102400', 'read_bytes_sec': '51200', 'write_bytes_sec': '0', 'total_iops_sec': '0', 'read_iops_sec': '200', 'write_iops_sec': '200', } del connection_info['data']['qos_specs'] connection_info['data'].update(dict(qos_specs=specs)) conf = libvirt_driver.get_config(connection_info, disk_info) tree = conf.format_dom() self.assertEqual('102400', tree.find('./iotune/total_bytes_sec').text) self.assertEqual('51200', tree.find('./iotune/read_bytes_sec').text) self.assertEqual('0', tree.find('./iotune/write_bytes_sec').text) self.assertEqual('0', tree.find('./iotune/total_iops_sec').text) self.assertEqual('200', tree.find('./iotune/read_iops_sec').text) self.assertEqual('200', tree.find('./iotune/write_iops_sec').text) def test_libvirt_volume_driver_readonly(self): libvirt_driver = volume.LibvirtVolumeDriver(self.fake_conn) connection_info = { 'driver_volume_type': 'fake', 'data': { "device_path": "/foo", 'access_mode': 'bar', }, } disk_info = { "bus": "virtio", "dev": "vde", "type": "disk", } self.assertRaises(exception.InvalidVolumeAccessMode, libvirt_driver.get_config, connection_info, self.disk_info) connection_info['data']['access_mode'] = 'rw' conf = libvirt_driver.get_config(connection_info, disk_info) tree = conf.format_dom() readonly = tree.find('./readonly') self.assertIsNone(readonly) connection_info['data']['access_mode'] = 'ro' conf = libvirt_driver.get_config(connection_info, disk_info) tree = conf.format_dom() readonly = tree.find('./readonly') self.assertIsNotNone(readonly) def iscsi_connection(self, volume, location, iqn, auth=False, transport=None): dev_name = 'ip-%s-iscsi-%s-lun-1' % (location, iqn) if transport is not None: dev_name = 'pci-0000:00:00.0-' + dev_name dev_path = '/dev/disk/by-path/%s' % (dev_name) ret = { 'driver_volume_type': 'iscsi', 'data': { 'volume_id': volume['id'], 'target_portal': location, 'target_iqn': iqn, 'target_lun': 1, 'device_path': dev_path, 'qos_specs': { 'total_bytes_sec': '102400', 'read_iops_sec': '200', } } } if auth: ret['data']['auth_method'] = 'CHAP' ret['data']['auth_username'] = 'foo' ret['data']['auth_password'] = 'bar' return ret def iscsi_connection_discovery_chap_enable(self, volume, location, iqn): dev_name = 'ip-%s-iscsi-%s-lun-1' % (location, iqn) dev_path = '/dev/disk/by-path/%s' % (dev_name) return { 'driver_volume_type': 'iscsi', 'data': { 'volume_id': volume['id'], 'target_portal': location, 'target_iqn': iqn, 'target_lun': 1, 'device_path': dev_path, 'discovery_auth_method': 'CHAP', 'discovery_auth_username': "testuser", 'discovery_auth_password': '123456', 'qos_specs': { 'total_bytes_sec': '102400', 'read_iops_sec': '200', } } } def generate_device(self, transport=None, lun=1, short=False): dev_format = "ip-%s-iscsi-%s-lun-%s" % (self.location, self.iqn, lun) if transport: dev_format = "pci-0000:00:00.0-" + dev_format if short: return dev_format fake_dev_path = "/dev/disk/by-path/" + dev_format return fake_dev_path def test_iscsiadm_discover_parsing(self): # Ensure that parsing iscsiadm discover ignores cruft. targets = [ ["192.168.204.82:3260,1", ("iqn.2010-10.org.openstack:volume-" "f9b12623-6ce3-4dac-a71f-09ad4249bdd3")], ["192.168.204.82:3261,1", ("iqn.2010-10.org.openstack:volume-" "f9b12623-6ce3-4dac-a71f-09ad4249bdd4")]] # This slight wonkiness brought to you by pep8, as the actual # example output runs about 97 chars wide. sample_input = """Loading iscsi modules: done Starting iSCSI initiator service: done Setting up iSCSI targets: unused %s %s %s %s """ % (targets[0][0], targets[0][1], targets[1][0], targets[1][1]) driver = volume.LibvirtISCSIVolumeDriver("none") out = driver.connector._get_target_portals_from_iscsiadm_output( sample_input) self.assertEqual(out, targets) def test_libvirt_iscsi_driver(self, transport=None): libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn) self.assertIsInstance(libvirt_driver.connector, connector.ISCSIConnector) def test_sanitize_log_run_iscsiadm(self): # Tests that the parameters to the os-brick connector's # _run_iscsiadm function are sanitized for passwords when logged. def fake_debug(*args, **kwargs): self.assertIn('node.session.auth.password', args[0]) self.assertNotIn('scrubme', args[0]) def fake_execute(*args, **kwargs): return (None, None) libvirt_driver = volume.LibvirtISCSIVolumeDriver(self.fake_conn) libvirt_driver.connector.set_execute(fake_execute) connection_info = self.iscsi_connection(self.vol, self.location, self.iqn) iscsi_properties = connection_info['data'] with mock.patch.object(connector.LOG, 'debug', side_effect=fake_debug) as debug_mock: libvirt_driver.connector._iscsiadm_update( iscsi_properties, 'node.session.auth.password', 'scrubme') # we don't care what the log message is, we just want to make sure # our stub method is called which asserts the password is scrubbed self.assertTrue(debug_mock.called) def iser_connection(self, volume, location, iqn): return { 'driver_volume_type': 'iser', 'data': { 'volume_id': volume['id'], 'target_portal': location, 'target_iqn': iqn, 'target_lun': 1, } } def sheepdog_connection(self, volume): return { 'driver_volume_type': 'sheepdog', 'data': { 'name': volume['name'] } } def test_libvirt_sheepdog_driver(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.sheepdog_connection(self.vol) conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self.assertEqual(tree.get('type'), 'network') self.assertEqual(tree.find('./source').get('protocol'), 'sheepdog') self.assertEqual(tree.find('./source').get('name'), self.name) libvirt_driver.disconnect_volume(connection_info, "vde") def rbd_connection(self, volume): return { 'driver_volume_type': 'rbd', 'data': { 'name': '%s/%s' % ('rbd', volume['name']), 'auth_enabled': CONF.libvirt.rbd_secret_uuid is not None, 'auth_username': CONF.libvirt.rbd_user, 'secret_type': 'ceph', 'secret_uuid': CONF.libvirt.rbd_secret_uuid, 'qos_specs': { 'total_bytes_sec': '1048576', 'read_iops_sec': '500', } } } def test_libvirt_rbd_driver(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertIsNone(tree.find('./source/auth')) self.assertEqual('1048576', tree.find('./iotune/total_bytes_sec').text) self.assertEqual('500', tree.find('./iotune/read_iops_sec').text) libvirt_driver.disconnect_volume(connection_info, "vde") def test_libvirt_rbd_driver_hosts(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) hosts = ['example.com', '1.2.3.4', '::1'] ports = [None, '6790', '6791'] connection_info['data']['hosts'] = hosts connection_info['data']['ports'] = ports conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertIsNone(tree.find('./source/auth')) found_hosts = tree.findall('./source/host') self.assertEqual([host.get('name') for host in found_hosts], hosts) self.assertEqual([host.get('port') for host in found_hosts], ports) libvirt_driver.disconnect_volume(connection_info, "vde") def test_libvirt_rbd_driver_auth_enabled(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) secret_type = 'ceph' connection_info['data']['auth_enabled'] = True connection_info['data']['auth_username'] = self.user connection_info['data']['secret_type'] = secret_type connection_info['data']['secret_uuid'] = self.uuid conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertEqual(tree.find('./auth').get('username'), self.user) self.assertEqual(tree.find('./auth/secret').get('type'), secret_type) self.assertEqual(tree.find('./auth/secret').get('uuid'), self.uuid) libvirt_driver.disconnect_volume(connection_info, "vde") def test_libvirt_rbd_driver_auth_enabled_flags_override(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) secret_type = 'ceph' connection_info['data']['auth_enabled'] = True connection_info['data']['auth_username'] = self.user connection_info['data']['secret_type'] = secret_type connection_info['data']['secret_uuid'] = self.uuid flags_uuid = '37152720-1785-11e2-a740-af0c1d8b8e4b' flags_user = 'bar' self.flags(rbd_user=flags_user, rbd_secret_uuid=flags_uuid, group='libvirt') conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertEqual(tree.find('./auth').get('username'), flags_user) self.assertEqual(tree.find('./auth/secret').get('type'), secret_type) self.assertEqual(tree.find('./auth/secret').get('uuid'), flags_uuid) libvirt_driver.disconnect_volume(connection_info, "vde") def test_libvirt_rbd_driver_auth_disabled(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) secret_type = 'ceph' connection_info['data']['auth_enabled'] = False connection_info['data']['auth_username'] = self.user connection_info['data']['secret_type'] = secret_type connection_info['data']['secret_uuid'] = self.uuid conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertIsNone(tree.find('./auth')) libvirt_driver.disconnect_volume(connection_info, "vde") def test_libvirt_rbd_driver_auth_disabled_flags_override(self): libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.rbd_connection(self.vol) secret_type = 'ceph' connection_info['data']['auth_enabled'] = False connection_info['data']['auth_username'] = self.user connection_info['data']['secret_type'] = secret_type connection_info['data']['secret_uuid'] = self.uuid # NOTE: Supplying the rbd_secret_uuid will enable authentication # locally in nova-compute even if not enabled in nova-volume/cinder flags_uuid = '37152720-1785-11e2-a740-af0c1d8b8e4b' flags_user = 'bar' self.flags(rbd_user=flags_user, rbd_secret_uuid=flags_uuid, group='libvirt') conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertNetworkAndProtocolEquals(tree) self.assertEqual(tree.find('./auth').get('username'), flags_user) self.assertEqual(tree.find('./auth/secret').get('type'), secret_type) self.assertEqual(tree.find('./auth/secret').get('uuid'), flags_uuid) libvirt_driver.disconnect_volume(connection_info, "vde") @mock.patch.object(host.Host, 'find_secret') @mock.patch.object(host.Host, 'create_secret') @mock.patch.object(host.Host, 'delete_secret') def test_libvirt_iscsi_net_driver(self, mock_delete, mock_create, mock_find): mock_find.return_value = FakeSecret() mock_create.return_value = FakeSecret() libvirt_driver = volume.LibvirtNetVolumeDriver(self.fake_conn) connection_info = self.iscsi_connection(self.vol, self.location, self.iqn, auth=True) secret_type = 'iscsi' flags_user = connection_info['data']['auth_username'] conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertISCSINetworkAndProtocolEquals(tree) self.assertEqual(tree.find('./auth').get('username'), flags_user) self.assertEqual(tree.find('./auth/secret').get('type'), secret_type) self.assertEqual(tree.find('./auth/secret').get('uuid'), SECRET_UUID) libvirt_driver.disconnect_volume(connection_info, 'vde') def test_libvirt_nfs_driver(self): # NOTE(vish) exists is to make driver assume connecting worked mnt_base = '/mnt' self.flags(nfs_mount_point_base=mnt_base, group='libvirt') libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn) self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False) export_string = '192.168.1.1:/nfs/share1' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") device_path = os.path.join(export_mnt_base, connection_info['data']['name']) self.assertEqual(device_path, connection_info['data']['device_path']) expected_commands = [ ('mkdir', '-p', export_mnt_base), ('mount', '-t', 'nfs', export_string, export_mnt_base), ('umount', export_mnt_base)] self.assertEqual(expected_commands, self.executes) @mock.patch.object(volume.utils, 'execute') @mock.patch.object(volume.LOG, 'debug') @mock.patch.object(volume.LOG, 'exception') def test_libvirt_nfs_driver_umount_error(self, mock_LOG_exception, mock_LOG_debug, mock_utils_exe): export_string = '192.168.1.1:/nfs/share1' connection_info = {'data': {'export': export_string, 'name': self.name}} libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn) mock_utils_exe.side_effect = processutils.ProcessExecutionError( None, None, None, 'umount', 'umount: device is busy.') libvirt_driver.disconnect_volume(connection_info, "vde") self.assertTrue(mock_LOG_debug.called) mock_utils_exe.side_effect = processutils.ProcessExecutionError( None, None, None, 'umount', 'umount: target is busy.') libvirt_driver.disconnect_volume(connection_info, "vde") self.assertTrue(mock_LOG_debug.called) mock_utils_exe.side_effect = processutils.ProcessExecutionError( None, None, None, 'umount', 'umount: Other error.') libvirt_driver.disconnect_volume(connection_info, "vde") self.assertTrue(mock_LOG_exception.called) def test_libvirt_nfs_driver_get_config(self): libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn) mnt_base = '/mnt' self.flags(nfs_mount_point_base=mnt_base, group='libvirt') export_string = '192.168.1.1:/nfs/share1' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) file_path = os.path.join(export_mnt_base, self.name) connection_info = {'data': {'export': export_string, 'name': self.name, 'device_path': file_path}} conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertFileTypeEquals(tree, file_path) self.assertEqual('raw', tree.find('./driver').get('type')) def test_libvirt_nfs_driver_already_mounted(self): # NOTE(vish) exists is to make driver assume connecting worked mnt_base = '/mnt' self.flags(nfs_mount_point_base=mnt_base, group='libvirt') libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn) export_string = '192.168.1.1:/nfs/share1' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") expected_commands = [ ('findmnt', '--target', export_mnt_base, '--source', export_string), ('umount', export_mnt_base)] self.assertEqual(self.executes, expected_commands) def test_libvirt_nfs_driver_with_opts(self): mnt_base = '/mnt' self.flags(nfs_mount_point_base=mnt_base, group='libvirt') libvirt_driver = volume.LibvirtNFSVolumeDriver(self.fake_conn) self.stubs.Set(libvirt_utils, 'is_mounted', lambda x, d: False) export_string = '192.168.1.1:/nfs/share1' options = '-o intr,nfsvers=3' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name, 'options': options}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") expected_commands = [ ('mkdir', '-p', export_mnt_base), ('mount', '-t', 'nfs', '-o', 'intr,nfsvers=3', export_string, export_mnt_base), ('umount', export_mnt_base), ] self.assertEqual(expected_commands, self.executes) @mock.patch.object(libvirt_utils, 'is_mounted') def test_libvirt_smbfs_driver(self, mock_is_mounted): mnt_base = '/mnt' self.flags(smbfs_mount_point_base=mnt_base, group='libvirt') mock_is_mounted.return_value = False libvirt_driver = volume.LibvirtSMBFSVolumeDriver(self.fake_conn) export_string = '//192.168.1.1/volumes' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name, 'options': None}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") expected_commands = [ ('mkdir', '-p', export_mnt_base), ('mount', '-t', 'cifs', '-o', 'username=guest', export_string, export_mnt_base), ('umount', export_mnt_base)] self.assertEqual(expected_commands, self.executes) def test_libvirt_smbfs_driver_already_mounted(self): mnt_base = '/mnt' self.flags(smbfs_mount_point_base=mnt_base, group='libvirt') libvirt_driver = volume.LibvirtSMBFSVolumeDriver(self.fake_conn) export_string = '//192.168.1.1/volumes' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") expected_commands = [ ('findmnt', '--target', export_mnt_base, '--source', export_string), ('umount', export_mnt_base)] self.assertEqual(expected_commands, self.executes) def test_libvirt_smbfs_driver_get_config(self): mnt_base = '/mnt' self.flags(smbfs_mount_point_base=mnt_base, group='libvirt') libvirt_driver = volume.LibvirtSMBFSVolumeDriver(self.fake_conn) export_string = '//192.168.1.1/volumes' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) file_path = os.path.join(export_mnt_base, self.name) connection_info = {'data': {'export': export_string, 'name': self.name, 'device_path': file_path}} conf = libvirt_driver.get_config(connection_info, self.disk_info) tree = conf.format_dom() self._assertFileTypeEquals(tree, file_path) @mock.patch.object(libvirt_utils, 'is_mounted') def test_libvirt_smbfs_driver_with_opts(self, mock_is_mounted): mnt_base = '/mnt' self.flags(smbfs_mount_point_base=mnt_base, group='libvirt') mock_is_mounted.return_value = False libvirt_driver = volume.LibvirtSMBFSVolumeDriver(self.fake_conn) export_string = '//192.168.1.1/volumes' options = '-o user=guest,uid=107,gid=105' export_mnt_base = os.path.join(mnt_base, utils.get_hash_str(export_string)) connection_info = {'data': {'export': export_string, 'name': self.name, 'options': options}} libvirt_driver.connect_volume(connection_info, self.disk_info) libvirt_driver.disconnect_volume(connection_info, "vde") expected_commands = [ ('mkdir', '-p', export_mnt_base), ('mount', '-t', 'cifs', '-o', 'user=guest,uid=107,gid=105', export_string, export_mnt_base), ('umount', export_mnt_base)] self.assertEqual(expected_commands, self.executes)
Appenzeller Sennenhunde dog short write-up. Breed dogs – with a pedigree (category of dogs): they are the product of the planned mating of canines within one breed of dog, whose source (mothers and fathers) is known. The breeding dogs are registered in the birth publication and have an effective official document of origin – a pedigree.
from __future__ import absolute_import import logging from jsonschema import Draft7Validator from requests.exceptions import Timeout, ConnectionError from sentry.utils.sentryappwebhookrequests import SentryAppWebhookRequestsBuffer from sentry.http import safe_urlopen from sentry.models.sentryapp import track_response_code logger = logging.getLogger(__name__) SELECT_OPTIONS_SCHEMA = { "type": "array", "definitions": { "select-option": { "type": "object", "properties": {"label": {"type": "string"}, "value": {"type": "string"}}, "required": ["label", "value"], } }, "properties": {"type": "array", "items": {"$ref": "#definitions/select-option"}}, } ISSUE_LINKER_SCHEMA = { "type": "object", "properties": { "webUrl": {"type": "string"}, "identifier": {"type": "string"}, "project": {"type": "string"}, }, "required": ["webUrl", "identifier", "project"], } SCHEMA_LIST = {"select": SELECT_OPTIONS_SCHEMA, "issue_link": ISSUE_LINKER_SCHEMA} def validate(instance, schema_type): schema = SCHEMA_LIST[schema_type] v = Draft7Validator(schema) if not v.is_valid(instance): return False return True def send_and_save_sentry_app_request(url, sentry_app, org_id, event, **kwargs): """ Send a webhook request, and save the request into the Redis buffer for the app dashboard request log Returns the response of the request kwargs ends up being the arguments passed into safe_urlopen """ buffer = SentryAppWebhookRequestsBuffer(sentry_app) slug = sentry_app.slug_for_metrics try: resp = safe_urlopen(url=url, **kwargs) except (Timeout, ConnectionError) as e: error_type = e.__class__.__name__.lower() logger.info( "send_and_save_sentry_app_request.timeout", extra={ "error_type": error_type, "organization_id": org_id, "integration_slug": sentry_app.slug, }, ) track_response_code(error_type, slug, event) # Response code of 0 represents timeout buffer.add_request(response_code=0, org_id=org_id, event=event, url=url) # Re-raise the exception because some of these tasks might retry on the exception raise else: track_response_code(resp.status_code, slug, event) buffer.add_request( response_code=resp.status_code, org_id=org_id, event=event, url=url, error_id=resp.headers.get("Sentry-Hook-Error"), project_id=resp.headers.get("Sentry-Hook-Project"), ) resp.raise_for_status() return resp
Finally, Loop Capital set a $90.00 price objective on Walgreens Boots Alliance and gave the company a "buy" rating in a research report on Thursday, October 26th. The share price has moved backward from its 20 days moving average, trading at a distance of -0.43% and stays 2.53% away from its 50 days moving average. The transaction was disclosed in a filing with the SEC, which is available at this link. COPYRIGHT VIOLATION NOTICE: "Alta Capital Management LLC Purchases 76,684 Shares of Walgreens Boots Alliance Inc (WBA)" was originally reported by Week Herald and is the property of of Week Herald. "This purported deal wouldn't be shocking ... but we're now struggling to see the strategic allure of the combination for Walgreens", Baird analyst Eric Coldwell said. He later pushed for the partnership between Alliance Boots and Walgreens, which eventually bought the rest of Alliance in 2014 and made Pessina chief executive officer. Annex Advisory Services LLC increased its position in shares of Walgreens Boots Alliance by 39.9% during the 2nd quarter. (NASDAQ:WBA)'s stock on Monday traded at beginning with a price of $68.62 and when day-trade ended the stock finally remains unchanged to reach at $68.46. Annual EPS Growth of past 5 years is 9.40%. Walgreens Boots Alliance Inc has a 12 month low of $63.82 and a 12 month high of $88.00. Likewise, the performance for the quarter was recorded as -5.34% and for the year was N/A. Analysts' mean recommendation for the stock is 3.10 (A rating of less than 2 means buy, "hold" within the 3 range, "sell" within the 4 range, and "strong sell" within the 5 range). The latest exchange of 0.74 Million shares is below its average trading activity of 1.5 Million shares. Amerisource. Amerisource had a market cap of approximately $19.6 billion, which Walgreens owns about $5.1 billion. Price earnings ratio (P/E) ratio Analysis about Walgreens Boots Alliance, Inc. Company fiscal year is ending in August and analysts' consensus recommendation is Outperform for WBA and estimated EPS for next quarter is 1.48. Stock repurchase programs are usually an indication that the company's board believes its stock is undervalued. Reportedly, Walgreens may fund a major part of the deal through debt and is forecast to accrue substantial returns. The company also recently announced a quarterly dividend, which will be paid on Monday, March 12th. The ex-dividend date is Wednesday, February 14th. This represents a $1.60 dividend on an annualized basis and a yield of 2.34%. Moving average convergence divergence (MACD) shows that the stock is on a PRICE RELATIVITY trend. The firm has current ratio of 1.00 that indicates firm has capacity of total assets to cover its short-term and long term obligations. If you are accessing this story on another domain, it was stolen and republished in violation of United States & worldwide copyright & trademark laws. The correct version of this piece of content can be read at https://www.dispatchtribunal.com/2018/02/14/walgreens-boots-alliance-inc-wba-shares-sold-by-oppenheimer-co-inc.html. Walgreens already owns 26% of the company, which has analysts confused about how the deal would benefit the pharmacy chain. The Company is a global pharmacy-led, health and wellbeing enterprise. The Company's products are marketed under a number of brands, which include No7, the Botanics range, Almus (generic medicines), Boots Pharmaceuticals and Soap & Glory (bathing and beauty brand). The person who posted the story is able to see who viewed it, but not the specific number of times one person may have. Another workaround is for users to download Stories directly from the web version of Instagram. The program dubbed " America's Harvest Box " is reported to save America $129 billion over the next 10 years. They feared it would upend a much-needed benefit for more than 80% of those in the program . Ivanka is also expected to attend some of the sporting events that U.S. athletes are competing in. He'll also be visiting Japan and South Korea.
# Author: Jason Lu import orm_createTable from sqlalchemy.orm import sessionmaker, relationship #创建与数据库的会话session class ,注意,这里返回给session的是个class,不是实例 Session_class = sessionmaker(bind=orm_createTable.engine) Session = Session_class() #生成session实例 # b1 = orm_createTable.Book(name="Learn python with Alex", pub_date='2014-05-02') # b2 = orm_createTable.Book(name="Learn java with Jason", pub_date='2016-08-02') b3 = orm_createTable.Book(name="跟我学开车", pub_date='2017-10-02') # # a1 = orm_createTable.Author(name="Alex") # a2 = orm_createTable.Author(name="Jason") # a3 = orm_createTable.Author(name="BBB") # # b1.authors = [a1, a2] # b3.authors = [a1, a2, a3] # # Session.add_all([b1, b2, b3, a1, a2, a3]) b3.authors = [a1] Session.add_all([b3]) Session.commit() # data = Session.query(orm_createTable.Author).filter(orm_createTable.Author.name=='Alex').first() # print(data.book) # # print(data.book[1].pub_date) # print() # book_obj = Session.query(orm_createTable.Book).filter(orm_createTable.Book.id==2).first() # print(book_obj.authors)
31 Nouveau Terrasse Nid D Abeille is free HD wallpaper. This wallpaper was upload at October 13, 2017 upload by Maria Matthews in terrasse. 31 Nouveau Terrasse Nid D Abeille is high definition wallpaper and size this wallpaper is 800x800. You can make 31 Nouveau Terrasse Nid D Abeille For your Desktop picture, Tablet, Android or iPhone and another Smartphone device for free. To download and obtain the 31 Nouveau Terrasse Nid D Abeille images by click the download button below to get multiple high-resversions.
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-smssync', version='0.3', packages=find_packages(), include_package_data=True, license='GNU GPLv3', description='A simple Django app to integrate with SMSSync, an SMS gateway for Android.', long_description=README, url='https://github.com/rodrigopitanga/django-smssync/', author='Rodrigo Pitanga', author_email='pitanga@members.fsf.org', install_requires=[ 'django-phonenumber-field', ], classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.9', 'Intended Audience :: Developers', 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: GNU GPLv3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
Compared to sales in the first quarter of 2018, Ford Motor Co. (NYSE: F) sold 23,700 fewer vehicles in the first quarter of this year. The 6.2% decline was considerably below the 2.2% drop for the auto industry as a whole in the 20 countries that Ford considers its traditional European markets. Ford’s first-quarter sales totaled 359,400 vehicles in its Euro 20 countries.
# Django settings for retinaburner project. import os.path DEBUG = True TEMPLATE_DEBUG = DEBUG PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__)) ADMINS = ( ('Kaitlin Devine', 'kdevine@sunlightfoundation.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '', # Or path to database file if using sqlite3. # The following settings are not used with sqlite3: 'USER': '', 'PASSWORD': '', 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP. 'PORT': '', # Set to empty string for default. } } # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/var/www/example.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://example.com/media/", "http://media.example.com/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/var/www/example.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'd-%xtxi759=renuz$l@@pav@+-_fqm+=j7wcmnk_z@bc&j8pzk' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'federal_spending.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'federal_spending.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'federal_spending.usaspending', 'federal_spending.fbo', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', ) SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer' # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } TMP_DIRECTORY = PROJECT_ROOT + '/tmp' CSV_PATH = PROJECT_ROOT + '/usaspending/downloads/csvs/' LOGGING_DIRECTORY = PROJECT_ROOT + '/usaspending/logs' FISCAL_YEARS = [2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014] UPDATE_YEARS = [2012, 2013, 2014] from local_settings import *
Is there a step-by-step guide to configuring jam detection on Repetier, or does anyone have any suggestions to help, please? I am using Repetier Host v2.0.0, Server v0.80.3 and firmware v0.92.9 and have added a Hall effect filament jam detector that pulses onto pin D40. Am I doing something obviously wrong? Is there any way to display the pulses or steps that the jam detector is reporting? And as a secondary point, is there an optimum length per pulse for the detector? I am able to add more magnets to increase the pulses per turn so can easily amend the current setting. I think I've answered some of my questions. I found the M602 command and have been using that to debug and to see the pulses and am now tuning it to work effectively. I'd still like to know if there is an optimum pulse / mm or pulse / extruder steps setting or design parameter so that I can optimise that. Optimum would be a Jam_Slowdown_Steps bigger than anything you experience in normal working situations. WIth debug you can already see what it really measures then add sone 30% on it as a start. Error should be again 30-50% on top I'd say. If you have a real error you will trigger any value anyway but on the other side you also want to trigger early. Thank you for your response, Repetier! Oh, and one further question - is it possible to edit the Jam settings anywhere other than by recompiling the firmware and uploading it? Is it available in the EEPROM or elsewhere accessible from the Repetier Server or Host? I've looked in both but not been able to find it yet. And yes, you understood me correctly regarding steps. Thanks again - much appreciated! I'll be able to play with the values and then lock them in to the firmware when I've goyt them sorted.
#!/usr/bin/env python # -*- coding: utf-8 -*- import time import pypot.primitive import pypot.robot import random from pypot.primitive.move import MoveRecorder, Move, MovePlayer from speak import Speak class Mime(pypot.primitive.Primitive): def __init__(self, robot): pypot.primitive.Primitive.__init__(self, robot) self._speak = Speak(robot) self._move = "../src/moveRecorded/mime.move" self._movem1 = "../src/moveRecorded/singe.move" self._movem2 = "../src/moveRecorded/poulet.move" self._movem3 = "../src/moveRecorded/nager.move" self._movem4 = "../src/moveRecorded/manger.move" #self._moveo = "../src/moveRecorded/faux.move" #self._moven = "../src/moveRecorded/vrai4.move" def run(self): poppy = self.robot for m in poppy.motors: m.compliant = False num1 = random.randint(1,4) if num1 == 1 : text1 = "singe" mouvement = self._movem1 elif num1 == 2 : text1 = "poulet" mouvement = self._movem2 elif num1 == 3 : text1 = "nager" mouvement = self._movem3 elif num1 == 4: text1 = "manger" mouvement = self._movem4 else: text1 = "singe" mouvement = self._movem1 print (text1) print "lancer vrai4 si le patient répond bien, faux si il se trompe." text = "Devine ce que je mime." with open(self._move) as f : m = Move.load(f) move_player = MovePlayer(self.robot, m) move_player.start() time.sleep(0.5) self._speak.start(text) time.sleep(3) with open(mouvement) as f : m = Move.load(f) move_player = MovePlayer(self.robot, m) move_player.start() # while True : # rep = raw_input() # #if appuie sur n # if rep == "n" : # with open(self._moven) as f : # m = Move.load(f) # move_player = MovePlayer(self.robot, m) # move_player.start() # time.sleep(1.5) # self._speak.start("Non, ce n'est pas ça. Essaye encore!") # elif rep == "o" : # with open(self._moveo) as f : # m = Move.load(f) # move_player = MovePlayer(self.robot, m) # move_player.start() # time.sleep(1.5) # self._speak.start("Bravo! Tu as vu comme je suis bon acteur?") # break
Dermask Water Jet Soothing Hydra Solutionâ„¢ is made with fine cellulose fiber to help active ingredients provide ultimate hydration. It contains aquaxyl and xylitol to enhance moisture retention and support the skin's barrier by preventing water loss. Aloe vera and phytonicdes provide cooling relief to soothe redness, blemishes, and sun burns while the formula removes impurities to keep skin looking and feeling calm and refreshed. This product is free from 10 harmful ingredients, including artificial fragrance and color, denatured alcohol, triethanolamine, sulfate-based surfactants, triclosan, mineral oil, propylene glycol, and DEA, which causes skin irritation. It is hypoallergenic and suited for all skin types including sensitive skin.
#!/usr/bin/env python import random from collections import defaultdict from giant.giant_base.giant_base import GiantError def raise_(ex): raise ex swagger_to_csharp_enum_example_map = { 'string': defaultdict(lambda: lambda enum: '"' + random.choice(enum) + '";', { 'guid': lambda enum: 'new Guid(' + random.choice(enum) + ');', 'date': lambda enum: 'DateTime.parse(' + random.choice(enum) + ');', 'date-time': lambda enum: 'DateTime.parse(' + random.choice(enum) + ');', 'byte': lambda enum: raise_(GiantError('Shiver me timbers, I can\'t parse a enum byte type. Implement it yerself!')), 'binary': lambda enum: raise_(GiantError('Shiver me timbers, I can\'t parse a enum binary type. Implement it yerself!')), 'password': lambda enum: random.choice(enum) } ), 'integer': defaultdict(lambda: lambda enum: str(random.choice(enum)) + ';', { 'int32': lambda enum: str(random.choice(enum)) + ';', 'int64': lambda enum: str(random.choice(enum)) + ';' } ), 'number': defaultdict(lambda: lambda enum: str(random.choice(enum)) + ';', { 'float': lambda enum: str(random.choice(enum)) + ';', 'double': lambda enum: str(random.choice(enum)) + ';' } ), 'boolean': defaultdict(lambda: lambda: str(random.choice(enum)) + ';') } def example_integer(schema): minimum = schema.get('minimum', 1) maximum = schema.get('maximum', minimum + 100) multiple = schema.get('multipleOf', 1) return random.choice(range(minimum, maximum, multiple)) def example_float(schema): minimum = schema.get('minimum', 0.0) maximum = schema.get('maximum', 100.0) multiple = schema.get('multipleOf', 0.01) return str(round(random.uniform(minimum, maximum) / multiple) * multiple) swagger_to_csharp_example_map = { 'string': defaultdict(lambda: lambda schema: '"ExampleString";', { 'guid': lambda schema: 'new Guid();', 'date': lambda schema: 'new DateTime();', 'date-time': lambda schema: 'new DateTime();', 'byte': lambda schema: 'new byte[10];', 'binary': lambda schema: 'new byte[10];', 'password': lambda schema: '"thepasswordispassword"' } ), 'integer': defaultdict(lambda: lambda schema: str(example_integer(schema)) + ';', { 'int32': lambda schema: str(example_integer(schema)) + ';', 'int64': lambda schema: str(example_integer(schema)) + ';' } ), 'number': defaultdict(lambda: lambda schema: str(example_float(schema)) + ';', { 'float': lambda schema: str(example_float(schema)) + ';', 'double': lambda schema: str(example_float(schema)) + ';' } ), 'boolean': defaultdict(lambda: lambda schema: random.choice(('true;', 'false;'))) }
The American art of the bar had some pretty dodgy decades in the twentieth century. Prohibition put a lot of the older master bartenders out of business in the US, while around the world, bartenders were still getting comfortable with all the various cocktails, fizzes, sours, coolers and whatnot that made up the classic repertoire. Many of the bartenders who were driving that process of familiarization had experience in New York, Chicago, San Francisco, Boston and New Orleans joints. But the greatest mixologist of the mid-twentieth century had not only never worked in the US, but also, as far as we can tell, visited just once. Constante Ribalaigua Vert was born outside of Barcelona in 1888. By 1900, his family emigrated to Havana, where his father tended bar at the venerable cafe Piña de Plata. When Ribalaigua was 16, as he told the American author Thomas Sugrue in 1935, his father “asked him if he wished to learn barkeeping.” He said yes. By then, Cuba was swarming with Americans, and the establishment had been turned into an American-style bar called “La Florida.” In 1918, he had earned enough to assume ownership of the “Floridita”—the “Little Florida”—as it was universally known. Apparently, when Ribalaigua told Sugrue that his “only hobby is his work,” he wasn’t kidding. He didn’t even drink. What he did do—besides inventing a new cocktail practically every day—was to make sure that his customers got the best drinks and the best service, whether they were Ernest Hemingway, Spencer Tracy or the couple visiting from Des Moines. He was still pulling shifts when he died, in 1952. What made his tipples so special? Ribalaigua always used quality ingredients, of course. He kept up with technology and created imaginative combinations of flavors (though not too imaginative). But most of all, he was a master of proportion. I’ve tried just about every recipe included in the little pamphlet he gave out, and they have been perfect, requiring no adjustments. The Longines Cocktail is a fine example of his work: unusual, but not weird, using ingredients that blend together to form a harmonious whole. Add the tea and sugar to a shaker and stir. Add the remaining ingredients and fill with ice. Shake well and strain into a chilled cocktail glass. *Pour half a cup of boiling water over a black-tea teabag and let stand for 5 minutes. Remove the teabag and chill the tea until cold. Don't lose track of Masters of Mixology: Constante Ribalaigua Vert!
import argparse from stl.core import Core from stl import __version__ class Cli: """ Singleton that handles the user input, inits the whole machinery, and takes care of exiting the programme. """ def __init__(self): """ Constructor. Inits the argparse parser and then all the subparsers through the _init_* methods. Each of the latter defines a function that takes a Core instance and the argparse args as arguments, which function will be called if the respective command is called. """ usage = 'stl [-v] [--dir DIR] subcommand' desc = ( 'stl is a simple time logger that enables you to ' 'keep tally of how many hours you have worked on this or that' ) self.parser = argparse.ArgumentParser(usage=usage, description=desc) self.parser.add_argument('--version', action='version', version=__version__) self.parser.add_argument('-v', '--verbose', action='store_true', help='print debug info') self.parser.add_argument('--dir', help=( 'set the directory where the data will be saved; ' 'defaults to ~/.config/stl or ~/.stl')) self.subparsers = self.parser.add_subparsers(dest='command', title='subcommands') self._init_start() self._init_stop() self._init_switch() self._init_status() self._init_add() self._init_edit() def _init_start(self): """ Inits the subparser that handles the start command. """ def start(core, args): task = ' '.join(args.task) if args.task else '' return core.start(task=task) usage = 'stl start [task]' desc = ( 'make a log that you are starting to work' ) subp = self.subparsers.add_parser('start', usage=usage, description=desc, help=desc) subp.add_argument('task', nargs=argparse.REMAINDER, help='the task that you are about to start working on') subp.set_defaults(func=start) def _init_stop(self): """ Inits the subparser that handles the stop command. """ def stop(core, args): return core.stop() usage = 'stl stop' desc = ( 'make a log that you just stopped working' ) subp = self.subparsers.add_parser('stop', usage=usage, description=desc, help=desc) subp.set_defaults(func=stop) def _init_switch(self): """ Inits the subparser that handles the switch command. """ def switch(core, args): task = ' '.join(args.task) if args.task else '' return core.switch(task=task) usage = 'stl switch [task]' desc = ( 'shortcut for stl stop && stl start; ' 'stop the current task and immediately start another one' ) subp = self.subparsers.add_parser('switch', usage=usage, description=desc, help=desc[:desc.find(';')]) subp.add_argument('task', nargs=argparse.REMAINDER, help='the task that you are about to start working on') subp.set_defaults(func=switch) def _init_status(self): """ Inits the subparser that handles the status/show command. """ def status(core, args): extra = None for key in ['day', 'week', 'month', 'year', 'span', 'task']: if getattr(args, key) is not None: extra = (key, ' '.join(getattr(args, key))) break return core.status(extra=extra) usage = ( 'stl (status|show) ' '[-d ... | -w ... | -m ... | -y ... | -s ... | -t ...]' ) desc = ( 'show a status report; ' 'when called without further arguments, ' 'it will tell you what you are doing now' ) subp = self.subparsers.add_parser('status', aliases=['show'], usage=usage, description=desc, help=desc[:desc.find(';')]) group = subp.add_mutually_exclusive_group() group.add_argument('-d', '--day', nargs=argparse.REMAINDER, help=( 'report for the given day, ' 'e.g. 15 oct, 2016-10-15, today, yesterday; ' 'empty string defaults to today')) group.add_argument('-w', '--week', nargs=argparse.REMAINDER, help=( 'report for the given week, ' 'possible values are this and last; ' 'empty string defaults to this week')) group.add_argument('-m', '--month', nargs=argparse.REMAINDER, help=( 'report for the given month, ' 'e.g. oct, 10, 10 2016, this, last; ' 'empty string defaults to this month')) group.add_argument('-y', '--year', nargs=argparse.REMAINDER, help=( 'report for the given year, ' 'e.g. 2016, this, last; ' 'empty string defaults to this year')) group.add_argument('-s', '--span', nargs=argparse.REMAINDER, help=( 'report for the time span between two dates (inclusive), ' 'e.g. 15 25 oct, 15 sep 2016 25 oct 2016, 15 sep 25 oct; ' 'if you specify only one date, the second will be set to today; ' 'some restrictions: ' 'the second date (if such) cannot be less specific than the first ' 'and months cannot be numbers')) group.add_argument('-t', '--task', nargs=argparse.REMAINDER, help='report for the given task') subp.set_defaults(func=status) def _init_add(self): """ Inits the subparser that handles the add command. """ def add(core, args): return core.add(args.start, args.stop, args.task) usage = 'stl add start stop [task]' desc = ( 'directly add a log entry; ' 'you can also do this from python, take a look at ' 'stl.core.Core.add()' ) subp = self.subparsers.add_parser('add', usage=usage, description=desc, help=desc[:desc.find(';')]) subp.add_argument('start', help='when work on the task started; use %%Y-%%m-%%dT%%H:%%M') subp.add_argument('stop', help='when work on the task stopped; use %%Y-%%m-%%dT%%H:%%M') subp.add_argument('task', nargs='?', default='', help='the task being worked on; optional') subp.set_defaults(func=add) def _init_edit(self): """ Inits the subparser that handles the edit command. """ def edit(core, args): month = ' '.join(getattr(args, 'month', [])) core.edit(month) usage = 'stl edit [month]' desc = ( 'lets you vim the right file' ) subp = self.subparsers.add_parser('edit', usage=usage, description=desc, help=desc) subp.add_argument('month', nargs=argparse.REMAINDER, help='the month you want to edit, e.g. oct 2016') subp.set_defaults(func=edit) def run(self, raw_args=None): """ Parses the given arguments (or, except for in unit testing, sys.argv), inits the Core instance and transfers to that. Note that if raw_args is None, then argparse's parser defaults to reading sys.argv. Returns a human-readable string to be printed to the user. """ args = self.parser.parse_args(raw_args) if args.command is None: return self.parser.format_help() core = Core(dir_path=args.dir, verbose=args.verbose) try: res = args.func(core, args) except Exception as err: return str(err) return res def main(): """ The (only) entry point for the command-line interface as registered in setup.py. Inits a Cli instance, runs it with sys.argv, and prints the output to stdout. """ cli = Cli() res = cli.run() if res: print(res.strip())
WAS $115 Knit tee shirt with cute, lacy pockets that add a fun touch. This makes a great summer top in a sage green in color, and is comfy and versatile with short sleeves, scoop neck, and not-too-long length. Color is a sage or mineral green, easily coordinating with blues and gray, or brighten it up with a colored bottom piece! Hand knit with a comfortable seamless construction, in a cool cotton and merino yarn to span three seasons. Designed to fit a womens medium, actual bust 37-38", worn with 2-3" of ease. Can also fit a size small as a slouchy top, or on a large as a more fitted top.
#!/usr/bin/python """ Date of Creation: 19th Dec 2014 Description: Functions for loading DAVID gene ontology results Copyright (C) 2010-2014 Philip J. Uren, Authors: Philip J. Uren This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ # pyokit imports from pyokit.datastruct.geneOntology import GeneOntologyEnrichmentResult ############################################################################### # MODULE-LEVEL CONSTANTS # ############################################################################### NUM_FIELDS_IN_DAVID_RECORD = 13 PVAL_FIELD_NUM = 11 ############################################################################### # ITERATORS # ############################################################################### def david_results_iterator(fn, verbose=False): """ Iterate over a DAVID result set and yeild GeneOntologyTerm objects representing each of the terms reported. The expected format for a DAVID result file is tab-seperated format. The following fields should be present: === =============== ========== ==================================== Num Field Type Example === =============== ========== ==================================== 0 Category string GOTERM_BP_FAT 1 Term string GO:0046907~intracellular transport 2 Count int 43 3 Percent float 11.345646437994723 4 PValue float 1.3232857694449546E-9 5 Genes string ARSB, KPNA6, GNAS 6 List Total int 310 7 Pop Hits int 657 8 Pop Total int 13528 9 Fold Enrichment float 2.8561103746256196 10 Bonferroni float 2.6293654579179204E-6 11 Benjamini float 2.6293654579179204E-6 12 FDR float 2.2734203852792234E-6 === =============== ========== ==================================== The first line is a header giving the field names -- this is ignored though, and we expect them in the order given above. Most of the fields are ignored at present; we take fields 0,1, and 11 (as the significance/p-value). When parsing the term field, we try to extract a term ID by splitting on tilde, but if we can't then this is set to None. :param fn: the file to parse :param verbose: if True, output progress to stderr. """ first = True for line in open(fn): line = line.strip() if line == "": continue if first: first = False continue parts = line.split("\t") if len(parts) != NUM_FIELDS_IN_DAVID_RECORD: raise IOError("failed to parse " + fn + " as DAVID result file. " + "Expected " + str(NUM_FIELDS_IN_DAVID_RECORD) + " " + "tab-separated fields, but found " + str(len(parts)) + " instead") n_parts = parts[1].split("~") name = n_parts[-1].strip() identifier = n_parts[0] if len(n_parts) > 1 else None catagory = parts[0].strip() try: p_val = float(parts[PVAL_FIELD_NUM]) except ValueError: raise IOError("Failed to parse " + fn + " as DAVID result file. " + "Expected field " + str(PVAL_FIELD_NUM) + " " + "to contain a floating point number " + "(Benjamini), found this instead: " + str(parts[PVAL_FIELD_NUM])) yield GeneOntologyEnrichmentResult(name, p_val, identifier, catagory) ############################################################################### # BULK LOADING FUNCTIONS # ############################################################################### def david_results_load_file(fn, verbose=False): """ Load a set of DAVID gene ontology results as a list of GeneOntologyTerm objects :param fn: :param verbose: """ return [x for x in david_results_iterator(fn, verbose)]
The following table shows the complete astrometric record for 08AQ118. The first three columns show the date of observation. The next six columns are RA and DEC. The next column (when provided) is the observed magnitude and filter. The next column is the object name (08AQ118) followed by the observatory code and reference code for the source of the astrometry.
# Perforce source for convert extension. # # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from mercurial import util from mercurial.i18n import _ from common import commit, converter_source, checktool, NoRepo import marshal import re def loaditer(f): "Yield the dictionary objects generated by p4" try: while True: d = marshal.load(f) if not d: break yield d except EOFError: pass class p4_source(converter_source): def __init__(self, ui, path, rev=None): super(p4_source, self).__init__(ui, path, rev=rev) if "/" in path and not path.startswith('//'): raise NoRepo(_('%s does not look like a P4 repository') % path) checktool('p4', abort=False) self.p4changes = {} self.heads = {} self.changeset = {} self.files = {} self.tags = {} self.lastbranch = {} self.parent = {} self.encoding = "latin_1" self.depotname = {} # mapping from local name to depot name self.re_type = re.compile( "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" "(\+\w+)?$") self.re_keywords = re.compile( r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" r":[^$\n]*\$") self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") self._parse(ui, path) def _parse_view(self, path): "Read changes affecting the path" cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path) stdout = util.popen(cmd, mode='rb') for d in loaditer(stdout): c = d.get("change", None) if c: self.p4changes[c] = True def _parse(self, ui, path): "Prepare list of P4 filenames and revisions to import" ui.status(_('reading p4 views\n')) # read client spec or view if "/" in path: self._parse_view(path) if path.startswith("//") and path.endswith("/..."): views = {path[:-3]:""} else: views = {"//": ""} else: cmd = 'p4 -G client -o %s' % util.shellquote(path) clientspec = marshal.load(util.popen(cmd, mode='rb')) views = {} for client in clientspec: if client.startswith("View"): sview, cview = clientspec[client].split() self._parse_view(sview) if sview.endswith("...") and cview.endswith("..."): sview = sview[:-3] cview = cview[:-3] cview = cview[2:] cview = cview[cview.find("/") + 1:] views[sview] = cview # list of changes that affect our source files self.p4changes = self.p4changes.keys() self.p4changes.sort(key=int) # list with depot pathnames, longest first vieworder = views.keys() vieworder.sort(key=len, reverse=True) # handle revision limiting startrev = self.ui.config('convert', 'p4.startrev', default=0) self.p4changes = [x for x in self.p4changes if ((not startrev or int(x) >= int(startrev)) and (not self.rev or int(x) <= int(self.rev)))] # now read the full changelists to get the list of file revisions ui.status(_('collecting p4 changelists\n')) lastid = None for change in self.p4changes: cmd = "p4 -G describe -s %s" % change stdout = util.popen(cmd, mode='rb') d = marshal.load(stdout) desc = self.recode(d["desc"]) shortdesc = desc.split("\n", 1)[0] t = '%s %s' % (d["change"], repr(shortdesc)[1:-1]) ui.status(util.ellipsis(t, 80) + '\n') if lastid: parents = [lastid] else: parents = [] date = (int(d["time"]), 0) # timezone not set c = commit(author=self.recode(d["user"]), date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'), parents=parents, desc=desc, branch='', extra={"p4": change}) files = [] i = 0 while ("depotFile%d" % i) in d and ("rev%d" % i) in d: oldname = d["depotFile%d" % i] filename = None for v in vieworder: if oldname.startswith(v): filename = views[v] + oldname[len(v):] break if filename: files.append((filename, d["rev%d" % i])) self.depotname[filename] = oldname i += 1 self.changeset[change] = c self.files[change] = files lastid = change if lastid: self.heads = [lastid] def getheads(self): return self.heads def getfile(self, name, rev): cmd = 'p4 -G print %s' \ % util.shellquote("%s#%s" % (self.depotname[name], rev)) stdout = util.popen(cmd, mode='rb') mode = None contents = "" keywords = None for d in loaditer(stdout): code = d["code"] data = d.get("data") if code == "error": raise IOError(d["generic"], data) elif code == "stat": p4type = self.re_type.match(d["type"]) if p4type: mode = "" flags = (p4type.group(1) or "") + (p4type.group(3) or "") if "x" in flags: mode = "x" if p4type.group(2) == "symlink": mode = "l" if "ko" in flags: keywords = self.re_keywords_old elif "k" in flags: keywords = self.re_keywords elif code == "text" or code == "binary": contents += data if mode is None: raise IOError(0, "bad stat") if keywords: contents = keywords.sub("$\\1$", contents) if mode == "l" and contents.endswith("\n"): contents = contents[:-1] return contents, mode def getchanges(self, rev): return self.files[rev], {} def getcommit(self, rev): return self.changeset[rev] def gettags(self): return self.tags def getchangedfiles(self, rev, i): return sorted([x[0] for x in self.files[rev]])
What do you do here at North? When did you begin working at North? What is your favorite thing to do in Indianapolis? What is your favorite Bible verse or passage?
#!/usr/bin/python # # Copyright 2013 Zynga Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sys,os,re,syslog,traceback,time from os import path from glob import glob import commands from itertools import groupby import shutil import json server_config_file = "/etc/zperfmon/server.cfg" daily_raw_dir = "_raw" # holder class for putting in config parameters class CFG: def set_option(self, option, value): setattr(self, option, value) pass def debug_print(*args): return #print(args) # # Read the server config file which is php code that creates a map. # def get_server_config(config_file): config_content = open(config_file).read() cfg = CFG() for m in re.finditer("^[\t ]*\"([^\"]+)\"\s*=>\s*\"([^\"]+)\"", config_content, re.MULTILINE): cfg.set_option(m.group(1), m.group(2)) return cfg page_re = re.compile('.*_raw/[0-9]+/(?P<runid>[0-9]*)\.(?P<filename>.*)\.xhprof') def pagename(k): m = page_re.match(k) if(m): return m.group('filename') def collect_profiles(cfg, rawpath): xhprof_files = glob("%s/*/*.xhprof" % rawpath) groups = {} for f in xhprof_files: k = pagename(f) groups.setdefault(k,[]) groups[k].append(f) return groups # # Find all manifest.json files one level under 'source' and combine them. Dump # result as json 'target'/manifest.json. Manifests are loaded with an eval() # since the pure python json[encode|decode] (for 2.4) is very slow. # def aggregate_manifests(source, target): aggregate = {} for manifest in glob(path.join(source, "*", "manifest.json")): try: m = json.read(open(manifest).read()) # # Very simplistic, we could use collections and sets and all # that. Not enough gain here to justify the trouble. # for page, count in [[k, v[1]] for k,v in m.items()]: if not aggregate.has_key(page): aggregate[page] = [page, 0] aggregate[page][1] += count except Exception, e: info = sys.exc_info() syslog.syslog(str(info[0])) syslog.syslog(traceback.format_exc()) agg_file = path.join(target, "manifest.json") open(agg_file, "w").write(json.write(aggregate)) return agg_file # look backwards from timestamp's half hour to num elements back # num is 48 by default, because it's in 1/2 hour slots # root_upload_dir=/db/zperfmon/<game_name>/timeslots/ # def extract_profiles(cfg, root_upload_dir, timestamp, num=48): end = int(timestamp / 1800) start = end - 48 slots = range(start, end) files = map(lambda x: path.join(root_upload_dir,str(x),"xhprof",cfg.xhprof_tbz_name), slots); aggregate_dir = path.normpath(path.join(root_upload_dir,'..','xhprof.daily', str(end), cfg.blob_dir)) rawpath = path.normpath(path.join(root_upload_dir,'..','xhprof.daily', str(end), daily_raw_dir)) if(not path.exists(rawpath)): os.makedirs(rawpath) if(not path.exists(aggregate_dir)): os.makedirs(aggregate_dir) count = 0 for f in files: os.makedirs("%s/%d" % (rawpath, count)) cmd = "tar --strip-components 1 -xjf %s -C %s/%d" % (f, rawpath, count) result = commands.getstatusoutput(cmd) if(result[0]): print "Command failed: %s" % cmd print "Ignoring error and continuing" count += 1 aggregate_manifests(rawpath, aggregate_dir) return (aggregate_dir, end, collect_profiles(cfg, rawpath)) def aggregate_runs(cfg, name, aggregate_dir, xhprofs): cmd = "%s %s %s %s %s" % (cfg.profile_aggregation_command, cfg.game_name, name, aggregate_dir, " ".join(xhprofs)) result = commands.getstatusoutput(cmd) if(result[0]): print "Command failed: %s" % cmd def extract_functions(cfg, name, aggregate_dir, xhprofs): cmd = "%s %s %s %s %s" % (cfg.profile_extraction_command, cfg.game_name, name, aggregate_dir, " ".join(xhprofs)) result = commands.getstatusoutput(cmd) if(result[0]): print "Command failed: %s" % cmd def cleanup_and_bzip(server_cfg, exec_dir): # create one tbz for inserting cwd = os.getcwd() os.chdir(exec_dir) # # Remove the raw directory # shutil.rmtree(daily_raw_dir) # # bzip to insert # cmd = "tar jcf %s %s/" % (server_cfg.xhprof_tbz_name, server_cfg.blob_dir) print cmd result = commands.getstatusoutput(cmd) debug_print(cmd) os.chdir(cwd) # ignore failures, recovery and sanity is not worth the returns if result[0]: return None def usage(): print "error !" def main(cfg): args = sys.argv[1:] if(len(args) < 2 or len(args) > 3): usage() return game_name = args[0] # xhprof_dir = args[1] root_upload_dir = args[1] if(len(args) == 3): timestamp = int(args[2]) else: timestamp = int(time.time()) cfg.set_option("game_name", game_name) # (aggregate_dir, day, profile_slots) = extract_profiles(cfg, xhprof_dir, timestamp) (aggregate_dir, end, profile_slots) = extract_profiles(cfg, root_upload_dir, timestamp) for name in profile_slots.keys(): aggregate_runs(cfg, "%s.%s" % (end,name), aggregate_dir, profile_slots[name]) # TODO: optimize this to generate off the aggregate file extract_functions(cfg, "%s.%s" % (end, name), aggregate_dir, profile_slots[name]) cleanup_and_bzip(cfg, path.normpath(path.join(aggregate_dir, ".."))) if __name__ == "__main__": status = 37 try: server_cfg = get_server_config(server_config_file) status = main(server_cfg) except: info = sys.exc_info() syslog.syslog(str(info[0])) syslog.syslog(traceback.format_exc()) status = 38 print traceback.format_exc() sys.exit(status)
Hello there! My name is Mrs. Rebecca Dvorak! This is my 7 th year teaching Kindergarten at Bennet Elementary. Before I came to Bennet I was the Director of the Student Child . . . Hello there! My name is Mrs. Rebecca Dvorak! This is my fifth year teaching Kindergarten at Bennet Elementary. Before I came to Bennet I was the Director of the Student Child Learning Center at Lincoln . . .
# schema.py # Copyright (C) 2005, 2006, 2007 Michael Bayer mike_mp@zzzcomputing.com # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """The schema module provides the building blocks for database metadata. This means all the entities within a SQL database that we might want to look at, modify, or create and delete are described by these objects, in a database-agnostic way. A structure of SchemaItems also provides a *visitor* interface which is the primary method by which other methods operate upon the schema. The SQL package extends this structure with its own clause-specific objects as well as the visitor interface, so that the schema package *plugs in* to the SQL package. """ import re, inspect from sqlalchemy import types, exceptions, util, databases from sqlalchemy.sql import expression, visitors import sqlalchemy URL = None __all__ = ['SchemaItem', 'Table', 'Column', 'ForeignKey', 'Sequence', 'Index', 'ForeignKeyConstraint', 'PrimaryKeyConstraint', 'CheckConstraint', 'UniqueConstraint', 'DefaultGenerator', 'Constraint', 'MetaData', 'ThreadLocalMetaData', 'SchemaVisitor', 'PassiveDefault', 'ColumnDefault'] class SchemaItem(object): """Base class for items that define a database schema.""" __metaclass__ = expression._FigureVisitName def _init_items(self, *args): """Initialize the list of child items for this SchemaItem.""" for item in args: if item is not None: item._set_parent(self) def _get_parent(self): raise NotImplementedError() def _set_parent(self, parent): """Associate with this SchemaItem's parent object.""" raise NotImplementedError() def get_children(self, **kwargs): """used to allow SchemaVisitor access""" return [] def __repr__(self): return "%s()" % self.__class__.__name__ def _get_bind(self, raiseerr=False): """Return the engine or None if no engine.""" if raiseerr: m = self.metadata e = m and m.bind or None if e is None: raise exceptions.InvalidRequestError("This SchemaItem is not connected to any Engine or Connection.") else: return e else: m = self.metadata return m and m.bind or None bind = property(lambda s:s._get_bind()) def _get_table_key(name, schema): if schema is None: return name else: return schema + "." + name class _TableSingleton(expression._FigureVisitName): """A metaclass used by the ``Table`` object to provide singleton behavior.""" def __call__(self, name, metadata, *args, **kwargs): schema = kwargs.get('schema', None) autoload = kwargs.pop('autoload', False) autoload_with = kwargs.pop('autoload_with', False) mustexist = kwargs.pop('mustexist', False) useexisting = kwargs.pop('useexisting', False) include_columns = kwargs.pop('include_columns', None) key = _get_table_key(name, schema) try: table = metadata.tables[key] if args: if not useexisting: raise exceptions.ArgumentError("Table '%s' is already defined for this MetaData instance." % key) return table except KeyError: if mustexist: raise exceptions.ArgumentError("Table '%s' not defined" % (key)) table = type.__call__(self, name, metadata, **kwargs) table._set_parent(metadata) # load column definitions from the database if 'autoload' is defined # we do it after the table is in the singleton dictionary to support # circular foreign keys if autoload: try: if autoload_with: autoload_with.reflecttable(table, include_columns=include_columns) else: metadata._get_bind(raiseerr=True).reflecttable(table, include_columns=include_columns) except exceptions.NoSuchTableError: del metadata.tables[key] raise # initialize all the column, etc. objects. done after # reflection to allow user-overrides table._init_items(*args) return table class Table(SchemaItem, expression.TableClause): """Represent a relational database table. This subclasses ``expression.TableClause`` to provide a table that is associated with an instance of ``MetaData``, which in turn may be associated with an instance of ``Engine``. Whereas ``TableClause`` represents a table as its used in an SQL expression, ``Table`` represents a table as it exists in a database schema. If this ``Table`` is ultimately associated with an engine, the ``Table`` gains the ability to access the database directly without the need for dealing with an explicit ``Connection`` object; this is known as "implicit execution". Implicit operation allows the ``Table`` to access the database to reflect its own properties (via the autoload=True flag), it allows the create() and drop() methods to be called without passing a connectable, and it also propigates the underlying engine to constructed SQL objects so that they too can be executed via their execute() method without the need for a ``Connection``. """ __metaclass__ = _TableSingleton def __init__(self, name, metadata, **kwargs): """Construct a Table. Table objects can be constructed directly. The init method is actually called via the TableSingleton metaclass. Arguments are: name The name of this table, exactly as it appears, or will appear, in the database. This property, along with the *schema*, indicates the *singleton identity* of this table. Further tables constructed with the same name/schema combination will return the same Table instance. \*args Should contain a listing of the Column objects for this table. \**kwargs Options include: schema The *schema name* for this table, which is required if the table resides in a schema other than the default selected schema for the engine's database connection. Defaults to ``None``. autoload Defaults to False: the Columns for this table should be reflected from the database. Usually there will be no Column objects in the constructor if this property is set. autoload_with if autoload==True, this is an optional Engine or Connection instance to be used for the table reflection. If ``None``, the underlying MetaData's bound connectable will be used. include_columns A list of strings indicating a subset of columns to be loaded via the ``autoload`` operation; table columns who aren't present in this list will not be represented on the resulting ``Table`` object. Defaults to ``None`` which indicates all columns should be reflected. mustexist Defaults to False: indicates that this Table must already have been defined elsewhere in the application, else an exception is raised. useexisting Defaults to False: indicates that if this Table was already defined elsewhere in the application, disregard the rest of the constructor arguments. owner Defaults to None: optional owning user of this table. useful for databases such as Oracle to aid in table reflection. quote Defaults to False: indicates that the Table identifier must be properly escaped and quoted before being sent to the database. This flag overrides all other quoting behavior. quote_schema Defaults to False: indicates that the Namespace identifier must be properly escaped and quoted before being sent to the database. This flag overrides all other quoting behavior. """ super(Table, self).__init__(name) self.metadata = metadata self.schema = kwargs.pop('schema', None) self.indexes = util.Set() self.constraints = util.Set() self._columns = expression.ColumnCollection() self.primary_key = PrimaryKeyConstraint() self._foreign_keys = util.OrderedSet() self.quote = kwargs.pop('quote', False) self.quote_schema = kwargs.pop('quote_schema', False) if self.schema is not None: self.fullname = "%s.%s" % (self.schema, self.name) else: self.fullname = self.name self.owner = kwargs.pop('owner', None) if len([k for k in kwargs if not re.match(r'^(?:%s)_' % '|'.join(databases.__all__), k)]): raise TypeError("Invalid argument(s) for Table: %s" % repr(kwargs.keys())) # store extra kwargs, which should only contain db-specific options self.kwargs = kwargs key = property(lambda self:_get_table_key(self.name, self.schema)) def _export_columns(self, columns=None): # override FromClause's collection initialization logic; TableClause and Table # implement it differently pass def _set_primary_key(self, pk): if getattr(self, '_primary_key', None) in self.constraints: self.constraints.remove(self._primary_key) self._primary_key = pk self.constraints.add(pk) primary_key = property(lambda s:s._primary_key, _set_primary_key) def __repr__(self): return "Table(%s)" % ', '.join( [repr(self.name)] + [repr(self.metadata)] + [repr(x) for x in self.columns] + ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']]) def __str__(self): return _get_table_key(self.encodedname, self.schema) def append_column(self, column): """Append a ``Column`` to this ``Table``.""" column._set_parent(self) def append_constraint(self, constraint): """Append a ``Constraint`` to this ``Table``.""" constraint._set_parent(self) def _get_parent(self): return self.metadata def _set_parent(self, metadata): metadata.tables[_get_table_key(self.name, self.schema)] = self self.metadata = metadata def get_children(self, column_collections=True, schema_visitor=False, **kwargs): if not schema_visitor: return expression.TableClause.get_children(self, column_collections=column_collections, **kwargs) else: if column_collections: return [c for c in self.columns] else: return [] def exists(self, bind=None): """Return True if this table exists.""" if bind is None: bind = self._get_bind(raiseerr=True) def do(conn): return conn.dialect.has_table(conn, self.name, schema=self.schema) return bind.run_callable(do) def create(self, bind=None, checkfirst=False): """Issue a ``CREATE`` statement for this table. See also ``metadata.create_all()``.""" self.metadata.create_all(bind=bind, checkfirst=checkfirst, tables=[self]) def drop(self, bind=None, checkfirst=False): """Issue a ``DROP`` statement for this table. See also ``metadata.drop_all()``.""" self.metadata.drop_all(bind=bind, checkfirst=checkfirst, tables=[self]) def tometadata(self, metadata, schema=None): """Return a copy of this ``Table`` associated with a different ``MetaData``.""" try: if schema is None: schema = self.schema key = _get_table_key(self.name, schema) return metadata.tables[key] except KeyError: args = [] for c in self.columns: args.append(c.copy()) for c in self.constraints: args.append(c.copy()) return Table(self.name, metadata, schema=schema, *args) class Column(SchemaItem, expression._ColumnClause): """Represent a column in a database table. This is a subclass of ``expression.ColumnClause`` and represents an actual existing table in the database, in a similar fashion as ``TableClause``/``Table``. """ def __init__(self, name, type_, *args, **kwargs): """Construct a new ``Column`` object. Arguments are: name The name of this column. This should be the identical name as it appears, or will appear, in the database. type\_ The ``TypeEngine`` for this column. This can be any subclass of ``types.AbstractType``, including the database-agnostic types defined in the types module, database-specific types defined within specific database modules, or user-defined types. If the column contains a ForeignKey, the type can also be None, in which case the type assigned will be that of the referenced column. \*args Constraint, ForeignKey, ColumnDefault and Sequence objects should be added as list values. \**kwargs Keyword arguments include: key Defaults to None: an optional *alias name* for this column. The column will then be identified everywhere in an application, including the column list on its Table, by this key, and not the given name. Generated SQL, however, will still reference the column by its actual name. primary_key Defaults to False: True if this column is a primary key column. Multiple columns can have this flag set to specify composite primary keys. As an alternative, the primary key of a Table can be specified via an explicit ``PrimaryKeyConstraint`` instance appended to the Table's list of objects. nullable Defaults to True : True if this column should allow nulls. True is the default unless this column is a primary key column. default Defaults to None: a scalar, Python callable, or ``ClauseElement`` representing the *default value* for this column, which will be invoked upon insert if this column is not present in the insert list or is given a value of None. The default expression will be converted into a ``ColumnDefault`` object upon initialization. _is_oid Defaults to False: used internally to indicate that this column is used as the quasi-hidden "oid" column index Defaults to False: indicates that this column is indexed. The name of the index is autogenerated. to specify indexes with explicit names or indexes that contain multiple columns, use the ``Index`` construct instead. unique Defaults to False: indicates that this column contains a unique constraint, or if `index` is True as well, indicates that the Index should be created with the unique flag. To specify multiple columns in the constraint/index or to specify an explicit name, use the ``UniqueConstraint`` or ``Index`` constructs instead. autoincrement Defaults to True: indicates that integer-based primary key columns should have autoincrementing behavior, if supported by the underlying database. This will affect ``CREATE TABLE`` statements such that they will use the databases *auto-incrementing* keyword (such as ``SERIAL`` for Postgres, ``AUTO_INCREMENT`` for Mysql) and will also affect the behavior of some dialects during ``INSERT`` statement execution such that they will assume primary key values are created in this manner. If a ``Column`` has an explicit ``ColumnDefault`` object (such as via the `default` keyword, or a ``Sequence`` or ``PassiveDefault``), then the value of `autoincrement` is ignored and is assumed to be False. `autoincrement` value is only significant for a column with a type or subtype of Integer. quote Defaults to False: indicates that the Column identifier must be properly escaped and quoted before being sent to the database. This flag should normally not be required as dialects can auto-detect conditions where quoting is required. """ super(Column, self).__init__(name, None, type_) self.args = args self.key = kwargs.pop('key', name) self._primary_key = kwargs.pop('primary_key', False) self.nullable = kwargs.pop('nullable', not self.primary_key) self._is_oid = kwargs.pop('_is_oid', False) self.default = kwargs.pop('default', None) self.index = kwargs.pop('index', None) self.unique = kwargs.pop('unique', None) self.quote = kwargs.pop('quote', False) self.onupdate = kwargs.pop('onupdate', None) self.autoincrement = kwargs.pop('autoincrement', True) self.constraints = util.Set() self.__originating_column = self self._foreign_keys = util.OrderedSet() if kwargs: raise exceptions.ArgumentError("Unknown arguments passed to Column: " + repr(kwargs.keys())) primary_key = util.SimpleProperty('_primary_key') foreign_keys = util.SimpleProperty('_foreign_keys') columns = property(lambda self:[self]) def __str__(self): if self.table is not None: if self.table.named_with_column(): return (self.table.encodedname + "." + self.encodedname) else: return self.encodedname else: return self.encodedname def _get_bind(self): return self.table.bind def references(self, column): """return true if this column references the given column via foreign key""" for fk in self.foreign_keys: if fk.column is column: return True else: return False def append_foreign_key(self, fk): fk._set_parent(self) def __repr__(self): kwarg = [] if self.key != self.name: kwarg.append('key') if self._primary_key: kwarg.append('primary_key') if not self.nullable: kwarg.append('nullable') if self.onupdate: kwarg.append('onupdate') if self.default: kwarg.append('default') return "Column(%s)" % ', '.join( [repr(self.name)] + [repr(self.type)] + [repr(x) for x in self.foreign_keys if x is not None] + [repr(x) for x in self.constraints] + ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg]) def _get_parent(self): return self.table def _set_parent(self, table): self.metadata = table.metadata if getattr(self, 'table', None) is not None: raise exceptions.ArgumentError("this Column already has a table!") if not self._is_oid: self._pre_existing_column = table._columns.get(self.key) table._columns.add(self) else: self._pre_existing_column = None if self.primary_key: table.primary_key.add(self) elif self.key in table.primary_key: raise exceptions.ArgumentError("Trying to redefine primary-key column '%s' as a non-primary-key column on table '%s'" % (self.key, table.fullname)) # if we think this should not raise an error, we'd instead do this: #table.primary_key.remove(self) self.table = table if self.index: if isinstance(self.index, basestring): raise exceptions.ArgumentError("The 'index' keyword argument on Column is boolean only. To create indexes with a specific name, create an explicit Index object external to the Table.") Index('ix_%s' % self._label, self, unique=self.unique) elif self.unique: if isinstance(self.unique, basestring): raise exceptions.ArgumentError("The 'unique' keyword argument on Column is boolean only. To create unique constraints or indexes with a specific name, append an explicit UniqueConstraint to the Table's list of elements, or create an explicit Index object external to the Table.") table.append_constraint(UniqueConstraint(self.key)) toinit = list(self.args) if self.default is not None: toinit.append(ColumnDefault(self.default)) if self.onupdate is not None: toinit.append(ColumnDefault(self.onupdate, for_update=True)) self._init_items(*toinit) self.args = None def copy(self): """Create a copy of this ``Column``, unitialized. This is used in ``Table.tometadata``. """ return Column(self.name, self.type, self.default, key = self.key, primary_key = self.primary_key, nullable = self.nullable, _is_oid = self._is_oid, quote=self.quote, index=self.index, *[c.copy() for c in self.constraints]) def _make_proxy(self, selectable, name = None): """Create a *proxy* for this column. This is a copy of this ``Column`` referenced by a different parent (such as an alias or select statement). """ fk = [ForeignKey(f._colspec) for f in self.foreign_keys] c = Column(name or self.name, self.type, self.default, key = name or self.key, primary_key = self.primary_key, nullable = self.nullable, _is_oid = self._is_oid, quote=self.quote, *fk) c.table = selectable c.orig_set = self.orig_set c.__originating_column = self.__originating_column c._distance = self._distance + 1 c._pre_existing_column = self._pre_existing_column if not c._is_oid: selectable.columns.add(c) if self.primary_key: selectable.primary_key.add(c) [c._init_items(f) for f in fk] return c def get_children(self, schema_visitor=False, **kwargs): if schema_visitor: return [x for x in (self.default, self.onupdate) if x is not None] + \ list(self.foreign_keys) + list(self.constraints) else: return expression._ColumnClause.get_children(self, **kwargs) class ForeignKey(SchemaItem): """Defines a column-level ``ForeignKey`` constraint between two columns. ``ForeignKey`` is specified as an argument to a Column object. One or more ``ForeignKey`` objects are used within a ``ForeignKeyConstraint`` object which represents the table-level constraint definition. """ def __init__(self, column, constraint=None, use_alter=False, name=None, onupdate=None, ondelete=None): """Construct a new ``ForeignKey`` object. column Can be a ``schema.Column`` object representing the relationship, or just its string name given as ``tablename.columnname``. schema can be specified as ``schema.tablename.columnname``. constraint Is the owning ``ForeignKeyConstraint`` object, if any. if not given, then a ``ForeignKeyConstraint`` will be automatically created and added to the parent table. """ self._colspec = column self._column = None self.constraint = constraint self.use_alter = use_alter self.name = name self.onupdate = onupdate self.ondelete = ondelete def __repr__(self): return "ForeignKey(%s)" % repr(self._get_colspec()) def copy(self): """Produce a copy of this ForeignKey object.""" return ForeignKey(self._get_colspec()) def _get_colspec(self): if isinstance(self._colspec, basestring): return self._colspec elif self._colspec.table.schema is not None: return "%s.%s.%s" % (self._colspec.table.schema, self._colspec.table.name, self._colspec.key) else: return "%s.%s" % (self._colspec.table.name, self._colspec.key) def references(self, table): """Return True if the given table is referenced by this ``ForeignKey``.""" return table.corresponding_column(self.column, False) is not None def _init_column(self): # ForeignKey inits its remote column as late as possible, so tables can # be defined without dependencies if self._column is None: if isinstance(self._colspec, basestring): # locate the parent table this foreign key is attached to. # we use the "original" column which our parent column represents # (its a list of columns/other ColumnElements if the parent table is a UNION) for c in self.parent.orig_set: if isinstance(c, Column): parenttable = c.table break else: raise exceptions.ArgumentError("Parent column '%s' does not descend from a table-attached Column" % str(self.parent)) m = re.match(r"^(.+?)(?:\.(.+?))?(?:\.(.+?))?$", self._colspec, re.UNICODE) if m is None: raise exceptions.ArgumentError("Invalid foreign key column specification: " + self._colspec) if m.group(3) is None: (tname, colname) = m.group(1, 2) schema = None else: (schema,tname,colname) = m.group(1,2,3) if _get_table_key(tname, schema) not in parenttable.metadata: raise exceptions.InvalidRequestError("Could not find table '%s' with which to generate a foreign key" % tname) table = Table(tname, parenttable.metadata, mustexist=True, schema=schema) try: if colname is None: # colname is None in the case that ForeignKey argument was specified # as table name only, in which case we match the column name to the same # column on the parent. key = self.parent self._column = table.c[self.parent.key] else: self._column = table.c[colname] except KeyError, e: raise exceptions.ArgumentError("Could not create ForeignKey '%s' on table '%s': table '%s' has no column named '%s'" % (self._colspec, parenttable.name, table.name, str(e))) else: self._column = self._colspec # propigate TypeEngine to parent if it didnt have one if isinstance(self.parent.type, types.NullType): self.parent.type = self._column.type return self._column column = property(lambda s: s._init_column()) def _get_parent(self): return self.parent def _set_parent(self, column): self.parent = column if self.parent._pre_existing_column is not None: # remove existing FK which matches us for fk in self.parent._pre_existing_column.foreign_keys: if fk._colspec == self._colspec: self.parent.table.foreign_keys.remove(fk) self.parent.table.constraints.remove(fk.constraint) if self.constraint is None and isinstance(self.parent.table, Table): self.constraint = ForeignKeyConstraint([],[], use_alter=self.use_alter, name=self.name, onupdate=self.onupdate, ondelete=self.ondelete) self.parent.table.append_constraint(self.constraint) self.constraint._append_fk(self) self.parent.foreign_keys.add(self) self.parent.table.foreign_keys.add(self) class DefaultGenerator(SchemaItem): """Base class for column *default* values.""" def __init__(self, for_update=False, metadata=None): self.for_update = for_update self.metadata = util.assert_arg_type(metadata, (MetaData, type(None)), 'metadata') def _get_parent(self): return getattr(self, 'column', None) def _set_parent(self, column): self.column = column self.metadata = self.column.table.metadata if self.for_update: self.column.onupdate = self else: self.column.default = self def execute(self, bind=None, **kwargs): if bind is None: bind = self._get_bind(raiseerr=True) return bind._execute_default(self, **kwargs) def __repr__(self): return "DefaultGenerator()" class PassiveDefault(DefaultGenerator): """A default that takes effect on the database side.""" def __init__(self, arg, **kwargs): super(PassiveDefault, self).__init__(**kwargs) self.arg = arg def __repr__(self): return "PassiveDefault(%s)" % repr(self.arg) class ColumnDefault(DefaultGenerator): """A plain default value on a column. This could correspond to a constant, a callable function, or a SQL clause. """ def __init__(self, arg, **kwargs): super(ColumnDefault, self).__init__(**kwargs) if callable(arg): if not inspect.isfunction(arg): self.arg = lambda ctx: arg() else: argspec = inspect.getargspec(arg) if len(argspec[0]) == 0: self.arg = lambda ctx: arg() else: defaulted = argspec[3] is not None and len(argspec[3]) or 0 if len(argspec[0]) - defaulted > 1: raise exceptions.ArgumentError( "ColumnDefault Python function takes zero or one " "positional arguments") else: self.arg = arg else: self.arg = arg def _visit_name(self): if self.for_update: return "column_onupdate" else: return "column_default" __visit_name__ = property(_visit_name) def __repr__(self): return "ColumnDefault(%s)" % repr(self.arg) class Sequence(DefaultGenerator): """Represents a named sequence.""" def __init__(self, name, start=None, increment=None, schema=None, optional=False, quote=False, **kwargs): super(Sequence, self).__init__(**kwargs) self.name = name self.start = start self.increment = increment self.optional=optional self.quote = quote self.schema = schema self.kwargs = kwargs def __repr__(self): return "Sequence(%s)" % ', '.join( [repr(self.name)] + ["%s=%s" % (k, repr(getattr(self, k))) for k in ['start', 'increment', 'optional']]) def _set_parent(self, column): super(Sequence, self)._set_parent(column) column.sequence = self def create(self, bind=None, checkfirst=True): """Creates this sequence in the database.""" if bind is None: bind = self._get_bind(raiseerr=True) bind.create(self, checkfirst=checkfirst) def drop(self, bind=None, checkfirst=True): """Drops this sequence from the database.""" if bind is None: bind = self._get_bind(raiseerr=True) bind.drop(self, checkfirst=checkfirst) class Constraint(SchemaItem): """Represent a table-level ``Constraint`` such as a composite primary key, foreign key, or unique constraint. Implements a hybrid of dict/setlike behavior with regards to the list of underying columns. """ def __init__(self, name=None): self.name = name self.columns = expression.ColumnCollection() def __contains__(self, x): return self.columns.contains_column(x) def keys(self): return self.columns.keys() def __add__(self, other): return self.columns + other def __iter__(self): return iter(self.columns) def __len__(self): return len(self.columns) def copy(self): raise NotImplementedError() def _get_parent(self): return getattr(self, 'table', None) class CheckConstraint(Constraint): def __init__(self, sqltext, name=None): super(CheckConstraint, self).__init__(name) self.sqltext = sqltext def _visit_name(self): if isinstance(self.parent, Table): return "check_constraint" else: return "column_check_constraint" __visit_name__ = property(_visit_name) def _set_parent(self, parent): self.parent = parent parent.constraints.add(self) def copy(self): return CheckConstraint(self.sqltext, name=self.name) class ForeignKeyConstraint(Constraint): """Table-level foreign key constraint, represents a collection of ``ForeignKey`` objects.""" def __init__(self, columns, refcolumns, name=None, onupdate=None, ondelete=None, use_alter=False): super(ForeignKeyConstraint, self).__init__(name) self.__colnames = columns self.__refcolnames = refcolumns self.elements = util.OrderedSet() self.onupdate = onupdate self.ondelete = ondelete if self.name is None and use_alter: raise exceptions.ArgumentError("Alterable ForeignKey/ForeignKeyConstraint requires a name") self.use_alter = use_alter def _set_parent(self, table): self.table = table table.constraints.add(self) for (c, r) in zip(self.__colnames, self.__refcolnames): self.append_element(c,r) def append_element(self, col, refcol): fk = ForeignKey(refcol, constraint=self, name=self.name, onupdate=self.onupdate, ondelete=self.ondelete, use_alter=self.use_alter) fk._set_parent(self.table.c[col]) self._append_fk(fk) def _append_fk(self, fk): self.columns.add(self.table.c[fk.parent.key]) self.elements.add(fk) def copy(self): return ForeignKeyConstraint([x.parent.name for x in self.elements], [x._get_colspec() for x in self.elements], name=self.name, onupdate=self.onupdate, ondelete=self.ondelete, use_alter=self.use_alter) class PrimaryKeyConstraint(Constraint): def __init__(self, *columns, **kwargs): super(PrimaryKeyConstraint, self).__init__(name=kwargs.pop('name', None)) self.__colnames = list(columns) def _set_parent(self, table): self.table = table table.primary_key = self for c in self.__colnames: self.append_column(table.c[c]) def add(self, col): self.append_column(col) def remove(self, col): col.primary_key=False del self.columns[col.key] def append_column(self, col): self.columns.add(col) col.primary_key=True def copy(self): return PrimaryKeyConstraint(name=self.name, *[c.key for c in self]) def __eq__(self, other): return self.columns == other class UniqueConstraint(Constraint): def __init__(self, *columns, **kwargs): super(UniqueConstraint, self).__init__(name=kwargs.pop('name', None)) self.__colnames = list(columns) def _set_parent(self, table): self.table = table table.constraints.add(self) for c in self.__colnames: self.append_column(table.c[c]) def append_column(self, col): self.columns.add(col) def copy(self): return UniqueConstraint(name=self.name, *self.__colnames) class Index(SchemaItem): """Represent an index of columns from a database table.""" def __init__(self, name, *columns, **kwargs): """Construct an index object. Arguments are: name The name of the index \*columns Columns to include in the index. All columns must belong to the same table, and no column may appear more than once. \**kwargs Keyword arguments include: unique Defaults to False: create a unique index. postgres_where Defaults to None: create a partial index when using PostgreSQL """ self.name = name self.columns = [] self.table = None self.unique = kwargs.pop('unique', False) self.kwargs = kwargs self._init_items(*columns) def _init_items(self, *args): for column in args: self.append_column(column) def _get_parent(self): return self.table def _set_parent(self, table): self.table = table self.metadata = table.metadata table.indexes.add(self) def append_column(self, column): # make sure all columns are from the same table # and no column is repeated if self.table is None: self._set_parent(column.table) elif column.table != self.table: # all columns muse be from same table raise exceptions.ArgumentError("All index columns must be from same table. " "%s is from %s not %s" % (column, column.table, self.table)) elif column.name in [ c.name for c in self.columns ]: raise exceptions.ArgumentError("A column may not appear twice in the " "same index (%s already has column %s)" % (self.name, column)) self.columns.append(column) def create(self, bind=None): if bind is not None: bind.create(self) else: self._get_bind(raiseerr=True).create(self) return self def drop(self, bind=None): if bind is not None: bind.drop(self) else: self._get_bind(raiseerr=True).drop(self) def __str__(self): return repr(self) def __repr__(self): return 'Index("%s", %s%s)' % (self.name, ', '.join([repr(c) for c in self.columns]), (self.unique and ', unique=True') or '') class MetaData(SchemaItem): """A collection of Tables and their associated schema constructs. Holds a collection of Tables and an optional binding to an ``Engine`` or ``Connection``. If bound, the [sqlalchemy.schema#Table] objects in the collection and their columns may participate in implicit SQL execution. The ``bind`` property may be assigned to dynamically. A common pattern is to start unbound and then bind later when an engine is available:: metadata = MetaData() # define tables Table('mytable', metadata, ...) # connect to an engine later, perhaps after loading a URL from a # configuration file metadata.bind = an_engine MetaData is a thread-safe object after tables have been explicitly defined or loaded via reflection. """ __visit_name__ = 'metadata' def __init__(self, bind=None, reflect=False): """Create a new MetaData object. bind An Engine or Connection to bind to. May also be a string or URL instance, these are passed to create_engine() and this MetaData will be bound to the resulting engine. reflect Optional, automatically load all tables from the bound database. Defaults to False. ``bind`` is required when this option is set. For finer control over loaded tables, use the ``reflect`` method of ``MetaData``. """ self.tables = {} self.bind = bind self.metadata = self if reflect: if not bind: raise exceptions.ArgumentError( "A bind must be supplied in conjunction with reflect=True") self.reflect() def __repr__(self): return 'MetaData(%r)' % self.bind def __contains__(self, key): return key in self.tables def __getstate__(self): return {'tables': self.tables} def __setstate__(self, state): self.tables = state['tables'] self._bind = None def is_bound(self): """True if this MetaData is bound to an Engine or Connection.""" return self._bind is not None # @deprecated def connect(self, bind, **kwargs): """Bind this MetaData to an Engine. Use ``metadata.bind = <engine>`` or ``metadata.bind = <url>``. bind A string, ``URL``, ``Engine`` or ``Connection`` instance. If a string or ``URL``, will be passed to ``create_engine()`` along with ``\**kwargs`` to produce the engine which to connect to. Otherwise connects directly to the given ``Engine``. """ global URL if URL is None: from sqlalchemy.engine.url import URL if isinstance(bind, (basestring, URL)): self._bind = sqlalchemy.create_engine(bind, **kwargs) else: self._bind = bind connect = util.deprecated(connect) def _bind_to(self, bind): """Bind this MetaData to an Engine, Connection, string or URL.""" global URL if URL is None: from sqlalchemy.engine.url import URL if isinstance(bind, (basestring, URL)): self._bind = sqlalchemy.create_engine(bind) else: self._bind = bind bind = property(lambda self: self._bind, _bind_to, doc= """An Engine or Connection to which this MetaData is bound. This property may be assigned an ``Engine`` or ``Connection``, or assigned a string or URL to automatically create a basic ``Engine`` for this bind with ``create_engine()``.""") def clear(self): self.tables.clear() def remove(self, table): # TODO: scan all other tables and remove FK _column del self.tables[table.key] def table_iterator(self, reverse=True, tables=None): from sqlalchemy.sql import util as sql_util if tables is None: tables = self.tables.values() else: tables = util.Set(tables).intersection(self.tables.values()) sorter = sql_util.TableCollection(list(tables)) return iter(sorter.sort(reverse=reverse)) def _get_parent(self): return None def reflect(self, bind=None, schema=None, only=None): """Load all available table definitions from the database. Automatically creates ``Table`` entries in this ``MetaData`` for any table available in the database but not yet present in the ``MetaData``. May be called multiple times to pick up tables recently added to the database, however no special action is taken if a table in this ``MetaData`` no longer exists in the database. bind A ``Connectable`` used to access the database; if None, uses the existing bind on this ``MetaData``, if any. schema Optional, query and reflect tables from an alterate schema. only Optional. Load only a sub-set of available named tables. May be specified as a sequence of names or a callable. If a sequence of names is provided, only those tables will be reflected. An error is raised if a table is requested but not available. Named tables already present in this ``MetaData`` are ignored. If a callable is provided, it will be used as a boolean predicate to filter the list of potential table names. The callable is called with a table name and this ``MetaData`` instance as positional arguments and should return a true value for any table to reflect. """ reflect_opts = {'autoload': True} if bind is None: bind = self._get_bind(raiseerr=True) conn = None else: reflect_opts['autoload_with'] = bind conn = bind.contextual_connect() if schema is not None: reflect_opts['schema'] = schema available = util.OrderedSet(bind.engine.table_names(schema, connection=conn)) current = util.Set(self.tables.keys()) if only is None: load = [name for name in available if name not in current] elif callable(only): load = [name for name in available if name not in current and only(name, self)] else: missing = [name for name in only if name not in available] if missing: s = schema and (" schema '%s'" % schema) or '' raise exceptions.InvalidRequestError( 'Could not reflect: requested table(s) not available ' 'in %s%s: (%s)' % (bind.engine.url, s, ', '.join(missing))) load = [name for name in only if name not in current] for name in load: Table(name, self, **reflect_opts) def create_all(self, bind=None, tables=None, checkfirst=True): """Create all tables stored in this metadata. This will conditionally create tables depending on if they do not yet exist in the database. bind A ``Connectable`` used to access the database; if None, uses the existing bind on this ``MetaData``, if any. tables Optional list of ``Table`` objects, which is a subset of the total tables in the ``MetaData`` (others are ignored). """ if bind is None: bind = self._get_bind(raiseerr=True) bind.create(self, checkfirst=checkfirst, tables=tables) def drop_all(self, bind=None, tables=None, checkfirst=True): """Drop all tables stored in this metadata. This will conditionally drop tables depending on if they currently exist in the database. bind A ``Connectable`` used to access the database; if None, uses the existing bind on this ``MetaData``, if any. tables Optional list of ``Table`` objects, which is a subset of the total tables in the ``MetaData`` (others are ignored). """ if bind is None: bind = self._get_bind(raiseerr=True) bind.drop(self, checkfirst=checkfirst, tables=tables) def _get_bind(self, raiseerr=False): if not self.is_bound(): if raiseerr: raise exceptions.InvalidRequestError("This SchemaItem is not connected to any Engine or Connection.") else: return None return self._bind class ThreadLocalMetaData(MetaData): """A MetaData variant that presents a different ``bind`` in every thread. Makes the ``bind`` property of the MetaData a thread-local value, allowing this collection of tables to be bound to different ``Engine`` implementations or connections in each thread. The ThreadLocalMetaData starts off bound to None in each thread. Binds must be made explicitly by assigning to the ``bind`` property or using ``connect()``. You can also re-bind dynamically multiple times per thread, just like a regular ``MetaData``. Use this type of MetaData when your tables are present in more than one database and you need to address them simultanesouly. """ __visit_name__ = 'metadata' def __init__(self): """Construct a ThreadLocalMetaData. Takes no arguments. """ self.context = util.ThreadLocal() self.__engines = {} super(ThreadLocalMetaData, self).__init__() # @deprecated def connect(self, bind, **kwargs): """Bind to an Engine in the caller's thread. Use ``metadata.bind=<engine>`` or ``metadata.bind=<url>``. bind A string, ``URL``, ``Engine`` or ``Connection`` instance. If a string or ``URL``, will be passed to ``create_engine()`` along with ``\**kwargs`` to produce the engine which to connect to. Otherwise connects directly to the given ``Engine``. """ global URL if URL is None: from sqlalchemy.engine.url import URL if isinstance(bind, (basestring, URL)): try: engine = self.__engines[bind] except KeyError: engine = sqlalchemy.create_engine(bind, **kwargs) bind = engine self._bind_to(bind) connect = util.deprecated(connect) def _get_bind(self, raiseerr=False): """The bound ``Engine`` or ``Connectable`` for this thread.""" if hasattr(self.context, '_engine'): return self.context._engine else: if raiseerr: raise exceptions.InvalidRequestError( "This ThreadLocalMetaData is not bound to any Engine or " "Connection.") else: return None def _bind_to(self, bind): """Bind to a Connectable in the caller's thread.""" global URL if URL is None: from sqlalchemy.engine.url import URL if isinstance(bind, (basestring, URL)): try: self.context._engine = self.__engines[bind] except KeyError: e = sqlalchemy.create_engine(bind) self.__engines[bind] = e self.context._engine = e else: # TODO: this is squirrely. we shouldnt have to hold onto engines # in a case like this if bind not in self.__engines: self.__engines[bind] = bind self.context._engine = bind bind = property(_get_bind, _bind_to, doc= """The bound Engine or Connection for this thread. This property may be assigned an Engine or Connection, or assigned a string or URL to automatically create a basic Engine for this bind with ``create_engine()``.""") def is_bound(self): """True if there is a bind for this thread.""" return (hasattr(self.context, '_engine') and self.context._engine is not None) def dispose(self): """Dispose any and all ``Engines`` to which this ``ThreadLocalMetaData`` has been connected.""" for e in self.__engines.values(): if hasattr(e, 'dispose'): e.dispose() class SchemaVisitor(visitors.ClauseVisitor): """Define the visiting for ``SchemaItem`` objects.""" __traverse_options__ = {'schema_visitor':True}
Email marketers and web developers, both from businesses and agencies. Real-time email verification and batch email validation solution for keeping your email lists clean. By using our service you can be sure that you will reduce your bounce rate, improve your KPI and most important - you will have a higher ROI. Our email validation service detects and highlights known spam-traps, temporary addresses, role-based emails and catch-all domains. Verifalia can accurately verify all your contacts, even those using non-Latin alphabets, like Chinese, Arabic or Cyrillic.
""" Data objects in group "Performance Curves" """ from collections import OrderedDict import logging from pyidf.helper import DataObject logger = logging.getLogger("pyidf") logger.addHandler(logging.NullHandler()) class CurveLinear(DataObject): """ Corresponds to IDD object `Curve:Linear` Linear curve with one independent variable. Input for the linear curve consists of a curve name, the two coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'Pressure', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Linear', 'pyname': u'CurveLinear', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveQuadLinear(DataObject): """ Corresponds to IDD object `Curve:QuadLinear` Linear curve with four independent variables. Input for the linear curve consists of a curve name, the two coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*w + C3*x + C4*y + C5*z """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 w', {'name': u'Coefficient2 w', 'pyname': u'coefficient2_w', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x', {'name': u'Coefficient3 x', 'pyname': u'coefficient3_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 y', {'name': u'Coefficient4 y', 'pyname': u'coefficient4_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 z', {'name': u'Coefficient5 z', 'pyname': u'coefficient5_z', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of w', {'name': u'Minimum Value of w', 'pyname': u'minimum_value_of_w', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of w', {'name': u'Maximum Value of w', 'pyname': u'maximum_value_of_w', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of z', {'name': u'Minimum Value of z', 'pyname': u'minimum_value_of_z', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of z', {'name': u'Maximum Value of z', 'pyname': u'maximum_value_of_z', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for w', {'name': u'Input Unit Type for w', 'pyname': u'input_unit_type_for_w', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance', u'VolumetricFlowPerPower'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance', u'VolumetricFlowPerPower'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance', u'VolumetricFlowPerPower'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for z', {'name': u'Input Unit Type for z', 'pyname': u'input_unit_type_for_z', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance', u'VolumetricFlowPerPower'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:QuadLinear', 'pyname': u'CurveQuadLinear', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_w(self): """field `Coefficient2 w` Args: value (float): value for IDD Field `Coefficient2 w` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_w` or None if not set """ return self["Coefficient2 w"] @coefficient2_w.setter def coefficient2_w(self, value=None): """Corresponds to IDD field `Coefficient2 w`""" self["Coefficient2 w"] = value @property def coefficient3_x(self): """field `Coefficient3 x` Args: value (float): value for IDD Field `Coefficient3 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x` or None if not set """ return self["Coefficient3 x"] @coefficient3_x.setter def coefficient3_x(self, value=None): """Corresponds to IDD field `Coefficient3 x`""" self["Coefficient3 x"] = value @property def coefficient4_y(self): """field `Coefficient4 y` Args: value (float): value for IDD Field `Coefficient4 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_y` or None if not set """ return self["Coefficient4 y"] @coefficient4_y.setter def coefficient4_y(self, value=None): """Corresponds to IDD field `Coefficient4 y`""" self["Coefficient4 y"] = value @property def coefficient5_z(self): """field `Coefficient5 z` Args: value (float): value for IDD Field `Coefficient5 z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_z` or None if not set """ return self["Coefficient5 z"] @coefficient5_z.setter def coefficient5_z(self, value=None): """Corresponds to IDD field `Coefficient5 z`""" self["Coefficient5 z"] = value @property def minimum_value_of_w(self): """field `Minimum Value of w` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of w` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_w` or None if not set """ return self["Minimum Value of w"] @minimum_value_of_w.setter def minimum_value_of_w(self, value=None): """Corresponds to IDD field `Minimum Value of w`""" self["Minimum Value of w"] = value @property def maximum_value_of_w(self): """field `Maximum Value of w` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of w` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_w` or None if not set """ return self["Maximum Value of w"] @maximum_value_of_w.setter def maximum_value_of_w(self, value=None): """Corresponds to IDD field `Maximum Value of w`""" self["Maximum Value of w"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_value_of_z(self): """field `Minimum Value of z` | Units are based on field `A5` Args: value (float): value for IDD Field `Minimum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_z` or None if not set """ return self["Minimum Value of z"] @minimum_value_of_z.setter def minimum_value_of_z(self, value=None): """Corresponds to IDD field `Minimum Value of z`""" self["Minimum Value of z"] = value @property def maximum_value_of_z(self): """field `Maximum Value of z` | Units are based on field `A5` Args: value (float): value for IDD Field `Maximum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_z` or None if not set """ return self["Maximum Value of z"] @maximum_value_of_z.setter def maximum_value_of_z(self, value=None): """Corresponds to IDD field `Maximum Value of z`""" self["Maximum Value of z"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_w(self): """field `Input Unit Type for w` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for w` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_w` or None if not set """ return self["Input Unit Type for w"] @input_unit_type_for_w.setter def input_unit_type_for_w(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for w`""" self["Input Unit Type for w"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for y`""" self["Input Unit Type for y"] = value @property def input_unit_type_for_z(self): """field `Input Unit Type for z` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for z` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_z` or None if not set """ return self["Input Unit Type for z"] @input_unit_type_for_z.setter def input_unit_type_for_z(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for z`""" self["Input Unit Type for z"] = value class CurveQuadratic(DataObject): """ Corresponds to IDD object `Curve:Quadratic` Quadratic curve with one independent variable. Input for a quadratic curve consists of the curve name, the three coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Quadratic', 'pyname': u'CurveQuadratic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveCubic(DataObject): """ Corresponds to IDD object `Curve:Cubic` Cubic curve with one independent variable. Input for a cubic curve consists of the curve name, the 4 coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 + C4*x**3 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 x**3', {'name': u'Coefficient4 x**3', 'pyname': u'coefficient4_x3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Cubic', 'pyname': u'CurveCubic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_x3(self): """field `Coefficient4 x**3` Args: value (float): value for IDD Field `Coefficient4 x**3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_x3` or None if not set """ return self["Coefficient4 x**3"] @coefficient4_x3.setter def coefficient4_x3(self, value=None): """ Corresponds to IDD field `Coefficient4 x**3` """ self["Coefficient4 x**3"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveQuartic(DataObject): """ Corresponds to IDD object `Curve:Quartic` Quartic (fourth order polynomial) curve with one independent variable. Input for a Quartic curve consists of the curve name, the five coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 + C4*x**3 + C5*x**4 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 x**3', {'name': u'Coefficient4 x**3', 'pyname': u'coefficient4_x3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 x**4', {'name': u'Coefficient5 x**4', 'pyname': u'coefficient5_x4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Quartic', 'pyname': u'CurveQuartic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_x3(self): """field `Coefficient4 x**3` Args: value (float): value for IDD Field `Coefficient4 x**3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_x3` or None if not set """ return self["Coefficient4 x**3"] @coefficient4_x3.setter def coefficient4_x3(self, value=None): """ Corresponds to IDD field `Coefficient4 x**3` """ self["Coefficient4 x**3"] = value @property def coefficient5_x4(self): """field `Coefficient5 x**4` Args: value (float): value for IDD Field `Coefficient5 x**4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_x4` or None if not set """ return self["Coefficient5 x**4"] @coefficient5_x4.setter def coefficient5_x4(self, value=None): """ Corresponds to IDD field `Coefficient5 x**4` """ self["Coefficient5 x**4"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveExponent(DataObject): """ Corresponds to IDD object `Curve:Exponent` Exponent curve with one independent variable. Input for a exponent curve consists of the curve name, the 3 coefficients, and the maximum and minimum valid independent variable values. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x**C3 The independent variable x is raised to the C3 power, multiplied by C2, and C1 is added to the result. """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 constant', {'name': u'Coefficient2 Constant', 'pyname': u'coefficient2_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 constant', {'name': u'Coefficient3 Constant', 'pyname': u'coefficient3_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 6, 'name': u'Curve:Exponent', 'pyname': u'CurveExponent', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_constant(self): """field `Coefficient2 Constant` Args: value (float): value for IDD Field `Coefficient2 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_constant` or None if not set """ return self["Coefficient2 Constant"] @coefficient2_constant.setter def coefficient2_constant(self, value=None): """Corresponds to IDD field `Coefficient2 Constant`""" self["Coefficient2 Constant"] = value @property def coefficient3_constant(self): """field `Coefficient3 Constant` Args: value (float): value for IDD Field `Coefficient3 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_constant` or None if not set """ return self["Coefficient3 Constant"] @coefficient3_constant.setter def coefficient3_constant(self, value=None): """Corresponds to IDD field `Coefficient3 Constant`""" self["Coefficient3 Constant"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Specify the minimum value of the independent variable x allowed | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Specify the maximum value of the independent variable x allowed | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveBicubic(DataObject): """ Corresponds to IDD object `Curve:Bicubic` Cubic curve with two independent variables. Input consists of the curve name, the ten coefficients, and the minimum and maximum values for each of the independent variables. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 + C4*y + C5*y**2 + C6*x*y + C7*x**3 + C8*y**3 + C9*x**2*y + C10*x*y**2 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 y', {'name': u'Coefficient4 y', 'pyname': u'coefficient4_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 y**2', {'name': u'Coefficient5 y**2', 'pyname': u'coefficient5_y2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 x*y', {'name': u'Coefficient6 x*y', 'pyname': u'coefficient6_xy', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient7 x**3', {'name': u'Coefficient7 x**3', 'pyname': u'coefficient7_x3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient8 y**3', {'name': u'Coefficient8 y**3', 'pyname': u'coefficient8_y3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient9 x**2*y', {'name': u'Coefficient9 x**2*y', 'pyname': u'coefficient9_x2y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient10 x*y**2', {'name': u'Coefficient10 x*y**2', 'pyname': u'coefficient10_xy2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for Y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Bicubic', 'pyname': u'CurveBicubic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_y(self): """field `Coefficient4 y` Args: value (float): value for IDD Field `Coefficient4 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_y` or None if not set """ return self["Coefficient4 y"] @coefficient4_y.setter def coefficient4_y(self, value=None): """Corresponds to IDD field `Coefficient4 y`""" self["Coefficient4 y"] = value @property def coefficient5_y2(self): """field `Coefficient5 y**2` Args: value (float): value for IDD Field `Coefficient5 y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_y2` or None if not set """ return self["Coefficient5 y**2"] @coefficient5_y2.setter def coefficient5_y2(self, value=None): """ Corresponds to IDD field `Coefficient5 y**2` """ self["Coefficient5 y**2"] = value @property def coefficient6_xy(self): """field `Coefficient6 x*y` Args: value (float): value for IDD Field `Coefficient6 x*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_xy` or None if not set """ return self["Coefficient6 x*y"] @coefficient6_xy.setter def coefficient6_xy(self, value=None): """ Corresponds to IDD field `Coefficient6 x*y` """ self["Coefficient6 x*y"] = value @property def coefficient7_x3(self): """field `Coefficient7 x**3` Args: value (float): value for IDD Field `Coefficient7 x**3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient7_x3` or None if not set """ return self["Coefficient7 x**3"] @coefficient7_x3.setter def coefficient7_x3(self, value=None): """ Corresponds to IDD field `Coefficient7 x**3` """ self["Coefficient7 x**3"] = value @property def coefficient8_y3(self): """field `Coefficient8 y**3` Args: value (float): value for IDD Field `Coefficient8 y**3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient8_y3` or None if not set """ return self["Coefficient8 y**3"] @coefficient8_y3.setter def coefficient8_y3(self, value=None): """ Corresponds to IDD field `Coefficient8 y**3` """ self["Coefficient8 y**3"] = value @property def coefficient9_x2y(self): """field `Coefficient9 x**2*y` Args: value (float): value for IDD Field `Coefficient9 x**2*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient9_x2y` or None if not set """ return self["Coefficient9 x**2*y"] @coefficient9_x2y.setter def coefficient9_x2y(self, value=None): """ Corresponds to IDD field `Coefficient9 x**2*y` """ self["Coefficient9 x**2*y"] = value @property def coefficient10_xy2(self): """field `Coefficient10 x*y**2` Args: value (float): value for IDD Field `Coefficient10 x*y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient10_xy2` or None if not set """ return self["Coefficient10 x*y**2"] @coefficient10_xy2.setter def coefficient10_xy2(self, value=None): """ Corresponds to IDD field `Coefficient10 x*y**2` """ self["Coefficient10 x*y**2"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for Y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for Y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Y`""" self["Input Unit Type for Y"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveBiquadratic(DataObject): """ Corresponds to IDD object `Curve:Biquadratic` Quadratic curve with two independent variables. Input consists of the curve name, the six coefficients, and min and max values for each of the independent variables. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 + C4*y + C5*y**2 + C6*x*y """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 y', {'name': u'Coefficient4 y', 'pyname': u'coefficient4_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 y**2', {'name': u'Coefficient5 y**2', 'pyname': u'coefficient5_y2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 x*y', {'name': u'Coefficient6 x*y', 'pyname': u'coefficient6_xy', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for Y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Biquadratic', 'pyname': u'CurveBiquadratic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_y(self): """field `Coefficient4 y` Args: value (float): value for IDD Field `Coefficient4 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_y` or None if not set """ return self["Coefficient4 y"] @coefficient4_y.setter def coefficient4_y(self, value=None): """Corresponds to IDD field `Coefficient4 y`""" self["Coefficient4 y"] = value @property def coefficient5_y2(self): """field `Coefficient5 y**2` Args: value (float): value for IDD Field `Coefficient5 y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_y2` or None if not set """ return self["Coefficient5 y**2"] @coefficient5_y2.setter def coefficient5_y2(self, value=None): """ Corresponds to IDD field `Coefficient5 y**2` """ self["Coefficient5 y**2"] = value @property def coefficient6_xy(self): """field `Coefficient6 x*y` Args: value (float): value for IDD Field `Coefficient6 x*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_xy` or None if not set """ return self["Coefficient6 x*y"] @coefficient6_xy.setter def coefficient6_xy(self, value=None): """ Corresponds to IDD field `Coefficient6 x*y` """ self["Coefficient6 x*y"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for Y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for Y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Y`""" self["Input Unit Type for Y"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveQuadraticLinear(DataObject): """ Corresponds to IDD object `Curve:QuadraticLinear` Quadratic-linear curve with two independent variables. Input consists of the curve name, the six coefficients, and min and max values for each of the independent variables. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = (C1 + C2*x + C3*x**2) + (C4 + C5*x + C6*x**2)*y """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 y', {'name': u'Coefficient4 y', 'pyname': u'coefficient4_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 x*y', {'name': u'Coefficient5 x*y', 'pyname': u'coefficient5_xy', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 x**2*y', {'name': u'Coefficient6 x**2*y', 'pyname': u'coefficient6_x2y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for Y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:QuadraticLinear', 'pyname': u'CurveQuadraticLinear', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_y(self): """field `Coefficient4 y` Args: value (float): value for IDD Field `Coefficient4 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_y` or None if not set """ return self["Coefficient4 y"] @coefficient4_y.setter def coefficient4_y(self, value=None): """Corresponds to IDD field `Coefficient4 y`""" self["Coefficient4 y"] = value @property def coefficient5_xy(self): """field `Coefficient5 x*y` Args: value (float): value for IDD Field `Coefficient5 x*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_xy` or None if not set """ return self["Coefficient5 x*y"] @coefficient5_xy.setter def coefficient5_xy(self, value=None): """ Corresponds to IDD field `Coefficient5 x*y` """ self["Coefficient5 x*y"] = value @property def coefficient6_x2y(self): """field `Coefficient6 x**2*y` Args: value (float): value for IDD Field `Coefficient6 x**2*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_x2y` or None if not set """ return self["Coefficient6 x**2*y"] @coefficient6_x2y.setter def coefficient6_x2y(self, value=None): """ Corresponds to IDD field `Coefficient6 x**2*y` """ self["Coefficient6 x**2*y"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for Y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for Y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Y`""" self["Input Unit Type for Y"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveCubicLinear(DataObject): """ Corresponds to IDD object `Curve:CubicLinear` Cubic-linear curve with two independent variables. Input consists of the curve name, the six coefficients, and min and max values for each of the independent variables. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = (C1 + C2*x + C3*x**2 + C4*x**3) + (C5 + C6*x)*y """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x', {'name': u'Coefficient2 x', 'pyname': u'coefficient2_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x**2', {'name': u'Coefficient3 x**2', 'pyname': u'coefficient3_x2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 x**3', {'name': u'Coefficient4 x**3', 'pyname': u'coefficient4_x3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 y', {'name': u'Coefficient5 y', 'pyname': u'coefficient5_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 x*y', {'name': u'Coefficient6 x*y', 'pyname': u'coefficient6_xy', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for Y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:CubicLinear', 'pyname': u'CurveCubicLinear', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x(self): """field `Coefficient2 x` Args: value (float): value for IDD Field `Coefficient2 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x` or None if not set """ return self["Coefficient2 x"] @coefficient2_x.setter def coefficient2_x(self, value=None): """Corresponds to IDD field `Coefficient2 x`""" self["Coefficient2 x"] = value @property def coefficient3_x2(self): """field `Coefficient3 x**2` Args: value (float): value for IDD Field `Coefficient3 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x2` or None if not set """ return self["Coefficient3 x**2"] @coefficient3_x2.setter def coefficient3_x2(self, value=None): """ Corresponds to IDD field `Coefficient3 x**2` """ self["Coefficient3 x**2"] = value @property def coefficient4_x3(self): """field `Coefficient4 x**3` Args: value (float): value for IDD Field `Coefficient4 x**3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_x3` or None if not set """ return self["Coefficient4 x**3"] @coefficient4_x3.setter def coefficient4_x3(self, value=None): """ Corresponds to IDD field `Coefficient4 x**3` """ self["Coefficient4 x**3"] = value @property def coefficient5_y(self): """field `Coefficient5 y` Args: value (float): value for IDD Field `Coefficient5 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_y` or None if not set """ return self["Coefficient5 y"] @coefficient5_y.setter def coefficient5_y(self, value=None): """Corresponds to IDD field `Coefficient5 y`""" self["Coefficient5 y"] = value @property def coefficient6_xy(self): """field `Coefficient6 x*y` Args: value (float): value for IDD Field `Coefficient6 x*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_xy` or None if not set """ return self["Coefficient6 x*y"] @coefficient6_xy.setter def coefficient6_xy(self, value=None): """ Corresponds to IDD field `Coefficient6 x*y` """ self["Coefficient6 x*y"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for Y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for Y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Y`""" self["Input Unit Type for Y"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveTriquadratic(DataObject): """ Corresponds to IDD object `Curve:Triquadratic` Quadratic curve with three independent variables. Input consists of the curve name, the twenty seven coefficients, and min and max values for each of the independent variables. Optional inputs for curve minimum and maximum may be used to limit the output of the performance curve. curve = a0 + a1*x**2 + a2*x + a3*y**2 + a4*y + a5*z**2 + a6*z + a7*x**2*y**2 + a8*x*y + a9*x*y**2 + a10*x**2*y + a11*x**2*z**2 + a12*x*z + a13*x*z**2 + a14*x**2*z + a15*y**2*z**2 + a16*y*z + a17*y*z**2 + a18*y**2*z + a19*x**2*y**2*z**2 + a20*x**2*y**2*z + a21*x**2*y*z**2 + a22*x*y**2*z**2 + a23*x**2*y*z + a24*x*y**2*z + a25*x*y*z**2 +a26*x*y*z """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 constant', {'name': u'Coefficient1 Constant', 'pyname': u'coefficient1_constant', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 x**2', {'name': u'Coefficient2 x**2', 'pyname': u'coefficient2_x2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 x', {'name': u'Coefficient3 x', 'pyname': u'coefficient3_x', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 y**2', {'name': u'Coefficient4 y**2', 'pyname': u'coefficient4_y2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 y', {'name': u'Coefficient5 y', 'pyname': u'coefficient5_y', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 z**2', {'name': u'Coefficient6 z**2', 'pyname': u'coefficient6_z2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient7 z', {'name': u'Coefficient7 z', 'pyname': u'coefficient7_z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient8 x**2*y**2', {'name': u'Coefficient8 x**2*y**2', 'pyname': u'coefficient8_x2y2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient9 x*y', {'name': u'Coefficient9 x*y', 'pyname': u'coefficient9_xy', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient10 x*y**2', {'name': u'Coefficient10 x*y**2', 'pyname': u'coefficient10_xy2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient11 x**2*y', {'name': u'Coefficient11 x**2*y', 'pyname': u'coefficient11_x2y', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient12 x**2*z**2', {'name': u'Coefficient12 x**2*z**2', 'pyname': u'coefficient12_x2z2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient13 x*z', {'name': u'Coefficient13 x*z', 'pyname': u'coefficient13_xz', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient14 x*z**2', {'name': u'Coefficient14 x*z**2', 'pyname': u'coefficient14_xz2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient15 x**2*z', {'name': u'Coefficient15 x**2*z', 'pyname': u'coefficient15_x2z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient16 y**2*z**2', {'name': u'Coefficient16 y**2*z**2', 'pyname': u'coefficient16_y2z2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient17 y*z', {'name': u'Coefficient17 y*z', 'pyname': u'coefficient17_yz', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient18 y*z**2', {'name': u'Coefficient18 y*z**2', 'pyname': u'coefficient18_yz2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient19 y**2*z', {'name': u'Coefficient19 y**2*z', 'pyname': u'coefficient19_y2z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient20 x**2*y**2*z**2', {'name': u'Coefficient20 x**2*y**2*z**2', 'pyname': u'coefficient20_x2y2z2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient21 x**2*y**2*z', {'name': u'Coefficient21 x**2*y**2*z', 'pyname': u'coefficient21_x2y2z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient22 x**2*y*z**2', {'name': u'Coefficient22 x**2*y*z**2', 'pyname': u'coefficient22_x2yz2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient23 x*y**2*z**2', {'name': u'Coefficient23 x*y**2*z**2', 'pyname': u'coefficient23_xy2z2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient24 x**2*y*z', {'name': u'Coefficient24 x**2*y*z', 'pyname': u'coefficient24_x2yz', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient25 x*y**2*z', {'name': u'Coefficient25 x*y**2*z', 'pyname': u'coefficient25_xy2z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient26 x*y*z**2', {'name': u'Coefficient26 x*y*z**2', 'pyname': u'coefficient26_xyz2', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient27 x*y*z', {'name': u'Coefficient27 x*y*z', 'pyname': u'coefficient27_xyz', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of z', {'name': u'Minimum Value of z', 'pyname': u'minimum_value_of_z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of z', {'name': u'Maximum Value of z', 'pyname': u'maximum_value_of_z', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for X', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for Y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for z', {'name': u'Input Unit Type for Z', 'pyname': u'input_unit_type_for_z', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Temperature', u'VolumetricFlow', u'MassFlow', u'Power', u'Distance'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless', u'Capacity', u'Power'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Triquadratic', 'pyname': u'CurveTriquadratic', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_constant(self): """field `Coefficient1 Constant` Args: value (float): value for IDD Field `Coefficient1 Constant` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_constant` or None if not set """ return self["Coefficient1 Constant"] @coefficient1_constant.setter def coefficient1_constant(self, value=None): """Corresponds to IDD field `Coefficient1 Constant`""" self["Coefficient1 Constant"] = value @property def coefficient2_x2(self): """field `Coefficient2 x**2` Args: value (float): value for IDD Field `Coefficient2 x**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_x2` or None if not set """ return self["Coefficient2 x**2"] @coefficient2_x2.setter def coefficient2_x2(self, value=None): """ Corresponds to IDD field `Coefficient2 x**2` """ self["Coefficient2 x**2"] = value @property def coefficient3_x(self): """field `Coefficient3 x` Args: value (float): value for IDD Field `Coefficient3 x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_x` or None if not set """ return self["Coefficient3 x"] @coefficient3_x.setter def coefficient3_x(self, value=None): """Corresponds to IDD field `Coefficient3 x`""" self["Coefficient3 x"] = value @property def coefficient4_y2(self): """field `Coefficient4 y**2` Args: value (float): value for IDD Field `Coefficient4 y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_y2` or None if not set """ return self["Coefficient4 y**2"] @coefficient4_y2.setter def coefficient4_y2(self, value=None): """ Corresponds to IDD field `Coefficient4 y**2` """ self["Coefficient4 y**2"] = value @property def coefficient5_y(self): """field `Coefficient5 y` Args: value (float): value for IDD Field `Coefficient5 y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_y` or None if not set """ return self["Coefficient5 y"] @coefficient5_y.setter def coefficient5_y(self, value=None): """Corresponds to IDD field `Coefficient5 y`""" self["Coefficient5 y"] = value @property def coefficient6_z2(self): """field `Coefficient6 z**2` Args: value (float): value for IDD Field `Coefficient6 z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_z2` or None if not set """ return self["Coefficient6 z**2"] @coefficient6_z2.setter def coefficient6_z2(self, value=None): """ Corresponds to IDD field `Coefficient6 z**2` """ self["Coefficient6 z**2"] = value @property def coefficient7_z(self): """field `Coefficient7 z` Args: value (float): value for IDD Field `Coefficient7 z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient7_z` or None if not set """ return self["Coefficient7 z"] @coefficient7_z.setter def coefficient7_z(self, value=None): """Corresponds to IDD field `Coefficient7 z`""" self["Coefficient7 z"] = value @property def coefficient8_x2y2(self): """field `Coefficient8 x**2*y**2` Args: value (float): value for IDD Field `Coefficient8 x**2*y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient8_x2y2` or None if not set """ return self["Coefficient8 x**2*y**2"] @coefficient8_x2y2.setter def coefficient8_x2y2(self, value=None): """ Corresponds to IDD field `Coefficient8 x**2*y**2` """ self["Coefficient8 x**2*y**2"] = value @property def coefficient9_xy(self): """field `Coefficient9 x*y` Args: value (float): value for IDD Field `Coefficient9 x*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient9_xy` or None if not set """ return self["Coefficient9 x*y"] @coefficient9_xy.setter def coefficient9_xy(self, value=None): """ Corresponds to IDD field `Coefficient9 x*y` """ self["Coefficient9 x*y"] = value @property def coefficient10_xy2(self): """field `Coefficient10 x*y**2` Args: value (float): value for IDD Field `Coefficient10 x*y**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient10_xy2` or None if not set """ return self["Coefficient10 x*y**2"] @coefficient10_xy2.setter def coefficient10_xy2(self, value=None): """ Corresponds to IDD field `Coefficient10 x*y**2` """ self["Coefficient10 x*y**2"] = value @property def coefficient11_x2y(self): """field `Coefficient11 x**2*y` Args: value (float): value for IDD Field `Coefficient11 x**2*y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient11_x2y` or None if not set """ return self["Coefficient11 x**2*y"] @coefficient11_x2y.setter def coefficient11_x2y(self, value=None): """ Corresponds to IDD field `Coefficient11 x**2*y` """ self["Coefficient11 x**2*y"] = value @property def coefficient12_x2z2(self): """field `Coefficient12 x**2*z**2` Args: value (float): value for IDD Field `Coefficient12 x**2*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient12_x2z2` or None if not set """ return self["Coefficient12 x**2*z**2"] @coefficient12_x2z2.setter def coefficient12_x2z2(self, value=None): """ Corresponds to IDD field `Coefficient12 x**2*z**2` """ self["Coefficient12 x**2*z**2"] = value @property def coefficient13_xz(self): """field `Coefficient13 x*z` Args: value (float): value for IDD Field `Coefficient13 x*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient13_xz` or None if not set """ return self["Coefficient13 x*z"] @coefficient13_xz.setter def coefficient13_xz(self, value=None): """ Corresponds to IDD field `Coefficient13 x*z` """ self["Coefficient13 x*z"] = value @property def coefficient14_xz2(self): """field `Coefficient14 x*z**2` Args: value (float): value for IDD Field `Coefficient14 x*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient14_xz2` or None if not set """ return self["Coefficient14 x*z**2"] @coefficient14_xz2.setter def coefficient14_xz2(self, value=None): """ Corresponds to IDD field `Coefficient14 x*z**2` """ self["Coefficient14 x*z**2"] = value @property def coefficient15_x2z(self): """field `Coefficient15 x**2*z` Args: value (float): value for IDD Field `Coefficient15 x**2*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient15_x2z` or None if not set """ return self["Coefficient15 x**2*z"] @coefficient15_x2z.setter def coefficient15_x2z(self, value=None): """ Corresponds to IDD field `Coefficient15 x**2*z` """ self["Coefficient15 x**2*z"] = value @property def coefficient16_y2z2(self): """field `Coefficient16 y**2*z**2` Args: value (float): value for IDD Field `Coefficient16 y**2*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient16_y2z2` or None if not set """ return self["Coefficient16 y**2*z**2"] @coefficient16_y2z2.setter def coefficient16_y2z2(self, value=None): """ Corresponds to IDD field `Coefficient16 y**2*z**2` """ self["Coefficient16 y**2*z**2"] = value @property def coefficient17_yz(self): """field `Coefficient17 y*z` Args: value (float): value for IDD Field `Coefficient17 y*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient17_yz` or None if not set """ return self["Coefficient17 y*z"] @coefficient17_yz.setter def coefficient17_yz(self, value=None): """ Corresponds to IDD field `Coefficient17 y*z` """ self["Coefficient17 y*z"] = value @property def coefficient18_yz2(self): """field `Coefficient18 y*z**2` Args: value (float): value for IDD Field `Coefficient18 y*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient18_yz2` or None if not set """ return self["Coefficient18 y*z**2"] @coefficient18_yz2.setter def coefficient18_yz2(self, value=None): """ Corresponds to IDD field `Coefficient18 y*z**2` """ self["Coefficient18 y*z**2"] = value @property def coefficient19_y2z(self): """field `Coefficient19 y**2*z` Args: value (float): value for IDD Field `Coefficient19 y**2*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient19_y2z` or None if not set """ return self["Coefficient19 y**2*z"] @coefficient19_y2z.setter def coefficient19_y2z(self, value=None): """ Corresponds to IDD field `Coefficient19 y**2*z` """ self["Coefficient19 y**2*z"] = value @property def coefficient20_x2y2z2(self): """field `Coefficient20 x**2*y**2*z**2` Args: value (float): value for IDD Field `Coefficient20 x**2*y**2*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient20_x2y2z2` or None if not set """ return self["Coefficient20 x**2*y**2*z**2"] @coefficient20_x2y2z2.setter def coefficient20_x2y2z2(self, value=None): """ Corresponds to IDD field `Coefficient20 x**2*y**2*z**2` """ self["Coefficient20 x**2*y**2*z**2"] = value @property def coefficient21_x2y2z(self): """field `Coefficient21 x**2*y**2*z` Args: value (float): value for IDD Field `Coefficient21 x**2*y**2*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient21_x2y2z` or None if not set """ return self["Coefficient21 x**2*y**2*z"] @coefficient21_x2y2z.setter def coefficient21_x2y2z(self, value=None): """ Corresponds to IDD field `Coefficient21 x**2*y**2*z` """ self["Coefficient21 x**2*y**2*z"] = value @property def coefficient22_x2yz2(self): """field `Coefficient22 x**2*y*z**2` Args: value (float): value for IDD Field `Coefficient22 x**2*y*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient22_x2yz2` or None if not set """ return self["Coefficient22 x**2*y*z**2"] @coefficient22_x2yz2.setter def coefficient22_x2yz2(self, value=None): """ Corresponds to IDD field `Coefficient22 x**2*y*z**2` """ self["Coefficient22 x**2*y*z**2"] = value @property def coefficient23_xy2z2(self): """field `Coefficient23 x*y**2*z**2` Args: value (float): value for IDD Field `Coefficient23 x*y**2*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient23_xy2z2` or None if not set """ return self["Coefficient23 x*y**2*z**2"] @coefficient23_xy2z2.setter def coefficient23_xy2z2(self, value=None): """ Corresponds to IDD field `Coefficient23 x*y**2*z**2` """ self["Coefficient23 x*y**2*z**2"] = value @property def coefficient24_x2yz(self): """field `Coefficient24 x**2*y*z` Args: value (float): value for IDD Field `Coefficient24 x**2*y*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient24_x2yz` or None if not set """ return self["Coefficient24 x**2*y*z"] @coefficient24_x2yz.setter def coefficient24_x2yz(self, value=None): """ Corresponds to IDD field `Coefficient24 x**2*y*z` """ self["Coefficient24 x**2*y*z"] = value @property def coefficient25_xy2z(self): """field `Coefficient25 x*y**2*z` Args: value (float): value for IDD Field `Coefficient25 x*y**2*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient25_xy2z` or None if not set """ return self["Coefficient25 x*y**2*z"] @coefficient25_xy2z.setter def coefficient25_xy2z(self, value=None): """ Corresponds to IDD field `Coefficient25 x*y**2*z` """ self["Coefficient25 x*y**2*z"] = value @property def coefficient26_xyz2(self): """field `Coefficient26 x*y*z**2` Args: value (float): value for IDD Field `Coefficient26 x*y*z**2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient26_xyz2` or None if not set """ return self["Coefficient26 x*y*z**2"] @coefficient26_xyz2.setter def coefficient26_xyz2(self, value=None): """ Corresponds to IDD field `Coefficient26 x*y*z**2` """ self["Coefficient26 x*y*z**2"] = value @property def coefficient27_xyz(self): """field `Coefficient27 x*y*z` Args: value (float): value for IDD Field `Coefficient27 x*y*z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient27_xyz` or None if not set """ return self["Coefficient27 x*y*z"] @coefficient27_xyz.setter def coefficient27_xyz(self, value=None): """ Corresponds to IDD field `Coefficient27 x*y*z` """ self["Coefficient27 x*y*z"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_value_of_z(self): """field `Minimum Value of z` | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_z` or None if not set """ return self["Minimum Value of z"] @minimum_value_of_z.setter def minimum_value_of_z(self, value=None): """Corresponds to IDD field `Minimum Value of z`""" self["Minimum Value of z"] = value @property def maximum_value_of_z(self): """field `Maximum Value of z` | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_z` or None if not set """ return self["Maximum Value of z"] @maximum_value_of_z.setter def maximum_value_of_z(self, value=None): """Corresponds to IDD field `Maximum Value of z`""" self["Maximum Value of z"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A5` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A5` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for X` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for X` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for X"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for X`""" self["Input Unit Type for X"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for Y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for Y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Y`""" self["Input Unit Type for Y"] = value @property def input_unit_type_for_z(self): """field `Input Unit Type for Z` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for Z` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_z` or None if not set """ return self["Input Unit Type for Z"] @input_unit_type_for_z.setter def input_unit_type_for_z(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for Z`""" self["Input Unit Type for Z"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveFunctionalPressureDrop(DataObject): """ Corresponds to IDD object `Curve:Functional:PressureDrop` Sets up curve information for minor loss and/or friction calculations in plant pressure simulations Expression: DeltaP = {K + f*(L/D)} * (rho * V^2) / 2 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'diameter', {'name': u'Diameter', 'pyname': u'diameter', 'minimum>': 0.0, 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'm'}), (u'minor loss coefficient', {'name': u'Minor Loss Coefficient', 'pyname': u'minor_loss_coefficient', 'minimum>': 0.0, 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'dimensionless'}), (u'length', {'name': u'Length', 'pyname': u'length', 'minimum>': 0.0, 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'm'}), (u'roughness', {'name': u'Roughness', 'pyname': u'roughness', 'minimum>': 0.0, 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'm'}), (u'fixed friction factor', {'name': u'Fixed Friction Factor', 'pyname': u'fixed_friction_factor', 'minimum>': 0.0, 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': 'real'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 5, 'name': u'Curve:Functional:PressureDrop', 'pyname': u'CurveFunctionalPressureDrop', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def diameter(self): """field `Diameter` | "D" in above expression, used to also calculate local velocity | Units: m Args: value (float): value for IDD Field `Diameter` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `diameter` or None if not set """ return self["Diameter"] @diameter.setter def diameter(self, value=None): """Corresponds to IDD field `Diameter`""" self["Diameter"] = value @property def minor_loss_coefficient(self): """field `Minor Loss Coefficient` | "K" in above expression | Units: dimensionless Args: value (float): value for IDD Field `Minor Loss Coefficient` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minor_loss_coefficient` or None if not set """ return self["Minor Loss Coefficient"] @minor_loss_coefficient.setter def minor_loss_coefficient(self, value=None): """Corresponds to IDD field `Minor Loss Coefficient`""" self["Minor Loss Coefficient"] = value @property def length(self): """field `Length` | "L" in above expression | Units: m Args: value (float): value for IDD Field `Length` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `length` or None if not set """ return self["Length"] @length.setter def length(self, value=None): """Corresponds to IDD field `Length`""" self["Length"] = value @property def roughness(self): """field `Roughness` | This will be used to calculate "f" from Moody-chart approximations | Units: m Args: value (float): value for IDD Field `Roughness` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `roughness` or None if not set """ return self["Roughness"] @roughness.setter def roughness(self, value=None): """Corresponds to IDD field `Roughness`""" self["Roughness"] = value @property def fixed_friction_factor(self): """field `Fixed Friction Factor` | Optional way to set a constant value for "f", instead of using | internal Moody-chart approximations Args: value (float): value for IDD Field `Fixed Friction Factor` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `fixed_friction_factor` or None if not set """ return self["Fixed Friction Factor"] @fixed_friction_factor.setter def fixed_friction_factor(self, value=None): """Corresponds to IDD field `Fixed Friction Factor`""" self["Fixed Friction Factor"] = value class CurveFanPressureRise(DataObject): """ Corresponds to IDD object `Curve:FanPressureRise` Special curve type with two independent variables. Input for the fan total pressure rise curve consists of the curve name, the four coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = C1*Qfan**2+C2*Qfan+C3*Qfan*(Psm-Po)**0.5+C4*(Psm-Po) Po assumed to be zero See InputOut Reference for curve details """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 c4', {'name': u'Coefficient4 C4', 'pyname': u'coefficient4_c4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of qfan', {'name': u'Minimum Value of Qfan', 'pyname': u'minimum_value_of_qfan', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'm3/s'}), (u'maximum value of qfan', {'name': u'Maximum Value of Qfan', 'pyname': u'maximum_value_of_qfan', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'm3/s'}), (u'minimum value of psm', {'name': u'Minimum Value of Psm', 'pyname': u'minimum_value_of_psm', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'Pa'}), (u'maximum value of psm', {'name': u'Maximum Value of Psm', 'pyname': u'maximum_value_of_psm', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'Pa'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'Pa'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real', 'unit': u'Pa'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:FanPressureRise', 'pyname': u'CurveFanPressureRise', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def coefficient4_c4(self): """field `Coefficient4 C4` Args: value (float): value for IDD Field `Coefficient4 C4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_c4` or None if not set """ return self["Coefficient4 C4"] @coefficient4_c4.setter def coefficient4_c4(self, value=None): """Corresponds to IDD field `Coefficient4 C4`""" self["Coefficient4 C4"] = value @property def minimum_value_of_qfan(self): """field `Minimum Value of Qfan` | Units: m3/s Args: value (float): value for IDD Field `Minimum Value of Qfan` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_qfan` or None if not set """ return self["Minimum Value of Qfan"] @minimum_value_of_qfan.setter def minimum_value_of_qfan(self, value=None): """Corresponds to IDD field `Minimum Value of Qfan`""" self["Minimum Value of Qfan"] = value @property def maximum_value_of_qfan(self): """field `Maximum Value of Qfan` | Units: m3/s Args: value (float): value for IDD Field `Maximum Value of Qfan` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_qfan` or None if not set """ return self["Maximum Value of Qfan"] @maximum_value_of_qfan.setter def maximum_value_of_qfan(self, value=None): """Corresponds to IDD field `Maximum Value of Qfan`""" self["Maximum Value of Qfan"] = value @property def minimum_value_of_psm(self): """field `Minimum Value of Psm` | Units: Pa | IP-Units: Pa Args: value (float): value for IDD Field `Minimum Value of Psm` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_psm` or None if not set """ return self["Minimum Value of Psm"] @minimum_value_of_psm.setter def minimum_value_of_psm(self, value=None): """Corresponds to IDD field `Minimum Value of Psm`""" self["Minimum Value of Psm"] = value @property def maximum_value_of_psm(self): """field `Maximum Value of Psm` | Units: Pa | IP-Units: Pa Args: value (float): value for IDD Field `Maximum Value of Psm` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_psm` or None if not set """ return self["Maximum Value of Psm"] @maximum_value_of_psm.setter def maximum_value_of_psm(self, value=None): """Corresponds to IDD field `Maximum Value of Psm`""" self["Maximum Value of Psm"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units: Pa | IP-Units: Pa Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units: Pa | IP-Units: Pa Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value class CurveExponentialSkewNormal(DataObject): """ Corresponds to IDD object `Curve:ExponentialSkewNormal` Exponential-modified skew normal curve with one independent variable. Input consists of the curve name, the four coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = see Input Output Reference """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 c4', {'name': u'Coefficient4 C4', 'pyname': u'coefficient4_c4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:ExponentialSkewNormal', 'pyname': u'CurveExponentialSkewNormal', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` | See InputOut Reference for curve description Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def coefficient4_c4(self): """field `Coefficient4 C4` Args: value (float): value for IDD Field `Coefficient4 C4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_c4` or None if not set """ return self["Coefficient4 C4"] @coefficient4_c4.setter def coefficient4_c4(self, value=None): """Corresponds to IDD field `Coefficient4 C4`""" self["Coefficient4 C4"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveSigmoid(DataObject): """ Corresponds to IDD object `Curve:Sigmoid` Sigmoid curve with one independent variable. Input consists of the curve name, the five coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = C1+C2/[1+exp((C3-x)/C4)]**C5 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 c4', {'name': u'Coefficient4 C4', 'pyname': u'coefficient4_c4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 c5', {'name': u'Coefficient5 C5', 'pyname': u'coefficient5_c5', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:Sigmoid', 'pyname': u'CurveSigmoid', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` | See InputOut Reference for curve description Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def coefficient4_c4(self): """field `Coefficient4 C4` Args: value (float): value for IDD Field `Coefficient4 C4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_c4` or None if not set """ return self["Coefficient4 C4"] @coefficient4_c4.setter def coefficient4_c4(self, value=None): """Corresponds to IDD field `Coefficient4 C4`""" self["Coefficient4 C4"] = value @property def coefficient5_c5(self): """field `Coefficient5 C5` Args: value (float): value for IDD Field `Coefficient5 C5` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_c5` or None if not set """ return self["Coefficient5 C5"] @coefficient5_c5.setter def coefficient5_c5(self, value=None): """Corresponds to IDD field `Coefficient5 C5`""" self["Coefficient5 C5"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveRectangularHyperbola1(DataObject): """ Corresponds to IDD object `Curve:RectangularHyperbola1` Rectangular hyperbola type 1 curve with one independent variable. Input consists of the curve name, the three coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = ((C1*x)/(C2+x))+C3 """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:RectangularHyperbola1', 'pyname': u'CurveRectangularHyperbola1', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveRectangularHyperbola2(DataObject): """ Corresponds to IDD object `Curve:RectangularHyperbola2` Rectangular hyperbola type 2 curve with one independent variable. Input consists of the curve name, the three coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = ((C1*x)/(C2+x))+(C3*x) """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:RectangularHyperbola2', 'pyname': u'CurveRectangularHyperbola2', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveExponentialDecay(DataObject): """ Corresponds to IDD object `Curve:ExponentialDecay` Exponential decay curve with one independent variable. Input consists of the curve name, the three coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = C1+C2*exp(C3*x) """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:ExponentialDecay', 'pyname': u'CurveExponentialDecay', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveDoubleExponentialDecay(DataObject): """ Corresponds to IDD object `Curve:DoubleExponentialDecay` Double exponential decay curve with one independent variable. Input consists of the curve name, the five coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = C1+C2*exp(C3*x)+C4*exp(C5*x) """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c4', {'name': u'Coefficient3 C4', 'pyname': u'coefficient3_c4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c5', {'name': u'Coefficient3 C5', 'pyname': u'coefficient3_c5', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:DoubleExponentialDecay', 'pyname': u'CurveDoubleExponentialDecay', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def coefficient3_c4(self): """field `Coefficient3 C4` Args: value (float): value for IDD Field `Coefficient3 C4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c4` or None if not set """ return self["Coefficient3 C4"] @coefficient3_c4.setter def coefficient3_c4(self, value=None): """Corresponds to IDD field `Coefficient3 C4`""" self["Coefficient3 C4"] = value @property def coefficient3_c5(self): """field `Coefficient3 C5` Args: value (float): value for IDD Field `Coefficient3 C5` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c5` or None if not set """ return self["Coefficient3 C5"] @coefficient3_c5.setter def coefficient3_c5(self, value=None): """Corresponds to IDD field `Coefficient3 C5`""" self["Coefficient3 C5"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value class CurveChillerPartLoadWithLift(DataObject): """ Corresponds to IDD object `Curve:ChillerPartLoadWithLift` This chiller part-load performance curve has three independent variables. Input consists of the curve name, the twelve coefficients, and the maximum and minimum valid independent variable values. Optional inputs for the curve minimum and maximum may be used to limit the output of the performance curve. curve = C1 + C2*x + C3*x**2 + C4*y + C5*y**2 + C6*x*y + C7*x**3 + C8*y**3 + C9*x**2*y + C10*x*y**2 + C11*x**2*y**2 + C12*z*y**3 x = dT* = normalized fractional Lift = dT / dTref y = PLR = part load ratio (cooling load/steady state capacity) z = Tdev* = normalized Tdev = Tdev / dTref Where: dT = Lift = Leaving Condenser Water Temperature - Leaving Chilled Water Temperature dTref = dT at the reference condition Tdev = Leaving Chilled Water Temperature - Reference Chilled Water Temperature """ _schema = {'extensible-fields': OrderedDict(), 'fields': OrderedDict([(u'name', {'name': u'Name', 'pyname': u'name', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'alpha'}), (u'coefficient1 c1', {'name': u'Coefficient1 C1', 'pyname': u'coefficient1_c1', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient2 c2', {'name': u'Coefficient2 C2', 'pyname': u'coefficient2_c2', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient3 c3', {'name': u'Coefficient3 C3', 'pyname': u'coefficient3_c3', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient4 c4', {'name': u'Coefficient4 C4', 'pyname': u'coefficient4_c4', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient5 c5', {'name': u'Coefficient5 C5', 'pyname': u'coefficient5_c5', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient6 c6', {'name': u'Coefficient6 C6', 'pyname': u'coefficient6_c6', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient7 c7', {'name': u'Coefficient7 C7', 'pyname': u'coefficient7_c7', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient8 c8', {'name': u'Coefficient8 C8', 'pyname': u'coefficient8_c8', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient9 c9', {'name': u'Coefficient9 C9', 'pyname': u'coefficient9_c9', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient10 c10', {'name': u'Coefficient10 C10', 'pyname': u'coefficient10_c10', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient11 c11', {'name': u'Coefficient11 C11', 'pyname': u'coefficient11_c11', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'coefficient12 c12', {'name': u'Coefficient12 C12', 'pyname': u'coefficient12_c12', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of x', {'name': u'Minimum Value of x', 'pyname': u'minimum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of x', {'name': u'Maximum Value of x', 'pyname': u'maximum_value_of_x', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of y', {'name': u'Minimum Value of y', 'pyname': u'minimum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of y', {'name': u'Maximum Value of y', 'pyname': u'maximum_value_of_y', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum value of z', {'name': u'Minimum Value of z', 'pyname': u'minimum_value_of_z', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum value of z', {'name': u'Maximum Value of z', 'pyname': u'maximum_value_of_z', 'required-field': True, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'minimum curve output', {'name': u'Minimum Curve Output', 'pyname': u'minimum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'maximum curve output', {'name': u'Maximum Curve Output', 'pyname': u'maximum_curve_output', 'required-field': False, 'autosizable': False, 'autocalculatable': False, 'type': u'real'}), (u'input unit type for x', {'name': u'Input Unit Type for x', 'pyname': u'input_unit_type_for_x', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for y', {'name': u'Input Unit Type for y', 'pyname': u'input_unit_type_for_y', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'input unit type for z', {'name': u'Input Unit Type for z', 'pyname': u'input_unit_type_for_z', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'}), (u'output unit type', {'name': u'Output Unit Type', 'pyname': u'output_unit_type', 'default': u'Dimensionless', 'required-field': False, 'autosizable': False, 'accepted-values': [u'Dimensionless'], 'autocalculatable': False, 'type': 'alpha'})]), 'format': None, 'group': u'Performance Curves', 'min-fields': 0, 'name': u'Curve:ChillerPartLoadWithLift', 'pyname': u'CurveChillerPartLoadWithLift', 'required-object': False, 'unique-object': False} @property def name(self): """field `Name` Args: value (str): value for IDD Field `Name` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `name` or None if not set """ return self["Name"] @name.setter def name(self, value=None): """Corresponds to IDD field `Name`""" self["Name"] = value @property def coefficient1_c1(self): """field `Coefficient1 C1` Args: value (float): value for IDD Field `Coefficient1 C1` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient1_c1` or None if not set """ return self["Coefficient1 C1"] @coefficient1_c1.setter def coefficient1_c1(self, value=None): """Corresponds to IDD field `Coefficient1 C1`""" self["Coefficient1 C1"] = value @property def coefficient2_c2(self): """field `Coefficient2 C2` Args: value (float): value for IDD Field `Coefficient2 C2` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient2_c2` or None if not set """ return self["Coefficient2 C2"] @coefficient2_c2.setter def coefficient2_c2(self, value=None): """Corresponds to IDD field `Coefficient2 C2`""" self["Coefficient2 C2"] = value @property def coefficient3_c3(self): """field `Coefficient3 C3` Args: value (float): value for IDD Field `Coefficient3 C3` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient3_c3` or None if not set """ return self["Coefficient3 C3"] @coefficient3_c3.setter def coefficient3_c3(self, value=None): """Corresponds to IDD field `Coefficient3 C3`""" self["Coefficient3 C3"] = value @property def coefficient4_c4(self): """field `Coefficient4 C4` Args: value (float): value for IDD Field `Coefficient4 C4` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient4_c4` or None if not set """ return self["Coefficient4 C4"] @coefficient4_c4.setter def coefficient4_c4(self, value=None): """Corresponds to IDD field `Coefficient4 C4`""" self["Coefficient4 C4"] = value @property def coefficient5_c5(self): """field `Coefficient5 C5` Args: value (float): value for IDD Field `Coefficient5 C5` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient5_c5` or None if not set """ return self["Coefficient5 C5"] @coefficient5_c5.setter def coefficient5_c5(self, value=None): """Corresponds to IDD field `Coefficient5 C5`""" self["Coefficient5 C5"] = value @property def coefficient6_c6(self): """field `Coefficient6 C6` Args: value (float): value for IDD Field `Coefficient6 C6` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient6_c6` or None if not set """ return self["Coefficient6 C6"] @coefficient6_c6.setter def coefficient6_c6(self, value=None): """Corresponds to IDD field `Coefficient6 C6`""" self["Coefficient6 C6"] = value @property def coefficient7_c7(self): """field `Coefficient7 C7` Args: value (float): value for IDD Field `Coefficient7 C7` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient7_c7` or None if not set """ return self["Coefficient7 C7"] @coefficient7_c7.setter def coefficient7_c7(self, value=None): """Corresponds to IDD field `Coefficient7 C7`""" self["Coefficient7 C7"] = value @property def coefficient8_c8(self): """field `Coefficient8 C8` Args: value (float): value for IDD Field `Coefficient8 C8` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient8_c8` or None if not set """ return self["Coefficient8 C8"] @coefficient8_c8.setter def coefficient8_c8(self, value=None): """Corresponds to IDD field `Coefficient8 C8`""" self["Coefficient8 C8"] = value @property def coefficient9_c9(self): """field `Coefficient9 C9` Args: value (float): value for IDD Field `Coefficient9 C9` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient9_c9` or None if not set """ return self["Coefficient9 C9"] @coefficient9_c9.setter def coefficient9_c9(self, value=None): """Corresponds to IDD field `Coefficient9 C9`""" self["Coefficient9 C9"] = value @property def coefficient10_c10(self): """field `Coefficient10 C10` Args: value (float): value for IDD Field `Coefficient10 C10` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient10_c10` or None if not set """ return self["Coefficient10 C10"] @coefficient10_c10.setter def coefficient10_c10(self, value=None): """Corresponds to IDD field `Coefficient10 C10`""" self["Coefficient10 C10"] = value @property def coefficient11_c11(self): """field `Coefficient11 C11` Args: value (float): value for IDD Field `Coefficient11 C11` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient11_c11` or None if not set """ return self["Coefficient11 C11"] @coefficient11_c11.setter def coefficient11_c11(self, value=None): """Corresponds to IDD field `Coefficient11 C11`""" self["Coefficient11 C11"] = value @property def coefficient12_c12(self): """field `Coefficient12 C12` Args: value (float): value for IDD Field `Coefficient12 C12` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `coefficient12_c12` or None if not set """ return self["Coefficient12 C12"] @coefficient12_c12.setter def coefficient12_c12(self, value=None): """Corresponds to IDD field `Coefficient12 C12`""" self["Coefficient12 C12"] = value @property def minimum_value_of_x(self): """field `Minimum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Minimum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_x` or None if not set """ return self["Minimum Value of x"] @minimum_value_of_x.setter def minimum_value_of_x(self, value=None): """Corresponds to IDD field `Minimum Value of x`""" self["Minimum Value of x"] = value @property def maximum_value_of_x(self): """field `Maximum Value of x` | Units are based on field `A2` Args: value (float): value for IDD Field `Maximum Value of x` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_x` or None if not set """ return self["Maximum Value of x"] @maximum_value_of_x.setter def maximum_value_of_x(self, value=None): """Corresponds to IDD field `Maximum Value of x`""" self["Maximum Value of x"] = value @property def minimum_value_of_y(self): """field `Minimum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Minimum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_y` or None if not set """ return self["Minimum Value of y"] @minimum_value_of_y.setter def minimum_value_of_y(self, value=None): """Corresponds to IDD field `Minimum Value of y`""" self["Minimum Value of y"] = value @property def maximum_value_of_y(self): """field `Maximum Value of y` | Units are based on field `A3` Args: value (float): value for IDD Field `Maximum Value of y` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_y` or None if not set """ return self["Maximum Value of y"] @maximum_value_of_y.setter def maximum_value_of_y(self, value=None): """Corresponds to IDD field `Maximum Value of y`""" self["Maximum Value of y"] = value @property def minimum_value_of_z(self): """field `Minimum Value of z` | Units are based on field `A4` Args: value (float): value for IDD Field `Minimum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_value_of_z` or None if not set """ return self["Minimum Value of z"] @minimum_value_of_z.setter def minimum_value_of_z(self, value=None): """Corresponds to IDD field `Minimum Value of z`""" self["Minimum Value of z"] = value @property def maximum_value_of_z(self): """field `Maximum Value of z` | Units are based on field `A4` Args: value (float): value for IDD Field `Maximum Value of z` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_value_of_z` or None if not set """ return self["Maximum Value of z"] @maximum_value_of_z.setter def maximum_value_of_z(self, value=None): """Corresponds to IDD field `Maximum Value of z`""" self["Maximum Value of z"] = value @property def minimum_curve_output(self): """field `Minimum Curve Output` | Specify the minimum value calculated by this curve object | Units are based on field `A5` Args: value (float): value for IDD Field `Minimum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `minimum_curve_output` or None if not set """ return self["Minimum Curve Output"] @minimum_curve_output.setter def minimum_curve_output(self, value=None): """Corresponds to IDD field `Minimum Curve Output`""" self["Minimum Curve Output"] = value @property def maximum_curve_output(self): """field `Maximum Curve Output` | Specify the maximum value calculated by this curve object | Units are based on field `A5` Args: value (float): value for IDD Field `Maximum Curve Output` Raises: ValueError: if `value` is not a valid value Returns: float: the value of `maximum_curve_output` or None if not set """ return self["Maximum Curve Output"] @maximum_curve_output.setter def maximum_curve_output(self, value=None): """Corresponds to IDD field `Maximum Curve Output`""" self["Maximum Curve Output"] = value @property def input_unit_type_for_x(self): """field `Input Unit Type for x` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for x` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_x` or None if not set """ return self["Input Unit Type for x"] @input_unit_type_for_x.setter def input_unit_type_for_x(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for x`""" self["Input Unit Type for x"] = value @property def input_unit_type_for_y(self): """field `Input Unit Type for y` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for y` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_y` or None if not set """ return self["Input Unit Type for y"] @input_unit_type_for_y.setter def input_unit_type_for_y(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for y`""" self["Input Unit Type for y"] = value @property def input_unit_type_for_z(self): """field `Input Unit Type for z` | Default value: Dimensionless Args: value (str): value for IDD Field `Input Unit Type for z` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `input_unit_type_for_z` or None if not set """ return self["Input Unit Type for z"] @input_unit_type_for_z.setter def input_unit_type_for_z(self, value="Dimensionless"): """Corresponds to IDD field `Input Unit Type for z`""" self["Input Unit Type for z"] = value @property def output_unit_type(self): """field `Output Unit Type` | Default value: Dimensionless Args: value (str): value for IDD Field `Output Unit Type` Raises: ValueError: if `value` is not a valid value Returns: str: the value of `output_unit_type` or None if not set """ return self["Output Unit Type"] @output_unit_type.setter def output_unit_type(self, value="Dimensionless"): """Corresponds to IDD field `Output Unit Type`""" self["Output Unit Type"] = value
Pick up the essentials of the Japanese language with this easy-to-use audio introduction. Covering everything from finding your way to talking about yourself, this Collins 40-minute audio can help you learn short and simple phrases quickly by just listening and repeating. Starting Out in Japanese, Part 1: Meeting People and Basic Expressions contains essential vocabulary, structure, and pronunciation in short lessons that are easy to master. In these introductory lessons, you'll learn how to count, how to talk about people and family members, how to talk about your home, and other essential expressions. Unlike most Japanese language books that focus on formal and polite use, this introductory guide will help you to speak and understand Japanese as it is used in everyday life. If you are studying, working, or just travelling to Japan, this book will help you to break down the language and culture into the real and everyday essentials. Use this guide to speak Japanese from day one, while also laying down a solid foundation for more advanced study and an appreciation of Japanese culture. Easy Learning German teaches you key words for the most important situations and lets you expand your language through a variety of activities. You choose whether to focus on learning the essentials or to progress to some more challenging activities – so whether you’re a beginner, or you want to refresh and build upon your existing knowledge, you can become confident in German without even having to pick up a pen. Everything you need to go from absolutely nothing to actually speaking and understanding simple Japanese. This audiobook is perfect for absolute beginners, students preparing for the JLPT 4/5 tests, or just people wanting to improve their beginner Japanese. Do you want to learn Japanese the fast, fun and easy way? Then this huge audiobook compilation is for you. Learn Japanese for Everyday Life is for people who want to finally speak and use real-life Japanese. You learn the must-know conversational phrases, questions, answers, social media phrases, and how to talk about your everyday life in Japanese. All words and phrases were hand-picked by our team of Japanese teachers and experts. Easy Learning Japanese teaches you key words for the most important situations and lets you expand your language through a variety of activities. You choose whether to focus on learning the essentials or to progress to some more challenging activities – so whether you’re a beginner, or you want to refresh and build upon your existing knowledge, you can become confident in Japanese without even having to pick up a pen. Unit 6: Going places (1): where? Unit 7: Going places (2): when? Unit 8: Going places (3): how? I cannot listen to this twice. I feel like it is condescending, maybe its perfect for simpletons. Corn itchy wow? So irritating.
import socket import logging from django.core.cache import cache from django.conf import settings logger = logging.getLogger(__name__) def hostname_ip(hostname): try: host_ip = socket.gethostbyname(hostname) logger.debug(f"Hostname : {hostname}, IP: {host_ip}") return host_ip except: logger.debug(f"Unable to get IP for {hostname}") return None def ip_yours(request): logger.debug(request.META) x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: logger.debug(f'HTTP_X_FORWARDED_FOR: {x_forwarded_for}') ip = x_forwarded_for.split(',')[0].strip() elif request.META.get('HTTP_X_REAL_IP'): ip = request.META.get('HTTP_X_REAL_IP') logger.debug(f'HTTP_X_REAL_IP: {ip}') else: ip = request.META.get('REMOTE_ADDR') logger.debug(f'REMOTE_ADDR: {ip}') return ip def set_discourse_ip_cache(): discourse_ip = hostname_ip(settings.DISCOURSE_HOSTNAME) cache.set(settings.DISCOURSE_IP_CACHE_KEY, discourse_ip, settings.DISCOURSE_IP_CACHE_TTL) return discourse_ip
Locking Fuel Caps. Aft Divident w/mirror on forward side. LH Aft Belted Flushing Toiled. 6 place passenger w / club Arrangement with dual Fold – down tables. SNOW WHITE, with Gray, maroon and cinnamon Trim. Always Hangared, no dagame history. Additional aquipment Engine Synchronizer Woodward. Aft Divident w/mirror on forward side. 6 place passenger w/clup arrengement with dual Fold-down tables.
#import GlobalConfig as conf import glob import re import sqlite3 import string import datetime import time from random import randint # TODO kan slettes from random import choice VERBOSE = True class Question(object): def __init__(self, cfg): """ Set up a new question object. @type cfg: dict @param cfg: Config data """ self.cfg = cfg self.data = {} self.status = 'new' self.regex = None self.qid = 1 # TODO self.active = False # TODO control if question is active def getStatus(self): """ @return: Status in question loop """ return self.status def getID(self): """ TODO """ return 1 def reset(self): """ Resets status for question. """ self.status = 'new' def __str__(self): """ @return: Question string """ return self.data['Question'] def askQuestion(self, c, n): """ Returns formatted question for channel. @type c: string @param c: channel @type n: number @param n: Question number """ self.status = 0 return [(0, c, "Question {}: {}".format(n, self.__str__()))] def tip_n(self): """ @return: Number of possible tips """ tmp = len(self.data['Tip']) if tmp > self.cfg['max_tips']: return self.cfg['max_tips'] return len(self.data['Tip']) def giveTip(self): """ @return: Next tip if exist, else returns correct answer. """ if VERBOSE: print("Len-tip: {}".format(len(self.data['Tip']))) if VERBOSE: print("give tip ... {}".format(self.status)) if self.tip_n() > self.status + 1: self.status += 1 return self.data['Tip'][self.status - 1] else: self.status = 'finished' return self.data['Answer'] # return self.data['Tip'] def stringToQuestion(self, qstring, source): """ Creates question from string. @type qstring: string @param qstring: Question formatted as string @type source: string @param source: Source name of string """ #print("stringToQuestion: {}".format(qstring)) tmp = qstring.splitlines() self.data = dict([l.strip() for l in line.split(':',1)] for line in tmp if line != '' and line[0:3] != 'Tip') self.data['Tip'] = [ line[4:].strip() for line in tmp if line[0:3] == 'Tip'] if len(self.data['Tip']) == 0: self.data['Tip'] = self.createTip(self.data['Answer']) self.data['Source'] = source if 'Regexp' in self.data: self.regex = re.compile(self.data['Regexp'], re.IGNORECASE) else: self.regex = re.compile(self.data['Answer'], re.IGNORECASE) #print self.data def createTip(self, qstring): """ Creates tips. TODO: Improve tips - ignore whitespace. @return: list of tips. """ tmp = [] i = 0 while i < len(qstring) and i < self.cfg['tip_freq']: tmp.append(''.join(c if (j-i) % self.cfg['tip_freq'] == 0 or c == ' ' else '.' for j,c in enumerate(qstring))) i += 1 #print tmp return tmp[:-1]
marble bathroom floor bathroom ideas marble bathroom floor and wall tiles. marble bathroom floor patterned geometric carrara marble bathroom floor designs. marble bathroom floor gorgeous white marble bathrooms marble bathroom floor tile ideas.
#!/usr/bin/env python """ Plot histograms of the HW0 self-assessment results. """ import yaml import numpy as np import matplotlib.pyplot as plt def gather_scores(recs, name): """Gather a set of 1-5 scores from student response records Args: recs: list of student responses name: name of self-assessment score to consider Returns: length-5 histogram of how many self-assessed as levels 1-5 """ scores = np.zeros(5) for rec in recs: if name in rec: scores[int(rec[name]-1)] += 1 return scores def score_plot(recs, name): """Produce a histogram plot file for a HW0 score. Args: recs: list of student responses name: name of self-assessment score to consider """ ind = np.arange(5)+1 scores = gather_scores(recs, name) plt.figure() plt.bar(ind-0.4, scores, 0.8) plt.title(name) plt.savefig("hw0-{0}.pdf".format(name)) if __name__ == "__main__": attrs = ['git', 'shell', 'c', 'python', 'architecture', 'concurrency', 'numerics'] with open("hw0.yml", "r") as f: recs = yaml.load(f) for attr in attrs: score_plot(recs, attr)
Read more about What triggers arthrofibrosis? A very popular and comprehensive coverage of the topic of arthrofibrosis (internal scarring in the knee joint) from Dr Frank Noyes of the Cincinnati Sportsmedicine and Orthopaedic Center. A list maintained by bulletin board member 'missmyknee' of surgeons with a special interest in arthrofibrosis of the knee. Read more about Who's Who in arthrofibrosis surgery? Mr Dirk Kokmeyer, physiotherapist at the Steadman Hawkins Clinic in Vail, Colorado presents a set of specialist tutorials on rehabilitation of knee patients with arthrofibrosis. An 'interpretation' of an article from 2007 discussing problem that interfere with joint function after knee replacement. Dr Peter Millett chats about his experience with infrapatellar contracture syndrome and the importance of recognising it early before the shortening of the patellar tendon becomes irreversible. Dr Peter Millett discusses why knee surgeons need to be aware that arthrofibrotic scarring can occur in this area because of the position of their arthroscopic portals (cuts).
import difflib import pgcs.core.data import pgcs.core.diff core = pgcs.core from . import tags database_objects = None def get_colors(values): groups = {} for value, group in values: if value is not None: assert group >= 0 count = groups.get(group, 0) groups[group] = count + 1 else: assert group == -1 def get_sorting(item): group, count = item return -count def get_group(item): group, count = item return group return [get_group(i) for i in sorted(groups.iteritems(), key=get_sorting)] def gen_columns(parent, diff): colors = get_colors(diff.values) span = parent.span["columns"] for column, (value, group) in enumerate(diff.values): classes = ["column-%d" % column] content = "" if value is None: classes.append("miss") else: classes.append("have") color = colors.index(group) classes.append("color-%d" % color) if isinstance(value, core.data.Table): if value.has_content is core.data.unknown: content = "?" elif value.has_content: content = "1" else: content = "0" span.span[classes].div[:] = content def gen_named_object_list(parent, diff, name=None): if diff: element = parent.div["list"] if name: element.div["head"].span["name"][:] = name for entry in diff.entries: kind, func = diff_types[type(entry.diff)] div = element.div["entry"] if entry.value: div.div["expander"][:] = "+" div.span["type"][:] = kind div.span["name"][:] = entry.name gen_columns(div, entry.value) if entry.value: children = div.div["children"] func(children, entry.diff) def gen_value(parent, diff, name, is_list=False): if diff: cls = "value" if is_list: cls = "list" element = parent.div[cls] head = element.div["head"] head.span["name"][:] = name table = element.table obis_by_group = [] dbis_by_group = [] for group in xrange(diff.groups): obis = [] obis_by_group.append(obis) dbis = [] dbis_by_group.append(dbis) for i, (o, g) in enumerate(diff.values): if g == group: obis.append(i) dbis.append(i) colors = get_colors(diff.values) tr = table.tr for color, group in enumerate(colors): dbis = dbis_by_group[group] dbns = [database_objects[i].get_name() for i in dbis] tr.th["color-%d" % color].div[:] = " ".join(dbns) def listlen(l): if l is None: return 0 else: return len(l) if is_list: if len(colors) == 2: lists = [diff.lists[obis_by_group[g][0]] for g in colors] gen_2column(table, *lists) else: for i in xrange(max([listlen(l) for l in diff.lists])): tr = table.tr for group in colors: obi = obis_by_group[group][0] lis = diff.lists[obi] td = tr.td if i < len(lis): td.div[:] = dump_column(lis[i]) elif isinstance(diff, core.diff.ObjectValue): tr = table.tr for group in colors: obi = obis_by_group[group][0] obj = diff.objects[obi] tr.td.div[:] = ".".join(obj.flatten()) else: tr = table.tr for group in colors: obi = obis_by_group[group][0] val, grp = diff.values[obi] try: content = unicode(val) except: content = "?" tr.td.div[:] = content def gen_ordered_object_list(parent, diff, name): gen_value(parent, diff, name, True) def dump_column(obj): s = "%s %s" % (obj.name, obj.type.name) if obj.notnull: s += " notnull" if obj.default: s += " %s" % obj.default return s class NamedHash(object): def __init__(self, object): self.object = object def __hash__(self): return hash(self.object.name) def __eq__(self, other): return self.object.name == other.object.name def gen_2column(table, seq1, seq2): hash1 = [NamedHash(o) for o in seq1] hash2 = [NamedHash(o) for o in seq2] match = difflib.SequenceMatcher(a=hash1, b=hash2) for tag, i1, i2, j1, j2 in match.get_opcodes(): if tag == "delete": for obj in seq1[i1:i2]: tr = table.tr tr.td.div[:] = dump_column(obj) tr.td elif tag == "insert": for obj in seq2[j1:j2]: tr = table.tr tr.td tr.td.div[:] = dump_column(obj) elif tag in ("replace", "equal"): for n in xrange(i2 - i1): tr = table.tr if i1 + n < i2: obj1 = seq1[i1 + n] tr.td.div[:] = dump_column(obj1) else: tr.td if j1 + n < j2: obj2 = seq2[j1 + n] tr.td.div[:] = dump_column(obj2) else: tr.td # Database def gen_database(tree, diff): div = tree.div["database"] gen_database_head(div, diff) gen_database_body(div, diff) def gen_database_head(parent, diff): span = parent.div["head"].span["columns"] for column, obj in enumerate(diff.objects): span.span[("column-%d" % column)][:] = obj.get_name() def gen_database_body(parent, diff): body = parent.div["body"] body.div["expander"][:] = "+" div = body.div["children"] gen_named_object_list(div, diff.languages) gen_named_object_list(div, diff.namespaces) # Language def gen_language(div, diff): gen_value(div, diff.owner, "owner") # Namespace def gen_namespace(div, diff): gen_value(div, diff.owner, "owner") gen_named_object_list(div, diff.types) gen_named_object_list(div, diff.composites) gen_named_object_list(div, diff.indexes) gen_named_object_list(div, diff.tables) gen_named_object_list(div, diff.views) gen_named_object_list(div, diff.sequences) gen_named_object_list(div, diff.functions) gen_named_object_list(div, diff.operators) gen_named_object_list(div, diff.opclasses) # Type def gen_type(div, diff): gen_value(div, diff.owner, "owner") gen_value(div, diff.notnull, "notnull") gen_value(div, diff.default, "default") def gen_domain(div, diff): gen_type(div, diff) gen_value(div, diff.basetype, "basetype") gen_named_object_list(div, diff.constraints, "constraints") # Function def gen_function(div, diff): gen_value(div, diff.owner, "owner") gen_value(div, diff.language, "language") gen_value(div, diff.rettype, "rettype") gen_value(div, diff.argtypes, "argtypes") gen_value(div, diff.source1, "source1") gen_value(div, diff.source2, "source2") # Relation def gen_relation(div, diff): gen_value(div, diff.owner, "owner") gen_ordered_object_list(div, diff.columns, "columns") def gen_rule_relation(div, diff): gen_relation(div, diff) gen_named_object_list(div, diff.rules, "rules") def gen_table(div, diff): gen_rule_relation(div, diff) gen_named_object_list(div, diff.triggers, "triggers") gen_named_object_list(div, diff.constraints, "constraints") # Sequence def gen_sequence(div, diff): gen_value(div, diff.owner, "owner") gen_value(div, diff.increment, "increment") gen_value(div, diff.minimum, "minimum") gen_value(div, diff.maximum, "maximum") # Column def gen_column(div, diff): gen_value(div, diff.type, "type") gen_value(div, diff.notnull, "notnull") gen_value(div, diff.default, "default") # Constraint def gen_constraint(div, diff): gen_value(div, diff.definition, "definition") def gen_column_constraint(div, diff): gen_constraint(div, diff) gen_ordered_object_list(div, diff.columns, "columns") def gen_foreign_key(div, diff): gen_column_constraint(div, diff) gen_value(div, diff.foreign_table, "foreign-table") gen_ordered_object_list(div, diff.foreign_columns, "foreign-columns") # Trigger def gen_trigger(div, diff): gen_value(div, diff.function, "function") gen_value(div, diff.description, "description") # Rule def gen_rule(div, diff): gen_value(div, diff.definition, "definition") # Operator def gen_operator(div, diff): gen_value(div, diff.owner, "owner") def gen_operator_class(div, diff): gen_value(div, diff.owner, "owner") gen_value(div, diff.intype, "intype") gen_value(div, diff.default, "default") gen_value(div, diff.keytype, "keytype") diff_types = { core.diff.CheckColumnConstraint: ("check-column-constraint", gen_column_constraint), core.diff.CheckConstraint: ("check-constraint", gen_constraint), core.diff.Column: ("column", gen_column), core.diff.Composite: ("composite", gen_relation), core.diff.Domain: ("domain", gen_domain), core.diff.ForeignKey: ("foreign-key", gen_foreign_key), core.diff.Function: ("function", gen_function), core.diff.Index: ("index", gen_relation), core.diff.Language: ("language", gen_language), core.diff.Namespace: ("namespace", gen_namespace), core.diff.Operator: ("operator", gen_operator), core.diff.OperatorClass: ("operator-class", gen_operator_class), core.diff.PrimaryKey: ("primary-key", gen_column_constraint), core.diff.Rule: ("rule", gen_rule), core.diff.Sequence: ("sequence", gen_sequence), core.diff.Table: ("table", gen_table), core.diff.Trigger: ("trigger", gen_trigger), core.diff.Type: ("type", gen_type), core.diff.UniqueColumnConstraint: ("unique-column-constraint", gen_column_constraint), core.diff.UniqueConstraint: ("unique-constraint", gen_constraint), core.diff.View: ("view", gen_rule_relation), } def generate(diff): global database_objects database_objects = diff.objects tree = tags.TagTree() gen_database(tree, diff) return tree.get_element_tree()
Whistleblowers in B.C. say they believe money laundering in government regulated casinos was deliberately being allowed and not stopped. We’re estimating $2-billion of dirty cash has flowed through lottery corp casinos and calls for a public inquiry from British Columbians are getting louder. Here’s a timeline of how we got here.
import os from datetime import datetime from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.core.signals import setting_changed from django.utils import timezone from django.utils._os import safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.functional import LazyObject, cached_property from django.utils.module_loading import import_string from django.utils.text import get_valid_filename __all__ = ( 'Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage', 'get_storage_class', ) class Storage: """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """Retrieve the specified file from storage.""" return self._open(name, mode) def save(self, name, content, max_length=None): """ Save new content to the file specified by name. The content should be a proper File object or any Python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content, name) name = self.get_available_name(name, max_length=max_length) return self._save(name, content) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_alternative_name(self, file_root, file_ext): """ Return an alternative filename, by adding an underscore and a random 7 character alphanumeric string (before the file extension, if one exists) to the filename. """ return '%s_%s%s' % (file_root, get_random_string(7), file_ext) def get_available_name(self, name, max_length=None): """ Return a filename that's free on the target storage system and available for new content to be written to. """ dir_name, file_name = os.path.split(name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, generate an alternative filename # until it doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) return name def generate_filename(self, filename): """ Validate the filename by calling get_valid_name() and return a filename to be passed to the save() method. """ # `filename` may include a path as returned by FileField.upload_to. dirname, filename = os.path.split(filename) return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Delete the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Return the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Return an absolute URL where the file's contents can be accessed directly by a Web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def get_accessed_time(self, name): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method') def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_created_time() method') def get_modified_time(self, name): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if # the file already exists before it's opened. OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0) def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): self._location = location self._base_url = base_url self._file_permissions_mode = file_permissions_mode self._directory_permissions_mode = directory_permissions_mode setting_changed.connect(self._clear_cached_properties) def _clear_cached_properties(self, setting, **kwargs): """Reset setting based property values.""" if setting == 'MEDIA_ROOT': self.__dict__.pop('base_location', None) self.__dict__.pop('location', None) elif setting == 'MEDIA_URL': self.__dict__.pop('base_url', None) elif setting == 'FILE_UPLOAD_PERMISSIONS': self.__dict__.pop('file_permissions_mode', None) elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': self.__dict__.pop('directory_permissions_mode', None) def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_location(self): return self._value_or_setting(self._location, settings.MEDIA_ROOT) @cached_property def location(self): return os.path.abspath(self.base_location) @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) @cached_property def file_permissions_mode(self): return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS) @cached_property def directory_permissions_mode(self): return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. directory = os.path.dirname(full_path) try: if self.directory_permissions_mode is not None: # os.makedirs applies the global umask, so we reset it, # for consistency with file_permissions_mode behavior. old_umask = os.umask(0) try: os.makedirs(directory, self.directory_permissions_mode, exist_ok=True) finally: os.umask(old_umask) else: os.makedirs(directory, exist_ok=True) except FileExistsError: raise FileExistsError('%s exists and is not a directory.' % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # The current umask value is masked out by os.open! fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except FileExistsError: # A new name is needed if the file exists. name = self.get_available_name(name) full_path = self.path(name) else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) # Store filenames with forward slashes, even on Windows. return str(name).replace('\\', '/') def delete(self, name): assert name, "The name argument is not allowed to be empty." name = self.path(name) # If the file or directory exists, delete it from the filesystem. try: if os.path.isdir(name): os.rmdir(name) else: os.remove(name) except FileNotFoundError: # FileNotFoundError is raised if the file or directory was removed # concurrently. pass def exists(self, name): return os.path.exists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] for entry in os.scandir(path): if entry.is_dir(): directories.append(entry.name) else: files.append(entry.name) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url) def _datetime_from_timestamp(self, ts): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ if settings.USE_TZ: # Safe to use .replace() because UTC doesn't have DST return datetime.utcfromtimestamp(ts).replace(tzinfo=timezone.utc) else: return datetime.fromtimestamp(ts) def get_accessed_time(self, name): return self._datetime_from_timestamp(os.path.getatime(self.path(name))) def get_created_time(self, name): return self._datetime_from_timestamp(os.path.getctime(self.path(name))) def get_modified_time(self, name): return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
Birds of Paradise sitting atop gorgeous orange or yellow roses, nestling on an lovely glass vase. Make someone gasp with surprise at this unusual arrangement. Send these flowers for a house warming or congratulations of any kind. When you send flowers in Mumbai send something that looks like a special style. This design was inspired by a sparrow trying her best to build a cute nest on my Mumbai window sill. Sparrows do not build their nests on trees, too many predators, they are loosing their typical urban habitat as building become skyscrapers in Mumbai. You can help by hanging sparrow shelters in your balcony or make small spaces for them to nest. Once i noticed a sparrow picking up the soft petals of a yellow rose that were spilled just outside my florist shop in Mumbai.I have sparrow shelters and these are used by the cute sparrows. The softness of the petals must have gone to line the little baby sparrow's bed, now I'm dreaming :) but it inspired me. so i got myself into floral designer mode and came up with Birds in a nest!
# exam1.py import sqlite3 import uuid from flask import Flask, request, jsonify DATABASE = 'exam.db' app = Flask(__name__) app.config.from_object(__name__) """ HTTP GET: list of all users @Request: Nothing @Response: Json list [ { "id": [10-digit integer], "name": "[user name]", "salary": [integer] }, { "id": 3645825710, "name": "Mobigen2", "salary": 20000 } ] """ @app.route('/users', methods=['GET']) def list_all_users(): # table check _check_table() return _select_all_users() # HTTP GET: list of specific user # @Request: /users/<integer_user_id> # @Response: Json # { # "id": [10-digit integer], # "name": "[user name]", # "salary": [integer] # } @app.route('/users/<int:user_id>', methods=['GET']) def list_user(user_id=None): # table check _check_table() return _select_user(user_id) # HTTP POST: insert a new user # @Request: Json # { # "name": "[user name]", # "salary": [integer] # } # @Response: Json # { # "id": [10-digit integer] # } @app.route('/users', methods=['POST']) def create_users(): # table check _check_table() return _insert_users(request.get_json()) # HTTP PUT: update a user # @Request: /users/<integer_user_id>, Json(user info) # { # "name": "[user name]", # "salary": [integer] # } # @Response: Json # { # "id": [10-digit integer] # } @app.route('/users/<user_id>', methods=['PUT']) def modify_user(user_id=None): # Table check _check_table() return _update_user(user_id, request.get_json()) # HTTP DELETE: delete a user # @Request: /users/<integer_user_id> # @Response: Json # { # "id": [10-digit integer] # } @app.route('/users/<user_id>', methods=['DELETE']) def remove_user(user_id=None): # Table check return _delete_user(user_id) # Check if the table exists. def _check_table(): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute( "SELECT name FROM sqlite_master WHERE type='table' AND name='user';") rs = cur.fetchall() if len(rs) <= 0: # Create table when table doesn't exist. _create_table() cur.close() conn.close() # Create Table def _create_table(): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute( "CREATE TABLE IF NOT EXISTS " "user(id int PRIMARY KEY, name text, salary int);") conn.commit() cur.close() conn.close() print "CREATE TABLE" return None # Select all users and return it in json format. def _select_all_users(): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute("SELECT * FROM user;") # return SQL table as JSON in python. rv = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()] if len(rv) > 0: cur.close() conn.close() return jsonify(rv) else: cur.close() conn.close() # If empty table return empty. return jsonify({"HTTP": "GET", "status": "all_empty"}) # Select specific user and return it in json format. def _select_user(reqdata): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute("SELECT * FROM user WHERE id=?;", (reqdata,)) # return SQL table as JSON in python. rv = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()] if len(rv) > 0: cur.close() conn.close() return jsonify(rv) else: cur.close() conn.close() # if empty table return jsonify({"HTTP": "GET", "status": "empty"}) # Insert a new user and returns generated ID in json format. def _insert_users(reqdata): # If request body is empty. if reqdata is None: return jsonify({"HTTP": "POST", "status": "empty"}) conn = sqlite3.connect(DATABASE) cur = conn.cursor() # Generate 32bit integer UUID int_uuid = uuid.uuid4().int & (1 << 32)-1 # Insert users data, id generated uuid. cur.execute( "insert into user values(?,?,?);", (int_uuid, reqdata['name'], reqdata['salary'])) conn.commit() cur.close() conn.close() return jsonify({"id": int_uuid}) # Update a user and return ID in json format. def _update_user(user_id, reqdata): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute("UPDATE user SET name=?, salary=? WHERE id=?;", (reqdata['name'], reqdata['salary'], user_id)) conn.commit() cur.close() conn.close() return jsonify({"id": user_id}) # Delete a user and return ID in json format. def _delete_user(user_id): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute("DELETE FROM user WHERE id=?;", (user_id,)) conn.commit() cur.close() conn.close() return jsonify({"id": user_id}) # Drop Table: Only for testing. @app.route('/reset') def _drop_table(): conn = sqlite3.connect(DATABASE) cur = conn.cursor() cur.execute("DROP TABLE 'user';") conn.commit() cur.close() conn.close() return "DROP TABLE" # Flask App running on localhost:8000 if __name__ == "__main__": app.run(host='0.0.0.0', port=8000)
First, I want to thank happy school blog for all the help they rendered to me. But I was denied a visa. I had my visa interview on 26th july. co: why are you going to US? Me: To pursue my higher education in bacherlors of Chemistry program. CO: which univ do you apply? Co: which university accepted you? Me: SMSU and UF accepted me. Me: smsu have thesame instate tuition for all international students, which means that i will be paying thesame tuition with aminnesota resident. They also have…(he interrupted). Co: what will you do with you degree? Me: I will come back to Nigeria to work with the National agency for food and drug administration and control (NAFDAC). Co: why don’t you want to do chemistry here in nigeria? Me: because getting an international degree will provide more job opportunity for me, because many employers preffer an international degree, Especially this government agency NAFDAC. Co: am sorry, i can’t issue a visa to you because of 214b. May be you can try next time, am sorry. I thanked him and left, but i still don’t know what i did wrong, he didn’t see any of my documents except my I-20. I saw him rejecting everybody but i thought that mines will be different. Please what can i do. I have a Job offer from a government agency here in Nigeria but the Co didn’t ask of any of the documents. A visa agent adviced me to document the Job offer letter and mail it to the consulate office to show that i have a good proof that i will come back , so that they will go through it and then give me a call to come back for another interview. But i never heard of that option before. Please is it advisible for me to do that? Or do I simply have to schedule for another appointment. Hi everyone! Its nice to be here and share experiences. My visa was rejected on 3 August this year in yaounde under section 214b. I am a Cameroonian. I got admitted into Columbia college in order to study Biology but due to my low score on the listening section of the TOEFL, I had to do an English for academic purpose program once I get to the school before starting with my biology major I chooses. When I received my I 20 form, instead of seeing biology as my program of study, I saw English . I didn’t bother, thought it was OK if the school put it that way. So when my turn came for the interview , there was this officer who rejected almost everyone. Unfortunately, he interviewed me. He took my documents and asked me what I was going to study, I got a little confused if it was English or biology? So I said that I was going to start studying English. Then he asked why? I said due to my low score on the listening section of the TOEFL. Then he asked me if I was a native speaker of English. Didn’t understand ” native speaker ” , so i said yes since I have been schooling in English all my life though born into a francophone family. He denied the visa and said it was a waste of time going to do English if I already know it. That’s my story. I told the admission counselor about it and she asked me to take a new TOEFL, she was going to make me a new I 20 form. Please I need your help to ace the next interview. Helo guys, myyy interview is 10dayys away at Abuja consular. I only applied to one university in usa. Should I jst tell the VO that I only applied to one uni or shld I make up 2 other uni and claim I wasn’t given admission? …..please reapply for another interview ASAP, as long as your I-20 is still valid, and most importantly pay a lot of attention while preparing and at the interview itself about your responses to VO, they just have to be convinced that is all! Express you passion about studying a particular discipline, and draw the VO attention to the kind of research in the school that made you opt for it as a choice of university rather than on the cost. You should pull through the this time. Be confident, not arrogant, and maintain eye contact steady. Good luck!! Nana kwasi Kwateng is my name, a citizen of the republic of Ghana.Please words could indeed not express my excitement and ecstasy when i came across your useful website a couple of days ago. Sir/Madam, the fact of the matter is, on the 7th of January,2011 I was denied an F-1 visa without any reason made know to me by the Visa Officer .But i was given a refusal sheet which stated in some part of it that an appeal can be made only when my previous situation has been changed. On the grounds of this, in order not to be refused an F-1 visa for the second time.I would plead with you for Christ sake to help me with any useful tips which pertains to a successful acquisition of an f-1 visa after being rejected the first time. Hope to hear from you soon.i count on your usual cooperation. Please I also heard a rumor that once a student has been rejected an F-1 visa for the first attempt, the chance of getting it for the second time is very slim.If so,please how possible? I would like to know the truth in this rumor. First of all, chill and don’t get upset. Book another Visa Interview Appointment. 2) It helps your case strongly when you apply to go to a university with a good reputation. However reputation alone isn’t going to suffice, you need to be sure yourself as to why the chosen university is the one you want to go to. Fees could be a reason. But mentioning that as a primary reason could make the VO wonder about why you are choosing a very average university with a low fees – is it to somehow make it to the US? You have to convince the VO that the university is offering you good opportunities for research, learning etc from faculty who are renowned and/or are researching areas which you are interested in. UF as in University of Florida right? You got admission in UF, and you chose SMSU, for chemistry. Let’s clear some things up. UF is a top public institution with a very good chemistry department. Instead of going to the best university you got admitted into, you opted for a very low ranked regional university, and the reason you gave was the tuition fees. A good student doesn’t do that, ever. You’ll get rejected straight away if the best reason for you to pick a university was because of low tuition fees, and furthermore when you already have admission into another university which is leagues ahead of the one you opted for. If you can, I’d advice you to get an I-20 from UF, if you don’t have it already, and opt to go there, because frankly speaking, if you go through this same interview and say the same things again, you’ll get rejected. If they ask you again, why SMSU, and if you have nothing else to say other than the low tuition fee reason, you’ll get rejected again. Schedule fr an another appoinment. Nthis time carry ur offer letter with u….!!!! when asked abt why this University…? U must answer it in subject oriented form like u are interested in research work going on over thre n u have particularly selected this univ bcoz the course work matches with ur interest of subjects….!!!!! n tell him abt the research work gng on n try explaining abt him… I think he’ll b impressed by listening to this n may think tht u have carried enough info reg the univ….!!!
from math import sqrt, pi, sin, cos, tan, atan2 as arctan2 def grid_to_longlat(E,N): #E, N are the British national grid coordinates - eastings and northings a, b = 6377563.396, 6356256.909 #The Airy 180 semi-major and semi-minor axes used for OSGB36 (m) F0 = 0.9996012717 #scale factor on the central meridian lat0 = 49*pi/180 #Latitude of true origin (radians) lon0 = -2*pi/180 #Longtitude of true origin and central meridian (radians) N0, E0 = -100000, 400000 #Northing & easting of true origin (m) e2 = 1 - (b*b)/(a*a) #eccentricity squared n = (a-b)/(a+b) #Initialise the iterative variables lat,M = lat0, 0 while N-N0-M >= 0.00001: #Accurate to 0.01mm lat = (N-N0-M)/(a*F0) + lat; M1 = (1 + n + (5./4)*n**2 + (5./4)*n**3) * (lat-lat0) M2 = (3*n + 3*n**2 + (21./8)*n**3) * sin(lat-lat0) * cos(lat+lat0) M3 = ((15./8)*n**2 + (15./8)*n**3) * sin(2*(lat-lat0)) * cos(2*(lat+lat0)) M4 = (35./24)*n**3 * sin(3*(lat-lat0)) * cos(3*(lat+lat0)) #meridional arc M = b * F0 * (M1 - M2 + M3 - M4) #transverse radius of curvature nu = a*F0/sqrt(1-e2*sin(lat)**2) #meridional radius of curvature rho = a*F0*(1-e2)*(1-e2*sin(lat)**2)**(-1.5) eta2 = nu/rho-1 secLat = 1./cos(lat) VII = tan(lat)/(2*rho*nu) VIII = tan(lat)/(24*rho*nu**3)*(5+3*tan(lat)**2+eta2-9*tan(lat)**2*eta2) IX = tan(lat)/(720*rho*nu**5)*(61+90*tan(lat)**2+45*tan(lat)**4) X = secLat/nu XI = secLat/(6*nu**3)*(nu/rho+2*tan(lat)**2) XII = secLat/(120*nu**5)*(5+28*tan(lat)**2+24*tan(lat)**4) XIIA = secLat/(5040*nu**7)*(61+662*tan(lat)**2+1320*tan(lat)**4+720*tan(lat)**6) dE = E-E0 #These are on the wrong ellipsoid currently: Airy1830. (Denoted by _1) lat_1 = lat - VII*dE**2 + VIII*dE**4 - IX*dE**6 lon_1 = lon0 + X*dE - XI*dE**3 + XII*dE**5 - XIIA*dE**7 #Want to convert to the GRS80 ellipsoid. #First convert to cartesian from spherical polar coordinates H = 0 #Third spherical coord. x_1 = (nu/F0 + H)*cos(lat_1)*cos(lon_1) y_1 = (nu/F0+ H)*cos(lat_1)*sin(lon_1) z_1 = ((1-e2)*nu/F0 +H)*sin(lat_1) #Perform Helmut transform (to go between Airy 1830 (_1) and GRS80 (_2)) s = -20.4894*10**-6 #The scale factor -1 tx, ty, tz = 446.448, -125.157, + 542.060 #The translations along x,y,z axes respectively rxs,rys,rzs = 0.1502, 0.2470, 0.8421 #The rotations along x,y,z respectively, in seconds rx, ry, rz = rxs*pi/(180*3600.), rys*pi/(180*3600.), rzs*pi/(180*3600.) #In radians x_2 = tx + (1+s)*x_1 + (-rz)*y_1 + (ry)*z_1 y_2 = ty + (rz)*x_1 + (1+s)*y_1 + (-rx)*z_1 z_2 = tz + (-ry)*x_1 + (rx)*y_1 + (1+s)*z_1 #Back to spherical polar coordinates from cartesian #Need some of the characteristics of the new ellipsoid a_2, b_2 =6378137.000, 6356752.3141 #The GSR80 semi-major and semi-minor axes used for WGS84(m) e2_2 = 1- (b_2*b_2)/(a_2*a_2) #The eccentricity of the GRS80 ellipsoid p = sqrt(x_2**2 + y_2**2) #Lat is obtained by an iterative proceedure: lat = arctan2(z_2,(p*(1-e2_2))) #Initial value latold = 2*pi while abs(lat - latold)>10**-16: lat, latold = latold, lat nu_2 = a_2/sqrt(1-e2_2*sin(latold)**2) lat = arctan2(z_2+e2_2*nu_2*sin(latold), p) #Lon and height are then pretty easy lon = arctan2(y_2,x_2) H = p/cos(lat) - nu_2 #Uncomment this line if you want to print the results #print [(lat-lat_1)*180/pi, (lon - lon_1)*180/pi] #Convert to degrees lat = lat*180/pi lon = lon*180/pi #Job's a good'n. return lat, lon
::Finding Nemo:: is getting me lately too — Marlin, Nemo’s dad, is ruled by fear, and rightfully after the way the story begins losing his wife and all of his children but one, but he sacrifices his safety (and really everything about who he’d become) again and again to try to find Nemo - the scene when Nemo learns his dad has become a legend in the ocean because of all he’s done to try to find him, it’s just so powerful. ::Inside Out:: (lots of people said this one… such a good example — Bing Bong! I actually totally relate to having some embarrassing to admit examples. I mentioned in prayer the other week watching with my son How to Train your Dragon 2, which has a character sacrifice themselves for another character, and it totally gets me tearing up — it’s not even Pixar, it’s a perfectly good story, but by no means the best, and still it gets me. Self sacrificial love is incredible. There is no more powerful storyline. There is something about it that taps into a deep place in us — where it just feels right, it feels like the purest, most vivid version of love. Well, as you may know, this is a good time of year to think about self-sacrificial love — We’re in the middle of the season of Lent - the 40 days every year in the church calendar that lead up to Good Friday and Easter Sunday, when we mark Jesus’ death and resurrection — the God of the universe choosing self-sacrificial love to save humanity. And here at BLV this Lent, we’ve talked about some different perspectives on: what exactly does humanity need saving from? Why was the self-sacrificial love of God — ::Jesus’ death on the cross:: — so necessary? Is it that, in some way, we actually need saving from God? From God’s anger at our sin, which needs to be appeased via some sort of punishment? For many people over the last hundred or so years of history this is a perspective that has felt important. But, just to speak for me personally, this perspective feels like it has some real challenges. Honestly, it makes me think God has a ::good-cop / bad-cop split personality::. Like we need saving from the angry, bad-cop father god who demands punishment, but the good-cop son Jesus takes dad’s beating for us? And that’s what it means that we are saved? Saved by God, (PAUSE) from God? So is God the Angry Father or the Loving and Selfless Son? I don’t think I am alone in feeling this - I wonder if you have ever struggled with this split personality view of God? And so because of this we’ve been looking at some other perspectives on the power and meaning of ::Jesus’ death on the Cross::. We’ve talked about how: perhaps we need saving from ourselves. From the evil that is so often in us, human beings. We humans are the ones who so often demand blood and demand “us vs them” judgments, not God! — as a result of our resentments and unforgiveness and desires for revenge and needs to feel justified or important or secure. That’s all in all of us. But God is so full of love and compassion for us that, in Jesus, he enters into our brokenness, and sacrifices himself — To become himself the victim of our bloodthirsty-ness, become himself the “them” to our “us”, so that no one else has to be victimized, excluded, or judged, or hurt at human hands. and to stop us when we are making others our victims. This perspective on Jesus’ death on the cross speaks to me. It feels like self-sacrificial love, it calls to that deep place in us I’m talking about. There is no greater love, because when we experience this or witness this, it is self-evidently inspiring and incredible and captivating like nothing else. It tugs at something inside of every heart that naturally stands up to take notice — Oh, wow! I may not be able to express all I’m feeling, but I kinda want to cry right now. Or I just want my life to be about something bigger right now. Because there’s just nothing that grabs me, moves me, speaks to me more than self-sacrificial love, and because it does feel like there are actually quite a lot of wonderful portrayals of self-sacrificial love in our world, I think there is huge opportunity for you and I and anyone to find Jesus showing up in our lives constantly. I’ve gotten to the point now in life that every time I see self-sacrificial love in a story, I feel Jesus, I feel the presence of God, I can feel God close to me. Keziah and I often end our evenings watching something on Netflix or Amazon Prime or whatever, and we like character-driven dramas anything from Stranger Things to Call The Midwife (or Lost back in the day), or we like heady comedies like the Good Place, or we like every Pixar movie, or fantasy epics like Harry Potter or Lord of the Rings, and again, what is a key important storyline in every one of these? Self-sacrificial love. And honestly, my experience now whenever we come to such a storyline is so much more than just appreciating a powerful or well-done story, it is spiritual — I feel God close to me. I suddenly feel like: it’s so simple — love, laying down one’s self for a friend, that is what life takes! All of the complicated mess of my life and everyone’s life who I care about and our world is, yes, so friggin’ complicated, but in this moment it feels overcome-able with the simplicity of self-sacrificial love — with Jesus. ::St. Paul wrote that all he knows, all he needs to know, or all anyone needs to know is “Christ Crucified”.:: Like, what if at the center of all things, of all of life, is self-sacrificial love — what Christ Crucified shows us — and, if we look for it, life can point us back to that center everyday, constantly. What if we could see Christ Crucified everywhere? And, again, when I say that, I don’t mean that we see a guilt trip everywhere, or that we feel emotionally manipulated constantly… I mean: what if we could see everywhere constant reminders that at the center of all things, of all of life, is self-sacrificial love? — the self-sacrificial love of God. I recently felt powerfully drawn, in a way I never have before, to the Bible’s accounts of ::Jesus in Gethsemane::, when Jesus comes to terms with his destiny of self sacrifice. And the reason was because I had just re-read J.K. Rowling’s “The Forest Again” chapter in the last Harry Potter book, where Harry comes to terms with his destiny that he has to sacrifice himself for his friends, and then humbly and courageously walks into it. The humanity of that account, Harry’s fear yet resolve that we’re taken into — it just helped me access so much more the “love“ in Jesus’ self-sacrificial act. This is self sacrificial love we’re talking about. Not a self sacrificial calculation, not a cold, unfeeling balancing of a cosmic equation. But love — that one has for another. Seriously, if you know Harry Potter, read that chapter, and then read the Gospel accounts of Jesus in Gethsemane — so powerful. An idea can have a huge impact on me and stay with me, for sure, but a relationship is another level of impact. That’s a spiritual impact. Our relationships are the things in our lives that shape us the most. So how might it change us to have more than just an appreciation for self-sacrificial love, but to have a relationship with Self-Sacrificial Love in personal form? How might that inject meaning and purpose and perspective into your everyday? How might that bring unexplainable hope to you as you think about the world, instead of the crippling fear and worry we’re all so used to? How much less lonely and isolated could you feel? How much more loved and settled and confident in your own shoes could you feel on a daily basis? How might that help shake you out of the apathy of the Modern American consumer/producer conveyor belt? or every time we see someone stand for a victim of stereotyping or scapegoating by calling out racist or prejudice behavior. What if every one of those moments is a spiritual opportunity that we can take? What if having a life shot-through with a feeling of connection to God is actually not as far away from any of us as we might think? And what if that starts with something we all are already naturally captivated by? Self-sacrificial love! It doesn’t start with feeling guilty, it doesn’t start with believing you deserve to be punished. It starts with feeling loved by God to the point that, if push came to shove, God would want you to live over himself. That feels backwards. That feels like it shouldn’t be that way. But this is, I think, what the message of the 40 days of Lent is. When St. Paul wrote about knowing only Christ Crucified, he acknowledged that such a message will feel like foolishness to most of the world, but that even God’s foolishness is wiser than human wisdom. I love that. Every one of our lives, and the whole world, moves forward through self-sacrificial love. Whether in the high stakes of life, like epic stories like Harry Potter try to pull us into. Or in the low stakes of life, through simple choices of self-sacrificial love made by all of us as spouses, parents, children, students, employees, employers, voters, consumers, neighbors, citizens of the world. But then don’t stop at just the nice feeling you get. Try to take what you see as a spiritual opportunity. Pray when this happens, try to turn your awareness to Jesus when you see self-sacrificial love. Try to take a moment, ask Jesus to speak to you when you see it. Or maybe God is encouraging us to try to do the same? Just let what you witnessed affect you for a minute so it can change you? So God can change you. Make you feel more ready to choose self-sacrifice the next time life presents an opportunity to you. Stand with me, and let’s pray.
from flask_jwt_extended import jwt_required from flask_restful import reqparse from zou.app.models.project import Project from zou.app.models.project_status import ProjectStatus from zou.app.services import ( deletion_service, projects_service, shots_service, user_service, ) from zou.app.utils import permissions, fields from .base import BaseModelResource, BaseModelsResource class ProjectsResource(BaseModelsResource): def __init__(self): BaseModelsResource.__init__(self, Project) def add_project_permission_filter(self, query): if permissions.has_admin_permissions(): return query else: return query.filter(user_service.build_related_projects_filter()) def check_read_permissions(self): return True def update_data(self, data): open_status = projects_service.get_or_create_open_status() if "project_status_id" not in data: data["project_status_id"] = open_status["id"] return data def post_creation(self, project): project_dict = project.serialize() if project.production_type == "tvshow": episode = shots_service.create_episode(project.id, "E01") project_dict["first_episode_id"] = fields.serialize_value( episode["id"] ) user_service.clear_project_cache() projects_service.clear_project_cache("") return project_dict class ProjectResource(BaseModelResource): def __init__(self): BaseModelResource.__init__(self, Project) self.protected_fields.append("team") def check_read_permissions(self, project): user_service.check_project_access(project["id"]) def post_update(self, project_dict): if project_dict["production_type"] == "tvshow": episode = shots_service.get_or_create_first_episode( project_dict["id"] ) project_dict["first_episode_id"] = fields.serialize_value( episode["id"] ) projects_service.clear_project_cache(project_dict["id"]) return project_dict def clean_get_result(self, data): project_status = ProjectStatus.get(data["project_status_id"]) data["project_status_name"] = project_status.name return data def post_delete(self, project_dict): projects_service.clear_project_cache(project_dict["id"]) @jwt_required def delete(self, instance_id): parser = reqparse.RequestParser() parser.add_argument("force", default=False, type=bool) args = parser.parse_args() project = self.get_model_or_404(instance_id) project_dict = project.serialize() if projects_service.is_open(project_dict): return { "error": True, "message": "Only closed projects can be deleted", }, 400 else: self.check_delete_permissions(project_dict) if args["force"] == True: deletion_service.remove_project(instance_id) else: project.delete() self.post_delete(project_dict) return "", 204
Finally thirty days, it absolutely was discovered that pony animal meat was in fact present in burgers for sale in the uk and Ireland. Another great strategy to fight anemia-related signs would be to carefully oxygenate the body through physical exercise and deep breathing. Nowadays you'll want to begin the right diet. This is just what we should dependence on a busy day and rapidly changing times. Discussions later verified my personal suspicions. It is distinguished that using tobacco is just not advantageous to health and physical fitness. The co mingling of resources at the level should generate automatic regulating and congressional oversight. > While purchasing boxing mouthguard be certain that you're comfy putting on it. It used to be there deals had been the special right of the rich and famous, and just in far-flung luxury hotels. All these activities call for great correctness so because of this the excellence of Tissot business might dependable for high dependability. Many of us are constantly reminded of this each time we turn on the news. If you don't make eye contact and talk clearly during social encounters, everyone is not attending elevates seriously. Once you understand what you yourself are doing try to weight lift no less than 2 times every week. Obtained since amended that issue and as opposed to becoming 12-0, obtained their own five losings. Maybe you're hoping to instruct your child some duty by providing them an animal of their own. It is a Pinoy social pertinent question everyone in the Philippines can relate genuinely to. This is basically the a very important factor i truly want from Santa. It acted as an official timekeeper in a number of major sports activities such as for example Davis Cup (1957) and Switzerland down hill snowboarding (1938). It really Mauro Luiz Soares Zamprogno - pt.wikipedia.org - is little wonder then they increased into obstacle of making a special class allow friends and loved ones to deliver their condolences in the loss in these a beloved 'family member'. Understanding the complimentary product build to suit your personality increases the energy and immediate harm. At this stage, unconditional really love is required to empower and enable boys making use of the confidence to take risks, engage with globally, and purpose precisely in community. If you are not a specialist user, buy Velcro gloves. It needs to be able the actual limitations of its overall performance including going after suspects at significantly more than 100 mph, be able to deal with crude landscapes and being rammed into or being used as a battering ram. As soon as you reach your target weight you're going to want to resume typical eating patterns. Thus remember these useful tips before distributing your pr release news. The organization is actually targeting growth in silver production to 9.0 million ounces annually within five years when both these project come onstream, it stated. Among the many problems with this will be you could find a lot better approaches to approach and conserve to suit your pension alone.
#------------------------------------------------------------------------------------------------------------------- #Name : Fixed Size Hash Table #Purpose : Fixed size Hash Table implementation in python using open addressing method for educational purpose #Author : Atul Kumar #Created : 07/07/2016 #License : GPL V3 #Copyright : (c) 2016 Atul Kumar (www.facebook.com/atul.kr.007) #Any corrections and suggestions for optimization are welcome :) #------------------------------------------------------------------------------------------------------------------- class HashTable: def __init__(self): self.size = 11 # size must be a prime number for collision resolution algo to work efficiently self.slot = [None] * self.size self.data = [None] * self.size self.emptyCount = self.size #counts empty slot left in hash table def hash_function(self,key,size): try: if key.isalnum(): #key is alpha numeric sum = 0 for ch in key: if ch.isdigit(): sum+=int(ch) else: sum+=ord(ch) key = sum except: pass #key is integer return key % size def rehash(self,old_hash,size): #Collision resolution with linear probing return (old_hash+1) % size def put(self,key,data): hash_value = self.hash_function(key,len(self.slot)) if self.slot[hash_value] == None: self.slot[hash_value] =key self.data[hash_value] = data self.emptyCount -= 1 else: if self.slot[hash_value] == key: self.data[hash_value] = data #replace else: if not self.isAnyEmpty(): next_slot = self.rehash(hash_value,len(self.slot)) while(self.slot[next_slot] != None and self.slot[next_slot] != key): next_slot = self.rehash(next_slot,len(self.slot)) if self.slot[next_slot] == None: self.slot[next_slot] = key self.data[next_slot] = data self.emptyCount -= 1 else: self.data[next_slot] = data #replace else: raise Exception("Hash table is full") def get(self,key): hash_value = self.hash_function(key,len(self.slot)) data = None found = False stop = False pos = hash_value while(self.slot[pos] != None and (not found and not stop)): if self.slot[pos] == key: found = True data = self.data[pos] else: pos = self.rehash(pos,len(self.slot)) if pos == hash_value: stop = True return data def isAnyEmpty(self): return self.emptyCount == 0 def __getitem__(self,key): return self.get(key) def __setitem__(self,key,data): self.put(key,data)
Born in 1965, Dr Suwailem is a prominent expert in the field of Islamic Finance, a senior economist, Islamic researcher and author of several specialised books. Dr Suwailem holds an MA in Economics, from Southern Illinois University and a PhD in Economics from Washington University, St Louis, Missouri. He was the Deputy Director at IRTI from 2007-2009. Dr Suwailem is currently a lecturer with The Knowledge International University KIU teaching "The Fiqh of Finance". He regularly contributes to the online Distance Learning lectures delivered by IRTI to MIHE’s Islamic Finance students.
import copy import dataclasses from dataclasses import dataclass from typing import Dict, Tuple, Iterator from randovania.game_description.area_location import AreaLocation from randovania.game_description.assignment import PickupAssignment, GateAssignment, PickupTarget from randovania.game_description.dock import DockWeakness, DockConnection from randovania.game_description.echoes_game_specific import EchoesGameSpecific from randovania.game_description.hint import Hint from randovania.game_description.resources.logbook_asset import LogbookAsset from randovania.game_description.resources.pickup_index import PickupIndex from randovania.game_description.resources.resource_info import CurrentResources from randovania.game_description.resources.resource_type import ResourceType @dataclass(frozen=True) class GamePatches: """Determines patches that are made to the game's data. Currently we support: * Swapping pickup locations """ player_index: int pickup_assignment: PickupAssignment elevator_connection: Dict[int, AreaLocation] dock_connection: Dict[Tuple[int, int], DockConnection] dock_weakness: Dict[Tuple[int, int], DockWeakness] translator_gates: GateAssignment starting_items: CurrentResources starting_location: AreaLocation hints: Dict[LogbookAsset, Hint] game_specific: EchoesGameSpecific def assign_new_pickups(self, assignments: Iterator[Tuple[PickupIndex, PickupTarget]]) -> "GamePatches": new_pickup_assignment = copy.copy(self.pickup_assignment) for index, pickup in assignments: assert index not in new_pickup_assignment new_pickup_assignment[index] = pickup return dataclasses.replace(self, pickup_assignment=new_pickup_assignment) def assign_pickup_assignment(self, assignment: PickupAssignment) -> "GamePatches": items: Iterator[Tuple[PickupIndex, PickupTarget]] = assignment.items() return self.assign_new_pickups(items) def assign_gate_assignment(self, assignment: GateAssignment) -> "GamePatches": new_translator_gates = copy.copy(self.translator_gates) for gate, translator in assignment.items(): assert gate not in new_translator_gates assert gate.resource_type == ResourceType.GATE_INDEX new_translator_gates[gate] = translator return dataclasses.replace(self, translator_gates=new_translator_gates) def assign_starting_location(self, location: AreaLocation) -> "GamePatches": return dataclasses.replace(self, starting_location=location) def assign_extra_initial_items(self, new_resources: CurrentResources) -> "GamePatches": current = copy.copy(self.starting_items) for resource, quantity in new_resources.items(): if resource.resource_type != ResourceType.ITEM: raise ValueError("Only ITEM is supported as extra initial items, got {}".format(resource.resource_type)) current[resource] = current.get(resource, 0) + quantity return dataclasses.replace(self, starting_items=current) def assign_hint(self, logbook: LogbookAsset, hint: Hint) -> "GamePatches": current = copy.copy(self.hints) current[logbook] = hint return dataclasses.replace(self, hints=current)
We get our fruit from Komati, a funky shop in Obz. They aren’t completely on the same page as us with our desire to source things as ethically as possible BUT they do make an effort to have locally produced items, they do have some certified organic products, their quality is good, and they are a very nice small company to work with. According to Leanne at Komati all of their products are either organic (labelled organic) or non-GMO. See www.komatifoods.co.za for more info.
# # Copyright (c) 2004 Conectiva, Inc. # # Written by Gustavo Niemeyer <niemeyer@conectiva.com> # # This file is part of Smart Package Manager. # # Smart Package Manager is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation; either version 2 of the License, or (at # your option) any later version. # # Smart Package Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Smart Package Manager; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # from smart.interfaces.qt.interface import QtInterface from smart.interfaces.qt import getPixmap, centerWindow from smart import * import time import qt class QtCommandInterface(QtInterface): def __init__(self, ctrl, argv=None): QtInterface.__init__(self, ctrl, argv) self._status = QtStatus() def showStatus(self, msg): self._status.show(msg) while qt.QApplication.eventLoop().hasPendingEvents(): qt.QApplication.eventLoop().processEvents(qt.QEventLoop.AllEvents) def hideStatus(self): self._status.hide() while qt.QApplication.eventLoop().hasPendingEvents(): qt.QApplication.eventLoop().processEvents(qt.QEventLoop.AllEvents) def run(self, command=None, argv=None): result = QtInterface.run(self, command, argv) self._status.wait() while self._log.isVisible(): time.sleep(0.1) while qt.QApplication.eventLoop().hasPendingEvents(): qt.QApplication.eventLoop().processEvents(qt.QEventLoop.AllEvents) return result class QtStatus(object): def __init__(self): self._window = qt.QDialog() self._window.setIcon(getPixmap("smart")) self._window.setCaption(_("Status")) self._window.setModal(True) self._vbox = qt.QVBox(self._window) self._vbox.setMargin(20) self._label = qt.QLabel(self._vbox) self._label.show() self._lastshown = 0 def show(self, msg): self._label.setText(msg) self._vbox.adjustSize() self._window.adjustSize() self._window.show() centerWindow(self._window) self._lastshown = time.time() while qt.QApplication.eventLoop().hasPendingEvents(): qt.QApplication.eventLoop().processEvents(qt.QEventLoop.AllEvents) def hide(self): self._window.hide() def isVisible(self): return self._window.isVisible() def wait(self): while self.isVisible() and self._lastshown+3 > time.time(): time.sleep(0.3) while qt.QApplication.eventLoop().hasPendingEvents(): qt.QApplication.eventLoop().processEvents(qt.QEventLoop.AllEvents) # vim:ts=4:sw=4:et
Research areas: program verification, static analysis, logic. 6 Should the question about structure of resolution proofs be closed? 15 What could be some potentially useful mathematical databases?
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Efficient ImageNet input pipeline using tf.data.Dataset.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import collections import functools import os from absl import logging import six import tensorflow.compat.v1 as tf import preprocessing def build_image_serving_input_fn(image_size, batch_size=None, resize_method=None): """Builds a serving input fn for raw images.""" def _image_serving_input_fn(): """Serving input fn for raw images.""" def _preprocess_image(image_bytes): """Preprocess a single raw image.""" image = preprocessing.preprocess_image( image_bytes=image_bytes, is_training=False, image_size=image_size, resize_method=resize_method) return image image_bytes_list = tf.placeholder( shape=[batch_size], dtype=tf.string, ) images = tf.map_fn( _preprocess_image, image_bytes_list, back_prop=False, dtype=tf.float32) return tf.estimator.export.ServingInputReceiver( images, {'image_bytes': image_bytes_list}) return _image_serving_input_fn class ImageNetTFExampleInput(six.with_metaclass(abc.ABCMeta, object)): """Base class for ImageNet input_fn generator.""" def __init__(self, is_training, use_bfloat16, num_cores=8, image_size=224, transpose_input=False, num_label_classes=1000, include_background_label=False, augment_name=None, mixup_alpha=0.0, randaug_num_layers=None, randaug_magnitude=None, resize_method=None): """Constructor. Args: is_training: `bool` for whether the input is for training use_bfloat16: If True, use bfloat16 precision; else use float32. num_cores: `int` for the number of TPU cores image_size: `int` for image size (both width and height). transpose_input: 'bool' for whether to use the double transpose trick num_label_classes: number of label classes. Default to 1000 for ImageNet. include_background_label: If true, label #0 is reserved for background. augment_name: `string` that is the name of the augmentation method to apply to the image. `autoaugment` if AutoAugment is to be used or `randaugment` if RandAugment is to be used. If the value is `None` no no augmentation method will be applied applied. See autoaugment.py for more details. mixup_alpha: float to control the strength of Mixup regularization, set to 0.0 to disable. randaug_num_layers: 'int', if RandAug is used, what should the number of layers be. See autoaugment.py for detailed description. randaug_magnitude: 'int', if RandAug is used, what should the magnitude be. See autoaugment.py for detailed description. resize_method: If None, use bicubic in default. """ self.image_preprocessing_fn = preprocessing.preprocess_image self.is_training = is_training self.use_bfloat16 = use_bfloat16 self.num_cores = num_cores self.transpose_input = transpose_input self.image_size = image_size self.include_background_label = include_background_label self.num_label_classes = num_label_classes if include_background_label: self.num_label_classes += 1 self.augment_name = augment_name self.mixup_alpha = mixup_alpha self.randaug_num_layers = randaug_num_layers self.randaug_magnitude = randaug_magnitude self.resize_method = resize_method def set_shapes(self, batch_size, images, labels): """Statically set the batch_size dimension.""" if self.transpose_input: images.set_shape(images.get_shape().merge_with( tf.TensorShape([None, None, None, batch_size]))) labels.set_shape(labels.get_shape().merge_with( tf.TensorShape([batch_size, None]))) # Convert to R1 tensors for fast transfer to device. images = tf.reshape(images, [-1]) else: images.set_shape(images.get_shape().merge_with( tf.TensorShape([batch_size, None, None, None]))) labels.set_shape(labels.get_shape().merge_with( tf.TensorShape([batch_size, None]))) return images, labels def mixup(self, batch_size, alpha, images, labels): """Applies Mixup regularization to a batch of images and labels. [1] Hongyi Zhang, Moustapha Cisse, Yann N. Dauphin, David Lopez-Paz Mixup: Beyond Empirical Risk Minimization. ICLR'18, https://arxiv.org/abs/1710.09412 Arguments: batch_size: The input batch size for images and labels. alpha: Float that controls the strength of Mixup regularization. images: A batch of images of shape [batch_size, ...] labels: A batch of labels of shape [batch_size, num_classes] Returns: A tuple of (images, labels) with the same dimensions as the input with Mixup regularization applied. """ mix_weight = tf.distributions.Beta(alpha, alpha).sample([batch_size, 1]) mix_weight = tf.maximum(mix_weight, 1. - mix_weight) images_mix_weight = tf.cast( tf.reshape(mix_weight, [batch_size, 1, 1, 1]), images.dtype) # Mixup on a single batch is implemented by taking a weighted sum with the # same batch in reverse. images_mix = ( images * images_mix_weight + images[::-1] * (1. - images_mix_weight)) labels_mix = labels * mix_weight + labels[::-1] * (1. - mix_weight) return images_mix, labels_mix def dataset_parser(self, value): """Parses an image and its label from a serialized ResNet-50 TFExample. Args: value: serialized string containing an ImageNet TFExample. Returns: Returns a tuple of (image, label) from the TFExample. """ keys_to_features = { 'image/encoded': tf.FixedLenFeature((), tf.string, ''), 'image/class/label': tf.FixedLenFeature([], tf.int64, -1), } parsed = tf.parse_single_example(value, keys_to_features) image_bytes = tf.reshape(parsed['image/encoded'], shape=[]) image = self.image_preprocessing_fn( image_bytes=image_bytes, is_training=self.is_training, image_size=self.image_size, use_bfloat16=self.use_bfloat16, augment_name=self.augment_name, randaug_num_layers=self.randaug_num_layers, randaug_magnitude=self.randaug_magnitude, resize_method=self.resize_method) # The labels will be in range [1,1000], 0 is reserved for background label = tf.cast( tf.reshape(parsed['image/class/label'], shape=[]), dtype=tf.int32) if not self.include_background_label: # Subtract 1 if the background label is discarded. label -= 1 onehot_label = tf.one_hot(label, self.num_label_classes) return image, onehot_label @abc.abstractmethod def make_source_dataset(self, index, num_hosts): """Makes dataset of serialized TFExamples. The returned dataset will contain `tf.string` tensors, but these strings are serialized `TFExample` records that will be parsed by `dataset_parser`. If self.is_training, the dataset should be infinite. Args: index: current host index. num_hosts: total number of hosts. Returns: A `tf.data.Dataset` object. """ return def input_fn(self, params): """Input function which provides a single batch for train or eval. Args: params: `dict` of parameters passed from the `TPUEstimator`. `params['batch_size']` is always provided and should be used as the effective batch size. Returns: A `tf.data.Dataset` object. """ # Retrieves the batch size for the current shard. The # of shards is # computed according to the input pipeline deployment. See # tf.estimator.tpu.RunConfig for details. batch_size = params['batch_size'] if 'context' in params: current_host = params['context'].current_input_fn_deployment()[1] num_hosts = params['context'].num_hosts else: current_host = 0 num_hosts = 1 dataset = self.make_source_dataset(current_host, num_hosts) # Use the fused map-and-batch operation. # # For XLA, we must used fixed shapes. Because we repeat the source training # dataset indefinitely, we can use `drop_remainder=True` to get fixed-size # batches without dropping any training examples. # # When evaluating, `drop_remainder=True` prevents accidentally evaluating # the same image twice by dropping the final batch if it is less than a full # batch size. As long as this validation is done with consistent batch size, # exactly the same images will be used. dataset = dataset.map(self.dataset_parser, 64).batch(batch_size, True) # Apply Mixup if self.is_training and self.mixup_alpha > 0.0: dataset = dataset.map( functools.partial(self.mixup, batch_size, self.mixup_alpha), num_parallel_calls=64) # Transpose for performance on TPU if self.transpose_input: dataset = dataset.map( lambda images, labels: (tf.transpose(images, [1, 2, 3, 0]), labels), num_parallel_calls=64) # Assign static batch size dimension dataset = dataset.map(functools.partial(self.set_shapes, batch_size), 64) # Prefetch overlaps in-feed with training dataset = dataset.prefetch(tf.data.experimental.AUTOTUNE) options = tf.data.Options() options.experimental_deterministic = False options.experimental_threading.max_intra_op_parallelism = 1 options.experimental_threading.private_threadpool_size = 48 dataset = dataset.with_options(options) return dataset class ImageNetInput(ImageNetTFExampleInput): """Generates ImageNet input_fn from a series of TFRecord files. The training data is assumed to be in TFRecord format with keys as specified in the dataset_parser below, sharded across 1024 files, named sequentially: train-00000-of-01024 train-00001-of-01024 ... train-01023-of-01024 The validation data is in the same format but sharded in 128 files. The format of the data required is created by the script at: https://github.com/tensorflow/tpu/blob/master/tools/datasets/imagenet_to_gcs.py """ def __init__(self, is_training, use_bfloat16, transpose_input, data_dir, image_size=224, num_parallel_calls=64, cache=False, num_label_classes=1000, include_background_label=False, augment_name=None, mixup_alpha=0.0, randaug_num_layers=None, randaug_magnitude=None, resize_method=None, holdout_shards=None): """Create an input from TFRecord files. Args: is_training: `bool` for whether the input is for training use_bfloat16: If True, use bfloat16 precision; else use float32. transpose_input: 'bool' for whether to use the double transpose trick data_dir: `str` for the directory of the training and validation data; if 'null' (the literal string 'null') or implicitly False then construct a null pipeline, consisting of empty images and blank labels. image_size: `int` for image size (both width and height). num_parallel_calls: concurrency level to use when reading data from disk. cache: if true, fill the dataset by repeating from its cache. num_label_classes: number of label classes. Default to 1000 for ImageNet. include_background_label: if true, label #0 is reserved for background. augment_name: `string` that is the name of the augmentation method to apply to the image. `autoaugment` if AutoAugment is to be used or `randaugment` if RandAugment is to be used. If the value is `None` no no augmentation method will be applied applied. See autoaugment.py for more details. mixup_alpha: float to control the strength of Mixup regularization, set to 0.0 to disable. randaug_num_layers: 'int', if RandAug is used, what should the number of layers be. See autoaugment.py for detailed description. randaug_magnitude: 'int', if RandAug is used, what should the magnitude be. See autoaugment.py for detailed description. resize_method: If None, use bicubic in default. holdout_shards: number of holdout training shards for validation. """ super(ImageNetInput, self).__init__( is_training=is_training, image_size=image_size, use_bfloat16=use_bfloat16, transpose_input=transpose_input, num_label_classes=num_label_classes, include_background_label=include_background_label, augment_name=augment_name, mixup_alpha=mixup_alpha, randaug_num_layers=randaug_num_layers, randaug_magnitude=randaug_magnitude) self.data_dir = data_dir if self.data_dir == 'null' or not self.data_dir: self.data_dir = None self.num_parallel_calls = num_parallel_calls self.cache = cache self.holdout_shards = holdout_shards def _get_null_input(self, data): """Returns a null image (all black pixels). Args: data: element of a dataset, ignored in this method, since it produces the same null image regardless of the element. Returns: a tensor representing a null image. """ del data # Unused since output is constant regardless of input return tf.zeros([self.image_size, self.image_size, 3], tf.bfloat16 if self.use_bfloat16 else tf.float32) def dataset_parser(self, value): """See base class.""" if not self.data_dir: return value, tf.constant(0., tf.float32, (1000,)) return super(ImageNetInput, self).dataset_parser(value) def make_source_dataset(self, index, num_hosts): """See base class.""" if not self.data_dir: logging.info('Undefined data_dir implies null input') return tf.data.Dataset.range(1).repeat().map(self._get_null_input) if self.holdout_shards: if self.is_training: filenames = [ os.path.join(self.data_dir, 'train-%05d-of-01024' % i) for i in range(self.holdout_shards, 1024) ] else: filenames = [ os.path.join(self.data_dir, 'train-%05d-of-01024' % i) for i in range(0, self.holdout_shards) ] for f in filenames[:10]: logging.info('datafiles: %s', f) dataset = tf.data.Dataset.from_tensor_slices(filenames) else: file_pattern = os.path.join( self.data_dir, 'train-*' if self.is_training else 'validation-*') logging.info('datafiles: %s', file_pattern) dataset = tf.data.Dataset.list_files(file_pattern, shuffle=False) # For multi-host training, we want each hosts to always process the same # subset of files. Each host only sees a subset of the entire dataset, # allowing us to cache larger datasets in memory. dataset = dataset.shard(num_hosts, index) if self.is_training and not self.cache: dataset = dataset.repeat() def fetch_dataset(filename): buffer_size = 8 * 1024 * 1024 # 8 MiB per file dataset = tf.data.TFRecordDataset(filename, buffer_size=buffer_size) return dataset # Read the data from disk in parallel dataset = dataset.interleave( fetch_dataset, cycle_length=self.num_parallel_calls, num_parallel_calls=self.num_parallel_calls, deterministic=False) if self.cache: dataset = dataset.cache().shuffle(1024 * 16).repeat() else: dataset = dataset.shuffle(1024) return dataset # Defines a selection of data from a Cloud Bigtable. BigtableSelection = collections.namedtuple('BigtableSelection', [ 'project', 'instance', 'table', 'prefix', 'column_family', 'column_qualifier' ]) class ImageNetBigtableInput(ImageNetTFExampleInput): """Generates ImageNet input_fn from a Bigtable for training or evaluation. """ def __init__(self, is_training, use_bfloat16, transpose_input, selection, augment_name=None, num_label_classes=1000, include_background_label=False, mixup_alpha=0.0, randaug_num_layers=None, randaug_magnitude=None, resize_method=None): """Constructs an ImageNet input from a BigtableSelection. Args: is_training: `bool` for whether the input is for training use_bfloat16: If True, use bfloat16 precision; else use float32. transpose_input: 'bool' for whether to use the double transpose trick selection: a BigtableSelection specifying a part of a Bigtable. augment_name: `string` that is the name of the augmentation method to apply to the image. `autoaugment` if AutoAugment is to be used or `randaugment` if RandAugment is to be used. If the value is `None` no no augmentation method will be applied applied. See autoaugment.py for more details. num_label_classes: number of label classes. Default to 1000 for ImageNet. include_background_label: if true, label #0 is reserved for background. mixup_alpha: float to control the strength of Mixup regularization, set to 0.0 to disable. randaug_num_layers: 'int', if RandAug is used, what should the number of layers be. See autoaugment.py for detailed description. randaug_magnitude: 'int', if RandAug is used, what should the magnitude be. See autoaugment.py for detailed description.s resize_method: if None, use bicubic. """ super(ImageNetBigtableInput, self).__init__( is_training=is_training, use_bfloat16=use_bfloat16, transpose_input=transpose_input, num_label_classes=num_label_classes, include_background_label=include_background_label, augment_name=augment_name, mixup_alpha=mixup_alpha, randaug_num_layers=randaug_num_layers, randaug_magnitude=randaug_magnitude, resize_method=resize_method) self.selection = selection def make_source_dataset(self, index, num_hosts): """See base class.""" try: from tensorflow.contrib.cloud import BigtableClient # pylint: disable=g-import-not-at-top except ImportError as e: logging.exception('Bigtable is not supported in TensorFlow 2.x.') raise e data = self.selection client = BigtableClient(data.project, data.instance) table = client.table(data.table) ds = table.parallel_scan_prefix(data.prefix, columns=[(data.column_family, data.column_qualifier)]) # The Bigtable datasets will have the shape (row_key, data) ds_data = ds.map(lambda index, data: data) if self.is_training: ds_data = ds_data.repeat() return ds_data
The cute wall lights like Mickey Mouse, it must very popular with children,so this kind of wall sconces most suitable for kids’ bedroom. The main material of the colorful wall lights is iron, the iron has unique fininsh. This retro wall lamp has a unique look. Made of wrought iron and hemp rope, iron has strong anti-corrosion and oxidation resistance, so the quality of the lamp body is very reliable. Hemp ropes are also used, and the hemp rope makes the overall look more fashionable.
#!/usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### # Copyright (C) 2008-2010 Francisco José Rodríguez Bogado # # <frbogado@novaweb.es> # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################### ''' Created on 24/02/2011 @author: bogado Peticiones sin laborante asignado. Desde aquí se podrán asignar laborantes para recoger las muestras e imprimir un listado de ruta de cada uno. ''' import pygtk pygtk.require('2.0') import gtk import sys, os, datetime if os.path.realpath(os.path.curdir).split(os.path.sep)[-1] == "formularios": os.chdir("..") sys.path.append(".") from framework import pclases from ventana_consulta import VentanaConsulta from ventana_generica import _abrir_en_ventana_nueva as abrir, GALACTUS import utils, utils.mapa class PeticionesSinAsignar(VentanaConsulta): def __init__(self, objeto = None, usuario = None, run = True, fecha = datetime.date.today()): """ Constructor. objeto puede ser un objeto de pclases con el que comenzar la ventana (en lugar del primero de la tabla, que es el que se muestra por defecto). """ self.nombre_fichero_ventana = os.path.split(__file__)[-1] __clase = pclases.Peticion self.__usuario = usuario if objeto: fecha = self.objeto.fechaRecogida VentanaConsulta.__init__(self, usuario = usuario, clase = __clase, run = False, ventana_marco="peticiones_sin_asignar.glade") self.build_tabla_laborantes() self.build_tabla_peticiones_sin_asignar() self.build_tabla_peticiones_asignadas() self.wids['b_asignar'].connect("clicked", self.asignar) self.wids['calendario'].connect('month-changed', marcar_dias_pendientes) self.actualizar_ventana() self.wids['calendario'].connect('day-selected',self.actualizar_ventana) self.wids['calendario'].select_month(fecha.month - 1, fecha.year) self.wids['calendario'].select_day(fecha.day) self.mapa = utils.mapa.Mapa() self.mapa.put_mapa(self.wids["vpaned1"]) sel = self.wids['tv_sin_asignar'].get_selection() sel.connect("changed", self.actualizar_mapa) sel = self.wids['tv_asignadas'].get_selection() sel.connect("changed", self.actualizar_mapa, False) if run: gtk.main() def actualizar_mapa(self, sel, track = True, flag = True): model, paths = sel.get_selected_rows() for path in paths: puid = model[path][-1] peticion = pclases.getObjetoPUID(puid) d = peticion.direccion if not d: d = peticion.obra.direccion try: self.mapa.centrar_mapa(d.lat, d.lon, zoom = 12, track = track, flag = flag) except AttributeError, e: # print e pass # La obra/peticion no tiene dirección asignada. def build_tabla_laborantes(self): cols = (("Nombre", "gobject.TYPE_STRING", False, True, True, None), ("Recogidas asignadas", "gobject.TYPE_STRING", False, True, False, None), ("PUID", "gobject.TYPE_STRING", False, False, False, None)) utils.ui.preparar_treeview(self.wids['tv_laborantes'], cols) self.wids['tv_laborantes'].connect("row-activated", self._abrir_en_ventana_nueva, self.__usuario, GALACTUS, None, pclases.Empleado) def build_tabla_peticiones_asignadas(self): cols = (("Obra", "gobject.TYPE_STRING", False, True, True, None), ("Dirección", "gobject.TYPE_STRING", False, True, False, None), ("Material", "gobject.TYPE_STRING", False, True, False, None), ("PUID", "gobject.TYPE_STRING", False, False, False, None)) utils.ui.preparar_listview(self.wids['tv_sin_asignar'], cols, multi = True) self.wids['tv_sin_asignar'].connect("row-activated", self._abrir_en_ventana_nueva, self.__usuario, GALACTUS, None, pclases.Peticion) def build_tabla_peticiones_sin_asignar(self): cols = (("Obra", "gobject.TYPE_STRING", False, True, True, None), ("Dirección", "gobject.TYPE_STRING", False, True, False, None), ("Material", "gobject.TYPE_STRING", False, True, False, None), ("Laborante", "gobject.TYPE_STRING", False, True, False, None), ("PUID", "gobject.TYPE_STRING", False, False, False, None)) utils.ui.preparar_listview(self.wids['tv_asignadas'], cols) self.wids['tv_asignadas'].connect("row-activated", self._abrir_en_ventana_nueva, self.__usuario, GALACTUS, None, pclases.Peticion) def _abrir_en_ventana_nueva(self, *args, **kw): abrir(*args, **kw) self.actualizar_ventana() def rellenar_widgets(self): self.rellenar_tabla_laborantes() self.rellenar_tablas_peticiones() def rellenar_tabla_laborantes(self): model = self.wids['tv_laborantes'].get_model() model.clear() padres = {} for e in pclases.Empleado.buscar_laborantes(): padres[e] = model.append(None, (e.get_info(), "", e.get_puid())) fecha_seleccionada = self.get_fecha_seleccionada() for p in pclases.Peticion.selectBy(fechaRecogida = fecha_seleccionada): laborante = p.empleado try: padre = padres[laborante] except KeyError: # El laborante ya no lo es, así que no lo listo. pass else: model.append(padre, ("", p.get_info(), p.get_puid())) try: model[padre][1] = utils.numero.float2str( utils.numero._float(model[padre][1]) + 1, precision=0) except (TypeError, ValueError): model[padre][1] = "1" def get_fecha_seleccionada(self): """ Devuelve la fecha del gtk.Calendar pero como un datetime. """ y, m, d = self.wids['calendario'].get_date() fecha = datetime.date(y, m+1, d) # Mes empieza en 0 en gtk.Calendar return fecha def rellenar_tablas_peticiones(self): fecha_seleccionada = self.get_fecha_seleccionada() self.wids['tv_sin_asignar'].get_model().clear() self.wids['tv_asignadas'].get_model().clear() for p in pclases.Peticion.selectBy(fechaRecogida = fecha_seleccionada): fila = ((p.obra and p.obra.get_info() or "", p.direccion and p.direccion.get_direccion_completa() or "", p.material and p.material.get_info() or "")) if not p.empleado: # No asignada model = self.wids['tv_sin_asignar'].get_model() else: model = self.wids['tv_asignadas'].get_model() fila += (p.empleado.get_nombre_completo(), ) fila += (p.get_puid(), ) model.append(fila) def asignar(self, boton): model, iter = self.wids['tv_laborantes'].get_selection().get_selected() if not iter: utils.ui.dialogo_info(titulo = "SELECCIONE UN LABORANTE", texto = "Debe seleccionar un laborante al que asignar las " "peticiones de recogida de material.", padre = self.wids['ventana']) else: empleado = pclases.getObjetoPUID(model[iter][-1]) sel = self.wids['tv_sin_asignar'].get_selection() sel.selected_foreach(self.asiganda, empleado) self.actualizar_ventana() def asiganda(self, treemodel, path, iter, laborante): p = pclases.getObjetoPUID(treemodel[iter][-1]) p.empleado = laborante p.sync() def imprimir(self, boton): """ Imprime una hoja de ruta por cada laborante. Si se ha seleccionado alguno, entonces solo imprime su hoja de ruta. """ model, iter = self.wids['tv_laborantes'].get_selection().get_selected() if not iter: # Imprimir para todos: laborantes = [] for fila in model: puid = fila[-1] laborante = pclases.getObjetoPUID(puid) laborantes.append(laborante) else: puid = model[iter][-1] laborante = pclases.getObjetoPUID(puid) laborantes = [laborante] dia = self.get_fecha_seleccionada() for laborante in laborantes: abrir_hoja_de_ruta(laborante, dia) def abrir_hoja_de_ruta(laborante, dia): """ Genera y abre un PDF con la hoja de ruta del laborante para el día recibido. """ from reports import hoja_de_ruta from utils.informes import abrir_pdf peticiones = laborante.get_peticiones(dia) pdf_hoja_ruta = hoja_de_ruta.hoja_ruta(laborante, peticiones) abrir_pdf(pdf_hoja_ruta) def marcar_dias_pendientes(calendario): """ Resalta los días en los que quedan peticiones pendientes de asignar en el mes activo. """ calendario.clear_marks() fecha_actual = calendario.get_date() uno_del_mes = datetime.date(fecha_actual[0], fecha_actual[1] + 1, 1) mes_siguiente = fecha_actual[1] + 2 if mes_siguiente > 12: anno = fecha_actual[0] + 1 mes_siguiente = mes_siguiente % 12 else: anno = fecha_actual[0] uno_del_siguiente = datetime.date(anno, mes_siguiente, 1) for p in pclases.Peticion.select(pclases.AND( pclases.Peticion.q.empleadoID==None, pclases.Peticion.q.fechaRecogida >= uno_del_mes, pclases.Peticion.q.fechaRecogida < uno_del_siguiente)): calendario.mark_day(p.fechaRecogida.day) def main(): from formularios.options_ventana import parse_options params, opt_params = parse_options() ventana = PeticionesSinAsignar(*params, **opt_params) if __name__ == "__main__": main()
Plan on how you are going to spend your time in the car, the games you"ll play, the places of interest you"ll stop at, the snacks you"ll have and the things to take with you in the car - to make your trip a FUN trip. Here some useful general checklists (children might like to make their own list). Bring lots of snacks and surprise packages. You can prepare healthy snack-packs (dried fruit, nuts, cheese sticks, choc-bits, tasty biscuits, pretzels, fruit bars, dry cereal) at home. For extra fun, use up your old Xmas or birthday paper and wrap up some snacks and 50 cent toys – simple things such as these may help to save your sanity in trying moments. Small drinks with resealable lids are better than large (although cheaper) bottles that can leak. Travel lollies are great as prizes in games and for general "bribing" purposes. It"s worth taking a damp cloth while on the move to wipe little hands. A wet cloth kept in a plastic bag will stay damp for many hours. Resealable containers and bottles are good to store left over food and fruit. It’s handy to have a couple of plastic shopping bags or similar to use as rubbish or motion sickness bags. If you can, try to avoid bringing food that is likely to melt, squash or go "off" in the heat. Glass containers and fragile toys or trinkets are best left at home, or safely packed in the boot. Stationery that is sharp like scissors or Stanley knives should be avoided - as should textas or crayons that may melt on the back shelf of the car. Don’t forget the driver. Take along some special delight for the driver - especially if you are the driver. Remember to stop every 2 hours to take a break and stretch your legs. If the driver gets tired swap drivers or pull over and have a sleep. Once you"ve shut the front door of your home your holiday has begun. So relax! Have a good trip and have a fun trip. Make the most of the journey because it"s part of your holiday too. When you take time to enjoy your trip, the time will fly. Sometimes, reading in the car, or travelling on the long and winding road can bring on motion sickness. To combat this get some fresh air, close your eyes and if possible sit in the front seat of the car. Children need help in entertaining themselves, lots of little snacks and surprises and most importantly they need lots of chances to get out and have a run around. Calculate the time it would take a group of adults to get to where you are going - and then double it! Leave yourself time to be able to get out of the car and have a look at the old bridge by the river - or play on those cool swings in the park. Each new place is a new adventure and often the unexpected things that happen on a trip are the most remembered. When the troops start to mutiny, it might be time to pull over and have a break. The kids will really appreciate a few minutes to kick the ball around or have a good look at the horses in the paddock. You"ll notice the difference in the atmosphere when everyone gets back in the car. Your travelling pet will also enjoy a drink of water and a sniff around the trees. Rearrange the car blanket your pet has been lying on and give that dog a new tennis ball to chew on. A warning - if the little ones are asleep in the back you may need to make the most of the moment and keep going while they doze. A small pillow might be useful for people who like to rest their head or arms while they travel. Every time you are within range of a toilet - at the park, the petrol station or the lunchtime picnic-stop, check out the toilets! Keep a roll of toilet paper and small garden trowel in the boot, for those times when there isn"t a convenience stop in sight. Take along those nifty hand held computer games, walkmans, knitting, packs of cards and travel-case games. The kids will know what they like to do in the car so let them choose. Books and magazines might give them motion sickness on those hairpin bends! Bring along those tapes of songs and stories. It may drive you crazy but the kids just love it. For maximum fun, before you go on your trip ask children to record themselves reading favourite short stories, jokes, poems or performing musical recitals. They`ll play it again and again and again and again…. The driver will need to take a break every 2 hours. Keep a ball, frisbee or skipping rope in the front to help the kids get rid of some of that eternal supply of energy when you stop. Take plenty of `snack packs` of dried fruit and other assorted goodies pre-packed drinks and a big bottle of water are lifesavers when the morale really starts to flag. A washcloth can help with sticky fingers.
from cl.visualizations.models import SCOTUSMap, JSONVersion, Referer from django.contrib import admin class JSONVersionAdmin(admin.ModelAdmin): readonly_fields = ( 'date_created', 'date_modified', ) raw_id_fields = ( 'map', ) class JSONVersionInline(admin.StackedInline): model = JSONVersion extra = 1 class RefererAdmin(admin.ModelAdmin): readonly_fields = ( 'date_created', 'date_modified', ) raw_id_fields = ( 'map', ) list_filter = ( 'display', ) list_display = ( '__unicode__', 'display', 'date_created', 'date_modified', ) search_fields = ( 'id', 'url', 'page_title', ) class RefererInline(admin.StackedInline): model = Referer extra = 1 class SCOTUSMapAdmin(admin.ModelAdmin): inlines = ( JSONVersionInline, RefererInline, ) raw_id_fields = ( 'clusters', 'cluster_start', 'cluster_end', ) readonly_fields = ( 'date_created', 'date_modified', 'generation_time', ) list_display = ( '__unicode__', 'user_id', 'date_created', 'date_modified', 'view_count', 'published', 'deleted', ) list_filter = ( 'published', 'deleted', ) search_fields = ( 'id', 'title', ) admin.site.register(SCOTUSMap, SCOTUSMapAdmin) admin.site.register(JSONVersion, JSONVersionAdmin) admin.site.register(Referer, RefererAdmin)
Can color heal the nervous system? Do different hues have the ability to bring health to the body through the gateway of the eyes? The answer to these questions is, amazingly, yes. Optometric phototherapy, which heavily relies upon the use of color, has been proven to deliver results to patients. This means that color is not only beautiful, but is also medically useful. What Is Optometric Phototherapy? How Does It Work? To understand syntonics, you must first understand color. Color is not just something lovely to look at; it actually emits physical frequencies, or vibrations. Different colors emit different frequencies. Researchers in the field of vision therapy have discovered what color frequencies, when received through the visual system, treat specific health problems. The combination of color and light bring about results that are extraordinary. What Health Issues Can Syntonics Successfully Treat? Colored light therapy can heal visual problems, but it doesn’t stop there. Because, according to Brain World, syntonics causes balance to be “…restored in the regulatory centers of the body’s nervous system, specifically the sympathetic and parasympathetic nervous systems,” it has the ability to bring peace to a troubled mind. Colored light therapy has been known to be an especially effective treatment for those having an unbalanced nervous system. Do you suffer with any of the health problems mentioned in this post? Colored light therapy may be a viable, safe, and artful treatment that could be useful in your pursuit of better health. Just like sound, which I recently shared about in my post “The Power of Sound ~ The Power of Your Song” -https://taniamarieartist.wordpress.com/2015/04/21/the-power-of-sound-the-power-of-your-song/, color and light therapy also emits frequencies, or vibrations, that can have many healing benefits. Read on to learn a little about color therapy and its effects.
from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ExtractorError class ChaturbateIE(InfoExtractor): _VALID_URL = r'https?://(?:[^/]+\.)?chaturbate\.com/(?P<id>[^/?#]+)' _TESTS = [{ 'url': 'https://www.chaturbate.com/siswet19/', 'info_dict': { 'id': 'siswet19', 'ext': 'mp4', 'title': 're:^siswet19 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$', 'age_limit': 18, 'is_live': True, }, 'params': { 'skip_download': True, } }, { 'url': 'https://en.chaturbate.com/siswet19/', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) m3u8_url = self._search_regex( r'src=(["\'])(?P<url>http.+?\.m3u8.*?)\1', webpage, 'playlist', default=None, group='url') if not m3u8_url: error = self._search_regex( r'<span[^>]+class=(["\'])desc_span\1[^>]*>(?P<error>[^<]+)</span>', webpage, 'error', group='error') raise ExtractorError(error, expected=True) formats = self._extract_m3u8_formats(m3u8_url, video_id, ext='mp4') return { 'id': video_id, 'title': self._live_title(video_id), 'thumbnail': 'https://cdn-s.highwebmedia.com/uHK3McUtGCG3SMFcd4ZJsRv8/roomimage/%s.jpg' % video_id, 'age_limit': self._rta_search(webpage), 'is_live': True, 'formats': formats, }
1. You Are a Co-Creator: Create the body you desire. We don’t need to resign ourselves to illness and disease or poor health outcomes, much less our genetic fate. Deeply engrained social or cultural beliefs may have taught us otherwise, but the truth about human design from a quantum perspective, is that we are actually designers of the physical reality we create for ourselves, including what happens in our bodies. We have a built-in capacity to manifest real changes in the body, right down to the cellular and genetic level, directly through our consciousness, even without physical intervention or treatments. We are more likely to be unhealthy if we remain stuck in our beliefs that have convinced us that many health problems are related to faulty genetics, are inherited, or that illness and disease occurs as a result of unfortunate events over which we have had no control. There is no need to believe we do not deserve perfection in our own bodies. Realization of this important truth is our starting point as long as we remind ourselves, if there was no imperfection, there would be no impetus to create! 2. You Are Your Body: Re-pattern your negative thoughts and emotions. The subtle energy body (personal quantum field) that surrounds our physical body contains energetic patterns, which reflect our thoughts and emotions. These patterns are broadcasting electromagnetic signals that are received by the non-encoded part of our DNA, which functions in a quantum way and operates like antennae. The negative or highly charged signals are received by our quantum DNA as frequency information that is “MIS-information”. These signals are then communicated to, and decoded by our physical DNA (the encoded part), in order to provide the instructions which govern all cellular and biological functioning in the body. When the frequency information provided is incorrect or of poor quality and the quantum and physical parts of our DNA aren’t communicating very well, our body is responding to sub-standard instructions, which ultimately can make us sick. By paying attention to what we allow ourselves to think and feel, we can create more positive and neutral energy in our fields. When we exercise conscious choice about how we carry ourselves through life, we can literally change the frequency information being transmitted to our physical DNA, which in turn re-encodes our gene sequences and provides better instructions to the body. 3. Your Challenges Create Opportunity: Develop a sincere desire and intent to heal. In order to realize the benefits of working with our consciousness to re-encode our DNA, we must work from the inside out to effect the changes we’d like to see in our bodies. It is actually “the self” that initiates and allows the healing. It doesn’t occur as a direct result of a practitioner’s intervention on the patient’s behalf, without their conscious involvement and intent. We need to be aware of the truth of who are in the moment and be able to see how these aspects of ourselves might have held us back from achieving better health. When we accept these as opportunities instead of impediments, we can unlock the hidden potential that lies within our quantum DNA to bring our bodies back into balance. No amount of willpower can overcome the aspects of our own selves that lead to the underpinnings of illness and disease. We cannot just decide to be different. We need to surrender to ourselves and to the healing process unconditionally, and in the absence of ego, so that we can honor ourselves in a non-judgemental and caring way. 4. Your Body is Wise: Trust in the wisdom of your “smart-body”. Our DNA interacts with our consciousness through an inseparable aspect of us, known as the Innate Self or “smart body”. It forms the essential bridge between our cellular structure, our DNA, and our consciousness and is responsible for mysterious recoveries, spontaneous remissions as well as the unexplained disappearances of incurable diseases. It represents a deeper intelligence that knows everything that’s going on chemically in us and exactly what we need in order to stay balanced and healthy. It knows what we are allergic to, what food or supplements might be supportive to us, and which ones might be harmful. We are communicating with it when we use techniques such as muscle testing or forms of body work such as osteopathy or body talk. Our job is to help things along by listening carefully and trusting in what our sensory perception, as a communicator for the smart-body, is telling us about what our bodies need. 5. Your DNA and Cells Are Listening: Consciously override your default mode. DNA doesn’t just evolve or become activated on its own, regardless of what influences might be exerted on it. It waits for us to direct it. Our DNA rests in autopilot and plays a default set of instructions to our cells through our DNA, based on the frequency information it is provided, unless we tell it to do something different. We can communicate with our cells and DNA through our minds (the voice of our consciousness) while holding an intention (the vehicle of our consciousness) in order to manifest the changes in our DNA and cells which we desire. Our subconscious mind and soul unconsciously and unconditionally take note of all of our experiences, both positive and negative, including those which are associated with trauma, extreme stress or injury, as well as emotionally charged events. They’re recorded as frequency information as our Akashic Records, which are held within the quantum part of our DNA. Without even realizing it, we store erroneous and self-limiting perceptions about these experiences that make up our belief system. These include attitudes and convictions we have about ourselves and others, desires that predispose us to unnecessary suffering or self-punishment, our genetic inheritance, body image and about our ability to heal. As a form of memory, this frequency information pervades our consciousness and drives what we manifest in our bodies. Through conscious intent, we can mine our Akash, intentionally substituting these negative attributes for more positive ones, which promote health such as strength, tolerance, self-acceptance and self-love. Our Akash is recalibrated as a result of the swap-out, improving the quality of frequency information our quantum DNA contains and communicates, which is then de-coded by our physical DNA, in order to create a more effective response in the body. We are vulnerable to poor health because, at a very deep level our cells have forgotten how to repair and regenerate themselves. When our DNA communication isn’t very effective, much of the encoding portion on our physical DNA isn’t working. Its access to the memory held within our quantum DNA is limited, so it can’t provide enough frequency information to the cells to help them remember how to operate efficiently. Unless we remind them otherwise, our cells simply respond to the instructions they were first imprinted with at birth. By talking directly to them, with conscious intent, we can remind them of their ability to change, to re-activate and take in new quantum information provided to them through the thoughts, feelings and beliefs our consciousness interprets. We can purposefully give them permission to void any instructions they currently have that create dysfunction in our bodies. We don’t need to give them specifics, as long as our intent is focused on expressing our desire for change. Accompanying this internal cellular communication with visualizations focused on how that change might be realized for us in our bodies can yield incredibly profound results.
""" byceps.services.board.aggregation_service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from ...database import db from .models.category import Category as DbCategory from .models.posting import Posting as DbPosting from .models.topic import Topic as DbTopic def aggregate_category(category: DbCategory) -> None: """Update the category's count and latest fields.""" topic_count = DbTopic.query.for_category(category.id).without_hidden().count() posting_query = DbPosting.query \ .without_hidden() \ .join(DbTopic) \ .filter_by(category=category) posting_count = posting_query.count() latest_posting = posting_query \ .filter(DbTopic.hidden == False) \ .latest_to_earliest() \ .first() category.topic_count = topic_count category.posting_count = posting_count category.last_posting_updated_at = latest_posting.created_at \ if latest_posting else None category.last_posting_updated_by_id = latest_posting.creator_id \ if latest_posting else None db.session.commit() def aggregate_topic(topic: DbTopic) -> None: """Update the topic's count and latest fields.""" posting_query = DbPosting.query.for_topic(topic.id).without_hidden() posting_count = posting_query.count() latest_posting = posting_query.latest_to_earliest().first() topic.posting_count = posting_count if latest_posting: topic.last_updated_at = latest_posting.created_at topic.last_updated_by_id = latest_posting.creator_id db.session.commit() aggregate_category(topic.category)
Washington -- As a provider of a start-up Internet Protocol video service, AT&T has been exempt from paying regulatory fees used to help fund the Federal Communications Commission&apos;s $313 million budget, much to the displeasure of incumbent cable operators. In a concession Monday, AT&T said it should begin to pay video-based fees -- not necessarily at the same rate as cable incumbents but more likely at a level that represents the actual burdens imposed by its nascent video service on FCC staff resources. "AT&T fully agrees that, as a [pay-TV] provider, it should pay an equitable share of the [FCC&apos;s] regulatory costs," the company said in an Oct. 27 FCC filing. On Oct. 22, AT&T reported 781,000 U-verse TV subscribers, a gain of 232,000 in the third quarter. The company expects to serve 1 million IPTV customers by the end of year. With so few pay-TV subscribers, AT&T doesn&apos;t have much at risk in the regulatory fee dispute at the FCC. Nevertheless, it did step into the middle of a fierce debate between the incumbent cable operators and Verizon on the one side and DirecTV and Dish Network on the other about how much cable and satellite carriers should pay in FCC regulatory fees. AT&T&apos;s exemption has only added to the intensity to a fight that has been going on for several years. The National Cable & Telecommunications Association, the American Cable Association and Verizon insist that cable operators are not being treated fairly, while DirecTV and Dish assert that cable MSOs pay at a rate commensurate with the level of regulatory activity they generate at the FCC. In an attempt to document the disparity, Verizon told the FCC in September that based on current policy, it will need to pay more in regulatory fees than Dish Network in 2008, even though Dish has 20 times the number of Verizon subscribers. Under current FCC rules, cable operators need to pay 80 cents per subscriber, contributing $51.8 million total. Direct broadcast satellite carriers pay $119,000 per-satellite basis, putting their FCC tab at $2.3 million. If DBS paid at cable&apos;s 80-cent per-subscriber rate, it would owe the FCC about $24.5 million, 10 times what is it paying now. ACA noted that in 2006 cable paid 77 cents per-subscriber, while satellite TV providers paid an effective rate of 6.7 cents. "There is no rational policy or any other justification for imposing regulatory fees on cable operators that are more than 11 times than those paid by the two national DBS providers. The [FCC] should impose per-subscriber fees on all [pay-TV providers], including DBS," ACA said in an Oct. 27 FCC filing. DirecTV and Dish Network want the FCC to keep the status quo, saying they do not require FCC oversight on as broad a scale as cable operators. "Unlike DirecTV and Dish Network, most cable operators are the dominant incumbent video providers in their service areas, and as such are subject to rules and regulatory proceedings that apply only to them," the DBS providers told the FCC on Oct. 27. AT&T didn&apos;t volunteer how much IPTV providers should pay in regulatory fees. It did say, however, that the FCC should turn to the IPTV question only after it had settled the cable-DBS dispute. "As this dispute makes clear, there is substantial controversy over whether the present assessment methodology is equitable," AT&T said.
""" Copyright 2008-2011 Free Software Foundation, Inc. This file is part of GNU Radio GNU Radio Companion is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GNU Radio Companion is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA """ from collections import defaultdict from .. base.Block import Block as _Block from .. gui.Block import Block as _GUIBlock from . FlowGraph import _variable_matcher import extract_docs class Block(_Block, _GUIBlock): def is_virtual_sink(self): return self.get_key() == 'virtual_sink' def is_virtual_source(self): return self.get_key() == 'virtual_source' ##for make source to keep track of indexes _source_count = 0 ##for make sink to keep track of indexes _sink_count = 0 def __init__(self, flow_graph, n): """ Make a new block from nested data. Args: flow: graph the parent element n: the nested odict Returns: block a new block """ #grab the data self._doc = n.find('doc') or '' self._imports = map(lambda i: i.strip(), n.findall('import')) self._make = n.find('make') self._var_make = n.find('var_make') self._checks = n.findall('check') self._callbacks = n.findall('callback') self._throttle = n.find('throttle') or '' self._bus_structure_source = n.find('bus_structure_source') or '' self._bus_structure_sink = n.find('bus_structure_sink') or '' #build the block _Block.__init__( self, flow_graph=flow_graph, n=n, ) _GUIBlock.__init__(self) def get_bus_structure(self, direction): if direction == 'source': bus_structure = self._bus_structure_source; else: bus_structure = self._bus_structure_sink; bus_structure = self.resolve_dependencies(bus_structure); if not bus_structure: return '' try: clean_bus_structure = self.get_parent().evaluate(bus_structure) return clean_bus_structure except: return '' def throttle(self): return bool(self._throttle) def validate(self): """ Validate this block. Call the base class validate. Evaluate the checks: each check must evaluate to True. """ _Block.validate(self) #evaluate the checks for check in self._checks: check_res = self.resolve_dependencies(check) try: if not self.get_parent().evaluate(check_res): self.add_error_message('Check "%s" failed.'%check) except: self.add_error_message('Check "%s" did not evaluate.'%check) # for variables check the value (only if var_value is used if _variable_matcher.match(self.get_key()) and self._var_value != '$value': value = self._var_value try: value = self.get_var_value() self.get_parent().evaluate(value) except Exception as err: self.add_error_message('Value "%s" cannot be evaluated:\n%s' % (value, err)) # check if this is a GUI block and matches the selected generate option current_generate_option = self.get_parent().get_option('generate_options') for label, option in (('WX GUI', 'wx_gui'), ('QT GUI', 'qt_gui')): if self.get_name().startswith(label) and current_generate_option != option: self.add_error_message("Can't generate this block in mode " + repr(option)) def rewrite(self): """ Add and remove ports to adjust for the nports. """ _Block.rewrite(self) # adjust nports for ports in (self.get_sources(), self.get_sinks()): for i, master_port in enumerate(ports): nports = master_port.get_nports() or 1 num_ports = 1 + len(master_port.get_clones()) if not nports and num_ports == 1: # not a master port and no left-over clones continue # remove excess cloned ports for port in master_port.get_clones()[nports-1:]: # remove excess connections for connection in port.get_connections(): self.get_parent().remove_element(connection) master_port.remove_clone(port) ports.remove(port) # add more cloned ports for i in range(num_ports, nports): port = master_port.add_clone() ports.insert(ports.index(master_port) + i, port) self.back_ofthe_bus(ports) # renumber non-message/-msg ports domain_specific_port_index = defaultdict(int) for port in filter(lambda p: p.get_key().isdigit(), ports): domain = port.get_domain() port._key = str(domain_specific_port_index[domain]) domain_specific_port_index[domain] += 1 def port_controller_modify(self, direction): """ Change the port controller. Args: direction: +1 or -1 Returns: true for change """ changed = False #concat the nports string from the private nports settings of all ports nports_str = ' '.join([port._nports for port in self.get_ports()]) #modify all params whose keys appear in the nports string for param in self.get_params(): if param.is_enum() or param.get_key() not in nports_str: continue #try to increment the port controller by direction try: value = param.get_evaluated() value = value + direction if 0 < value: param.set_value(value) changed = True except: pass return changed def get_doc(self): doc = self._doc.strip('\n').replace('\\\n', '') #merge custom doc with doxygen docs return '\n'.join([doc, extract_docs.extract(self.get_key())]).strip('\n') def get_category(self): return _Block.get_category(self) def get_imports(self): """ Resolve all import statements. Split each import statement at newlines. Combine all import statments into a list. Filter empty imports. Returns: a list of import statements """ return filter(lambda i: i, sum(map(lambda i: self.resolve_dependencies(i).split('\n'), self._imports), [])) def get_make(self): return self.resolve_dependencies(self._make) def get_var_make(self): return self.resolve_dependencies(self._var_make) def get_var_value(self): return self.resolve_dependencies(self._var_value) def get_callbacks(self): """ Get a list of function callbacks for this block. Returns: a list of strings """ def make_callback(callback): callback = self.resolve_dependencies(callback) if 'self.' in callback: return callback return 'self.%s.%s'%(self.get_id(), callback) return map(make_callback, self._callbacks)
Once upon a time, there lived two octopuses in a well-groomed fish tank.The purple and the orange octopuses were named Chun Yue and Xian Yao respectively. They were siblings and loved each other dearly. One fateful day, the worker from the factory took Chun Yue away from Xian Yao and he was heartbroken.He felt as if a thousand arrows had been shot through his heart and the pain in his heart was excruciating. <iframe width="640" height="360" src="http://v.koobits.com/1kd8u6q1z74p" frameborder="0" allowfullscreen></iframe><br /><a href="http://ebooks.koobits.com">FREE Children Ebook</a> by KooBits.
#!/usr/bin/env python # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- """ An example to show receiving events from an Event Hub with checkpoint store doing checkpoint by every fixed time interval. In the `receive` method of `EventHubConsumerClient`: If no partition id is specified, the checkpoint_store are used for load-balance and checkpoint. If partition id is specified, the checkpoint_store can only be used for checkpoint. """ import os import time from azure.eventhub import EventHubConsumerClient from azure.eventhub.extensions.checkpointstoreblob import BlobCheckpointStore CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"] EVENTHUB_NAME = os.environ['EVENT_HUB_NAME'] STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"] BLOB_CONTAINER_NAME = "your-blob-container-name" # Please make sure the blob container resource exists. partition_last_checkpoint_time = dict() checkpoint_time_interval = 15 def on_event(partition_context, event): # Put your code here. # Avoid time-consuming operations. p_id = partition_context.partition_id print("Received event from partition: {}".format(p_id)) now_time = time.time() p_id = partition_context.partition_id last_checkpoint_time = partition_last_checkpoint_time.get(p_id) if last_checkpoint_time is None or (now_time - last_checkpoint_time) >= checkpoint_time_interval: partition_context.update_checkpoint(event) partition_last_checkpoint_time[p_id] = now_time if __name__ == '__main__': checkpoint_store = BlobCheckpointStore.from_connection_string(STORAGE_CONNECTION_STR, BLOB_CONTAINER_NAME) consumer_client = EventHubConsumerClient.from_connection_string( conn_str=CONNECTION_STR, consumer_group='$Default', eventhub_name=EVENTHUB_NAME, checkpoint_store=checkpoint_store, # For load-balancing and checkpoint. Leave None for no load-balancing. ) try: with consumer_client: """ Without specified partition_id, the receive will try to receive events from all partitions and if provided with a checkpoint store, the client will load-balance partition assignment with other EventHubConsumerClient instances which also try to receive events from all partitions and use the same storage resource. """ consumer_client.receive( on_event=on_event, starting_position="-1", # "-1" is from the beginning of the partition. ) # With specified partition_id, load-balance will be disabled, for example: # client.receive(on_event=on_event, partition_id='0') except KeyboardInterrupt: print('Stopped receiving.')
Can't wait to see a trailer for the new Terminator 5 movie? Well, neither can we! UGO.com is proud to announce "Not Fake Trailers": the new place for Hollywood's biggest [fake] movie trailers, all premiering YEARS before the film hits the big screen. At a recent press junket interview in London, Arnold Schwarzenegger revealed that he will indeed "be back" for Terminator 5, which is leaving fans in a Terminator tizzy. Will Arnold be back to portray the Terminator 101 again? And if so, how will they explain how old and tired he looks? I thought steel don't peel. Then again, that skin he's wearing IS organic. Maybe it ages. Will Terminator 5 take place in the present day or will it continue the story of whiny adult John Connor as he battles robot motorcycles in a post-apocalyptic future? Will Christian Bale be back and will he finally drop the Batman voice he uses in every movie since Batman Begins? And what's James Cameron doing? You think he'll go see Terminator 5 or will he stay at home to add more ponytails to Avatar 2? Find out this and more in this UGO exclusive Terminator 5 trailer! What's Black and White and Frank Miller all over? Check back next week for a new installment of "Not Fake Trailers" to find out. Is there an upcoming movie you'd like to see a trailer for RIGHT NOW!? Tweet at us @ugodotcom with the hashtag #NotFakeTrailers to let us know!
""" Generic thread pool class. Modeled after Java's ThreadPoolExecutor. Please note that this ThreadPool does *not* fully implement the PEP 3148 ThreadPool! """ from threading import Thread, Lock, currentThread from weakref import ref import logging import atexit try: from queue import Queue, Empty except ImportError: from Queue import Queue, Empty logger = logging.getLogger(__name__) _threadpools = set() # Worker threads are daemonic in order to let the interpreter exit without # an explicit shutdown of the thread pool. The following trick is necessary # to allow worker threads to finish cleanly. def _shutdown_all(): for pool_ref in tuple(_threadpools): pool = pool_ref() if pool: pool.shutdown() atexit.register(_shutdown_all) class ThreadPool(object): def __init__(self, core_threads=0, max_threads=20, keepalive=1): """ :param core_threads: maximum number of persistent threads in the pool :param max_threads: maximum number of total threads in the pool :param thread_class: callable that creates a Thread object :param keepalive: seconds to keep non-core worker threads waiting for new tasks """ self.core_threads = core_threads self.max_threads = max(max_threads, core_threads, 1) self.keepalive = keepalive self._queue = Queue() self._threads_lock = Lock() self._threads = set() self._shutdown = False _threadpools.add(ref(self)) logger.info('Started thread pool with %d core threads and %s maximum ' 'threads', core_threads, max_threads or 'unlimited') def _adjust_threadcount(self): self._threads_lock.acquire() try: if self.num_threads < self.max_threads: self._add_thread(self.num_threads < self.core_threads) finally: self._threads_lock.release() def _add_thread(self, core): t = Thread(target=self._run_jobs, args=(core,)) t.setDaemon(True) t.start() self._threads.add(t) def _run_jobs(self, core): logger.debug('Started worker thread') block = True timeout = None if not core: block = self.keepalive > 0 timeout = self.keepalive while True: try: func, args, kwargs = self._queue.get(block, timeout) except Empty: break if self._shutdown: break try: func(*args, **kwargs) except: logger.exception('Error in worker thread') self._threads_lock.acquire() self._threads.remove(currentThread()) self._threads_lock.release() logger.debug('Exiting worker thread') @property def num_threads(self): return len(self._threads) def submit(self, func, *args, **kwargs): if self._shutdown: raise RuntimeError('Cannot schedule new tasks after shutdown') self._queue.put((func, args, kwargs)) self._adjust_threadcount() def shutdown(self, wait=True): if self._shutdown: return logging.info('Shutting down thread pool') self._shutdown = True _threadpools.remove(ref(self)) self._threads_lock.acquire() for _ in range(self.num_threads): self._queue.put((None, None, None)) self._threads_lock.release() if wait: self._threads_lock.acquire() threads = tuple(self._threads) self._threads_lock.release() for thread in threads: thread.join() def __repr__(self): if self.max_threads: threadcount = '%d/%d' % (self.num_threads, self.max_threads) else: threadcount = '%d' % self.num_threads return '<ThreadPool at %x; threads=%s>' % (id(self), threadcount)
Pues, happy birthday! That’s great and is a sign of their affection for you. Revel in it.You’ll find that people’s lives get bound up in your own and they will become part of the weaving that you call life…. Crap. That made me cry a little.
#!/usr/bin/python3 """randProgram generates random simpleStack programs, and evaluates them. This is basically a really simple fuzzer for testing that no programs "go wrong", where "go wrong" means "throw an exception". Allows one exception to be thrown: MemoryError if the stack grows beyond 10,000 elements. These programs just tend to be randomly generated fork-bombs, which while technically valid, aren't interesting, and don't violate the spirit of simpleStack. Assumes that a 100 line program will reach its final state in 10,000 steps, which is a completely invalid assumption. """ import random import string import simpleStack _DEBUG = False _SYMBOLS = ["PRINT", "DUP", "INV", "--", "++", "SUB", "MUL", "MOD", "SWP", "JNZ", "GET", "PUT"] def gen_program(min_size, max_size): """gen_program generates a random program.""" size = random.randrange(min_size, max_size) prog = [] for _ in range(size): # Randomly pick if we add a program symbol or random word. if random.choice([True, False]): prog.append(random.choice(_SYMBOLS)) else: wordlen = random.randrange(0, 10) prog.append(''.join(random.choice(string.ascii_letters + string.digits) for _ in range(wordlen))) return prog if __name__ == "__main__": # Generate 10000 programs, or 1 in debug mode mem_errors = 0 num_runs = 1 if _DEBUG else 10000 for _ in range(num_runs): prog = gen_program(10, 100) # Run for 100,000 steps. If in debug mode, actually print, otherwise, # don't. if _DEBUG: print("\n".join(prog)) def fake_print(_, end=None): """Fake print that does nothing""" del end try: simpleStack.run_simple_stack(prog, max_steps=10000, printFun=print if _DEBUG else fake_print) except MemoryError: mem_errors += 1 print("Ran {} runs, with {} memory errors".format(num_runs, mem_errors))
The Mirage 2000 crash in Bengaluru and the tragic fatalities of the two test pilots must make Indian policy-makers recognise and take steps to urgently address the conditions in which the Hindustan Aeronautics Limited (HAL) is run. HAL's below par performance standards - from HF24 to the Gnats, MiG 21s, 23/27s and the Sukhois and now even the Mirages - is an old story. In fact, the end of the financial year is witness to an annual frenzy at the establishment with regard to its slipshod work and slippages in the quality of overhaul and timeframes, servicing, and quality control across the board, for aircraft and equipment alike. The lack of competition within the country and non-participation of the civilian sector have virtually sounded the death knell for this behemoth white elephant public sector undertaking (PSU). HAL held great promise in the 1960s - indeed, at the time, it was considered to have the makings of a 'Navratna' PSU, particularly when the German aircraft designer, Kurt Tank, stepped in to help with the HF-24 Marut. The initial stages of the Light Combat Aircraft's (LCA) conception were world class in terms of contemporary technology and tradecraft. However, the past few decades have seen this premier aircraft manufacturer slip down the abyss and being unable to provide even basic value for money. In today’s competitive era of high-end aviation technology, any venture not able to meet high quality safety and efficiency standards is bound to fall out of reckoning. HAL's recent track record unfortunately points to a sorry state of affairs, devoid of any accountability. The nature of leadership is also a concern. A venture with such a precarious national security charter must display dynamic leadership. Past attempts at bringing in IAF functionaries were later systematically scuttled by the techno-bureaucratic war machine, leaving promotees and political appointees as CEOs. The government needs to consider bringing in serving IAF functionaries at higher management levels to enhance accountability. Further, a combination of the public-private partnership (PPP) model and outsourcing to renowned foreign players (such as Israeli Aerospace Industries Limited, Lockheed Martin Corporation or British Aerospace plc) must be considered, and meritocracy brought in from top to bottom to ensure value for money spent.
import json from django.http import HttpResponse from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator from django.views.decorators.csrf import ensure_csrf_cookie from django.views.generic import TemplateView, View from rsvp.models import Invitation, Guest, Meal, ReplyError class RSVPView(TemplateView): template_name='rsvp/rsvp_page.html' def get_context_data(self): context = super().get_context_data() context['meal_choices'] = json.dumps([meal.toJSON() for meal in Meal.objects.all()]) return context class InvitationView(View): @method_decorator(ensure_csrf_cookie) def get(self, request, invite_code): invite = get_object_or_404(Invitation, code=invite_code) data = invite.toJSON() return HttpResponse(json.dumps(data), content_type='application/json') @method_decorator(ensure_csrf_cookie) def post(self, request, invite_code): invite = get_object_or_404(Invitation, code=invite_code) body_unicode = request.body.decode('utf-8') data = json.loads(body_unicode) try: invite.handle_reply(data) except ReplyError as e: data = json.dumps(e.errors) return HttpResponse(data, status=400, content_type='application/json') else: return HttpResponse()
Tobermory RNLI hosted three Senior Maritime Operations Officers from Stornoway Coastguard Operations Centre on Saturday 1 September 2018 and everyone was able to put faces to voices. In the event of an incident at sea, it is almost always the staff at Stornoway Coastguard Operations Centre (CGOC) who alert Tobermory RNLI’s launch authorities and request the assistance of Tobermory RNLI’s Severn class lifeboat, Elizabeth Fairlie Ramsey. The launch authorities and boat’s officers are in regular contact with staff at Stornoway CGOC by telephone and VHF not only during ‘shouts’ but also during routine exercises. The three visiting Senior Maritime Operations Officers were treated to tea and cakes in the station and met some of the volunteer crew and launch authorities before having a tour of Tobermory’s all weather lifeboat.
from .invalid_auth import InvalidAuth from .token_builder import VerifyToken, ExtractToken from flask import current_app as app, request, jsonify from functools import wraps def authenticate(error): resp = jsonify({'code':error.code, 'description':error.description}) resp.status_code = 401 return resp class AuthRouteDecorator: """ Helper to provide a decorator to require authorization for a route """ def __init__(self, UserCls): """ Initialize with the UserProxy Class to use """ self.UserCls = UserCls def findUser(self): """ Find the User for the current request """ auth = request.headers.get('Authorization', None) token = ExtractToken(auth) try: data = VerifyToken(token, app.config['SECRET_KEY']) user = self.UserCls.query.get(data['id']) return user except InvalidAuth as e: return authenticate(e) def requires_auth(self, f): @wraps(f) def decorated(*args, **kwargs): try: user = self.findUser() kwargs['user'] = user return f(*args, **kwargs) except InvalidAuth as e: return authenticate(e) return decorated
What next for the Scottish Tories? | Centre on Constitutional Change l Researching the issues. Informing the debate. Home » Blogs » What next for the Scottish Tories? The Scottish Conservatives have exceeded expectations by winning 13 seats in Scotland. This is the party’s best result in Scotland since 1983. It surpasses their previous tally of 11 MPs in 1992. Like in the Scottish Parliament and local elections, they fought a disciplined campaign on their key messages and were particularly adept at highlighting problems in the SNP’s domestic record. The party’s new group of MPs presents a great opportunity. However, the party’s MPs will want to reflect on their predecessors’ experience. One of the most toxic legacies of the 1990s was the impression among some Scots that Scottish Conservative MPs put the Government’s interest before the Scottish interest. New Scottish Conservative MPs need to be seen to be standing up for Scotland. They are in a good position to influence government policy and the party already appears to be throwing its weight around. However, tougher tests will come. One of the problems for the territorial branches of statewide parties is that they end up having to take the flak for decisions they did not make (see, for instance, the recent Scottish Conservative troubles in the Scottish Parliament about UK Government welfare changes). The 13 Scottish Conservatives will need to decide what issues to prioritise and what they will need to compromise on in order to sustain Theresa May’s majority. Second, the party’s new MPs have an opportunity to think about the Union. The intellectual case for the Union took a battering at the Supreme Court last year when the UK Government’s lawyer reasserted a Diceyan interpretation of Westminster’s constitutional position. Most notably, he argued that the Sewel Convention could be ignored at will. The Scottish Conservatives now have a chance to make the case for a more plural interpretation of the constitution and to push the UK Government to engage more constructively with last year’s House of Lords Constitution Committee report on the territorial constitution. Intergovernmental relations, for instance, are ripe for reform and new MPs might usefully promote some incremental ideas in this area (for instance, from their own Adam Tomkins). What does the Union look like when Scottish Conservatives get the chance to shape it? If the answer is ‘no different from now’, then they might find it much harder to defend in future. Finally, the idea of a separate party in Scotland need not necessarily be dismissed following this result. Scottish conservatism already looks different from English conservatism and this divergence may increase over the coming years as Scottish Conservatives turn their thoughts to how they might govern Scotland. Ruth Davidson has already implemented most of Murdo Fraser’s ideas from his 2011 leadership campaign and it would not be out of place to effect this final change. If the Scottish Conservatives are going to be pursuing a distinctively Scottish agenda at Westminster, then they might as well take full credit for it. In doing so, they might also demonstrate how a looser Union can still work in everyone’s favour. Conservatives can enjoy this success, but they must also be mindful of how things went wrong the last time the party had more than 11 MPs. Independence may be off the agenda for the moment, but the issue has not gone away. Scottish Conservatives may therefore regret not using this opportunity to fix the Union’s roof while the weather is at least overcast.
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_nics short_description: Module to manage network interfaces of Virtual Machines in oVirt version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage network interfaces of Virtual Machines in oVirt." options: name: description: - "Name of the network interface to manage." required: true vm: description: - "Name of the Virtual Machine to manage." required: true state: description: - "Should the Virtual Machine NIC be present/absent/plugged/unplugged." choices: ['present', 'absent', 'plugged', 'unplugged'] default: present network: description: - "Logical network to which the VM network interface should use, by default Empty network is used if network is not specified." profile: description: - "Virtual network interface profile to be attached to VM network interface." interface: description: - "Type of the network interface." choices: ['virtio', 'e1000', 'rtl8139', 'pci_passthrough', 'rtl8139_virtio', 'spapr_vlan'] default: 'virtio' mac_address: description: - "Custom MAC address of the network interface, by default it's obtained from MAC pool." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Add NIC to VM - ovirt_nics: state: present vm: myvm name: mynic interface: e1000 mac_address: 00:1a:4a:16:01:56 profile: ovirtmgmt network: ovirtmgmt # Plug NIC to VM - ovirt_nics: state: plugged vm: myvm name: mynic # Unplug NIC from VM - ovirt_nics: state: unplugged vm: myvm name: mynic # Remove NIC from VM - ovirt_nics: state: absent vm: myvm name: mynic ''' RETURN = ''' id: description: ID of the network interface which is managed returned: On success if network interface is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c nic: description: "Dictionary of all the network interface attributes. Network interface attributes can be found on your oVirt instance at following url: https://ovirt.example.com/ovirt-engine/api/model#types/nic." returned: On success if network interface is found. ''' try: import ovirtsdk4.types as otypes except ImportError: pass import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, create_connection, equal, get_link_name, ovirt_full_argument_spec, search_by_name, ) class VmNicsModule(BaseModule): def __init__(self, *args, **kwargs): super(VmNicsModule, self).__init__(*args, **kwargs) self.vnic_id = None @property def vnic_id(self): return self._vnic_id @vnic_id.setter def vnic_id(self, vnic_id): self._vnic_id = vnic_id def build_entity(self): return otypes.Nic( name=self._module.params.get('name'), interface=otypes.NicInterface( self._module.params.get('interface') ) if self._module.params.get('interface') else None, vnic_profile=otypes.VnicProfile( id=self.vnic_id, ) if self.vnic_id else None, mac=otypes.Mac( address=self._module.params.get('mac_address') ) if self._module.params.get('mac_address') else None, ) def update_check(self, entity): return ( equal(self._module.params.get('interface'), str(entity.interface)) and equal(self._module.params.get('profile'), get_link_name(self._connection, entity.vnic_profile)) and equal(self._module.params.get('mac_address'), entity.mac.address) ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent', 'plugged', 'unplugged'], default='present' ), vm=dict(required=True), name=dict(required=True), interface=dict(default=None), profile=dict(default=None), network=dict(default=None), mac_address=dict(default=None), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) try: # Locate the service that manages the virtual machines and use it to # search for the NIC: auth = module.params.pop('auth') connection = create_connection(auth) vms_service = connection.system_service().vms_service() # Locate the VM, where we will manage NICs: vm_name = module.params.get('vm') vm = search_by_name(vms_service, vm_name) if vm is None: raise Exception("VM '%s' was not found." % vm_name) # Locate the service that manages the virtual machines NICs: vm_service = vms_service.vm_service(vm.id) nics_service = vm_service.nics_service() vmnics_module = VmNicsModule( connection=connection, module=module, service=nics_service, ) # Find vNIC id of the network interface (if any): profile = module.params.get('profile') if profile and module.params['network']: cluster_name = get_link_name(connection, vm.cluster) dcs_service = connection.system_service().data_centers_service() dc = dcs_service.list(search='Clusters.name=%s' % cluster_name)[0] networks_service = dcs_service.service(dc.id).networks_service() network = next( (n for n in networks_service.list() if n.name == module.params['network']), None ) if network is None: raise Exception( "Network '%s' was not found in datacenter '%s'." % ( module.params['network'], dc.name ) ) for vnic in connection.system_service().vnic_profiles_service().list(): if vnic.name == profile and vnic.network.id == network.id: vmnics_module.vnic_id = vnic.id # Handle appropriate action: state = module.params['state'] if state == 'present': ret = vmnics_module.create() elif state == 'absent': ret = vmnics_module.remove() elif state == 'plugged': vmnics_module.create() ret = vmnics_module.action( action='activate', action_condition=lambda nic: not nic.plugged, wait_condition=lambda nic: nic.plugged, ) elif state == 'unplugged': vmnics_module.create() ret = vmnics_module.action( action='deactivate', action_condition=lambda nic: nic.plugged, wait_condition=lambda nic: not nic.plugged, ) module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
It’s good to be home. My fellow Americans, Michelle and I have been so touched by all the well-wishes we’ve received over the past few weeks. But tonight it’s my turn to say thanks. Whether we’ve seen eye-to-eye or rarely agreed at all, my conversations with you, the American people – in living rooms and schools; at farms and on factory floors; at diners and on distant outposts – are what have kept me honest, kept me inspired, and kept me going. Every day, I learned from you. You made me a better president, and you made me a better man. I first came to Chicago when I was in my early 20s, still trying to figure out who I was; still searching for a purpose to my life. It was in neighborhoods not far from here where I began working with church groups in the shadows of closed steel mills. It was on these streets where I witnessed the power of faith, and the quiet dignity of working people in the face of struggle and loss. This is where I learned that change only happens when ordinary people get involved, get engaged, and come together to demand it. After eight years as your president, I still believe that. And it’s not just my belief. It’s the beating heart of our American idea – our bold experiment in self-government. It’s the conviction that we are all created equal, endowed by our creator with certain unalienable rights, among them life, liberty, and the pursuit of happiness. It’s the insistence that these rights, while self-evident, have never been self-executing; that we, the people, through the instrument of our democracy, can form a more perfect union. This is the great gift our Founders gave us. The freedom to chase our individual dreams through our sweat, toil, and imagination – and the imperative to strive together as well, to achieve a greater good. For 240 years, our nation’s call to citizenship has given work and purpose to each new generation. It’s what led patriots to choose republic over tyranny, pioneers to trek west, slaves to brave that makeshift railroad to freedom. It’s what pulled immigrants and refugees across oceans and the Rio Grande, pushed women to reach for the ballot, powered workers to organize. It’s why GIs gave their lives at Omaha Beach and Iwo Jima; Iraq and Afghanistan – and why men and women from Selma to Stonewall were prepared to give theirs as well. So that’s what we mean when we say America is exceptional. Not that our nation has been flawless from the start, but that we have shown the capacity to change, and make life better for those who follow. If I had told you eight years ago that America would reverse a great recession, reboot our auto industry, and unleash the longest stretch of job creation in our history…if I had told you that we would open up a new chapter with the Cuban people, shut down Iran’s nuclear weapons program without firing a shot, and take out the mastermind of 9/11…if I had told you that we would win marriage equality, and secure the right to health insurance for another 20 million of our fellow citizens – you might have said our sights were set a little too high. But that’s what we did. That’s what you did. You were the change. You answered people’s hopes, and because of you, by almost every measure, America is a better, stronger place than it was when we started. In 10 days, the world will witness a hallmark of our democracy: the peaceful transfer of power from one freely elected president to the next. I committed to President-elect Trump that my administration would ensure the smoothest possible transition, just as President Bush did for me. Because it’s up to all of us to make sure our government can help us meet the many challenges we still face. That’s what I want to focus on tonight – the state of our democracy. Understand, democracy does not require uniformity. Our founders quarreled and compromised, and expected us to do the same. But they knew that democracy does require a basic sense of solidarity – the idea that for all our outward differences, we are all in this together; that we rise or fall as one. There have been moments throughout our history that threatened to rupture that solidarity. The beginning of this century has been one of those times. A shrinking world, growing inequality; demographic change and the specter of terrorism – these forces haven’t just tested our security and prosperity, but our democracy as well. And how we meet these challenges to our democracy will determine our ability to educate our kids, and create good jobs, and protect our homeland. Our democracy won’t work without a sense that everyone has economic opportunity. Today, the economy is growing again; wages, incomes, home values, and retirement accounts are rising again; poverty is falling again. The wealthy are paying a fairer share of taxes even as the stock market shatters records. The unemployment rate is near a 10-year low. The uninsured rate has never, ever been lower. Healthcare costs are rising at the slowest rate in 50 years. And if anyone can put together a plan that is demonstrably better than the improvements we’ve made to our healthcare system – that covers as many people at less cost – I will publicly support it. That, after all, is why we serve – to make people’s lives better, not worse. But for all the real progress we’ve made, we know it’s not enough. Our economy doesn’t work as well or grow as fast when a few prosper at the expense of a growing middle class. But stark inequality is also corrosive to our democratic principles. While the top 1% has amassed a bigger share of wealth and income, too many families, in inner cities and rural counties, have been left behind – the laid-off factory worker; the waitress and healthcare worker who struggle to pay the bills – convinced that the game is fixed against them, that their government only serves the interests of the powerful – a recipe for more cynicism and polarization in our politics. There are no quick fixes to this long-term trend. I agree that our trade should be fair and not just free. But the next wave of economic dislocation won’t come from overseas. It will come from the relentless pace of automation that makes many good, middle-class jobs obsolete. And so we must forge a new social compact – to guarantee all our kids the education they need; to give workers the power to unionize for better wages; to update the social safety net to reflect the way we live now and make more reforms to the tax code so corporations and individuals who reap the most from the new economy don’t avoid their obligations to the country that’s made their success possible. We can argue about how to best achieve these goals. But we can’t be complacent about the goals themselves. For if we don’t create opportunity for all people, the disaffection and division that has stalled our progress will only sharpen in years to come. There’s a second threat to our democracy – one as old as our nation itself. After my election, there was talk of a post-racial America. Such a vision, however well-intended, was never realistic. For race remains a potent and often divisive force in our society. I’ve lived long enough to know that race relations are better than they were 10, or 20, or 30 years ago – you can see it not just in statistics, but in the attitudes of young Americans across the political spectrum. But we’re not where we need to be. All of us have more work to do. After all, if every economic issue is framed as a struggle between a hard-working white middle class and undeserving minorities, then workers of all shades will be left fighting for scraps while the wealthy withdraw further into their private enclaves. If we decline to invest in the children of immigrants, just because they don’t look like us, we diminish the prospects of our own children – because those brown kids will represent a larger share of America’s workforce. And our economy doesn’t have to be a zero-sum game. Last year, incomes rose for all races, all age groups, for men and for women. For blacks and other minorities, it means tying our own struggles for justice to the challenges that a lot of people in this country face – the refugee, the immigrant, the rural poor, the transgender American, and also the middle-aged white man who from the outside may seem like he’s got all the advantages, but who’s seen his world upended by economic, cultural, and technological change. For white Americans, it means acknowledging that the effects of slavery and Jim Crow didn’t suddenly vanish in the ‘60s; that when minority groups voice discontent, they’re not just engaging in reverse racism or practicing political correctness; that when they wage peaceful protest, they’re not demanding special treatment, but the equal treatment our Founders promised. For native-born Americans, it means reminding ourselves that the stereotypes about immigrants today were said, almost word for word, about the Irish, Italians, and Poles. America wasn’t weakened by the presence of these newcomers; they embraced this nation’s creed, and it was strengthened. None of this is easy. For too many of us, it’s become safer to retreat into our own bubbles, whether in our neighborhoods or college campuses or places of worship or our social media feeds, surrounded by people who look like us and share the same political outlook and never challenge our assumptions. The rise of naked partisanship, increasing economic and regional stratification, the splintering of our media into a channel for every taste – all this makes this great sorting seem natural, even inevitable. And increasingly, we become so secure in our bubbles that we accept only information, whether true or not, that fits our opinions, instead of basing our opinions on the evidence that’s out there. This trend represents a third threat to our democracy. Politics is a battle of ideas; in the course of a healthy debate, we’ll prioritize different goals, and the different means of reaching them. But without some common baseline of facts; without a willingness to admit new information, and concede that your opponent is making a fair point, and that science and reason matter, we’ll keep talking past each other, making common ground and compromise impossible. Isn’t that part of what makes politics so dispiriting? How can elected officials rage about deficits when we propose to spend money on preschool for kids, but not when we’re cutting taxes for corporations? How do we excuse ethical lapses in our own party, but pounce when the other party does the same thing? It’s not just dishonest, this selective sorting of the facts; it’s self-defeating. Because as my mother used to tell me, reality has a way of catching up with you. Take the challenge of climate change. In just eight years, we’ve halved our dependence on foreign oil, doubled our renewable energy, and led the world to an agreement that has the promise to save this planet. But without bolder action, our children won’t have time to debate the existence of climate change; they’ll be busy dealing with its effects: environmental disasters, economic disruptions, and waves of climate refugees seeking sanctuary. It’s that spirit, born of the Enlightenment, that made us an economic powerhouse – the spirit that took flight at Kitty Hawk and Cape Canaveral; the spirit that that cures disease and put a computer in every pocket. It’s that spirit – a faith in reason, and enterprise, and the primacy of right over might, that allowed us to resist the lure of fascism and tyranny during the Great Depression, and build a post-World War II order with other democracies, an order based not just on military power or national affiliations but on principles – the rule of law, human rights, freedoms of religion, speech, assembly, and an independent press. That order is now being challenged – first by violent fanatics who claim to speak for Islam; more recently by autocrats in foreign capitals who see free markets, open democracies, and civil society itself as a threat to their power. The peril each poses to our democracy is more far-reaching than a car bomb or a missile. It represents the fear of change; the fear of people who look or speak or pray differently; a contempt for the rule of law that holds leaders accountable; an intolerance of dissent and free thought; a belief that the sword or the gun or the bomb or propaganda machine is the ultimate arbiter of what’s true and what’s right. Because of the extraordinary courage of our men and women in uniform, and the intelligence officers, law enforcement, and diplomats who support them, no foreign terrorist organization has successfully planned and executed an attack on our homeland these past eight years; and although Boston and Orlando remind us of how dangerous radicalization can be, our law enforcement agencies are more effective and vigilant than ever. We’ve taken out tens of thousands of terrorists – including Osama bin Laden. The global coalition we’re leading against ISIL has taken out their leaders, and taken away about half their territory. ISIL will be destroyed, and no one who threatens America will ever be safe. To all who serve, it has been the honor of my lifetime to be your Commander-in-Chief. But protecting our way of life requires more than our military. Democracy can buckle when we give in to fear. So just as we, as citizens, must remain vigilant against external aggression, we must guard against a weakening of the values that make us who we are. That’s why, for the past eight years, I’ve worked to put the fight against terrorism on a firm legal footing. That’s why we’ve ended torture, worked to close Gitmo, and reform our laws governing surveillance to protect privacy and civil liberties. That’s why I reject discrimination against Muslim Americans. That’s why we cannot withdraw from global fights – to expand democracy, and human rights, women’s rights, and LGBT rights – no matter how imperfect our efforts, no matter how expedient ignoring such values may seem. For the fight against extremism and intolerance and sectarianism are of a piece with the fight against authoritarianism and nationalist aggression. If the scope of freedom and respect for the rule of law shrinks around the world, the likelihood of war within and between nations increases, and our own freedoms will eventually be threatened. So let’s be vigilant, but not afraid. ISIL will try to kill innocent people. But they cannot defeat America unless we betray our Constitution and our principles in the fight. Rivals like Russia or China cannot match our influence around the world – unless we give up what we stand for, and turn ourselves into just another big country that bullies smaller neighbors. Which brings me to my final point – our democracy is threatened whenever we take it for granted. All of us, regardless of party, should throw ourselves into the task of rebuilding our democratic institutions. When voting rates are some of the lowest among advanced democracies, we should make it easier, not harder, to vote. When trust in our institutions is low, we should reduce the corrosive influence of money in our politics, and insist on the principles of transparency and ethics in public service. When Congress is dysfunctional, we should draw our districts to encourage politicians to cater to common sense and not rigid extremes. Our Constitution is a remarkable, beautiful gift. But it’s really just a piece of parchment. It has no power on its own. We, the people, give it power – with our participation, and the choices we make. Whether or not we stand up for our freedoms. Whether or not we respect and enforce the rule of law. America is no fragile thing. But the gains of our long journey to freedom are not assured. In his own farewell address, George Washington wrote that self-government is the underpinning of our safety, prosperity, and liberty, but “from different causes and from different quarters much pains will be taken…to weaken in your minds the conviction of this truth;” that we should preserve it with “jealous anxiety;” that we should reject “the first dawning of every attempt to alienate any portion of our country from the rest or to enfeeble the sacred ties” that make us one. Ultimately, that’s what our democracy demands. It needs you. Not just when there’s an election, not just when your own narrow interest is at stake, but over the full span of a lifetime. If you’re tired of arguing with strangers on the Internet, try to talk with one in real life. If something needs fixing, lace up your shoes and do some organizing. If you’re disappointed by your elected officials, grab a clipboard, get some signatures, and run for office yourself. Show up. Dive in. Persevere. Sometimes you’ll win. Sometimes you’ll lose. Presuming a reservoir of goodness in others can be a risk, and there will be times when the process disappoints you. But for those of us fortunate enough to have been a part of this work, to see it up close, let me tell you, it can energize and inspire. And more often than not, your faith in America – and in Americans – will be confirmed. Mine sure has been. Over the course of these eight years, I’ve seen the hopeful faces of young graduates and our newest military officers. I’ve mourned with grieving families searching for answers, and found grace in a Charleston church. I’ve seen our scientists help a paralyzed man regain his sense of touch, and our wounded warriors walk again. I’ve seen our doctors and volunteers rebuild after earthquakes and stop pandemics in their tracks. I’ve seen the youngest of children remind us of our obligations to care for refugees, to work in peace, and above all to look out for each other. That faith I placed all those years ago, not far from here, in the power of ordinary Americans to bring about change – that faith has been rewarded in ways I couldn’t possibly have imagined. I hope yours has, too. Some of you here tonight or watching at home were there with us in 2004, in 2008, in 2012 – and maybe you still can’t believe we pulled this whole thing off. You’re not the only ones. Michelle – for the past 25 years, you’ve been not only my wife and mother of my children, but my best friend. You took on a role you didn’t ask for and made it your own with grace and grit and style and good humor. You made the White House a place that belongs to everybody. And a new generation sets its sights higher because it has you as a role model. You’ve made me proud. You’ve made the country proud. Malia and Sasha, under the strangest of circumstances, you have become two amazing young women, smart and beautiful, but more importantly, kind and thoughtful and full of passion. You wore the burden of years in the spotlight so easily. Of all that I’ve done in my life, I’m most proud to be your dad. To Joe Biden, the scrappy kid from Scranton who became Delaware’s favorite son: You were the first choice I made as a nominee, and the best. Not just because you have been a great vice president, but because in the bargain, I gained a brother. We love you and Jill like family, and your friendship has been one of the great joys of our life. To my remarkable staff: For eight years – and for some of you, a whole lot more – I’ve drawn from your energy, and tried to reflect back what you displayed every day: heart, and character, and idealism. I’ve watched you grow up, get married, have kids, and start incredible new journeys of your own. Even when times got tough and frustrating, you never let Washington get the better of you. The only thing that makes me prouder than all the good we’ve done is the thought of all the remarkable things you’ll achieve from here. And to all of you out there – every organizer who moved to an unfamiliar town and kind family who welcomed them in, every volunteer who knocked on doors, every young person who cast a ballot for the first time, every American who lived and breathed the hard work of change – you are the best supporters and organizers anyone could hope for, and I will forever be grateful. Because, yes, you changed the world. That’s why I leave this stage tonight even more optimistic about this country than I was when we started. Because I know our work has not only helped so many Americans; it has inspired so many Americans – especially so many young people out there – to believe you can make a difference; to hitch your wagon to something bigger than yourselves. This generation coming up – unselfish, altruistic, creative, patriotic – I’ve seen you in every corner of the country. You believe in a fair, just, inclusive America; you know that constant change has been America’s hallmark, something not to fear but to embrace, and you are willing to carry this hard work of democracy forward. You’ll soon outnumber any of us, and I believe as a result that the future is in good hands. Unedited transcript of President Obama’s prepared remarks during his farewell address in Chicago, as provided by the White House. (Es Omaha Beach, no Obama Beach). Gracias por tu mensaje, Genevieve!