prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
# -*- coding: utf-8 -*- # © 2012-2016 Camptocamp SA # License AGPL-3.0 or lat
er (http://www.gnu.org/licenses/agpl). {'name': 'Switzerland - Payment Slip (BVR/ESR)', 'summary': 'Print ESR/BVR payment slip with your invoices', 'version': '10.0.2.1.1', 'author': "Camptocam
p,Odoo Community Association (OCA)", 'category': 'Localization', 'website': 'http://www.camptocamp.com', 'license': 'AGPL-3', 'depends': [ 'base', 'account', 'report', 'l10n_ch_base_bank', 'base_transaction_id', # OCA/bank-statement-reconcile ], 'data': [ "views/company.xml", "views/bank.xml", "views/account_invoice.xml", "wizard/bvr_import_view.xml", "report/report_declaration.xml", "security/ir.model.access.csv" ], 'demo': [], 'test': [], 'auto_install': False, 'installable': True, 'images': [] }
info_system = 'http://webpac.lib.nthu.edu.tw/F/' top_circulations = 'http://www.lib.nthu.edu.tw/guide/topcirculation
s/index.htm' top_circulations_bc2007 = 'http://www.lib.nthu.edu.tw/guide/topcirculations/bc2007.htm' rss_recent_books = 'http://webpac.lib.nthu.edu.tw:8080/nbr/reader/rbn_rss.jsp' lo
st_found_url = 'http://adage.lib.nthu.edu.tw/find/search_it.php'
import pyglet from pyglet.window import key window = pyglet.window.Window() @window.event def on_key_p
ress(symbol, modifiers): print('A key was pressed') if symbol == key.A: print('The "A" key was pressed.') elif symbol == key.LEFT: pri
nt('The left arrow key was pressed.') elif symbol == key.ENTER: print('The enter key was pressed.') @window.event def on_draw(): window.clear() pyglet.app.run()
blems = Problems(changes=[]) review = Review(self.repo, self.pr) sha = 'abc123' review.publish(problems, sha) assert self.pr.create_comment.called, 'Should create a comment' msg = ('Could not review pull request. ' 'It may be too large, or contain no reviewable changes.') self.pr.create_comment.assert_called_with(msg) def test_publish_empty_comment_add_ok_label(self): problems = Problems(changes=[]) config = {'OK_LABEL': 'No lint'} review = Review(self.repo, self.pr, config) sha = 'abc123' review.publish(problems, sha) assert self.pr.create_comment.called, 'ok comment should be added.' assert self.pr.remove_label.called, 'label should be removed.' self.pr.remove_label.assert_called_with(config['OK_LABEL']) msg = ('Could not review pull request. ' 'It may be too large, or contain no reviewable changes.') self.pr.create_comment.assert_called_with(msg) def test_publish_empty_comment_with_comment_status(self): config = { 'PULLREQUEST_STATUS': True, } problems = Problems(changes=[]) review = Review(self.repo, self.pr, config) sha = 'abc123' review.publish(problems, sha) assert self.pr.create_comment.called, 'Should create a comment' msg = ('Could not review pull request. ' 'It may be too large, or contain no reviewable changes.') self.repo.create_status.assert_called_with( self.pr.head, 'error', msg) self.pr.create_comment.assert_called_with(msg) def test_publish_comment_threshold_checks(self): fixture = load_fixture('comments_current.json') self.pr.review_comments.return_value = map( lambda f: GhIssueComment(f), json.loads(fixture)) problems = Problems() filename_1 = 'Console/Command/Task/AssetBuildTask.php' errors = ( (filename_1, 117, 'Something bad'), (filename_1, 119, 'Something bad'), ) problems.add_many(errors) problems.set_changes([1]) sha = 'abc123' review = Review(self.repo, self.pr) review.publish_summary = Mock() review.publish(problems, sha, 1) assert review.publish_summary.called, 'Should have been called.' def test_publish_summary(self): problems = Problems() filename_1 = 'Console/Command/Task/AssetBuildTask.php' errors = ( (filename_1, 117, 'Something bad'), (filename_1, 119, 'Something bad'), ) problems.add_many(errors) problems.set_changes([1]) review = Revi
ew(self.repo, self.pr) review.publish_summary(problems) assert self.pr.create_comment.called eq_(1, self.pr.create_comment.call_count) msg = """There are 2 errors: * Console/Command/Task/AssetBuildTask.php, line 117 -
Something bad * Console/Command/Task/AssetBuildTask.php, line 119 - Something bad """ self.pr.create_comment.assert_called_with(msg) class TestProblems(TestCase): two_files_json = load_fixture('two_file_pull_request.json') # Block offset so lines don't match offsets block_offset = load_fixture('pull_request_line_offset.json') def setUp(self): self.problems = Problems() def test_add(self): self.problems.add('file.py', 10, 'Not good') for item in self.problems: print item eq_(1, len(self.problems)) self.problems.add('file.py', 11, 'Not good') eq_(2, len(self.problems)) eq_(2, len(self.problems.all())) eq_(2, len(self.problems.all('file.py'))) eq_(0, len(self.problems.all('not there'))) def test_add__duplicate_is_ignored(self): self.problems.add('file.py', 10, 'Not good') eq_(1, len(self.problems)) self.problems.add('file.py', 10, 'Not good') eq_(1, len(self.problems)) def test_add__same_line_combines(self): self.problems.add('file.py', 10, 'Tabs bad') self.problems.add('file.py', 10, 'Spaces are good') eq_(1, len(self.problems)) result = self.problems.all() expected = 'Tabs bad\nSpaces are good' eq_(expected, result[0].body) def test_add__same_line_ignores_duplicates(self): self.problems.add('file.py', 10, 'Tabs bad') self.problems.add('file.py', 10, 'Tabs bad') eq_(1, len(self.problems)) result = self.problems.all() expected = 'Tabs bad' eq_(expected, result[0].body) def test_add__with_base_path(self): problems = Problems('/some/path/') problems.add('/some/path/file.py', 10, 'Not good') eq_([], problems.all('/some/path/file.py')) eq_(1, len(problems.all('file.py'))) eq_(1, len(problems)) def test_add__with_base_path_no_trailing_slash(self): problems = Problems('/some/path') problems.add('/some/path/file.py', 10, 'Not good') eq_([], problems.all('/some/path/file.py')) eq_(1, len(problems.all('file.py'))) eq_(1, len(problems)) def test_add__with_diff_containing_block_offset(self): res = map(lambda f: PullFile(f), json.loads(self.block_offset)) changes = DiffCollection(res) problems = Problems(changes=changes) line_num = 32 problems.add('somefile.py', line_num, 'Not good') eq_(1, len(problems)) result = problems.all('somefile.py') eq_(changes.line_position('somefile.py', line_num), result[0].position, 'Offset should be transformed to match value in changes') def test_add_many(self): errors = [ ('some/file.py', 10, 'Thing is wrong'), ('some/file.py', 12, 'Not good'), ] self.problems.add_many(errors) result = self.problems.all('some/file.py') eq_(2, len(result)) expected = [ Comment(errors[0][0], errors[0][1], errors[0][1], errors[0][2]), Comment(errors[1][0], errors[1][1], errors[1][1], errors[1][2]), ] eq_(expected, result) def test_limit_to_changes__remove_problems(self): res = map(lambda f: PullFile(f), json.loads(self.two_files_json)) changes = DiffCollection(res) # Setup some fake problems. filename_1 = 'Console/Command/Task/AssetBuildTask.php' errors = ( (None, None, 'This is a general comment'), (filename_1, 117, 'Something bad'), (filename_1, 119, 'Something else bad'), (filename_1, 130, 'Filtered out, as line is not changed'), ) self.problems.add_many(errors) filename_2 = 'Test/test_files/View/Parse/single.ctp' errors = ( (filename_2, 2, 'Filtered out'), (filename_2, 3, 'Something bad'), (filename_2, 7, 'Filtered out'), ) self.problems.add_many(errors) self.problems.set_changes(changes) self.problems.limit_to_changes() result = self.problems.all(filename_1) eq_(2, len(result)) expected = [ (None, None, 'This is a general comment'), (filename_1, 117, 'Something bad'), (filename_1, 119, 'Something else bad')] eq_(result.sort(), expected.sort()) result = self.problems.all(filename_2) eq_(1, len(result)) expected = [ Comment(filename_2, 3, 3, 'Something bad') ] eq_(result, expected) def test_has_changes(self): problems = Problems(changes=None) self.assertFalse(problems.has_changes()) problems = Problems(changes=[1]) assert problems.has_changes() def assert_review_comments_created(call_args, errors, sha): """ Check that the review comments match the error list. """ eq_(len(call_args), len(errors), 'Errors and comment counts are off') for i, err in enumerate(errors): expected = call( c
fr
om typing import TypeVar, Dict, Iterable, Any T = TypeVar("T") def foo(values: Dict[T, Iterable[Any]]): for e in []: values.setdefault(e, un
defined)
# -*- coding: utf-8 -*- # Copyright (C) 2013 Michael Hogg # This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution import bonemapy from distutils.core import setup setup( name = 'bonemapy', version = bonemapy.__version__, description = 'An ABAQUS plug-in to map bone properties from CT scans to 3D finite element bone/implant models', license = 'MIT license', keywords = ["ABAQUS", "plug-in","CT","finite","element","bone","properties","python"], author = 'Michael Hogg', author_email = 'michael.christopher.hogg@gmail.com', url = "https://github.com/mhogg/bonemapy", download_url = "https://g
ithub.com/mhogg/bonemapy/releases", classifiers = [ "Programming Language :: Python",
"Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Development Status :: 4 - Beta", "Environment :: Other Environment", "Environment :: Plugins", "Intended Audience :: Healthcare Industry", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering :: Medical Science Apps.", "Topic :: Scientific/Engineering :: Visualization", ], long_description = """ bonemapy is an ABAQUS plug-in that is used to extract bone density, or Hounsfield Unit (HU) values, from CT scans. The bone density can then be used to setup heterogeneous material properties for a 3D finite element bone/implant model. The HU values are extracted at the element integration points. Tri-linear interpolation is used to calculate the HU values at the location of the integration points. bonemapy produces a text file containing the HU values that is formatted so that it can easily be read using ABAQUS user subroutines that are required to apply the bone properties. An ABAQUS odb file is also created containing a fieldoutput representing HU so that the user can quickly visualise the mapped HU values. """, )
import logbook import show_off_web_app.infrastructure.static_cache as static_cache import pyramid.httpexceptions as exc from show_off_web_app.infrastructure.supressor import suppress import show_off_web_app.infrastructure.cookie_auth as cookie_auth from show_off_web_app.services.account_service import AccountService class BaseController: def __init__(self, request):
self.request = request self.build_cache_id = static_cache.build_cache_id log_name = 'Ctrls/' + type(self).__name__.replace("Controller", "") self.log = logbook.Logger(log_name) @property def is_logged_i
n(self): return cookie_auth.get_user_id_via_auth_cookie(self.request) is not None # noinspection PyMethodMayBeStatic @suppress() def redirect(self, to_url, permanent=False): if permanent: raise exc.HTTPMovedPermanently(to_url) raise exc.HTTPFound(to_url) @property def merged_dicts(self): data = dict() data.update(self.request.GET) data.update(self.request.POST) data.update(self.request.matchdict) return data @property def logged_in_user_id(self): user_id = cookie_auth.get_user_id_via_auth_cookie(self.request) return user_id @property def logged_in_user(self): uid = self.logged_in_user_id if not uid: return None return AccountService.find_account_by_id(uid)
#!/usr/bin/python # -*- coding: utf-8 -*- #----------------------------------------------------------------- #-- Ser
vo class #-- Juan Gonzalez-Gomez (obijuan). May-2013 #----------------------------------------------------------------- #-- Controlling the position of servos from the PC #-- The Arduino / skymega or another arduino compatible board #-- should have the firmware FingerServer uploaded #----------------------------------------------------------------- import time class IncorrectAngle(): pass class Servo(object): """Servo
class. For accessing to all the Servos""" def __init__(self, sp, dir = 0): """Arguments: serial port and servo number""" self.sp = sp #-- Serial device self.dir = dir #-- Servo number self._pos = 0; #-- Current pos def __str__(self): str1 = "Servo: {0}\n".format(self.dir) str2 = "Serial port: {0}".format(self.sp.name) return str1 + str2 def set_pos(self, pos): """Set the angular servo pos. The pos is an integer number in the range [-90 ,90] """ #-- Check that the pos in the range [-90,90] if not (-90 <= pos <= 90): raise IncorrectAngle() return #-- Convert the pos to an integer value pos = int(round(pos)) #-- Build the frame frame = self.dir + str(pos) + "\r" #-- Debug print (frame) #-- Send the frame self.sp.write(frame) #-- Store the current servo pos self._pos = pos @property def pos(self): """Read the current servo pos""" return self._pos @pos.setter def pos(self, value): """Set the sero pos""" self.set_pos(value)
import zipfile try: import zlib COMPRESSION = zipfile.ZIP_DEFLATED except: COMPRESSION = zipfile.ZIP_STORED try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from django.core.files.uploadedfile import SimpleUploadedFile class NotACompressedFile(Exception): pass class CompressedFile(object): def __init__(self, file_input=None): if file_input: self._open(file_input) else: self._create() def _create(self): self.descriptor = StringIO() self.zf = zipfile.ZipFile(self.descriptor, mode='w') def _open(self, file_input): try: # Is it a file like object? file_input.seek(0) except AttributeError: # If not, try open it. self.descriptor = open(file_input, 'r+b') else: self.descriptor = file_input try: test = zipfile.ZipFile(self.descriptor, mode='r') except zipfile.BadZipfile: raise NotACompressedFile else: test.close() self.descriptor.seek(0) self.zf = zipfile.ZipFile(self.descriptor, mode='a') def add_file(self, file_input, arcname=None): try: # Is it a file like object? file_input.seek(0) except Attrib
uteError: # If not, keep it self.zf.write(file_input, arcname=arcname, compress_type=COMPRESSION) else: self.zf.writestr(arcname, file_input.read()) def contents(self): return [filename for fi
lename in self.zf.namelist() if not filename.endswith('/')] def get_content(self, filename): return self.zf.read(filename) def write(self, filename=None): # fix for Linux zip files read in Windows for file in self.zf.filelist: file.create_system = 0 self.descriptor.seek(0) if filename: descriptor = open(filename, 'w') descriptor.write(self.descriptor.read()) else: return self.descriptor def as_file(self, filename): return SimpleUploadedFile(name=filename, content=self.write().read()) def close(self): self.zf.close()
import frappe def execute(): frappe.reload_doc("core", "doctype", "tod
o") try: frappe.db.sql("""update tabToDo set status = if(ifnull(checked,0)=0, 'Open', 'Closed
')""") except: pass
__version__ =
"2.
0"
#!/usr/bin/env python # Calder Phillips-Grafflin - WPI/ARC Lab import rospy import math import tf from tf
.transformations import * from visualization_msgs.msg import * from geometry_msgs.msg import * class RobotMarkerPublisher: def __init__(self, root_frame, rate): self.root_frame = root_frame self.rate =
rate self.marker_pub = rospy.Publisher("robot_markers_debug", Marker) rate = rospy.Rate(self.rate) while not rospy.is_shutdown(): self.display_table() rate.sleep() def display_table(self): # Make table top marker_msg = Marker() marker_msg.type = Marker.CUBE_LIST marker_msg.ns = "robot" marker_msg.id = 1 marker_msg.action = Marker.ADD marker_msg.lifetime = rospy.Duration(0.0) marker_msg.header.stamp = rospy.Time.now() marker_msg.header.frame_id = self.root_frame marker_msg.scale.x = 0.04 marker_msg.scale.y = 0.04 marker_msg.scale.z = 0.02 marker_msg.color.a = 1.0 marker_msg.color.r = 1.0 marker_msg.color.b = 0.0 marker_msg.color.g = 1.0 marker_msg.pose.position.x = 0.0 marker_msg.pose.position.y = 0.0 marker_msg.pose.position.z = 0.0 marker_msg.pose.orientation.x = 0.0 marker_msg.pose.orientation.y = 0.0 marker_msg.pose.orientation.z = 0.0 marker_msg.pose.orientation.w = 1.0 # Make the individual points p1 = Point() p1.x = 0.0025 p1.y = 0.0025 p1.z = -0.01 p2 = Point() p2.x = p1.x p2.y = p1.y + 0.04 p2.z = p1.z p3 = Point() p3.x = p1.x - 0.04 p3.y = p1.y p3.z = p1.z marker_msg.points = [p1, p2, p3] marker_msg.colors = [marker_msg.color, marker_msg.color, marker_msg.color] self.marker_pub.publish(marker_msg) if __name__ == "__main__": rospy.init_node("robot_marker_debug_publisher") rospy.loginfo("Starting the robot marker broadcaster...") #Get the parameters from the server root_frame = rospy.get_param("~root_frame", "test_robot_frame") rate = rospy.get_param("~rate", 10.0) RobotMarkerPublisher(root_frame, rate)
# -*- coding: utf-8 -*- # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { 'name': 'Summary', 'summary': 'Summary Module used by CLVsol Solutions.', 'version': '12.0.4.0', 'author': 'Carlos Eduardo Vercelino - CLVsol', 'category': 'CLVsol Solutions', 'license': 'AGPL-3', 'website': 'https://github.com/CLVsol', 'images': [], 'depends': [ 'clv_base', 'clv_global_log', ], 'data': [ 'security/summ
ary_security.xml', 'security/ir.model.access.csv', 'views/summary_template_view.xml', 'views/summary_view.xml', 'views/summary_log_view.xml', 'views/file_system_view.xml', ], 'demo': [], 'test': [], 'init_xml': [], 'test': [], 'update_xml': [], 'installable': True, 'application':
False, 'active': False, 'css': [], }
ate the `min_diff_loss`. This should only be set if neither `sensitive_group_dataset` or `nonsensitive_group_dataset` is passed in. Furthermore, the `x` component for every batch should have the same structure as that of the `original_dataset` batches' `x` components. This function should be used to create the dataset that will be passed to `min_diff.keras.MinDiffModel` during training and, optionally, during evaluation. The inputs should either have both `sensitive_group_dataset` and `nonsensitive_group_dataset` passed in and `min_diff_dataset` left unset or vice versa. In the case of the former, `min_diff_data` will be built using `utils.build_min_diff_dataset`. Warning: All input datasets should be batched **before** being passed in. Each input dataset must output a tuple in the format used in `tf.keras.Model.fit`. Specifically the output must be a tuple of length 1, 2 or 3 in the form `(x, y, sample_weight)`. This output will be parsed internally in the following way: ``` batch = ... # Batch from any one of the input datasets. x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(batch) ``` Every batch from the returned `tf.data.Dataset` will contain one batch from each of the input datasets. Each returned batch will be a tuple of `(packed_inputs, original_y, original_sample_weight)` matching the length of `original_dataset` batches where: - `packed_inputs`: is an instance of `utils.MinDiffPackedInputs` containing: - `original_inputs`: `x` component taken directly from the `original_dataset` batch. - `min_diff_data`: batch of data formed from `sensitive_group_dataset` and `nonsensitive_group_dataset` (as described in `utils.build_min_diff_dataset`) or taken directly from `min_diff_dataset`. - `original_y`: is the `y` component taken directly from the `original_dataset` batch. - `original_sample_weight`: is the `sample_weight` component taken directly from the `original_dataset` batch. `min_diff_data` will be used in `min_diff.keras.MinDiffModel` when calculating the `min_diff_loss`. It is a tuple or structure (matching the structure of the inputs) of `(min_diff_x, min_diff_membership, min_diff_sample_weight)`. Caution: If you are passing in `min_diff_dataset` make sure that each `min_diff_data` batch contains about the same number of sensitive and nonsensitive examples as indicated by `min_diff_membership` (when passing in `sensitive_group_dataset` and `nonsensitive_group_dataset` this is determined by their batch sizes). Returns: A `tf.data.Dataset` whose output is a tuple of (`packed_inputs`, `original_y`, `original_sample_weight`) matching the output length of `original_dataset`. """ # pyformat: enable # Either sensitive_group_dataset and nonsensitive_group_dataset are both set # and min_diff_dataset is not or vice versa. min_diff_dataset_present = min_diff_dataset is not None sensitive_dataset_present = sensitive_group_dataset is not None nonsensitive_dataset_present = nonsensitive_group_dataset is not None # Case where min_diff_dataset is set and the others are not. set_to_use_min_diff_dataset = ( min_diff_dataset_present and not (sensitive_dataset_present or nonsensitive_dataset_present)) # Case where sensitive_group_dataset and nonsensitive_group_dataset are both # set and min_diff_dataset is not. set_to_construct_min_diff_dataset = ((sensitive_dataset_present and nonsensitive_dataset_present) and not min_diff_dataset_present) if not (set_to_use_min_diff_dataset or set_to_construct_min_diff_dataset): raise ValueError( "Invalid arguments: You must either pass in only the `min_diff_dataset`" " (and leave `sensitive_group_dataset` and `nonsensitive_group_dataset`" " as None) or set both `sensitive_group_dataset` and " "`nonsensitive_group_dataset` (and leave `min_diff_dataset` as None), " "given: \n" "\n`sensitive_group_dataset`: {}" "\n`nonsensitive_group_dataset`: {}" "\n`min_diff_dataset`: {}".format(sensitive_group_dataset, nonsensitive_group_dataset, min_diff_dataset)) # First construct the min_diff_dataset if need be. if set_to_construct_min_diff_dataset: min_diff_dataset = build_min_diff_dataset(sensitive_group_dataset, nonsensitive_group_dataset) else: # validate min_diff_dataset since it was passed in. structure_utils.validate_min_diff_structure( min_diff_dataset, struct_name="min_diff_dataset", element_type=tf.data.Dataset) dataset = tf.data.Dataset.zip((original_dataset, min_diff_dataset)) def _map_fn(original_batch, min_diff_batch): # Unpack original batch. original_x, original_y, original_sample_weight = ( tf.keras.utils.unpack_x_y_sample_weight(original_batch)) #
Assert that all min_diff_xs have the same structure as original_x. # TODO: Should we assert that Tensor shapes are the same (other # than number of examples). min_diff_xs = [ tf.keras.utils.unpack_x_y_sample_weight(batch)[0] # First element is x. for batch in structure_utils._flatten_min_dif
f_structure(min_diff_batch) ] for min_diff_x in min_diff_xs: try: tf.nest.assert_same_structure(original_x, min_diff_x) except Exception as e: raise type(e)( "The x component structure of (one of) the `min_diff_dataset`(s) " "does not match that of the original x structure (original shown " "first): {}".format(e)) # pack min_diff_batch with original_x return _pack_as_original( original_batch, MinDiffPackedInputs( original_inputs=original_x, min_diff_data=min_diff_batch), original_y, original_sample_weight) # Reshape dataset output. return dataset.map(_map_fn) def _pack_as_original(original_batch, x, y, w): """Packs x, y, w while conserving the shape of the original batch.""" if not isinstance(original_batch, tuple): return x length = len(original_batch) return (x, y, w)[:length] def _tensor_concat(t1, t2): """Concatenates (sparse or dense) tensors.""" if isinstance(t1, tf.SparseTensor): # Ensure SparseTensors have the same non-batch dim before concatenating. max_shape = tf.math.maximum(t1.dense_shape[1], t2.dense_shape[1]) t1 = tf.sparse.reset_shape(t1, [t1.dense_shape[0], max_shape]) t2 = tf.sparse.reset_shape(t2, [t2.dense_shape[0], max_shape]) return tf.sparse.concat(axis=0, sp_inputs=[t1, t2]) else: return tf.concat([t1, t2], axis=0) def build_min_diff_dataset(sensitive_group_dataset, nonsensitive_group_dataset) -> tf.data.Dataset: # pyformat: disable """Build MinDiff dataset from sensitive and nonsensitive datasets. Arguments: sensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure (unnested dict) of `tf.data.Dataset`s containing only examples that belong to the sensitive group. nonsensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure (unnested dict) of `tf.data.Dataset`s containing only examples that do **not** belong to the sensitive group. This function builds a `tf.data.Dataset` containing examples that are meant to only be used when calculating a `min_diff_loss`. This resulting dataset will need to be packed with the original dataset used for the original task of the model which can be done by calling `utils.pack_min_diff_data`. Warning: All input datasets should be batched **before** being passed in. Each input dataset must output a tuple in the format used in `tf.keras.Model.fit`. Specifically the output must be a tuple of length 1, 2 or 3 in the form `(x, y, sample_weight)`. This output will be parsed internally in the following way: ``` batch = ... # Batch from any of th
stle_battlement_stairs_a",0,"castle_battlement_stairs_a","bo_castle_battlement_stairs_a", []), ("castle_battlement_stairs_b",0,"castle_battlement_stairs_b","bo_castle_battlement_stairs_b", []), ("castle_gate_house_a",0,"castle_gate_house_a","bo_castle_gate_house_a", []), ("castle_round_tower_a",0,"castle_round_tower_a","bo_castle_round_tower_a", []), ("castle_square_keep_a",0,"castle_square_keep_a","bo_castle_square_keep_a", []), ("castle_stairs_a",sokf_type_ladder,"castle_stairs_a","bo_castle_stairs_a", []), ("castle_drawbridge_open",0,"castle_drawbridges_open","bo_castle_drawbridges_open", []), ("castle_drawbridge_closed",0,"castle_drawbridges_closed","bo_castle_drawbridges_closed", []), ("spike_group_a",0,"spike_group_a","bo_spike_group_a", []), ("spike_a",0,"spike_a","bo_spike_a", []), ("belfry_a",sokf_moveable,"belfry_a","bo_belfry_a", []), ("belfry_b",sokf_moveable,"belfry_b","bo_belfry_b", []), ("belfry_b_platform_a",sokf_moveable,"belfry_b_platform_a","bo_belfry_b_platform_a", []), ("belfry_old",0,"belfry_a","bo_belfry_a", []), ("belfry_platform_a",sokf_moveable,"belfry_platform_a","bo_belfry_platform_a", []), ("belfry_platform_b",sokf_moveable,"belfry_platform_b","bo_belfry_platform_b", []), ("belfry_platform_old",0,"belfry_platform_b","bo_belfry_platform_b", []), ("belfry_wheel",sokf_moveable,"belfry_wheel",0, []), ("belfry_wheel_old",0,"belfry_wheel",0, []), ("mangonel",0,"mangonel","bo_mangonel", []), ("trebuchet_old",0,"trebuchet_old","bo_trebuchet_old", []), ("trebuchet_new",0,"trebuchet_new","bo_trebuchet_old", []), ("trebuchet_destructible",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible,"trebuchet_new","bo_trebuchet_old", [ (ti_on_init_scene_prop, [ (store_trigger_param_1, ":instance_no"), (scene_prop_set_hit_points, ":instance_no", 2400), ]), (ti_on_scene_prop_destroy, [ (play_sound, "snd_dummy_destroyed"), (try_begin), (this_or_next|multiplayer_is_server), (neg|game_in_multiplayer_mode), (store_trigger_param_1, ":instance_no"), (prop_instance_get_position, pos1, ":instance_no"), (particle_system_burst, "psys_dummy_smoke_big", pos1, 100), (particle_system_burst, "psys_dummy_straw_big", pos1, 100), (position_move_z, pos1, -500), (position_rotate_x, pos1, 90), (prop_instance_animate_to_position, ":instance_no", pos1, 300), #animate to 6 meters below in 6 second (try_begin), (eq, "$g_round_ended", 0), (scene_prop_get_team, ":scene_prop_team_no", ":instance_no"), (try_begin), (eq, ":scene_prop_team_no", 0), (assign, ":scene_prop_team_no_multiplier", -1), (else_try), (assign, ":scene_prop_team_no_multiplier", 1), (try_end), (try_begin), (eq, "$g_number_of_targets_destroyed", 0), (store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 2), #2 means destroyed object is a trebuchet #for only server itself----------------------------------------------------------------------------------------------- (call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"), #for only server itself----------------------------------------------------------------------------------------------- (get_max_players, ":num_players"), (try_for_range, ":player_no", 1, ":num_players"), (player_is_active, ":player_no"), (multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"), (try_end), (val_add, "$g_number_of_targets_destroyed", 1), (else_try), (store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 9), #9 means attackers destroyed all targets #for only server itself----------------------------------------------------------------------------------------------- (call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"), #for only server itself----------------------------------------------------------------------------------------------- (get_max_players, ":num_players"), (try_for_range, ":player_no", 1, ":num_players"), (player_is_active, ":player_no"), (multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"), (try_end), (val_add, "$g_numb
er_of_targets_destroyed", 1), (try_end), (try_end), #giving gold for destroying target (for trebuchet) #step-1 calculating total damage given to that scene prop (assign, ":total_damage_given", 0), (get_max_players, ":num_players"), (try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"), (try_begin), (eq, "spr_trebuchet_destructible", "$g_destructible_target_1"), (player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1), (else_try), (player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2), (try_end), (val_add, ":total_damage_given", ":damage_given"), (try_end), #step-2 sharing 1000 gold (if num active players < 20 then 50 * num active players) to players which gave damage with the damage amounts. #(scene_prop_get_max_hit_points, ":max_hit_points", ":instance_no"), (assign, ":destroy_money_addition", 0), (get_max_players, ":num_players"), (try_for_range, ":player_no", 0, ":num_players"), (player_is_active, ":player_no"), (val_add, ":destroy_money_addition", 50), (try_end), (try_begin), (ge, ":destroy_money_addition", multi_destroy_target_money_add), (assign, ":destroy_money_addition", multi_destroy_target_money_add), (try_end), (val_mul, ":destroy_money_addition", "$g_multiplayer_battle_earnings_multiplier"), (val_div, ":destroy_money_addition", 100), (get_max_players, ":num_players"), (try_for_range, ":player_no", 0, ":num_players"), (player_is_active, ":player_no"), (try_begin), (eq, "spr_trebuchet_destructible", "$g_destructible_target_1"), (player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1), (else_try), (player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2), (try_end), (player_get_gold, ":player_gold", ":player_no"), #give money to player which helped flag to be owned by new_flag_owner team (val_mul, ":damage_given", ":destroy_money_addition"), (store_div, ":gold_earned", ":damage_given", ":total_damage_given"), (val_add, ":player_gold", ":gold_earned"), (player_set_gold, ":player_no", ":player_gold", multi_max_gold_that_can_be_stored), (try_end), (try_end), ]), (ti_on_scene_prop_hit, [ (store_trigger_param_1, ":instance_no"), (store_trigger_param_2, ":damage"), (try_begin), (scene_prop_get_hit_points, ":hit_points", ":instance_no"), (val_sub, ":hit_points", ":damage"), (gt, ":hit_points", 0), (play_sound, "snd_dummy_hit"), (else_try), (neg|multiplayer_is_server), (play_sound, "snd_dummy_destroyed"), (try_end), (try_begin), (this_or_next|multiplayer_is_server), (neg|game_in_multiplayer_mode), (particle_system_burst, "psys_dummy_smoke", pos1, 3), (particle_system_burst, "psys_dummy_straw", pos1, 10), (set_fixed_point_multiplier, 1), (position_get_x, ":attacker_agent_id", pos2), (try_begin), (ge, ":attacker_agent_id", 0), (agent_is_alive, ":attacker_agent_id"), (agent_is_human,
import urwid import logging class UserInput(object): def __init__(self): self._viewMap = None self._mainLoop = None def set
Map(self, ** viewMap): self._viewMap = viewMap def setLoop(self, loop): self._mainLoop = loop def __call__(self, keypress): logging.debug('keypress={}'.format(keypress)) if keypress in ('q', 'Q'): raise urwid.ExitMainLoop() if type(keypress) is not str: return if keypress.upper() not in self._viewMap:
return view = self._viewMap[keypress.upper()] self._mainLoop.widget = view.widget()
# # SVC (SVM Multi classifier) # # @ author becxer # @ e-mail becxer87@gmail.com # import numpy as np from pytrain.SVM import SVM from pytrain.lib import convert from pytrain.lib import ptmath class SVC: def __init__(self, mat_data,
label_data): self.x = np.mat(convert.list2npfloat(mat_data)) self.ys = np.mat(np.sign(convert.list2npfloat(label_data) - 0.5)) self.outbit = self.ys.shape[1] self.svm4bit = [] for i in range(self.outbit): self.svm4bit.append(SVM(self.x, self.ys[:,i])) def fit(self, C, toler, epoch, kernel = 'Linear', kernel_params = {}): fo
r i in range(self.outbit): self.svm4bit[i].fit(C, toler, epoch, kernel, kernel_params) def predict(self, array_input): array_input = np.mat(convert.list2npfloat(array_input)) output = [] for i in range(self.outbit): output.append(self.svm4bit[i].predict(array_input)) return list(np.sign(np.array(output) + 1))
import numpy as np from sklearn.preprocessing import LabelEncoder from sklearn.preprocessing.label import _check_numpy_unicode_bug from sklearn.utils import column_or_1d from ..base import SparkBroadcasterMixin, SparkTransformerMixin class SparkLabelEncoder(LabelEncoder, SparkTransformerMixin, SparkBroadcasterMixin): """Encode labels with value between 0 and n_classes-1. Read more in the :ref:`User Guide <preprocessing_targets>`. Attributes ---------- classes_ : array of shape (n_class,) Holds the label for each class. Examples -------- `SparkLabelEncoder` can be used to normalize labels. >>> from splearn.preprocessing import SparkLabelEncoder >>> from splearn import BlockRDD >>> >>> data = ["paris", "paris", "tokyo", "amsterdam"] >>> y = BlockRDD(sc.parallelize(data)) >>> >>> le = SparkLabelEncoder() >>> le.fit(y) >>> le.classes_ array(['amsterdam', 'paris', 'tokyo'], dtype='|S9') >>> >>> test = ["tokyo", "tokyo", "paris"] >>> y_test = BlockRDD(sc.parallelize(test)) >>> >>> le.transform(y_test).toarray() array([2, 2, 1]) >>> >>> test = [2, 2, 1] >>> y_test = BlockRDD(sc.parallelize(test)) >>> >>> le.inverse_transform(y_test).toarray() array(['tokyo', 'tokyo', 'paris'], dtype='|S9') """ __transient__ = ['classes_'] def fit(self, y): """Fit label encoder Parameters ---------- y : ArrayRDD (n_samples,) Target values. Returns ------- self : returns an instance of self. """ def mapper(y): y = column_or_1d(y, warn=True) _check_numpy_unicode_bug(y) return np.unique(y) def reducer(a, b): return np.unique(np.concatenate((a, b))) self.classes_ = y.map(mapper).reduce(reducer) return self def fit_transform(self, y): """Fit label encoder and return encoded labels Parameters ---------- y : ArrayRDD [n_samples] Target values. Returns ------- y : ArrayRDD [n_samples] """ return self.fit(y).transform(y) def transform(self, y): """Transform lab
els to normalized encoding. Parameters ---------- y : ArrayRDD [n_samples] Target values. Returns ------- y : ArrayRDD [n_samples] """
mapper = super(SparkLabelEncoder, self).transform mapper = self.broadcast(mapper, y.context) return y.transform(mapper) def inverse_transform(self, y): """Transform labels back to original encoding. Parameters ---------- y : numpy array of shape [n_samples] Target values. Returns ------- y : ArrayRDD [n_samples] """ mapper = super(SparkLabelEncoder, self).inverse_transform mapper = self.broadcast(mapper, y.context) return y.transform(mapper)
"""grace Revision ID: 3d30c324ed4 Revises: 8c78a916f1 Create Date: 2015-09-07 08:51:46.375707 """ # revision identifiers, used by Alembic. revision = '3d30c324ed4' down_revision = '8c78a916f1' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### pass #
## end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### pass ### end Alembic com
mands ###
#!/usr/bin/env python network_device = { 'ip_addr' : '81.1.1.3', 'username' : 'user1', 'passwd' : 'pass
123', 'vendor' : 'cisco', 'model' : '3940', } for k,v in network_device.items(): print k,v network_device['passwd']='newpass' netwo
rk_device['secret']='enable' for k,v in network_device.items(): print k,v try: print network_device['device_type'] except KeyError: print "Device type not found\n"
from OpenGLCffi.GLES2 impo
rt params @params(api='gles2', prms=['n', 'ids']) def glGenQueriesEXT(n, ids): pass @params(api='gles2', prms=['n', 'ids']) def glDeleteQueriesEXT(n, ids): pass @params(api='gles2
', prms=['id']) def glIsQueryEXT(id): pass @params(api='gles2', prms=['target', 'id']) def glBeginQueryEXT(target, id): pass @params(api='gles2', prms=['target']) def glEndQueryEXT(target): pass @params(api='gles2', prms=['target', 'pname', 'params']) def glGetQueryivEXT(target, pname): pass @params(api='gles2', prms=['id', 'pname', 'params']) def glGetQueryObjectuivEXT(id, pname): pass
# Create the directory to send data to/from gem5 system self.logger.info("Creating temporary directory for interaction " " with gem5 via virtIO: {}" .format(self.gem5_interact_dir)) os.mkdir(self.gem5_interact_dir) # Create the directory for gem5 output (stats files etc) if not os.path.exists(self.stats_directory): os.mkdir(self.stats_directory) if os.path.exists(self.gem5_out_dir): raise TargetStableError("The gem5 stats directory {} already " "exists.".format(self.gem5_out_dir)) else: os.mkdir(self.gem5_out_dir) # We need to redirect the standard output and standard error for the # gem5 process to a file so that we can debug when things go wrong. f = os.path.join(self.gem5_out_dir, 'stdout') self.stdout_file = open(f, 'w') f = os.path.join(self.gem5_out_dir, 'stderr') self.stderr_file = open(f, 'w') # We need to keep this so we can check which port to use for the # telnet connection. self.stderr_filename = f # Start gem5 simulation self.logger.info("Starting the gem5 simulator") command_line = "{} --outdir={} {} {}".format(self.gem5args_binary, quote(self.gem5_out_dir), self.gem5args_args, self.gem5args_virtio) self.logger.debug("gem5 command line: {}".format(command_line)) self.gem5 = subprocess.Popen(shlex.split(command_line), stdout=self.stdout_file, stderr=self.stderr_file) else: # The simulation should already be running # Need to dig up the (1) gem5 simulation in question (2) its input # and output directories (3) virtio setting self._intercept_existing_gem5() # As the gem5 simulation is running now or was already running # we now need to find out which telnet port it uses self._intercept_telnet_port() def _intercept_existing_gem5(self): """ Intercept the information about a running gem5 simulation e.g. pid, input directory etc """ self.logger("This functionality is not yet implemented") raise TargetStableError() def _intercept_telnet_port(self): """ Intercept the telnet port of a running gem5 simulation """ if self.gem5 is None: raise TargetStableError('The platform has no gem5 simulation! ' 'Something went wrong') while self.gem5_port is None: # Check that gem5 is running! if self.gem5.poll(): message = "The gem5 process has crashed with error code {}!\n\tPlease see {} for details." raise TargetStableError(message.format(self.gem5.poll(), self.stderr_file.name)) # Open the stderr file with open(self.stderr_filename, 'r') as f: for line in f: # Look for two different strings, exact wording depends on # version of gem5 m = re.search(r"Listening for system connection on port (?P<port>\d+)", line) if not m: m = re.search(r"Listening for connections on port (?P<port>\d+)", line) if m: port = int(m.group('port')) if port >= 3456 and port < 5900: self.gem5_port = port break # Check if the sockets are not disabled m = re.search(r"Sockets disabled, not accepting terminal connections", line) if m: raise TargetStableError("The sockets have been disabled!" "Pass --listener-mode=on to gem5") else: time.sleep(1) def init_target_connection(self, target): """ Update the type of connection in the target from here """ if target.os == 'linux': target.conn_cls = LinuxGem5Connection else: target.conn_cls = AndroidGem5Connection def setup(self, target): """ Deploy m5 if not yet installed """ m5_path = self._deploy_m5(target) target.conn.m5_path = m5_path # Set the terminal settings for the connection to gem5 self._resize_shell(target) def update_from_target(self, target): """ Set the m5 path and if not yet installed, deploy m5 Overwrite certain methods in the target that either can be done more efficiently by gem5 or don't exist in gem5 """ m5_path = target.get_installed('m5') if m5_path is None: m5_path = self._deploy_m5(target) target.conn.m5_path = m5_path # Overwrite the following methods (monkey-patching) self.logger.debug("Overwriting the 'capture_screen' method in target") # Housekeeping to prevent recursion setattr(target, 'target_impl_capture_screen', target.capture_screen) target.capture_screen = types.MethodType(_overwritten_capture_screen, target) self.logger.debug("Overwriting the 'reset' method in target") target.reset = types.MethodType(_overwritten_reset, target) self.logger.debug("Overwriting the 'reboot' method in target") target.reboot = types.MethodType(_overwritten_reboot, target) # Call the general update_from_target implementation super(Gem5SimulationPlatform, self).update_from_target(target) def gem5_capture_screen(self, filepath): file_list = os.listdir(self.gem5_out_dir) screen_caps = [] for f in file_list: if '.bmp' in f: screen_caps.append(f) if '{ts}' in filepath: cmd = '{} date -u -Iseconds'
# pylint: disable=no-member ts = self.target.execute(cmd.format(self.target.busybox)).strip() filepath = filepath.format(ts=ts) successful_capture = False if len(screen_caps) == 1: # Bail out if we do not have image, and resort to the slower, built # in method. try: import Image gem5_image = os.path.join(self.gem5_out_dir, screen_caps[0]) temp_im
age = os.path.join(self.gem5_out_dir, "file.png") im = Image.open(gem5_image) im.save(temp_image, "PNG") shutil.copy(temp_image, filepath) os.remove(temp_image) # pylint: disable=undefined-variable gem5_logger.info("capture_screen: using gem5 screencap") successful_capture = True except (shutil.Error, ImportError, IOError): pass return successful_capture # pylint: disable=no-self-use def _deploy_m5(self, target): # m5 is not yet installed so install it host_executable = os.path.join(PACKAGE_BIN_DIRECTORY, target.abi, 'm5') return target.install(host_executable) # pylint: disable=no-self-use def _resize_shell(self, target): """ Resize the shell to avoid line wrapping issues. """ # Try and avoid line wrapping as much as possible. target.execute('{} stty columns 1024'.format(target.busybox)) target.execute('reset', check_exit_code=False) # Methods that will be monkey-patched onto the target def _overwritten_reset(self): # pylint: disable=unused-argument raise TargetStableError('Resetting is not allowed on gem5 platforms!') def _overwritten_reboot(self): # pylin
import pytes
t, sys, os sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../") from unittest import TestCase from pylogic.case import Case class TestBaseOperand(TestCase): def test_eq_case(self): case1 = Case("parent", "homer", "bart") case2 = Case("parent", "homer", "bart") assert case1 == case2 def test_not_eq_case1(self): case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "lisa") assert case1 != case2 def test_not_eq_case2(self): case1 = Case("parent", "homer", "bart") case2 = Case("brother", "homer", "lisa") assert case1 != case2
import unittest from prtgcli.cli import main class TestQuery(unittest.TestCase): def setUp(self): pass def test_list_devices(self): pass def test_list_sensors(self): pass def test_status(self):
pass def test_update(s
elf): pass
# -*- coding: utf-8 -*- # Generated by Dja
ngo 1.9.4 on 2016-04-26 16:53 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('pppcemr', '0122_auto_20160425_1327'), ] operations = [ migrations.AddField( model_name='treatment', name='height_cm', field=models.FloatField(blank=True, help_text='cm', null=Tru
e), ), migrations.AlterField( model_name='treatment', name='weight_kg', field=models.FloatField(blank=True, help_text='kg', null=True), ), ]
# TempConv.py # Celcius to Fahreinheit def Fahreinheit(temp): temp
= float(temp) temp = (temp*9/5)+32 return temp # Fahreinheit to Ce
lcius def Celcius(temp): temp = float(temp) temp = (temp-32)*5/9 return temp
###### # # FDAction class and related functions # ################################################################################ class FDAction(FDReady): """ A task that yields an instance of this class will be suspended until an I/O operation on a specified file descriptor is complete. """ def __init__(self, fd, func, args=(), kwargs={}, read=False, write=False, exc=False): """ Resume the yielding task when fd is ready for reading, writing, and/or "exceptional" condition handling. fd can be any object accepted by select.select() (meaning an integer or an object with a fileno() method that returns an integer). Any exception raised by select() due to fd will be re-raised in the yielding task. The value of the yield expression will be the result of calling func with the specified args and kwargs (which presumably performs a read, write, or other I/O operation on fd). If func raises an exception, it will be re-raised in the yielding task. Thus, FDAction is really just a convenient subclass of FDReady that requests that the task manager perform an I/O operation on the calling task's behalf. If kwargs contains a timeout argument that is not None, a Timeout exception will be raised in the yielding task if fd is not ready after timeout seconds have elapsed. """ timeout = kwargs.pop('timeout', None) super(FDAction, self).__init__(fd, read, write, exc, timeout) self.func = func self.args = args self.kwargs = kwargs def _eval(self): return self.func(*(self.args), **(self.kwargs)) def read(fd, *args, **kwargs): """ A task that yields the result of this function will be resumed when fd is readable, and the value of the yield expression will be the result of reading from fd. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if fd is not readable after timeout seconds have elapsed. Other arguments will be passed to the read function (os.read() if fd is an integer, fd.read() otherwise). For example: try: data = (yield read(fd, 1024, timeout=5)) except Timeout: # No data after 5 seconds """ func = (partial(os.read, fd) if _is_file_descriptor(fd) else fd.read) return FDAction(fd, func, args, kwargs, read=True) def readline(fd, *args, **kwargs): """ A task that yields the result of this function will be resumed when fd is readable, and the value of the yield expression will be the result of reading a line from fd. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if fd is not readable after timeout seconds have elapsed. Other arguments will be passed to fd.readline(). For example: try: data = (yield readline(fd, timeout=5)) except Timeout: # No data after 5 seconds """ return FDAction(fd, fd.readline, args, kwargs, read=True) def write(fd, *args, **kwargs): """ A task that yields the result of this function will be resumed when fd is writable, and the value of the yield expression will be the result of writing to fd. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if fd is not writable after timeout seconds have elapsed. Other arguments will be passed to the write function (os.write() if fd is an integer, fd.write() otherwise). For example: try: nbytes = (yield write(fd, data, timeout=5)) except Timeout: # Can't write after 5 seconds """ func = (partial(os.write, fd) if _is_file_descriptor(fd) else fd.write) return FDAction(fd, func, args, kwargs, write=True) def accept(sock, *args, **kwargs): """ A task that yields the result of this function will be resumed when sock is readable, and the value of the yield expression will be the result of accepting a new connection on sock. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if sock is not readable after timeout seconds have elapsed. Other arguments will be passed to sock.accept(). For example: try: conn, address = (yield accept(sock, timeout=5)) except Timeout: # No connections after 5 seconds """ return FDAction(sock, sock.accept, args, kwargs, read=True) def recv(sock, *args, **kwargs): """ A task that yields the result of this function will be resumed when sock is readable, and the value of the yield expression will be the result of receiving from sock. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if sock is not readable after timeout seconds have elapsed. Other arguments will be passed to sock.recv(). For example: try: data = (yield recv(sock, 1024, timeout=5)) except Timeout: # No data after 5 seconds """ return FDAction(sock, sock.recv, args, kwargs, read=True) def recvfrom(sock, *args, **kwargs): """
A task that yields the result of this function will be resumed when sock is readable, and the value of the yield expression will be the result of receiving from sock. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if sock is not readable after timeout seconds have elapsed. Other arguments will be passed to sock.recvfrom(). For example: try:
data, address = (yield recvfrom(sock, 1024, timeout=5)) except Timeout: # No data after 5 seconds """ return FDAction(sock, sock.recvfrom, args, kwargs, read=True) def send(sock, *args, **kwargs): """ A task that yields the result of this function will be resumed when sock is writable, and the value of the yield expression will be the result of sending to sock. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if sock is not writable after timeout seconds have elapsed. Other arguments will be passed to the sock.send(). For example: try: nsent = (yield send(sock, data, timeout=5)) except Timeout: # Can't send after 5 seconds """ return FDAction(sock, sock.send, args, kwargs, write=True) def sendto(sock, *args, **kwargs): """ A task that yields the result of this function will be resumed when sock is writable, and the value of the yield expression will be the result of sending to sock. If a timeout keyword is given and is not None, a Timeout exception will be raised in the yielding task if sock is not writable after timeout seconds have elapsed. Other arguments will be passed to the sock.sendto(). For example: try: nsent = (yield sendto(sock, data, address, timeout=5)) except Timeout: # Can't send after 5 seconds """ return FDAction(sock, sock.sendto, args, kwargs, write=True) ################################################################################ # # Queue and _QueueAction classes # ################################################################################ class Queue(object): """ A multi-producer, multi-consumer FIFO queue (similar to Queue.Queue) that can be used for exchanging data between tasks """ def __init__(self, contents=(), maxsize=0): """ Create a new Queue instance. contents is a sequence (empty by default) containing the initial contents of the queue. If maxsize is greater than 0, the queue will hold a maximum of maxsize items, and put() will block until space is available in the queue. """ self.maxsize = int(maxsize) self._queue = collections.deque(contents)
cept Exception as e: pass # Start printing stuff again. settings["verbose"] = True os.chdir(cwd) return _d def noaa(D="", path="", wds_url="", lpd_url="", version=""): """ Convert between NOAA and LiPD files | Example: LiPD to NOAA converter | 1: L = lipd.readLipd() | 2: lipd.noaa(L, "/Users/someuser/Desktop", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/noaa-templates/data-version-1.0.0", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/data-version-1.0.0", "v1-1.0.0") | Example: NOAA to LiPD converter | 1: lipd.readNoaa() | 2: lipd.noaa() :param dict D: Metadata :param str path: Path where output files will be written to :param str wds_url: WDSPaleoUrl, where NOAA template file will be stored on NOAA's FTP server :param
str lpd_url: URL where LiPD file will be stored on NOAA's FTP server :param str version: Version of the dataset :return none: """ global files, cwd try: # When going from NOAA to LPD, use the global "files" variable. # When going from LPD to NOAA, use the data from the LiPD Library. # Choose the mode _mode = noaa_prompt() # LiPD
mode: Convert LiPD files to NOAA files if _mode == "1": # _project, _version = noaa_prompt_1() if not version or not lpd_url: print("Missing parameters: Please try again and provide all parameters.") return if not D: print("Error: LiPD data must be provided for LiPD -> NOAA conversions") else: try: os.mkdir("noaa_files") except FileExistsError: pass if "paleoData" in D: _d = copy.deepcopy(D) D = lpd_to_noaa(_d, wds_url, lpd_url, version, path) else: # For each LiPD file in the LiPD Library for dsn, dat in D.items(): _d = copy.deepcopy(dat) # Process this data through the converter _d = lpd_to_noaa(_d, wds_url, lpd_url, version, path) # Overwrite the data in the LiPD object with our new data. D[dsn] = _d # If no wds url is provided, then remove instances from jsonld metadata if not wds_url: D = rm_wds_url(D) # Write out the new LiPD files, since they now contain the new NOAA URL data if(path): writeLipd(D, path) else: print("Path not provided. Writing to CWD...") writeLipd(D, cwd) # NOAA mode: Convert NOAA files to LiPD files elif _mode == "2": # Pass through the global files list. Use NOAA files directly on disk. noaa_to_lpd(files) else: print("Invalid input. Try again.") except Exception as e: pass # Placeholder to catch errors so we can always chdir back to cwd os.chdir(cwd) return def doi(D, force=False): """ Use the DOI id stored in the LiPD publication data to fetch new information from the DOI.org using their API. Merge the results with the existing data. This process will open the LiPD files on your computer, and overwrite them when done. This will not affect LiPD data currently loaded into memory. | Example | 1: D = lipd.readLipd() | 2: D = lipd.doi(D) | | DOI location : D["pub"][0]["doi"] :param dict D: Metadata, either a single dataset or multiple datasets sorted by dataset name. :param bool force: Force DOIs to update even if they have previously been processed. Default is False. :return dict D: Metadata, with all publication data updated where possible """ global cwd try: D = doi_main(D, force) except Exception as e: pass os.chdir(cwd) return D def fetchDoiWithCsv(csv_source, write_file=True): """ Retrieve DOI publication data for a list of DOI IDs that are stored in a CSV file. No LiPD files needed. This process uses the DOI.org API for data. :param str csv_source: The path to the CSV file stored on your computer :param bool write_file: Write the results to a JSON file (default) or print the results to the console. :return none: """ global cwd try: update_dois(csv_source, write_file) except Exception as e: pass # Placeholder to catch errors so we can always chdir back to cwd os.chdir(cwd) return def validate(D, detailed=True): """ Use the Validator API for lipd.net to validate all LiPD files in the LiPD Library. Display the PASS/FAIL results. Display detailed results if the option is chosen. :param dict D: Metadata (single or multiple datasets) :param bool detailed: Show or hide the detailed results of each LiPD file. Shows warnings and errors :return none: """ print("\n") # Fetch new results by calling lipd.net/api/validator (costly, may take a while) print("Fetching results from validator at lipd.net/validator... this may take a few moments.\n") try: results = [] # Get the validator-formatted data for each dataset. if "paleoData" in D: _api_data = get_validator_format(D) # A list of lists of LiPD-content metadata results.append(call_validator_api(D["dataSetName"], _api_data)) else: for dsn, dat in D.items(): _api_data = get_validator_format(dat) # A list of lists of LiPD-content metadata results.append(call_validator_api(dsn, _api_data)) display_results(results, detailed) except Exception as e: print("Error: validate: {}".format(e)) __move_to_cwd() return # def viewLipd(D): # # try: # # Move to py package dir, so we can relative reference json_viewer.py # _go_to_package() # # Open viewer in subprocess, so it's contained and closed in a new py process # subprocess.call(('python', 'json_viewer.py', json.dumps(D))) # except Exception as e: # pass # # Placeholder to catch errors so we can always chdir back to cwd # # __move_to_cwd() # return # PUT # def addEnsemble(D, dsn, ensemble): # """ # Create ensemble entry and then add it to the specified LiPD dataset. # # :param dict D: LiPD data # :param str dsn: Dataset name # :param list ensemble: Nested numpy array of ensemble column data. # :return dict D: LiPD data # """ # # # Check that the given filename exists in the library # if dsn in D: # meta = D[dsn] # # Create an ensemble dictionary entry # ens = create_ensemble(ensemble) # # If everything above worked, then there should be formatted ensemble data now. # if ens: # # Insert the formatted ensemble data into the master lipd library # meta = insert_ensemble(meta, ens) # # Set meta into lipd object # D[dsn] = meta # else: # print("Error: This dataset was not found in your LiPD data: {}".format(dsn)) # return D # DATA FRAMES def ensToDf(ensemble): """ Create an ensemble data frame from some given nested numpy arrays :param list ensemble: Ensemble data :return obj df: Pandas dataframe """ try: df = create_dataframe(ensemble) except Exception as e: pass __move_to_cwd() return df # TODO Not adapted to objectless utilties. Does it need an update? # def lipdToDf(D, dsn): # """ # Get LiPD data frames from LiPD object # :param dict D: LiPD data # :param str dsn: Dataset name # :return dict dfs: Pandas dataframes # """ # try: # dfs = lipd_lib.get_dfs(dsn) # except KeyError: # print("Error: Unable to find LiPD file") # logger_start.warn("lipd_to_df: KeyError: missing lipd
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Httpie(PythonPackage): """Modern, user-friendly command-line HTTP client for the API era.""" homepage = 'https://httpie.io/' pypi = 'httpie/httpie-2.6.0.tar.gz' maintainers = ['jakubroztocil'] version('2.6.0', sha256='ef929317b239bbf0a5bb7159b4c5d2edbfc55f8a0bcf9cd24ce597daec2afca5') version('2.5.0', sha256='fe6a8bc50fb0635a84ebe1296a732e39357c3e1354541bf51a7057b4877e47f9') # TODO: Remove both versions for HTTPie 2.7.0. version('0.9.9', sha256='f1202e6fa60367e2265284a53f35bfa5917119592c2ab08277efc7fffd744fcb', deprecated=True) version('0.9.8', sha256='515870b15231530f56fe2164190581748e8799b66ef0fe36ec9da3396f0df6e1', deprecated=True) dep
ends_on('python@3.6:', when='@2.5:', type=('build', 'run')) depends_on('py-setuptools', type=('build', 'run')) depends_on('py-charset-normalizer@2:', when='@2.6:', type=('build', 'run')) depends_on('py-defusedxml@0.6:', when='@2.5:', type=('build', 'run')) depends_on('py-
pygments@2.1.3:', type=('build', 'run')) depends_on('py-pygments@2.5.2:', when='@2.5:', type=('build', 'run')) depends_on('py-requests@2.11:', type=('build', 'run')) depends_on('py-requests@2.22:+socks', when='@2.5:', type=('build', 'run')) depends_on('py-requests-toolbelt@0.9.1:', when='@2.5:', type=('build', 'run')) # TODO: Remove completely py-argparse for HTTPie 2.7.0. # Concretization problem breaks this. Unconditional for now... # https://github.com/spack/spack/issues/3628 # depends_on('py-argparse@1.2.1:', type=('build', 'run'), # when='^python@:2.6,3.0:3.1') depends_on('py-argparse@1.2.1:', type=('build', 'run'), when='^python@:2.6')
fro
m vmware.models import VM, VMwareHost from rest_framework import serializers class VMSerializer(serializers.ModelSerializer): class Meta: model = VM fields = ('name', 'moid', 'vcenter', 'host', 'instance_uuid', 'os_type', 'ad
ded_time', 'is_template', 'state') class VMWareHostSerializer(serializers.ModelSerializer): baremetal = serializers.HyperlinkedRelatedField(many=False, view_name='baremetal-detail', read_only=True) class Meta: model = VMwareHost fields = ('name', 'ip_address', 'vcenter', 'baremetal', 'state')
import os import stat import time from inaugurator import sh class TargetDevice: _found = None @classmethod def device(cls, candidates): if cls._found is None: cls._found = cls._find(candidates) return cls._found pass @classmethod def _find(cls, candidates): RETRIES = 5 for retry in xrange(RETRIES): for device in candidates: if not os.path.exists(device): continue if not stat.S_ISBLK(os.stat(device).st_mode): continue try: output = sh.run("dosfslabel", device + 1) if output.strip() == "STRATODOK":
raise Exception( "DOK was found on SDA. cannot continue: its likely the " "the HD driver was not lo
aded correctly") except: pass print "Found target device %s" % device return device print "didn't find target device, sleeping before retry %d" % retry time.sleep(1) os.system("/usr/sbin/busybox mdev -s") raise Exception("Failed finding target device")
from django.db import models from jsonfield import JSONField from collections import OrderedDict class BaseObject(models.Model): """ The base model from which all apps inherit """ # Type represents the app that uses it. Assets, Persons, Orgs, etc type = models.CharField(max_length=256) # Related-to represents the the relation of this object with other objects (of any type) related_to = models.ManyToManyField("self", blank=True) created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now_add=True, auto_now=True) # Store all attributes/properties of the object as dictionary att
ributes = JSONField(load_kwargs={'object_pairs_hook': OrderedDict}, blank=True) def __init__(self, *args, **kwargs): super(BaseObject, self).__init__(*args, **kwargs) if not self.pk and not self.type: self.type = self.TYPE class BasePropertyManager(models.Manager): def create_attributes(self, baseobject, **attributes): """ Given a set of key-value attributes for a given object, create the attribute-set in table """
property_set = [] for attr, value in attributes.items(): property_set.append(BaseProperty(baseobject=baseobject, key=attr, value=value)) self.bulk_create(property_set) class BaseProperty(models.Model): """ Key-Value attributes of objects are stored here. """ baseobject = models.ForeignKey(BaseObject) key = models.CharField(max_length=256) value = models.CharField(max_length=256) objects = BasePropertyManager() def __unicode__(self): """Representation of field""" return {self.baseobject.id: {self.key: self.value}} class ProxyObject(BaseObject): class Meta: proxy = True
""" Default settings for the ``mezzanine.generic`` app. Each of these can be overridden in your project's settings module, just like regular Django settings. The ``editable`` argument for each controls whether the setting is editable via Django's admin. Thought should be given to how a setting is actually used before making it editable, as it may be inappropriate - for example settings that are only read during startup shouldn't be editable, since changing them would require an application reload. """ from django.conf import settings from django.utils.translation import ugettext_lazy as _ from mezzanine.conf import register_setting generic_comments = getattr(settings, "COMMENTS_APP", "") == "mezzanine.generic" if generic_comments: register_setting( name="COMMENTS_ACCOUNT_REQUIRED", label=_("Accounts required for commenting"), description=_("If ``True``, users must log in to comment."), editable=True, default=False, ) register_setting( name="COMMENTS_DISQUS_SHORTNAME", label=_("Disqus shortname"), description=_("Shortname for the http://disqus.com comments " "service."), editable=True, default="", ) register_setting( name="COMMENTS_DISQUS_API_PUBLIC_KEY", label=_("Disqus public key"), description=_("Public key for http://disqus.com developer API"), editable=True, default="", ) register_setting( name="COMMENTS_DISQUS_API_SECRET_KEY", label=_("Disqus secret key"), description=_("Secret key for http://disqus.com developer API"), editable=True, default="", ) register_setting( name="COMMENTS_DEFAULT_APPROVED", label=_("Auto-approve comments"), description=_("If ``True``, built-in comments are approved by " "default."), editable=True, default=True, ) register_setting( name="COMMENT_FILTER", description=_("Dotted path to the function to call on a comment's " "value before it is rendered to the template."), editable=False, default=None, ) register_setting( name="COMMENTS_NOTIFICATION_EMAILS", label=_("Comment notification email addresses"), description=_("A comma separated list of email addresses that "
"will receive an email notification each time a " "new comment is posted on the site."), editable=True, default="", ) register_setting( name="COMMENTS_NUM_LATEST", label=_("Admin comments"), description=_("Number of latest comments shown in the admin " "dashboard."), ed
itable=True, default=5, ) register_setting( name="COMMENTS_UNAPPROVED_VISIBLE", label=_("Show unapproved comments"), description=_("If ``True``, comments that have ``is_public`` " "unchecked will still be displayed, but replaced with a " "``waiting to be approved`` message."), editable=True, default=True, ) register_setting( name="COMMENTS_REMOVED_VISIBLE", label=_("Show removed comments"), description=_("If ``True``, comments that have ``removed`` " "checked will still be displayed, but replaced " "with a ``removed`` message."), editable=True, default=True, ) register_setting( name="COMMENTS_USE_RATINGS", description=_("If ``True``, comments can be rated."), editable=False, default=True, ) register_setting( name="RATINGS_ACCOUNT_REQUIRED", label=_("Accounts required for rating"), description=_("If ``True``, users must log in to rate content " "such as blog posts and comments."), editable=True, default=False, ) register_setting( name="RATINGS_RANGE", description=_("A sequence of integers that are valid ratings."), editable=False, default=range(getattr(settings, "RATINGS_MIN", 1), getattr(settings, "RATINGS_MAX", 5) + 1), )
# -*- coding: utf-8 -*- ######################################
######################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can red
istribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import tools from openerp.osv import fields, osv class sale_report(osv.osv): _name = "sale.report" _description = "Sales Orders Statistics" _auto = False _rec_name = 'date' _columns = { 'date': fields.datetime('Date Order', readonly=True), 'date_confirm': fields.date('Date Confirm', readonly=True), 'product_id': fields.many2one('product.product', 'Product', readonly=True), 'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True), 'product_uom_qty': fields.float('# of Qty', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'user_id': fields.many2one('res.users', 'Salesperson', readonly=True), 'price_total': fields.float('Total Price', readonly=True), 'delay': fields.float('Commitment Delay', digits=(16,2), readonly=True), 'categ_id': fields.many2one('product.category','Category of Product', readonly=True), 'nbr': fields.integer('# of Lines', readonly=True), 'state': fields.selection([ ('draft', 'Quotation'), ('waiting_date', 'Waiting Schedule'), ('manual', 'Manual In Progress'), ('progress', 'In Progress'), ('invoice_except', 'Invoice Exception'), ('done', 'Done'), ('cancel', 'Cancelled') ], 'Order Status', readonly=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True), 'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True), 'section_id': fields.many2one('crm.case.section', 'Sales Team'), } _order = 'date desc' def _select(self): select_str = """ SELECT min(l.id) as id, l.product_id as product_id, t.uom_id as product_uom, sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty, sum(l.product_uom_qty * l.price_unit * (100.0-l.discount) / 100.0) as price_total, count(*) as nbr, s.date_order as date, s.date_confirm as date_confirm, s.partner_id as partner_id, s.user_id as user_id, s.company_id as company_id, extract(epoch from avg(date_trunc('day',s.date_confirm)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay, s.state, t.categ_id as categ_id, s.pricelist_id as pricelist_id, s.project_id as analytic_account_id, s.section_id as section_id """ return select_str def _from(self): from_str = """ sale_order_line l join sale_order s on (l.order_id=s.id) left join product_product p on (l.product_id=p.id) left join product_template t on (p.product_tmpl_id=t.id) left join product_uom u on (u.id=l.product_uom) left join product_uom u2 on (u2.id=t.uom_id) """ return from_str def _group_by(self): group_by_str = """ GROUP BY l.product_id, l.order_id, t.uom_id, t.categ_id, s.date_order, s.date_confirm, s.partner_id, s.user_id, s.company_id, s.state, s.pricelist_id, s.project_id, s.section_id """ return group_by_str def init(self, cr): # self._table = sale_report tools.drop_view_if_exists(cr, self._table) cr.execute("""CREATE or REPLACE VIEW %s as ( %s FROM ( %s ) %s )""" % (self._table, self._select(), self._from(), self._group_by())) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
#!/usr/bin/env python3 """ Lazy 'tox' to quickly check if branch is up to PR standards. This is NOT a tox replacement, only a quick check during development. """ import os import asyncio import sys import re import shlex from collections import namedtuple try: from colorlog.escape_codes import escape_codes except ImportError: escape_codes = None RE_ASCII = re.compile(r"\033\[[^m]*m") Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) PASS = "green" FAIL = "bold_red" def printc(the_color, *args): """Color print helper.""" msg = " ".join(args) if not escape_codes: print(msg) return try: print(escape_codes[the_color] + msg + escape_codes["reset"]) except KeyError: print(msg) raise ValueError("Invalid color {}".format(the_color)) def validate_requirements_ok(): """Validate requirements, returns True of ok.""" from gen_requirements_all import main as req_main return req_main(True) == 0 async def read_stream(stream, display): """Read from stream line by line until EOF, display, and capture lines.""" output = [] while True: line = await stream.readline() if not line: break output.append(line) display(line.decode()) # assume it doesn't block return b"".join(output) async def async_exec(*args, display=False): """Execute, return code & log.""" argsp = [] for arg in args: if os.path.isfile(arg): argsp.append("\\\n {}".format(shlex.quote(arg))) else: argsp.append(shlex.quote(arg)) printc("cyan", *argsp) try: kwargs = { "loop": LOOP, "stdout": asyncio.subprocess.PIPE, "stderr": asyncio.subprocess.STDOUT, } if display: kwargs["stderr"] = asyncio.subprocess.PIPE proc = await asyncio.create_subprocess_exec(*args, **kwargs) except FileNotFoundError as err: printc( FAIL, "Could not execute {}. Did you install test requirements?".format(args[0]), ) raise err if not display: # Readin stdout into log stdout, _ = await proc.communicate() else: # read child's stdout/stderr concurrently (capture and display) stdout, _ = await asyncio.gather( read_stream(proc.stdout, sys.stdout.write), read_stream(proc.stderr, sys.stderr.write), ) exit_code = await proc.wait() stdout = stdout.decode("utf-8") return exit_code, stdout async def git(): """Exec git.""" if len(sys.argv) > 2 and sys.argv[1] == "--": return sys.argv[2:] _, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD") merge_base = log.splitlines()[0] _, log = await async_exec("git", "diff", merge_base, "--name-only") return log.splitlines() async def pylint(files): """Exec pylint.""" _, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files) res = [] for line in log.splitlines(): line = line.split(":") if len(line) < 3: continue _fn = line[0].replace("\\", "/") res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/"))) return res async def flake8(files): """Exec flake8.""" _, log = await async_exec("flake8", "--doctests", *files) res = [] for line in log.splitlines(): line = line.split(":") if len(line) < 4: continue _fn = line[0].replace("\\", "/") res.append(Error(_fn, line[1], line[2], line[3].strip(), False)) return res async def lint(files): """Perform lint.""" files = [file for file in files if os.path.isfile(file)] fres, pres = await asyncio.gather(flake8(files), pylint(files)) res = fres + pres res.sort(key=lambda item: item.file) if res: print("Pylint & Flake8 errors:") else: printc(PASS, "Pylint and Flake8 passed") lint_ok = True for err in res: err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg) # tests/* does not have to pass lint if err.skip: print(err_msg) else: printc(FAIL, err_msg) lint_ok = False return lint_ok async def main(): """Run the main loop.""" # Ensure we are in the homeassistant root os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file
__)))) files = await git() if not files: print( "No changed files found. Please ensure you have added your "
"changes with git add & git commit" ) return pyfile = re.compile(r".+\.py$") pyfiles = [file for file in files if pyfile.match(file)] print("=============================") printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles)) print("=============================") skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint" if skip_lint: printc(FAIL, "LINT DISABLED") elif not await lint(pyfiles): printc(FAIL, "Please fix your lint issues before continuing") return test_files = set() gen_req = False for fname in pyfiles: if fname.startswith("homeassistant/components/"): gen_req = True # requirements script for components # Find test files... if fname.startswith("tests/"): if "/test_" in fname and os.path.isfile(fname): # All test helpers should be excluded test_files.add(fname) else: parts = fname.split("/") parts[0] = "tests" if parts[-1] == "__init__.py": parts[-1] = "test_init.py" elif parts[-1] == "__main__.py": parts[-1] = "test_main.py" else: parts[-1] = "test_" + parts[-1] fname = "/".join(parts) if os.path.isfile(fname): test_files.add(fname) if gen_req: print("=============================") if validate_requirements_ok(): printc(PASS, "script/gen_requirements.py passed") else: printc(FAIL, "Please run script/gen_requirements.py") return print("=============================") if not test_files: print("No test files identified, ideally you should run tox") return code, _ = await async_exec( "pytest", "-vv", "--force-sugar", "--", *test_files, display=True ) print("=============================") if code == 0: printc(PASS, "Yay! This will most likely pass tox") else: printc(FAIL, "Tests not passing") if skip_lint: printc(FAIL, "LINT DISABLED") if __name__ == "__main__": LOOP = ( asyncio.ProactorEventLoop() if sys.platform == "win32" else asyncio.get_event_loop() ) try: LOOP.run_until_complete(main()) except (FileNotFoundError, KeyboardInterrupt): pass finally: LOOP.close()
#!/usr/bin/env python """shuffle a dataset""" import random import sys def sol_shuffle(filena
me, out_filename): try: file = open(filename, 'rb') lines = file.readlines() if len(lines) == 0: print 'empty file' file.close() sys.exit() if lines[-1][-1] != '\n': lines[-1]+='\n' random.shuffle(lines) wfile = open(out_filename, 'wb') wfile.writelines(lines) wfile.close() except IOError as e: print "I/O error ({0}): {1}".format(e.errno, e.strerror) sys.exit() else: file.close()
import abc class RuleLearner: """2D 2-person board game rule learner base class TODO """ def __init__(self, board_height, board_width): """Initialize the rule learner Subclasses should call this constructor. :type board_height: positive integer :param board_height: the height (number of rows) of the board :type board_width: positive integer
:param board_width: the width (number of columns) of the board """ self._board_height = board_height self._board_width = board_width @abc.abstractmethod def get_valid_moves(self, board): """Get the valid moves for the board. :type board: Boards.Board :param board: the board for which to determine the valid moves :returns: a 2D Numpy array with the same dimensions as the board contains, the cells where moves are v
alid set to 1, the rest set to 0 """ pass
#!/usr/bin/python2.7 # -*- coding: utf-8 -*- # vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and
# limitations under the License. """ The devices file. """ class Devices: def __init__(self): fo = open("/proc/devices") self._charmap = {} self._blockmap = {} for line in fo.readlines(): if line.start
swith("Character"): curmap = self._charmap continue elif line.startswith("Block"): curmap = self._blockmap continue elif len(line) > 4: [num, fmt] = line.split() num = int(num) curmap[num] = fmt def __str__(self): s = ["Character devices:"] for num, fmt in self._charmap.items(): s.append("%3d %s" % (num, fmt)) s.append("\nBlock devices:") for num, fmt in self._blockmap.items(): s.append("%3d %s" % (num, fmt)) return "\n".join(s) def get_device(self, dtype, major, minor): pass def _test(argv): d = Devices() print d if __name__ == "__main__": import sys _test(sys.argv)
import glob def handle(userToken, _
): # Get usertoken data userID = userToken.userID # Make sure the match exists matchID = userToken.matchID if mat
chID not in glob.matches.matches: return match = glob.matches.matches[matchID] # Get our slotID and change ready status slotID = match.getUserSlotID(userID) if slotID != None: match.toggleSlotReady(slotID)
### # # W A R N I N G # # This recipe is obsolete! # # When you are looking for copying and pickling functionality for generators # implemented in pure Python download the # # generator_tools # # package at the cheeseshop or at www.fiber-space.de # ### import new import copy import types import sys from opcode import* def copy_generator(f_gen): ''' Function used to copy a generator object. @param f_gen: generator object. @return: pair (g_gen, g) where g_gen is a new generator object and g a generator function g producing g_gen. The function g is created from f_gen.gi_frame. Usage: function copies a running generator. def inc(start, step = 1): i = start while True: yield i i+= step >>> inc_gen = inc(3) >>> inc_gen.next() 3 >>> inc_gen.next() 4 >>> inc_gen_c, inc_c = copy_generator(inc_gen) >>> inc_gen_c.next() == inc_gen.next() True >>> inc_gen_c.next() 6 Implementation strategy: Inspecting the frame of a running generator object f provides following important information about the state of the generator: - the values of bound locals inside the generator object - the last bytecode being executed This state information of f is restored in a new function generator g in the following way: - the signature of g is defined by the locals of f ( co_varnames of f ). So we can pass the locals to g inspected from the current frame of running f. Yet unbound locals are assigned to None. All locals will be deepcopied. If one of the locals is a generator object it will be copied using copy_generator. If a local is not copyable it will be assigned directly. Shared state is therefore possible. - bytecode hack. A JUMP_ABSOLUTE bytecode instruction is prepended to the bytecode of f with an offset pointing to the next unevaluated bytecode instruction of f. Corner cases: - an unstarted generator ( last instruction = -1 ) will be just cloned. - if a generator has been already closed ( gi_frame = None ) a ValueError exception is raised. ''' if not f_gen.gi_frame: raise ValueError("Can't copy closed generator") f_co
de = f_gen.gi_frame.f_code offset = f_gen.gi_frame.f_lasti locals = f_gen.gi_frame.f_l
ocals if offset == -1: # clone the generator argcount = f_code.co_argcount else: # bytecode hack - insert jump to current offset # the offset depends on the version of the Python interpreter if sys.version_info[:2] == (2,4): offset +=4 elif sys.version_info[:2] == (2,5): offset +=5 start_sequence = (opmap["JUMP_ABSOLUTE"],)+divmod(offset, 256)[::-1] modified_code = "".join([chr(op) for op in start_sequence])+f_code.co_code argcount = f_code.co_nlocals varnames = list(f_code.co_varnames) for i, name in enumerate(varnames): loc = locals.get(name) if isinstance(loc, types.GeneratorType): varnames[i] = copy_generator(loc)[0] else: try: varnames[i] = copy.deepcopy(loc) except TypeError: varnames[i] = loc new_code = new.code(argcount, f_code.co_nlocals, f_code.co_stacksize, f_code.co_flags, modified_code, f_code.co_consts, f_code.co_names, f_code.co_varnames, f_code.co_filename, f_code.co_name, f_code.co_firstlineno, f_code.co_lnotab) g = new.function(new_code, globals(),) g_gen = g(*varnames) return g_gen, g
# coding: utf-8 # # Copyright © 2017 weirdgiraffe <giraffe@cyberzoo.xyz> # # Distributed under terms of the MIT license. # import sys try: # real kodi import xbmc import xbmcaddon import xbmcgui import xbmcplugin except ImportError: # mocked kodi from mock_kodi import xbmc from mock_kodi import xbmcaddon from mock_kodi import xbmcgui from mock_kodi import xbmcplugin try: # python2 from urllib import urlencode from urlparse import urlparse, parse_qs except ImportError: # python3 from urllib.parse import urlparse, parse_qs, urlencode class logger: @staticmethod def debug(s): xbmc.log(s, xbmc.LOGDEBUG) @staticmethod def info(s): xbmc.log(s, xbmc.LOGNOTICE) @staticmethod def error(s): s += '\n\taddon arguments:\n\t{0}'.format('\n\t'.join(sys.argv[1:])) xbmc.log(s, xbmc.LOGERROR) def list_item(name, thumb): li = xbmcgui.ListItem(name) if thumb is not None: li.setArt(thumb) # it is sayed that both of these methods are deprecated # see: http://kodi.wiki/view/Jarvis_API_changes # but only these methods actually works with Jarvis li.setIconImage(thumb) li.setThumbnailImage(thumb) return li class Plugin: def __init__(self, *args): self._addon = xbmcaddon.Addon() self._url = args[0] self._handler = int(args[1], base=10) # addon url has format: # plugin://plugin.hello.blah?arg1=xxx&arg2=xxx # where args are urlencoded o = urlparse(args[2]) self._args = dict() for k, v in parse_qs(o.query).items(): if len(v) == 1: self._args[k] = v[0] else: self._args[k] = v @property def icon(self): return self._addon.getAddonInfo('icon') @property def args(self): return self._args def read_input(self, header): keyboard = xbmc.Keyboard('', 'Что искать?', False) keyboard.doModal() if keyboard.isConfirmed(): return keyboard.getText() def play(self, url): li = xbmcgui.ListItem(path=url) xbmcplugin.setResolvedUrl(self._handler, True, li) def add_screen_item(self, name, url, **kwargs): thumb = kwargs.get('thumb') li = list_item(name, thumb) li.setProperty('IsPlayable', 'true') ret = xbmcplugin.addDirectoryItem(self._handler, url, li, False) if not ret: logger.error('failed to add {0} playable item'.format(name)) def add_screen_directory(self, name, url, **kwargs): thumb = kwargs.get('thumb') li = list_item(name, thumb) args = [self._handler, url, li, True] items_count = kwargs.get('items_count') if items_count: args += [items_count] ret = xbmcplugin.addDirectoryItem(*args) if not ret: logger.error('failed to add {0} directory item'.format(name)) def publish_screen(self, ok, refresh=False): xbmcplugin.endOfDirectory(self._handler, ok, refresh) def make_url(self, argv): return '{0}?{1}'.format(self._url, urlencode(argv)) def settings_value(self, setting_id): return self._addon.getSetting(setting_id) def show_notification(self, title, message): timeout = len(m
essage) / 10 * 2000 title = title.replace('"', '\\"') message = message.replace('"', '\\"') xbmc.executebuiltin('Notification("{0}",
"{1}","{2}","{3}")'.format( title.encode('ascii', 'ignore'), message.encode('ascii', 'ignore'), timeout, self.icon))
uiltin_recipe_collection = get_builtin_recipe_collection() self.scheduler_config = SchedulerConfig() try: with zipfile.ZipFile(P('builtin_recipes.zip', allow_user_override=False), 'r') as zf: self.favicons = dict([(x.filename, x) for x in zf.infolist() if x.filename.endswith('.png')]) except: self.favicons = {} self.do_refresh() def get_builtin_recipe(self, urn, download=True): if download: try: return download_builtin_recipe(urn) except: import traceback traceback.print_exc() return get_builtin_recipe(urn) def get_recipe(self, urn, download=True): coll = self.custom_recipe_collection if urn.startswith('custom:') \ else self.builtin_recipe_collection for recipe in coll: if recipe.get('id', False) == urn: if coll is self.builtin_recipe_collection: return self.get_builtin_recipe(urn[8:], download=download) return get_custom_recipe(int(urn[len('custom:'):])) def update_custom_recipe(self, urn, title, script): id_ = int(urn[len('custom:'):]) update_custom_recipe(id_, title, script) self.custom_recipe_collection = get_custom_recipe_collection() def update_custom_recipes(self, script_urn_map): script_ids = [] for urn, title_script in script_urn_map.iteritems(): id_ = int(urn[len('custom:'):]) (title, script) = title_script script_ids.append((id_, title, script)) update_custom_recipes(script_ids) self.custom_recipe_collection = get_custom_recipe_collection() def add_custom_recipe(self, title, script): add_custom_recipe(title, script) self.custom_recipe_collection = get_custom_recipe_collection() def add_custom_recipes(self, scriptmap): add_custom_recipes(scriptmap) self.custom_recipe_collection = get_custom_recipe_collection() def remove_custom_recipes(self, urns): ids = [int(x[len('custom:'):]) for x in urns] for id_ in ids: remove_custom_recipe(id_) self.custom_recipe_collection = get_custom_recipe_collection() def do_refresh(self, restrict_to_urns=set([])): self.custom_recipe_collection = get_custom_recipe_collection() zf = P('builtin_recipes.zip', allow_user_override=False) def factory(cls, parent, *args): args = list(args) if cls is NewsItem: args.extend([self.default_icon, self.custom_icon, self.favicons, zf]) args += [self.builtin_recipe_collection, self.custom_recipe_collection, self.scheduler_config, parent] return cls(*args) def ok(urn): if restrict_to_urns is None: return False return not restrict_to_urns or urn in restrict_to_urns new_root = factory(NewsTreeItem, None) scheduled = factory(NewsCategory, new_root, _('Scheduled')) custom = factory(NewsCategory, new_root, _('Custom')) lang_map = {} self.all_urns = set([]) self.showing_count = 0 self.builtin_count = 0 for x in self.custom_recipe_collection: urn = x.get('id') self.all_urns.add(urn) if ok(urn): factory(NewsItem, custom, urn, x.get('title')) self.showing_count += 1 for x in self.builtin_recipe_collection: urn = x.get('id') self.all_urns.add(urn) if ok(urn): lang = x.get('langua
ge', 'und') if lang: lang = lang.replace('-', '_') if lang not in lang_map: lang_map[lang] = factory(NewsCategory, new_root, lang)
factory(NewsItem, lang_map[lang], urn, x.get('title')) self.showing_count += 1 self.builtin_count += 1 for x in self.scheduler_config.iter_recipes(): urn = x.get('id') if urn not in self.all_urns: self.scheduler_config.un_schedule_recipe(urn) continue if ok(urn): factory(NewsItem, scheduled, urn, x.get('title')) new_root.prune() new_root.sort() self.root = new_root self.reset() def reset(self): self.beginResetModel(), self.endResetModel() def recipe_from_urn(self, urn): coll = self.custom_recipe_collection if 'custom:' in urn else \ self.builtin_recipe_collection for x in coll: if x.get('id', None) == urn: return copy.deepcopy(x) def schedule_info_from_urn(self, urn): return self.scheduler_config.get_schedule_info(urn) def account_info_from_urn(self, urn): return self.scheduler_config.get_account_info(urn) def universal_set(self): return self.all_urns def get_customize_info(self, urn): return self.scheduler_config.get_customize_info(urn) def get_matches(self, location, query): query = query.strip().lower() if not query: return self.universal_set() results = set([]) for urn in self.universal_set(): recipe = self.recipe_from_urn(urn) if query in recipe.get('title', '').lower() or \ query in recipe.get('description', '').lower(): results.add(urn) return results def search(self, query): results = [] try: query = unicode(query).strip() if query: results = self.parse(query) if not results: results = None except ParseException: results = [] self.do_refresh(restrict_to_urns=results) self.searched.emit(True) def columnCount(self, parent): return 1 def data(self, index, role): if not index.isValid(): return None item = index.internalPointer() return item.data(role) def headerData(self, *args): return None def flags(self, index): if not index.isValid(): return Qt.ItemIsEnabled|Qt.ItemIsSelectable item = index.internalPointer() return item.flags() def resort(self): self.do_refresh() def index(self, row, column, parent): if not self.hasIndex(row, column, parent): return QModelIndex() if not parent.isValid(): parent_item = self.root else: parent_item = parent.internalPointer() try: child_item = parent_item.children[row] except IndexError: return QModelIndex() ans = self.createIndex(row, column, child_item) return ans def parent(self, index): if not index.isValid(): return QModelIndex() child_item = index.internalPointer() parent_item = child_item.parent if parent_item is self.root or parent_item is None: return QModelIndex() ans = self.createIndex(parent_item.row(), 0, parent_item) return ans def rowCount(self, parent): if parent.column() > 0: return 0 if not parent.isValid(): parent_item = self.root else: parent_item = parent.internalPointer() return len(parent_item.children) def update_recipe_schedule(self, urn, schedule_type, schedule, add_title_tag=True, custom_tags=[]): recipe = self.recipe_from_urn(urn) self.scheduler_config.schedule_recipe(recipe, schedule_type, schedule, add_title_tag=add_title_tag, custom_tags=custom_tags) def update_last_downloaded(self, urn): self.scheduler_config.update_last_downloaded(urn) def set_account_info(self, urn, un, pw): self.scheduler_config.set_account_info(urn, un, pw) def clea
############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>) # Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ from . import api class stock_packages(osv.osv): _inherit = "stock.packages" def cancel_postage(self, cr, uid, ids, context=None): for package in self.browse(cr, uid, ids, context=context): if package.shipping_company_name.lower() != "usps": continue usps_config = api.v1.get_config(cr, uid, sale=package.pick_id.sale_id, context=context) test = package.pick_id.logis_company.test_mode if hasattr(package, "tracking_no") and package.tracking_no: try: response = api.v1.cancel_shipping(usps_config, package, shipper=None, test=test) except Exception, e: self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': str(e)}, context=context) return { 'type': 'ir.actions.client', 'tag': 'action_warn', 'name': _('Exception'), 'params': {'title': _('Exception'), 'text': str(e), 'sticky': True} } if hasattr(response, "error") or not response.refunds[0].refunded: err = response.error if hasattr(response, "error") else response.refunds[0].message self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': err}, context=context) return {
'type': 'ir.actions.client', 'tag': 'action_warn', 'name': _('Failure'), 'params': { 'title': _('Package #%s Cancellation Failed') % package.packge_no, 'text': err, 'sticky': True } } else: self.pool.get('stock.packages').write(cr, uid, package.id, {
'ship_message' : 'Shipment Cancelled', 'tracking_no': '' }, context=context) return super(stock_packages, self).cancel_postage(cr, uid, ids, context=context) stock_packages() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- # Copyright 2012 splinter authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. class InheritedDocs(type): def __new__(mcs, class_name, bases, dict): items_to_patch = [ (k, v) for k, v in dict.items() if not k.startswith("__") and not v.__doc__ ] for name, obj in items_to_patch: doc = None for base in bases: if hasattr(base,
name): doc = getattr(base, name).__doc__ if doc: if isinstance(obj, property) and not obj.fset: obj.fget.__doc__ = doc dict[name] = property(fget=obj.fget) else: obj.__doc__ = doc break return type.__new__(mcs, class_name,
bases, dict)
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.utils import
cint, cstr, flt, nowdate, comma_and, date_diff fro
m frappe import msgprint, _ from frappe.model.document import Document class LeaveControlPanel(Document): def get_employees(self): conditions, values = [], [] for field in ["employment_type", "branch", "designation", "department"]: if self.get(field): conditions.append("{0}=%s".format(field)) values.append(self.get(field)) condition_str = " and " + " and ".join(conditions) if len(conditions) else "" e = frappe.db.sql("select name from tabEmployee where status='Active' {condition}" .format(condition=condition_str), tuple(values)) return e def validate_values(self): for f in ["from_date", "to_date", "leave_type", "no_of_days"]: if not self.get(f): frappe.throw(_("{0} is required").format(self.meta.get_label(f))) def to_date_validation(self): if date_diff(self.to_date, self.from_date) <= 0: return "Invalid period" def allocate_leave(self): self.validate_values() leave_allocated_for = [] employees = self.get_employees() if not employees: frappe.throw(_("No employee found")) for d in self.get_employees(): try: la = frappe.new_doc('Leave Allocation') la.set("__islocal", 1) la.employee = cstr(d[0]) la.employee_name = frappe.db.get_value('Employee',cstr(d[0]),'employee_name') la.leave_type = self.leave_type la.from_date = self.from_date la.to_date = self.to_date la.carry_forward = cint(self.carry_forward) la.new_leaves_allocated = flt(self.no_of_days) la.docstatus = 1 la.save() leave_allocated_for.append(d[0]) except: pass if leave_allocated_for: msgprint(_("Leaves Allocated Successfully for {0}").format(comma_and(leave_allocated_for)))
#!/usr/bin/env python # -*- coding: utf-8 -*- import latin_noun import latin_pronoun import latin_adj import latin_conj import latin_prep import latin_verb_reg import latin_verb_irreg import util class LatinDic: dic = {} auto_macron_mode = False def flatten(text): ret
urn text.replace(u'ā',u'a').replace(u'ē',u'e').replace(u'ī',u'i').replace(u'ō',u'o').replace(u'ū',u'u').replace(u'ȳ',u'y').lower() def register(surface, info): if not info.has_key('pos'): return if LatinDic.auto_macron_mode: surface = flatten(surface) if LatinDic.dic.has_key(surface): LatinDic.dic[surface].append(info)
else: LatinDic.dic[surface] = [info] def register_items(items): for item in items: register(item['surface'], item) def lookup(word): return LatinDic.dic.get(word, None) def dump(): for k, v in LatinDic.dic.items(): print util.render2(k, v) def load_def(file, tags={}): items = [] with open(file, 'r') as fp: for line in fp: if len(line) == 0: continue if line[0] == '#': continue fs = line.rstrip().split('\t') if len(fs) < 3: continue surface = fs[0].decode('utf-8') pos = fs[1] ja = fs[2] items.append(util.aggregate_dicts({'surface':surface, 'pos':pos, 'ja':ja}, tags)) return items def load(auto_macron_mode=False): LatinDic.auto_macron_mode = auto_macron_mode items = [] items += latin_noun.load() items += latin_pronoun.load() items += latin_adj.load() items += latin_conj.load() items += latin_prep.load() items += latin_verb_reg.load() items += latin_verb_irreg.load() items += load_def('words/adv.def', {'pos':'adv'}) items += load_def('words/other.def') register_items(items) # return ld if __name__ == '__main__': # for k, v in dic.items(): # print util.render(k), util.render(v) pass
from django.core.exceptions import ValidationError from django.core.urlresolvers import reverse from django.db import IntegrityError from django.shortcuts import render, redirect from django.contrib import messages from django import forms as django_forms from django.views.decorators.cache import cache_page from django.utils.translation import ugettext_lazy as _ from core.utils.decorators import log from . import forms from . import logic @log @cache_page(60 * 3) def index(request, template='user/blog/index.html', context={}): blog_logic = logic.BlogLogic(request) context['pages'] = blog_logic.pages() context['posts'] = blog_logic.posts() # context['beeps'] = blog_logic.beeps() return render(request, template, context) ''' Pages ''' @log def pages(request, template='user/blog/pages.html', context={}): blog_logic = logic.BlogLogic(request) context['pages'] = blog_logic.pages() return render(request, templ
ate, context) @log @cache_page(60 * 3) def page(request, page_slug, template='user/blog/page.html', context={}): blog_logic = logic.BlogLogic(request) context['page'] = blog_logic.page(page_slug) return render(request, tem
plate, context) ''' Posts ''' @log def posts(request, template='user/blog/posts.html', context={}): blog_logic = logic.BlogLogic(request) context['posts'] = blog_logic.posts() return render(request, template, context) @log @cache_page(60 * 3) def post(request, post_id, post_slug, template='user/blog/post.html', context={}): blog_logic = logic.BlogLogic(request) context['post'] = blog_logic.post(post_id, post_slug) return render(request, template, context) ''' Others ''' @log def contact(request, template="user/blog/contact.html", context={}): contact_form = forms.ContactForm(request.POST or None) if request.method == 'POST': if contact_form.is_valid(): contact_form.save() messages.add_message(request, messages.SUCCESS, _('Your message successfully submitted.')) return redirect(reverse('blog_contact')) else: messages.add_message(request, messages.ERROR, _('Please fix errors bellow.')) context['contact_form'] = contact_form context['document_form'] = forms.DocumentForm() return render(request, template, context) @log def document(request, template="user/blog/contact.html", context={}): document_form = forms.DocumentForm(request.POST or None, request.FILES or None) if request.method == 'POST': if document_form.is_valid(): document_form.save() messages.add_message(request, messages.SUCCESS, _('Your application successfully submitted.')) return redirect(reverse('blog_contact')) else: messages.add_message(request, messages.ERROR, _('Please fix errors bellow.')) context['contact_form'] = forms.ContactForm() context['document_form'] = document_form return render(request, template, context) @log def search(request, template='user/blog/search.html', context={}): blog_logic = logic.BlogLogic(request) term = blog_logic.get_param("term") search_result = blog_logic.search(term) context['term'] = term context['pages'] = search_result.pages context['posts'] = search_result.posts return render(request, template, context) @log def subscribe(request): blog_logic = logic.BlogLogic(request) name = blog_logic.get_param("name") email = blog_logic.get_param("email") if not name or not email: messages.add_message(request, messages.ERROR, _('Please enter your name and email.')) else: try: django_forms.EmailField().clean(email) blog_logic.new_subscription(name, email) messages.add_message(request, messages.SUCCESS, _('You successfully subscribed.')) except ValidationError: messages.add_message(request, messages.ERROR, _('Please enter correct email.')) except IntegrityError: messages.add_message(request, messages.WARNING, _('You already have been subscribed.')) return redirect(request.META.get('HTTP_REFERER'))
__all__ = [ 'fixed_value', 'coalesce', ] try: from itertools import ifilter as filter except ImportError: pass class _FixedValue(object):
def __init__(self, value): self._value = value def __call__(self, *args, **kwargs): return self._value def fixed_value(value):
return _FixedValue(value) class _Coalesce(object): def _filter(self, x): return x is not None def __init__(self, callbacks, else_=None): self._callbacks = callbacks self._else = else_ def __call__(self, invoice): results = ( callback(invoice) for callback in self._callbacks ) try: return next(filter( self._filter, results )) except StopIteration: return self._else def coalesce(callbacks, else_=None): return _Coalesce(callbacks, else_=else_)
#MenuTitle: Set Preferred Names (Name IDs 16 a
nd 17) for Width Variants # -*- coding: utf-8 -*- __doc__=""" Sets Preferred Names custom pa
rameters (Name IDs 16 and 17) for all instances, so that width variants will appear in separate menus in Adobe apps. """ thisFont = Glyphs.font # frontmost font widths = ( "Narrow", "Seminarrow", "Semi Narrow", "Extranarrow", "Extra Narrow", "Ultranarrow", "Ultra Narrow", "Condensed", "Semicondensed", "Semi Condensed", "Extracondensed", "Extra Condensed", "Ultracondensed", "Ultra Condensed", "Compressed", "Semicompressed", "Semi Compressed", "Extracompressed", "Extra Compressed", "Ultracompressed", "Ultra Compressed", "Extended", "Semiextended", "Semi Extended", "Extraextended", "Extra Extended", "Ultraextended", "Ultra Extended", "Expanded", "Semiexpanded", "Semi Expanded", "Extraexpanded", "Extra Expanded", "Ultraexpanded", "Ultra Expanded", "Wide", "Semiwide", "Semi Wide", "Extrawide", "Extra Wide", "Ultrawide", "Ultra Wide", ) for thisInstance in thisFont.instances: print "Processing Instance:", thisInstance.name familyName = thisFont.familyName if thisInstance.customParameters["familyName"]: familyName = thisInstance.customParameters["familyName"] widthVariant = None for width in widths: if width in thisInstance.name: widthVariant = width elif " " in width: width = width.replace(" ","") if width in thisInstance.name: widthVariant = width if widthVariant: preferredFamilyName = "%s %s" % ( thisFont.familyName.strip(), widthVariant.strip() ) preferredStyleName = thisInstance.name.replace(widthVariant,"").strip() if not preferredStyleName: preferredStyleName = "Regular" thisInstance.customParameters["preferredFamilyName"] = preferredFamilyName thisInstance.customParameters["preferredSubfamilyName"] = preferredStyleName print " preferredFamilyName:", preferredFamilyName print " preferredSubfamilyName:", preferredStyleName
import os import yaml DEFAULT_DIR = '../etc/' class BaseConfig(object): __config = {} __default_dir = None @classmethod def load(cls, filename, default_path=DEFAULT_DI
R): """ Setup configuration """ path = "%s/%s.yaml" % (default_path, filename) cls.__default_dir = default_path
if os.path.exists(path): with open(path, 'rt') as filehandle: cls.__config = dict(yaml.load(filehandle.read()).items() + \ cls.__config.items()) else: raise OSError("Config doesn't exists: %s" % path) @classmethod def get_default_path(cls): return cls.__default_dir @classmethod def get(cls, key, value=None): if key in cls.__config: return cls.__config.get(key, value) return cls.__config.get(key.upper(), value) @classmethod def get_url(cls, method): url = cls.__config.get('urls', {}).get(method) if not url: raise ValueError("Could not find url for method: %s" % method) return Config.get('api_host') + url Config = BaseConfig()
"""Check for partitioning errors.""" FR = self.states.FR FL = self.states.FL FS = self.states.FS FO = self.states.FO checksum = FR+(FL+FS+FO)*(1.-FR) - 1. if abs(checksum) >= 0.0001: msg = ("Error in partitioning!\n") msg += ("Checksum: %f, FR: %5.3f, FL: %5.3f, FS: %5.3f, FO: %5.3f\n" \ % (checksum, FR, FL, FS, FO)) self.logger.error(msg) warn(msg) # raise exc.PartitioningError(msg) @prepare_states def integrate(self, day, delt=1.0): """Update partitioning factors based on development stage (DVS)""" params = self.params DVS = self.kiosk["DVS"] self.states.FR = params.FRTB(DVS) self.states.FL = params.FLTB(DVS) self.states.FS = params.FSTB(DVS) self.states.FO = params.FOTB(DVS) # Pack partitioning factors into tuple self.states.PF = PartioningFactors(self.states.FR, self.states.FL, self.states.FS, self.states.FO) self._check_partitioning() def calc_rates(self, day, drv): """ Return partitioning factors based on current DVS. """ # rate calculation does nothing for partioning as it is a derived # state return self.states.PF class DVS_Partitioning_NPK(SimulationObject): """Class for assimilate partitioning based on development stage (`DVS`) with influence of NPK stress. `DVS_Partitioning_NPK` calculates the partitioning of the assimilates to roots, stems, leaves and storage organs using fixed partitioning tables as a function of crop development stage. The only different with the normal partitioning class is the effect of nitrogen stress on partitioning to leaves (parameter NPART). The available assimilates are first split into below-ground and aboveground using the values in FRTB. In a second stage they are split into leaves (`FLTB`), stems (`FSTB`) and storage organs (`FOTB`). Since the partitioning fractions are derived from the state variable `DVS` they are regarded state variables as well. **Simulation parameters** (To be provided in cropdata dictionary): ======= ============================================= ======= ============ Name Description Type Unit ======= ============================================= ======= ============ FRTB Partitioning to roots as a function of TCr - development stage. FSTB Partitioning to stems as a function of TCr - development stage. FLTB Partitioning to leaves as a function of TCr - development stage. FOTB Partitioning to starge organs as a function TCr - of development stage. NPART Coefficient for the effect of N stress on SCR - leaf biomass allocation ======= ============================================= ======= ============ **State variables** ======= ================================================= ==== ============ Name Description Pbl Unit ======= ================================================= ==== ============ FR Fraction partitioned to roots. Y - FS Fraction partitioned to stems. Y - FL Fraction partitioned to leaves. Y - FO Fraction partitioned to storage orgains Y - ======= ================================================= ==== ============ **Rate variables** None **Signals send or handled** None **External dependencies:** ======= =================================== ================= ============ Name Description Provided by Unit ======= =================================== ================= ============ DVS Crop development stage DVS_Phenology - TRA Actual transpiration Simple_Evapotranspiration mm d-1 TRAMX Maximum transpiration Simple_Evapotranspiration mm d-1 NNI Nitrogen nutrition index npk_dynamics - ======= =================================== ================= ============ *Exceptions raised* A PartitioningError is raised if the partitioning coefficients to leaves, stems and storage organs on a given day do not add up to '1'. """ class Parameters(ParamTemplate): FRTB = AfgenTrait() FLTB = AfgenTrait() FSTB = AfgenTrait() FOTB = AfgenTrait() NPART = Float(-99.) # coefficient for the effect of N stress on leaf allocation class StateVariables(StatesTemplate): FR = Float(-99.) FL = Float(-99.) FS = Float(-99.) FO = Float(-99.) PF = Instance(PartioningFactors) def initialize(self, day, kiosk, cropdata): """ :param day: start date of the simulation :param kiosk: variable kiosk of this PCSE instance :param cropdata: dictionary with WOFOST cropdata key/value pairs """ self.params = self.Parameters(cropdata) self.kiosk = kiosk # initial partioning factors (pf) DVS = self.kiosk["DVS"] FR = self.params.FRTB(DVS) FL = self.params.FLTB(DVS) FS = self.params.FSTB(DVS) FO = self.params.FOTB(DVS) # Pack partitioning factors into tuple PF = PartioningFactors(FR, FL, FS, FO) # Initial states self.states = self.StateVariables(kiosk, publish=["FR","FL","FS","FO"], FR=FR, FL=FL, FS=FS, FO=FO, PF=PF) self._check_partitioning() def _check_partitioning(self): """Check for partitioning errors.""" FR = self.states.FR FL = self.states.FL FS = self.states.FS FO = self.states.FO checksum = FR+(FL+FS+FO)*(1.-FR) - 1. if abs(checksum) >= 0.0001: msg = ("Error in partitioning!\n") msg += ("Checksum: %f, FR: %5.3f, FL: %5.3f, FS: %5.3f, FO: %5.3f\n" \ % (checksum, FR, FL, FS, FO)) self.logger.error(msg) raise exc.PartitioningError(msg) @prepare_states def integrate(self, day, delt=1.0): """ Update partitioning factors based on development stage (DVS) and the Nitrogen nutrition Index (NNI) """ params = self.params states = self.states DVS = self.kiosk["DVS"] TRA = self.kiosk["TRA"] TRAMX = self.kiosk["TRAMX"] NNI = self.kiosk["NNI"] TRANRF = TRA/TRAMX if TRANRF < NNI: # Water stress is more severe than nitrogen stress and the # partitioning follows the original LINTUL2 assumptions # Note: we use specifically nitrogen stress not nutrient stress!!! FRTMOD = max( 1., 1./(TRANRF+0.5)) states.FR = min(0.6, params.FRTB(DVS) * FRTMOD) states.FL = params.FLTB(DVS) states.FS = params.FSTB(DVS) states.FO = params.FOTB(DVS)
else: # Nitrogen stress is more severe than water stress resulting in # less partitioning to leaves and more to stems FLVMOD = exp(-params.NPART * (1.0-NNI)) states.FL = params.FLTB(DVS) * FLVMOD states.FS = params.FSTB(DVS) + params.FLTB(DVS) - states.FL states.FR = params.FRTB(DVS) states.FO = p
arams.FOTB(DVS) # Pack partitioning factors into tuple states.PF = PartioningFactors(states.FR, states.FL, states.FS, states.FO) self._check_partitioning() def calc_rates(self, day, drv): """ Return partitioning factors based on current DVS. """ # rate calculation does nothing for partioning as
from temboo.Library.Amazon.SNS.AddPermission import AddPermission, AddPermissionInputSet, AddPermissionResultSet, AddPermissionChoreographyExecution from temboo.Library.Amazon.SNS.ConfirmSubscripti
on import ConfirmSubscription, ConfirmSubscriptionInputSet, ConfirmSubscriptionResultSet, ConfirmSubscriptionChoreographyExecution from temboo.Library.Amazon.SNS.CreateTopic import CreateTopic, CreateTopicInputSet, Cre
ateTopicResultSet, CreateTopicChoreographyExecution from temboo.Library.Amazon.SNS.DeleteTopic import DeleteTopic, DeleteTopicInputSet, DeleteTopicResultSet, DeleteTopicChoreographyExecution from temboo.Library.Amazon.SNS.GetTopicAttributes import GetTopicAttributes, GetTopicAttributesInputSet, GetTopicAttributesResultSet, GetTopicAttributesChoreographyExecution from temboo.Library.Amazon.SNS.ListSubscriptions import ListSubscriptions, ListSubscriptionsInputSet, ListSubscriptionsResultSet, ListSubscriptionsChoreographyExecution from temboo.Library.Amazon.SNS.ListSubscriptionsByTopic import ListSubscriptionsByTopic, ListSubscriptionsByTopicInputSet, ListSubscriptionsByTopicResultSet, ListSubscriptionsByTopicChoreographyExecution from temboo.Library.Amazon.SNS.ListTopics import ListTopics, ListTopicsInputSet, ListTopicsResultSet, ListTopicsChoreographyExecution from temboo.Library.Amazon.SNS.Publish import Publish, PublishInputSet, PublishResultSet, PublishChoreographyExecution from temboo.Library.Amazon.SNS.RemovePermission import RemovePermission, RemovePermissionInputSet, RemovePermissionResultSet, RemovePermissionChoreographyExecution from temboo.Library.Amazon.SNS.SetTopicAttributes import SetTopicAttributes, SetTopicAttributesInputSet, SetTopicAttributesResultSet, SetTopicAttributesChoreographyExecution from temboo.Library.Amazon.SNS.Subscribe import Subscribe, SubscribeInputSet, SubscribeResultSet, SubscribeChoreographyExecution from temboo.Library.Amazon.SNS.Unsubscribe import Unsubscribe, UnsubscribeInputSet, UnsubscribeResultSet, UnsubscribeChoreographyExecution
eeGroup, Subsession as OtreeSubsession, Constants import json import channels import logging from otree import constants_internal import django.test from otree.common_internal import (get_admin_secret_code) client = django.test.Client() ADMIN_SECRET_CODE = get_admin_secret_code() # For automatic inactive pushing #??? from .models import LiveManagementThread, LivePusherThread from threading import Event import time # End-For automatic inactive pushing from .pages import PresenterView ############################################# ############################################# # Connected to websocket.connect def ws_winnerpage_connect(message): print("*********CONNECTWINNERPAGE************") channelsGroup("WINNERPAGE").add(message.reply_channel) # Connected to websocket.receive def ws_winnerpage_message(message): print("*********RECEIVEWINNERPAGE************") # Connected to websocket.disconnect def ws_winnerpage_disconnect(message): print("*********DISCONNECTWINNERPAGE************") channelsGroup("WINNERPAGE").discard(message.reply_channel) ############################################# ############################################# # Connected to websocket.connect def ws_connect(message): print("*********CONNECT************") channelsGroup("adminreport").add(message.reply_channel) # Connected to websocket.receive def ws_message(message): print("*********RECEIVE************") # Decrypt the url: No info in the url in this app # Decrypt the received message jsonmessage = json.loads(message.content['text']) subsession_pk = jsonmessage['subsession_pk'] mysubsession = OtreeSubsession.objects.get(pk=subsession_pk) if 'order' in jsonmessage: order = jsonmessage['order'] # Manage the synchronisation page between the 2 parts if order == "No Jump 2 Next": mysubsession.jump_2_next = False mysubsession.save() mysubsession.session.vars['running_part_2'] = "False" mysubsession.session.save() elif order == "Jump 2 Next": mysubsession.jump_2_next = True mysubsession.save() mysubsession.session.vars['running_part_2'] = "True" mysubsession.session.save() elif order == "push_all_players_on_page": page_name = jsonmessage['page_name'] round_nb = jsonmessage['round_nb'] for p in mysubsession.get_players(): if ((str(p.participant._current_page_name) == page_name) & (p.participant._round_number == round_nb)): # This player is one of those who needs to be advanced try: if p.participant._current_form_page_url: resp = client.post( p.participant._current_form_page_url, data={ constants_internal.timeout_happened: True, constants_internal.admin_secret_code: ADMIN_SECRET_CODE }, follow=True ) else: resp = client.get(p.participant._start_url(), follow=True) except: logging.exception("Failed to advance participant.") raise assert resp.status_code < 400 p.participant.vars['participant_was_pushed'] = 'True' p.participant.save() channels.Group( 'auto-advance-{}'.format(p.participant.code) ).send( {'text': json.dumps( {'auto_advanced': True})} ) elif order == "push_active_players_on_page": group_pk = jsonmessage['group_pk'] mygroup = OtreeGroup.objects.get(pk=group_pk) page_name = jsonmessage['page_name'] round_nb = jsonmessage['round_nb'] for p in mygroup.get_players(): if ((str(p.participant._current_page_name) == page_name) & (p.participant._round_number == round_nb) & (p.participant.vars['active_flag'] != 'inactive')): # This player is one of those who needs to be advanced try: if p.participant._current_form_page_url: resp = clie
nt.post( p.participant._current_form_page_url, data={ constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE }, follow=True ) else: resp = client.get(p.participant._start_url(), follow=True) except: logging.exception("Failed to advance participant.") raise assert resp.status_code < 400 p.participant.vars['participant_was_pushed'] = 'True' p.participant.save() channels.Group( 'auto-advance-{}'.format(p.participant.code) ).send( {'text': json.dumps( {'auto_advanced': True})} ) elif order == "push_inactive_players_on_page": group_pk = jsonmessage['group_pk'] mygroup = OtreeGroup.objects.get(pk=group_pk) page_name = jsonmessage['page_name'] round_nb = jsonmessage['round_nb'] for p in mygroup.get_players(): if ((str(p.participant._current_page_name) == page_name) & (p.participant._round_number == round_nb) & (p.participant.vars['active_flag'] == 'inactive')): # This player is one of those who needs to be advanced try: if p.participant._current_form_page_url: resp = client.post( p.participant._current_form_page_url, data={ constants_internal.timeout_happened: True, constants_internal.admin_secret_code: ADMIN_SECRET_CODE }, follow=True ) else: resp = client.get(p.participant._start_url(), follow=True) except: logging.exception("Failed to advance participant.") raise assert resp.status_code < 400 p.participant.vars['participant_was_pushed'] = 'True' p.participant.save() channels.Group( 'auto-advance-{}'.format(p.participant.code) ).send( {'text': json.dumps( {'auto_advanced': True})} ) elif order == "deactivate_all_group_on_page": group_pk = jsonmessage['group_pk'] mygroup = OtreeGroup.objects.get(pk=group_pk) page_name = jsonmessage['page_name'] round_nb = jsonmessage['round_nb'] for p in mygroup.get_players(): if ((str(p.participant._current_page_name) == page_name) & (p.participant._round_number == round_nb)): p.participant.vars['active_flag'] = 'inactive' p.participant.save() elif order == "reactivate_all_group_on_page": group_pk = jsonmessage['group_pk'] mygroup = OtreeGroup.objects.get(pk=group_pk) pa
# i2c_esp.py Test program for asi2c.py # Tests Responder on ESP8266 # The MIT License (MIT) # # Copyright (c) 2018 Peter Hinch # # Permission is hereby granted, free of charge, to any per
son obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the S
oftware. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # pyb esp8266 # scl X9 - 0 # sda X10 - 2 # sync X11 - 5 # ack Y8 - 4 # gnd - gnd import uasyncio as asyncio from machine import Pin, I2C import asi2c import ujson i2c = I2C(scl=Pin(0),sda=Pin(2)) # software I2C syn = Pin(5) ack = Pin(4) chan = asi2c.Responder(i2c, syn, ack) async def receiver(): sreader = asyncio.StreamReader(chan) await chan.ready() print('started') for _ in range(5): # Test flow control res = await sreader.readline() print('Received', ujson.loads(res)) await asyncio.sleep(4) while True: res = await sreader.readline() print('Received', ujson.loads(res)) async def sender(): swriter = asyncio.StreamWriter(chan, {}) txdata = [0, 0] while True: await swriter.awrite(''.join((ujson.dumps(txdata), '\n'))) txdata[1] += 1 await asyncio.sleep_ms(1500) loop = asyncio.get_event_loop() loop.create_task(receiver()) loop.create_task(sender()) try: loop.run_forever() finally: chan.close() # for subsequent runs
# Borrowed and modif
ied from xbmcswift import logging import xbmc from pulsar.ad
don import ADDON_ID class XBMCHandler(logging.StreamHandler): xbmc_levels = { 'DEBUG': 0, 'INFO': 2, 'WARNING': 3, 'ERROR': 4, 'LOGCRITICAL': 5, } def emit(self, record): xbmc_level = self.xbmc_levels.get(record.levelname) xbmc.log(self.format(record), xbmc_level) def _get_logger(): logger = logging.getLogger(ADDON_ID) logger.setLevel(logging.DEBUG) handler = XBMCHandler() handler.setFormatter(logging.Formatter('[%(name)s] %(message)s')) logger.addHandler(handler) return logger log = _get_logger()
options.listhosts or options.listtasks or options.listtags or options.syntax: self._tqm = None else: self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=self.passwords) def run(self): ''' Run the given playbook, based on the settings in the play which may limit the runs to serialized groups, etc. ''' result = 0 entrylist = [] entry = {} try: for playbook_path in self._playbooks: pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader) self._inventory.set_playbook_basedir(os.path.dirname(playbook_path)) if self._tqm is None: # we are doi
ng a listing entry = {'playbook': playbook_path} entry['plays'] = [] else: # make sure the tqm has callbacks loaded self._tqm.load_callbacks() self._tqm.send_callback('v2_playbook_on_start', pb) i = 1 plays = pb.get_plays() display.vv(u'%d plays in %s' % (len(plays), to_un
icode(playbook_path))) for play in plays: if play._included_path is not None: self._loader.set_basedir(play._included_path) else: self._loader.set_basedir(pb._basedir) # clear any filters which may have been applied to the inventory self._inventory.remove_restriction() if play.vars_prompt: for var in play.vars_prompt: vname = var['name'] prompt = var.get("prompt", vname) default = var.get("default", None) private = var.get("private", True) confirm = var.get("confirm", False) encrypt = var.get("encrypt", None) salt_size = var.get("salt_size", None) salt = var.get("salt", None) if vname not in self._variable_manager.extra_vars: if self._tqm: self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default) play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default) else: # we are either in --list-<option> or syntax check play.vars[vname] = default # Create a temporary copy of the play here, so we can run post_validate # on it without the templating changes affecting the original object. all_vars = self._variable_manager.get_vars(loader=self._loader, play=play) templar = Templar(loader=self._loader, variables=all_vars) new_play = play.copy() new_play.post_validate(templar) if self._options.syntax: continue if self._tqm is None: # we are just doing a listing entry['plays'].append(new_play) else: self._tqm._unreachable_hosts.update(self._unreachable_hosts) # we are actually running plays for batch in self._get_serialized_batches(new_play): if len(batch) == 0: self._tqm.send_callback('v2_playbook_on_play_start', new_play) self._tqm.send_callback('v2_playbook_on_no_hosts_matched') break # restrict the inventory to the hosts in the serialized batch self._inventory.restrict_to_hosts(batch) # and run it... result = self._tqm.run(play=play) # check the number of failures here, to see if they're above the maximum # failure percentage allowed, or if any errors are fatal. If either of those # conditions are met, we break out, otherwise we only break out if the entire # batch failed failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) if new_play.max_fail_percentage is not None and \ int((new_play.max_fail_percentage)/100.0 * len(batch)) > int((len(batch) - failed_hosts_count) / len(batch) * 100.0): break elif len(batch) == failed_hosts_count: break # clear the failed hosts dictionaires in the TQM for the next batch self._unreachable_hosts.update(self._tqm._unreachable_hosts) self._tqm.clear_failed_hosts() # if the last result wasn't zero or 3 (some hosts were unreachable), # break out of the serial batch loop if result not in (0, 3): break i = i + 1 # per play if entry: entrylist.append(entry) # per playbook # send the stats callback for this playbook if self._tqm is not None: if C.RETRY_FILES_ENABLED: retries = set(self._tqm._failed_hosts.keys()) retries.update(self._tqm._unreachable_hosts.keys()) retries = sorted(retries) if len(retries) > 0: if C.RETRY_FILES_SAVE_PATH: basedir = C.shell_expand(C.RETRY_FILES_SAVE_PATH) else: basedir = os.path.dirname(playbook_path) (retry_name, _) = os.path.splitext(os.path.basename(playbook_path)) filename = os.path.join(basedir, "%s.retry" % retry_name) if self._generate_retry_inventory(filename, retries): display.display("\tto retry, use: --limit @%s\n" % filename) self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats) # if the last result wasn't zero, break out of the playbook file name loop if result != 0: break if entrylist: return entrylist finally: if self._tqm is not None: self._tqm.cleanup() if self._options.syntax: display.display("No issues encountered") return result return result def _get_serialized_batches(self, play): ''' Returns a list of hosts, subdivided into batches based on the serial size specified in the play. ''' # make sure we have a unique list of hosts all_hosts = self._inventory.get_hosts(play.hosts) # check to see if the serial number was specified as a percentage, # and convert it to an integer value based on the number of hosts if isinstance(play.serial, string_types) and play.serial.endswith('%'): serial_pct = int(play.serial.replace("%","")) serial = int((serial_pct/100.0) * len(all_hosts)) or 1 else: if play.serial is None: serial = -1 else: serial = int(play.serial) # if the serial count was not specified or is invalid, default to # a list of all
#!/usr/bin/python from pisi.ac
tionsapi import shelltools, get, cmaketools, pisitools def setup(): cmaketools.configure() def build(): cmaketools.make() def install(): cmaketools.install() pisitools.dodo
c ("AUTHORS", "ChangeLog", "COPYING")
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = [ 'pyramid', 'pyramid_debugtoolbar', 'waitress', ] setup(name='tilecost', version='0.0', description='tilecost', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pyramid", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip
_safe=False, install_requires=requires, tests_require=requires, test_suite="tilecost", entry_points="""\ [paste.app_factory] main = tilecost:main """,
)
__author__ = 'mwagner' from PyQt4.Qt import Qt from PyQt4.QtGui import QDialog, QIcon from ..view.Ui_VertexDialog import Ui_VertexDialog from ..model.VertexToolsError import * class VertexDialog(QDialog, Ui_VertexDialog): def __init__(self, plugin, parent=None): super(VertexDialog, self).__init__(parent) self.setAttribute(Qt.WA_DeleteOnClose) self.plugin = plugin self.setupUi(self) self.helpButton.setIcon(self.plugin.get_icon("
help.gif")) self.se
tWindowIcon(QIcon(":beninCad/info.png"))
# -*- coding: utf-8 -*- # Open Source Initiative OSI - The MIT License (MIT):Licensing # # The MIT License (MIT) # Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com) # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is furnished to do # so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import gevent import zerorpc from testutils import teardown, random_ipc_endpoint def test_client_connect(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def lolita(self): return 42 srv = MySrv() srv.bind(endpoint) gevent.spawn(srv.run) client = zerorpc.Client() client.connect(endpoint) assert client.lolita() == 42 def test_client_quick_connect(): endpoint = random_ipc_endpoint() class MySrv(zerorpc.Server): def lol
ita(self): return 42 srv = MySrv() srv.bind(endpoint) gevent.spawn(sr
v.run) client = zerorpc.Client(endpoint) assert client.lolita() == 42
import json import requests import key API_key = key.getAPIkey() #load all champion pictures def load_champion_pictures(champion_json): print len(champion_json['data']) version = champion_json['version'] print "version: " + version for champion in champion_json['data']: print champion r = requests.get('http://ddragon.leagueoflegends.com/cdn/' + version + '/img/champion/' + champion + '.png') if r.status_code == 200: img = r.content with open('static/images/champions/' + champion_json['data'][champion]['name'] + '.png', 'w') as f: f.write(img) print "img created" else: print "pictures: something went wrong" #load champion json #converts to python dict using json() and json.dump() for error checking def load_champion_json(): try: r = requests.get('https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?&api_key=' + API_key) champion_json = r.json() if 'status' in champion_json: print
champion_json['status']['message'] return load_champion_pictures(champion_json) # quick fix to change MonkeyKing to Wukong so that sort_keys sorts it properly champion_json['data']['Wukong'] = champion_json['data']['MonkeyKing'] del champion_json['data']['Monke
yKing'] except ValueError as e: print e.message return with open('static/json/champion.json', 'w') as f: json.dump(champion_json, f, sort_keys=True) load_champion_json()
# Standard import os import sys # Third Party import numpy as np import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plt import seaborn as sns import pyfilm as pf from skimage.measure import label from skimage import filters plt.rcParams.update({'figure.autolayout': True}) mpl.rcParams['axes.unicode_minus'] = False #local from run import Run import plot_style plot_style.white() pal = sns.color_palette('deep') def structure_analysis(run, perc_thresh, create_film=False): """ Calculates the number of structures as a function of time for a given percentile cut-off. Writes results and plots to an appropriate directory. Parameters ---------- run : object Run object calculated by the Run class. perc_thresh : int Percentile threshold at which to cut off fluctuations. create_film : bool Determines whether a film of the labelled structures is produced. """ run.read_ntot() make_results_dir(run, perc_thresh) labelled_image, nlabels = label_structures(run, perc_thresh) no_structures = count_structures(run, labelled_image, nlabels) plot_no_structures(run, no_structures, perc_thresh) save_results(run, no_structures, perc_thresh) if create_film: make_film(run, no_structures, labelled_image, perc_thresh) def make_results_dir(run, perc_thresh): os.system('mkdir -p ' + run.run_dir + 'analysis/structures_' + str(perc_thresh)) def label_structures(r
un, perc_thresh): nlabels = np.empty(run.nt, dtype=int) labelled_image = np.empty([run.nt, run.nx, run.ny], dtype=int) for it in range(run.nt): tmp = run.ntot_i[it,:,:].copy() # Apply Gaussian filter tmp = filters.gaussian(tmp, sigma=1) thresh = np.percentile(tmp, perc_thresh, interpolation='nearest') tmp_max = np.max(tmp) tmp_thresh = thresh/tmp_max tmp /= tmp_max tmp[tmp <= tmp
_thresh] = 0 tmp[tmp > tmp_thresh] = 1 # Label the resulting structures labelled_image[it,:,:], nlabels[it] = label(tmp, return_num=True, background=0) return(labelled_image, nlabels) def count_structures(run, labelled_image, nlabels): """ Remove any structures which are too small and count structures. """ nblobs = np.empty(run.nt, dtype=int) for it in range(run.nt): hist = np.histogram(np.ravel(labelled_image[it]), bins=range(1,nlabels[it]+1))[0] smallest_struc = np.mean(hist)*0.1 hist = hist[hist > smallest_struc] nblobs[it] = len(hist) return(nblobs) def plot_no_structures(run, no_structures, perc_thresh): """ Plot number of structures as a function of time. """ plt.clf() plt.plot(no_structures) plt.xlabel('Time index') plt.ylabel('Number of structures') plt.ylim(0) plt.savefig(run.run_dir + 'analysis/structures_' + str(perc_thresh) + '/nblobs.pdf') def save_results(run, no_structures, perc_thresh): """ Save the number of structures as a function of time in a file. """ np.savetxt(run.run_dir + 'analysis/structures_' + str(perc_thresh) + '/nblobs.csv', np.transpose((range(run.nt), no_structures)), delimiter=',', fmt='%d', header='t_index,nblobs') def make_film(run, no_structures, labelled_image, perc_thresh): titles = [] for it in range(run.nt): titles.append('No. of structures = {}'.format(no_structures[it])) plot_options = {'cmap':'gist_rainbow', 'levels':np.arange(-1,np.max(labelled_image)) } options = {'file_name':'structures', 'film_dir':run.run_dir + 'analysis/structures_' + str(perc_thresh) , 'frame_dir':run.run_dir + 'analysis/structures_' + str(perc_thresh) + '/film_frames', 'nprocs':None, 'aspect':'equal', 'xlabel':r'$x$ (m)', 'ylabel':r'$y$ (m)', 'cbar_ticks':np.arange(-1,np.max(labelled_image),2), 'cbar_label':r'Label', 'fps':10, 'bbox_inches':'tight', 'title':titles } pf.make_film_2d(run.r, run.z, labelled_image, plot_options=plot_options, options=options) if __name__ == '__main__': run = Run(sys.argv[1]) structure_analysis(run, 75, create_film=False) structure_analysis(run, 95, create_film=False)
#!/usr/bin/env python # This file mainly exists to allow python setup.py test to work. # # You can test all the variations of
tests by running: # # ./manage.py test && python runtests.py && ./setup.py test && echo OK # import os, sys os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings' from django.core.management im
port call_command def runtests(): # use the call_command approach so that we are as similar to running # './manage.py test' as possible. Notably we need the South migrations to be # run. call_command('test', verbosity=2) sys.exit(0) if __name__ == '__main__': runtests()
marcxml, req=req, **argd)) else: _ = gettext_set_language(argd['ln']) req.write(template.tmpl_page(top=_('Unknown type: %s') % argd['type'], **argd)) def record_get_keywords(record, main_field=bconfig.CFG_MAIN_FIELD, others=bconfig.CFG_OTHER_FIELDS): """Returns a dictionary of keywordToken objects from the marc record. Weight is set to (0,0) if no weight can be found. This will load keywords from the field 653 and 695__a (which are the old 'DESY' keywords) @var record: int or marc record, if int - marc record is loaded from the database. If you pass record instance, keywords are extracted from it @return: tuple (found, keywords, marcxml) found - int indicating how many main_field keywords were found the other fields are not counted keywords - standard dictionary of keywordToken objects marcrec - marc record object loaded with data """ keywords = {} if isinstance(main_field, basestring): main_field = [main_field] if isinstance(others, basestring): others = [others] if isinstance(record, int): rec = get_record(record) else: rec = record found = 0 for m_field in main_field: tag, ind1, ind2 = bibclassify_engine._parse_marc_code(m_field) for field in rec.get(tag, []): keyword = '' weight = 0 type = '' for subfield in field[0]: if subfield[0] == 'a': keyword = subfield[1] elif subfield[0] == 'n': weight = int(subfield[1]) elif subfield[0] == '9': type = subfield[1] if keyword: found += 1 keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0) for x in range(weight)]] if others: for field_no in others: tag, ind1, ind2 = bibclassify_engine._parse_marc_code(field_no) type = 'f%s' % field_no for field in rec.get(tag, []): keyword = '' for subfield in field[0]: if subfield[0] == 'a': keyword = subfield[1] keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0)]] break return found, keywords, rec def generate_keywords(req, recid, argd): """Extracts keywords from the fulltexts (if found) for the given recid. It first checks whether the keywords are not already stored in the temp file (maybe from the previous run). @var req: req object @var recid: record id @var argd: arguments passed from web @keyword store_keywords: boolean, whether to save records in the file @return: standard dictionary of kw objects or {} """ ln = argd['ln'] _ = gettext_set_language(ln) keywords = {} # check the files were not already generated abs_path = bibclassify_engine.get_tmp_file(recid) if os.path.exists(abs_path): try: # Try to load the data from the tmp file recs = bibupload.xml_marc_to_records(bibupload.open_marc_file(abs_path)) return record_get_keywords(recs[0]) except: pass # check it is allowed (for this user) to generate pages (exit_stat, msg) = acce.acc_authorize_action(req, 'runbibclassify') if exit_stat != 0: log.info('Access denied: ' + msg) msg = _("The site settings do not allow automatic keyword extraction") req.write(template.tmpl_page_msg(msg=msg)) return 0, keywords, None # register generation bibdocfiles = BibRecDocs(recid).list_latest_files() if bibdocfiles: # User arrived at a page, but no keywords are available inprogress, msg = _doc_already_submitted(recid) if argd['generate'] != 'yes': # Display a form and give them possibility to generate keywords if inprogress: req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _(msg))) else: req.write(template.tmpl_page_generate_keywords(req=req, **argd)) return 0, keywords, None else: # after user clicked on "generate" button if inprogress: req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _(msg) )) else: schedule_extraction(recid, taxonomy=bconfig.CFG_EXTRACTION_TAXONOMY) req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _('We have registered your request, the automated' 'keyword extraction will run after some time. Please return back in a while.'))) else: req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _("Unfortunately, we don't have a PDF fulltext for this record in the storage, \ keywords cannot be generated using an automated process."))) return 0, keywords, None def upload_keywords(filename, mode='correct', recids=None): """Stores the extracted keywords in the database @var filename: fullpath to the file with marc record @keyword mode: correct|replace|add|delete use correct to add fields if they are different replace all fi
elds with fields from th
e file add - add (even duplicate) fields delete - delete fields which are inside the file @keyword recids: list of record ids, this arg comes from the bibclassify daemon and it is used when the recids contains one entry (recid) - ie. one individual document was processed. We use it to mark the job title so that it is possible to query database if the bibclassify was run over that document (in case of collections with many recids, we simply construct a general title) """ if mode == 'correct': m = '-c' elif mode == 'replace': m = '-r' elif mode == 'add': m = '-a' elif mode == 'delete': m = '-d' else: raise Exception('Unknown mode') # let's use the user column to store the information, cause no better alternative in sight... user_title = 'bibclassify.upload' if recids and len(recids) == 1: user_title = 'extract:%d' % recids[0] bibtask.task_low_level_submission('bibupload', user_title, '-n', m, filename) def schedule_extraction(recid, taxonomy): bibtask.task_low_level_submission('bibclassify', 'extract:%s' % recid, '-k', taxonomy, '-i', '%s' % recid) def _doc_already_submitted(recid): # check extraction was already registered sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\ AND (status='WAITING' OR status='RUNNING')" if dbquery.run_sql(sql, ('bibclassify','extract:%s' % recid))[0][0] > 0: return (True, "The automated keyword extraction \ for this document has been already scheduled. Please return back in a while.") # check the upload is inside the scheduled tasks sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\ AND (status='WAITING' OR status='RUNNING')" if dbquery.run_sql(sql, ('bibupload','extract:%s' % recid))[0][0] > 0: return (True, 'The document was already processed, ' 'it will take a while for it to be ingested.') # or the task was run and is already archived sql = "SELECT COUNT(proc) FROM hstTASK WHERE proc = %s AND user = %s" if dbquery.run_sql(sql, ('bibupload','extract:%s' % recid))[0][0] > 0: return (True, 'The document was already processed, ' 'at this moment, the automated extraction is not available.') # or the task was already ran sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\ AND (status='DONE')" if dbquery.run_sql(sql, ('bibclassify','extra
#!/usr/bin/env python import mredis import time ports = [6379, 6380] servers = [] for port in ports: servers.append({'host': 'localhost', 'port': port, 'db': 0}) mr = mredis.MRedis(servers) # Destructive test of the database #
print mr.flushall() #print mr.flushdb() print mr.ping() # Build a set of keys for operations keys = set() for x in xrange(0, 100): key = 'key:%.8f' % time.time() keys.add(key) for key in keys: mr.set(key, time.time()) fetched = mr.keys('key:*') results = [] for server in fetched: for key
in fetched[server]: results.append('%s->%s' % (key, mr.get(key))) print '%i keys fetched' % len(results) for key in keys: mr.delete(key) print mr.bgrewriteaof() print mr.dbsize() print mr.lastsave() #print mr.info() print mr.randomkey()
#!/usr/bin/python3 import argparse, random, textwrap from datetime import datetime from urllib import request from xml.etree import ElementTree labels = { "clouds": "%", "humidity": "%", "precipitation": "%", "temp": "°F", "wind-direction": "°", "wind-speed": " mph", } parser = argparse.ArgumentParser(description = "display weather using data from weather.gov") parser.add_argument("latitude", help = "latitude of location", type = float) parser.add_argument("longitude", help = "longitude of location", type = float) args = parser.parse_args() def print_weather(latitude, longitude): # weather.gov provides two xml files: digitalDWML and dwml. # digitalDWML includes detailed, 24-hour forecast data for the next 7 days. # dwml includes simple data for the current day as well as text and icons. # in this script, digitalDWML is referred to as "detailed" and dwml is # referred to as "simple". weather_detailed_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat=" + str(latitude) + "&lon=" + str(longitude) + "&FcstType=digitalDWML").read() weather_simple_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat=" + str(latitude) + "&lon=" + str(longitude) + "&FcstType=dwml").read() # these variables and functions refer to digitalDWML root = ElementTree.fromstring(weather_detailed_xml) parameters = root.find("data").find("parameters") def temperature(type): for node in parameters.iter("temperature"): if node.get("type") == type: return node wrapped_description = "\n".join( textwrap.wrap( ElementTree.fromstring(weather_simple_xml).\ find("data").find("parameters").find("weather").\ find("weather-conditions").attrib["weather-summary"], width = 30, break_long_words = False)) print("Weather Forecast for " + root.find("data").find("location").find("city").text + ":\n" + wrapped_description + "\n" ) print("Updated: " # %z is defective so the timezone is cropped from the date string + datetime.strptime( root.find("data").find("time-layout").find("start-valid-time").text[:-6], "%Y-%m-%dT%H:%M:%S").strftime("%d %B %Y @ %I:%M %p") ) print("Temperature: " + temperature("hourly")[0].text + labels["temp"] ) print("Cloud Cover: " + parameters.find("cloud-amount")[0].text + labels["clouds"] ) print("Sustained Wind: " + parameters.find("wind-speed")[0].text + labels["wind-speed"] + " @ " + parameters.find("direction")[0].text + labels["wind-direction"] ) print("Humidity: " + parameters.find("humidity")[0].text + labels["humidity"] ) print("Precipitation: " + parameters.find("probability-of-precipitation")[0].text + labels["precipitation"] ) try: print_weather(args.latitude, args.longitude) except Exception as error: if type(error) == ElementTree.ParseError: print("error: invalid coordinates given or weather.gov's xml format has changed.") else: print
("erro
r: " + error)
# -*- coding: utf-8 -*- from __future__ import unicode_literals import errno import hashlib import os from django.conf import settings from django.core.files import File from django.core.files.storage import FileSystemStorage from django.utils.encoding import force_unicode __all__ = ["HashedFileSystemStorage"] __author__ = "pmeier82" class ContentExists(Exception): pass class HashedFileSystemStorage(FileSystemStorage): """`FileSystemStorage` subclass that manages file names by content hashes""" def get_available_name(self, name): raise ContentExists() def _get_content_name(self, name, content, chunk_size=None): dir_name = os.path.split(name)[0] file_name = self._generate_hash(content=content, chunk_size=chunk_size) return os.path.join(dir_name, file_name) def _generate_hash(self, content, chunk_size=None): if chunk_size is None: chunk_size = getattr(content, "DEFAULT_CHUNK_SIZE", File.DEFAULT_CHUNK_SIZE) hash_gen = hashlib.sha1() cursor = content.tell() content.seek(0) try: while True: data = content.read(chunk_size) if not data: break hash_gen.update(data) return hash_gen.hexdigest() finally:
content.seek(cursor) def save(self, name, content):
if getattr(settings, "DEBUG", None) is True: print "{}::save({})".format(self.__class__.__name__, name) if name is None: name = content.name name = self._get_content_name(name, content) name = self._save(name, content) return force_unicode(name.replace('\\', '/')) def _save(self, name, content): new_name = self._get_content_name(name=name, content=content) try: return super(HashedFileSystemStorage, self)._save(new_name, content) except ContentExists: pass except OSError, e: if e.errno == errno.EEXIST: pass else: raise return new_name def delete(self, name): if getattr(settings, "DEBUG", None) is True: print "{}::delete({})".format(self.__class__.__name__, name) return super(HashedFileSystemStorage, self).delete(name) if __name__ == "__main__": pass
"""Extensions which provide a block segments.""" from __future__
import division from __future__ import a
bsolute_import from __future__ import print_function from __future__ import unicode_literals
from django.shortcuts import render, get_object_or_404 from django.views import generic from django.http import HttpResponse, HttpResponseRedirect from django.template import loader from rest_framework import reverse from druidapi.query.models impo
rt QueryModel from models import Result from forms import SearchForm import requests import json class IndexView(generic.View): """ The view for the main page, where the search form is """ def get(self, request): form = SearchForm return render(request, 'index.html', {'form': form}) def post(self, request): form = SearchForm(request.POST) if form.is_valid(): #
Little bit of cheating, ideally the html would handle this # but, I felt like building the webapp in django... # alternatively, I could just reach over and build this. start = form.cleaned_data['start'].isoformat() end = form.cleaned_data['end'].isoformat() # POST the query and return the pk, so we can look it up later r = requests.post('http://localhost:9000/api/query/', data={'start_date': start, 'end_date': end}) result = Result.objects.create(key=r.json()["pk"]) result.save() # To the results! return HttpResponseRedirect("/{0}/".format(r.json()["pk"])) else: return render(request, 'index.html', {'form': form}) class ResultsView(generic.View): """ When the search is executed, it needs to display the results... """ def get(self, request, pk): result = Result.objects.get(key=pk) # GET the results for the key we're given r = requests.get("http://localhost:9000/api/query/{0}/execute/".format(pk)) result.data = r.json() return render(request, 'results.html', {'result': result})
rom importlib import import_module from ijson import common from ijson.backends.python import basic_parse, Lexer from ijson.compat import IS_PY2 JSON = b''' { "docs": [ { "null": null, "boolean": false, "true": true, "integer": 0, "double": 0.5, "exponent": 1.0e+2, "long": 10000000000, "string": "\\u0441\\u0442\\u0440\\u043e\\u043a\\u0430 - \xd1\x82\xd0\xb5\xd1\x81\xd1\x82" }, { "meta": [[1], {}] }, { "meta": {"key": "value"} }, { "meta": null } ] } ''' JSON_EVENTS = [ ('start_map', None), ('map_key', 'docs'), ('start_array', None), ('start_map', None), ('map_key', 'null'),
('null', None), ('map_key', 'boolean'), ('boolean', False), ('m
ap_key', 'true'), ('boolean', True), ('map_key', 'integer'), ('number', 0), ('map_key', 'double'), ('number', Decimal('0.5')), ('map_key', 'exponent'), ('number', 100), ('map_key', 'long'), ('number', 10000000000), ('map_key', 'string'), ('string', 'строка - тест'), ('end_map', None), ('start_map', None), ('map_key', 'meta'), ('start_array', None), ('start_array', None), ('number', 1), ('end_array', None), ('start_map', None), ('end_map', None), ('end_array', None), ('end_map', None), ('start_map', None), ('map_key', 'meta'), ('start_map', None), ('map_key', 'key'), ('string', 'value'), ('end_map', None), ('end_map', None), ('start_map', None), ('map_key', 'meta'), ('null', None), ('end_map', None), ('end_array', None), ('end_map', None), ] SCALAR_JSON = b'0' INVALID_JSONS = [ b'["key", "value",]', # trailing comma b'["key" "value"]', # no comma b'{"key": "value",}', # trailing comma b'{"key": "value" "key"}', # no comma b'{"key" "value"}', # no colon b'invalid', # unknown lexeme b'[1, 2] dangling junk' # dangling junk ] YAJL1_PASSING_INVALID = INVALID_JSONS[6] INCOMPLETE_JSONS = [ b'', b'"test', b'[', b'[1', b'[1,', b'{', b'{"key"', b'{"key":', b'{"key": "value"', b'{"key": "value",', ] STRINGS_JSON = br''' { "str1": "", "str2": "\"", "str3": "\\", "str4": "\\\\", "special\t": "\b\f\n\r\t" } ''' NUMBERS_JSON = b'[1, 1.0, 1E2]' SURROGATE_PAIRS_JSON = b'"\uD83D\uDCA9"' class Parse(object): ''' Base class for parsing tests that is used to create test cases for each available backends. ''' def test_basic_parse(self): events = list(self.backend.basic_parse(BytesIO(JSON))) self.assertEqual(events, JSON_EVENTS) def test_basic_parse_threaded(self): thread = threading.Thread(target=self.test_basic_parse) thread.start() thread.join() def test_scalar(self): events = list(self.backend.basic_parse(BytesIO(SCALAR_JSON))) self.assertEqual(events, [('number', 0)]) def test_strings(self): events = list(self.backend.basic_parse(BytesIO(STRINGS_JSON))) strings = [value for event, value in events if event == 'string'] self.assertEqual(strings, ['', '"', '\\', '\\\\', '\b\f\n\r\t']) self.assertTrue(('map_key', 'special\t') in events) def test_surrogate_pairs(self): event = next(self.backend.basic_parse(BytesIO(SURROGATE_PAIRS_JSON))) parsed_string = event[1] self.assertEqual(parsed_string, '💩') def test_numbers(self): events = list(self.backend.basic_parse(BytesIO(NUMBERS_JSON))) types = [type(value) for event, value in events if event == 'number'] self.assertEqual(types, [int, Decimal, Decimal]) def test_invalid(self): for json in INVALID_JSONS: # Yajl1 doesn't complain about additional data after the end # of a parsed object. Skipping this test. if self.__class__.__name__ == 'YajlParse' and json == YAJL1_PASSING_INVALID: continue with self.assertRaises(common.JSONError) as cm: list(self.backend.basic_parse(BytesIO(json))) def test_incomplete(self): for json in INCOMPLETE_JSONS: with self.assertRaises(common.IncompleteJSONError): list(self.backend.basic_parse(BytesIO(json))) def test_utf8_split(self): buf_size = JSON.index(b'\xd1') + 1 try: events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size)) except UnicodeDecodeError: self.fail('UnicodeDecodeError raised') def test_lazy(self): # shouldn't fail since iterator is not exhausted self.backend.basic_parse(BytesIO(INVALID_JSONS[0])) self.assertTrue(True) def test_boundary_lexeme(self): buf_size = JSON.index(b'false') + 1 events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size)) self.assertEqual(events, JSON_EVENTS) def test_boundary_whitespace(self): buf_size = JSON.index(b' ') + 1 events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size)) self.assertEqual(events, JSON_EVENTS) def test_api(self): self.assertTrue(list(self.backend.items(BytesIO(JSON), ''))) self.assertTrue(list(self.backend.parse(BytesIO(JSON)))) # Generating real TestCase classes for each importable backend for name in ['python', 'yajl', 'yajl2', 'yajl2_cffi']: try: classname = '%sParse' % ''.join(p.capitalize() for p in name.split('_')) if IS_PY2: classname = classname.encode('ascii') locals()[classname] = type( classname, (unittest.TestCase, Parse), {'backend': import_module('ijson.backends.%s' % name)}, ) except ImportError: pass class Common(unittest.TestCase): ''' Backend independent tests. They all use basic_parse imported explicitly from the python backend to generate parsing events. ''' def test_object_builder(self): builder = common.ObjectBuilder() for event, value in basic_parse(BytesIO(JSON)): builder.event(event, value) self.assertEqual(builder.value, { 'docs': [ { 'string': 'строка - тест', 'null': None, 'boolean': False, 'true': True, 'integer': 0, 'double': Decimal('0.5'), 'exponent': 100, 'long': 10000000000, }, { 'meta': [[1], {}], }, { 'meta': {'key': 'value'}, }, { 'meta': None, }, ], }) def test_scalar_builder(self): builder = common.ObjectBuilder() for event, value in basic_parse(BytesIO(SCALAR_JSON)): builder.event(event, value) self.assertEqual(builder.value, 0) def test_parse(self): events = common.parse(basic_parse(BytesIO(JSON))) events = [value for prefix, event, value in events if prefix == 'docs.item.meta.item.item' ] self.assertEqual(events, [1]) def test_items(self): events = basic_parse(BytesIO(JSON)) meta = list(common.items(common.parse(events), 'docs.item.meta')) self.assertEqual(meta, [ [[1], {}], {'key': 'value'}, None, ]) class Stream(unittest.TestCase): def test_bytes(self
import os, random rfilename=random.choice(os.listdir("/storage/pictures")) rextension=os.path.splitext(rfilename)[1] picturespath='/storage/pictures/' #TODO Probably dont need a forloop can possibly do random* #TODO What if the directory is empty? for filename in os.listdir(picturespath): if filename.startswith("random"): extension=os.path.splitext(filename)[1] newname=pictu
respath + str(random.random()).rsplit('.',1)[1] + extension # rename the existing random wallpaper to something random filename=picturespath+filename os.rename(filename, newname) # now rename the newly randomly founded file to be random rfilename=picturespath+rfilename os.rename(rfilename, picturespath+
'random'+rextension)
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-11-14 21:43 from __future__ import unicode_literals from django.db import migrations class Migr
ation(migrations.Migration): dependencies = [ ('recipe', '0010_auto_20171114_1443'), ] operations = [ migrations.RemoveField( model_name='direction', name='recipe', ), migrations.DeleteModel( name='Direction',
), ]
from __future__ import division from __future__ import print_function import os import sys import functools # Update path root = os.path.join(os.getcwd().split('proj1')[0], 'proj1') if root not in sys.path: sys.path.append(root) import numpy as np import pandas as pd import multiprocessing from pdb import set_trace from Simulator import simulate from Utils.PlotsUtils import line, line2 from Utils.RandomUtil import Random from Utils.MisclUtils import TimeUtil rand = Random() timer = TimeUtil() # Set seed rand.set_seed(seed_val=12458) def customer_loss_rate(customers): served = np.sum([customer.serviced for customer in customers]) total = len(customers) return served / total def plot_runtime(x=None, y=None): line(x, y, x_label=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$") def plot_runtime_vs_avg(x, y, y_1): line2(x, y, x, y_1, label_1="Actual Runtimes", label_2="Expected value of $\rho$", x_label=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$") def task_5(): rho_list = np.arange(0.05, 1, 0.1) C = 1e5 elapsed = [] for rho in rho_list: start_time = timer.current_time() serviced = simulate(l = rho, server_lim = 40, max_serviced=C, L=1, verbose=False) end_time = timer.current_time() elapsed.append(end_time-start_time) data = pd.DataFrame([[a,b] for a, b in zip(rho_list, elapsed)], columns=["Rho", "Seconds"])
data.to_csv(os.path.abspath(os.path.join(root,"tasks/task5.csv"))) def task5_plot(): data = pd.read_csv(os.path.abspath("tasks/task5.csv")) plot_runtime(data["Rho"], data["Seconds"]) set_trace() def compare_plot(): rho_list = np.arange(0.05, 1, 0.1) average_rho = [np.mean([rand.exponential(lam=p) for _ in xrange(10000)]) for p in rho_list] data = pd.read_csv(os.path.abspath("tasks/task5.csv"))
plot_runtime(data["Rho"], average_rho) if __name__ == "__main__": task_5() task5_plot() compare_plot()
# Copyright (C) 2016 Intel Corporation # Released under the MIT license (see COPYING.MIT) from oeqa.core.exception import OEQAMissingVariable from . import OETestDecorator, registerDecorator def has_feature(td, feature): """ Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES. """ if (feature in td.get('DISTRO_FEATURES', '
') or feature in td.get('IMAGE_FEATURES', '')): return True return False @registerDecorator class skipIfDataVar(OETestDecorator): """ Skip test based on value of a data store's variable. It will get the info of var f
rom the data store and will check it against value; if are equal it will skip the test with msg as the reason. """ attrs = ('var', 'value', 'msg') def setUpDecorator(self): msg = ('Checking if %r value is %r to skip test' % (self.var, self.value)) self.logger.debug(msg) if self.case.td.get(self.var) == self.value: self.case.skipTest(self.msg) @registerDecorator class skipIfNotDataVar(OETestDecorator): """ Skip test based on value of a data store's variable. It will get the info of var from the data store and will check it against value; if are not equal it will skip the test with msg as the reason. """ attrs = ('var', 'value', 'msg') def setUpDecorator(self): msg = ('Checking if %r value is not %r to skip test' % (self.var, self.value)) self.logger.debug(msg) if not self.case.td.get(self.var) == self.value: self.case.skipTest(self.msg) @registerDecorator class skipIfNotInDataVar(OETestDecorator): """ Skip test if value is not in data store's variable. """ attrs = ('var', 'value', 'msg') def setUpDecorator(self): msg = ('Checking if %r value is in %r to run ' 'the test' % (self.var, self.value)) self.logger.debug(msg) if not self.value in self.case.td.get(self.var): self.case.skipTest(self.msg) @registerDecorator class OETestDataDepends(OETestDecorator): attrs = ('td_depends',) def setUpDecorator(self): for v in self.td_depends: try: value = self.case.td[v] except KeyError: raise OEQAMissingVariable("Test case need %s variable but"\ " isn't into td" % v) @registerDecorator class skipIfNotFeature(OETestDecorator): """ Skip test based on DISTRO_FEATURES. value must be in distro features or it will skip the test with msg as the reason. """ attrs = ('value', 'msg') def setUpDecorator(self): msg = ('Checking if %s is in DISTRO_FEATURES ' 'or IMAGE_FEATURES' % (self.value)) self.logger.debug(msg) if not has_feature(self.case.td, self.value): self.case.skipTest(self.msg)
# -*-coding:Utf-8 -* # Copyright (c) 2010-2017 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Package contenant les différents signaux. Un signal est une classe toute simple, semblable à une exception en ce qu'elle permet de transmettre des messages et met en pause l'exécution pendant le temps que le message passe. Cependant, après réception du signal, l'exécution peut se poursuivre. """ from secondaires.navigation.equipage.signaux.base import Signal from secondaires.navigation.equipage.signaux.attendre import SignalAttendre from secondaires.navigation.equipage.signaux.abandonne import SignalAbandonne from secondaires.navigation.equipage.signaux.inutile import SignalInutile from secondaires.navigation.equipage.signaux.relais import SignalRelais from secondaires.naviga
tion.equipage.signaux.repete import SignalRepete from secondaires.navigatio
n.equipage.signaux.termine import SignalTermine
task: res = self.interface.add_cleaning_network(task) add_ports_mock.assert_called_once_with( task, CONF.neutron.cleaning_network, security_groups=CONF.neutron.cleaning_network_security_groups) rollback_mock.assert_called_once_with( task, CONF.neutron.cleaning_network) self.assertEqual(res, add_ports_mock.return_value) self.port.refresh() self.assertEqual(self.neutron_port['id'], self.port.internal_info['cleaning_vif_port_id']) @mock.patch.object(neutron_common, 'validate_network', side_effect=lambda n, t: n) @mock.patch.object(neutron_common, 'remove_ports_from_network') def test_remove_cleaning_network(self, remove_ports_mock, validate_mock): self.port.internal_info = {'cleaning_vif_port_id': 'vif-port-id'} self.port.save() with task_manager.acquire(self.context, self.node.id) as task: self.interface.remove_cleaning_network(task) remove_ports_mock.assert_called_once_with( task, CONF.neutron.cleaning_network) validate_mock.assert_called_once_with( CONF.neutron.cleaning_network, 'cleaning network') self.port.refresh() self.assertNotIn('cleaning_vif_port_id', self.port.internal_info) @mock.patch.object(neutron_common, 'unbind_neutron_port') def test_unconfigure_tenant_networks(self, mock_unbind_port): with task_manager.acquire(self.context, self.node.id) as task: self.interface.unconfigure_tenant_networks(task) mock_unbind_port.assert_called_once_with( self.port.extra['vif_port_id']) def test_configure_tenant_networks_no_ports_for_node(self): n = utils.create_test_node(self.context, network_interface='neutron', uuid=uuidutils.generate_uuid()) with task_manager.acquire(self.context, n.id) as task: self.assertRaisesRegexp( exception.NetworkError, 'No ports are associated', self.interface.configure_tenant_networks, task) @mock.patch.object(neutron_common, 'get_client') @mock.patch.object(neutron, 'LOG') def test_configure_tenant_networks_no_vif_id(self, log_mock, client_mock): self.port.extra = {} self.port.save() upd_mock = mock.Mock() client
_mock.return_value.update_port = upd_mock with task_manager.acquire(self.cont
ext, self.node.id) as task: self.assertRaisesRegex(exception.NetworkError, 'No neutron ports or portgroups are ' 'associated with node', self.interface.configure_tenant_networks, task) client_mock.assert_called_once_with() upd_mock.assert_not_called() self.assertIn('No neutron ports or portgroups are associated with', log_mock.error.call_args[0][0]) @mock.patch.object(neutron_common, 'get_client') @mock.patch.object(neutron, 'LOG') def test_configure_tenant_networks_multiple_ports_one_vif_id( self, log_mock, client_mock): expected_body = { 'port': { 'binding:vnic_type': 'baremetal', 'binding:host_id': self.node.uuid, 'binding:profile': {'local_link_information': [self.port.local_link_connection]} } } utils.create_test_port(self.context, node_id=self.node.id, address='52:54:00:cf:2d:33', extra={}, uuid=uuidutils.generate_uuid()) upd_mock = mock.Mock() client_mock.return_value.update_port = upd_mock with task_manager.acquire(self.context, self.node.id) as task: self.interface.configure_tenant_networks(task) client_mock.assert_called_once_with() upd_mock.assert_called_once_with(self.port.extra['vif_port_id'], expected_body) @mock.patch.object(neutron_common, 'get_client') def test_configure_tenant_networks_update_fail(self, client_mock): client = client_mock.return_value client.update_port.side_effect = neutron_exceptions.ConnectionFailed( reason='meow') with task_manager.acquire(self.context, self.node.id) as task: self.assertRaisesRegexp( exception.NetworkError, 'Could not add', self.interface.configure_tenant_networks, task) client_mock.assert_called_once_with() @mock.patch.object(neutron_common, 'get_client') def _test_configure_tenant_networks(self, client_mock, is_client_id=False, vif_int_info=False): upd_mock = mock.Mock() client_mock.return_value.update_port = upd_mock if vif_int_info: kwargs = {'internal_info': { 'tenant_vif_port_id': uuidutils.generate_uuid()}} self.port.internal_info = { 'tenant_vif_port_id': self.port.extra['vif_port_id']} self.port.extra = {} else: kwargs = {'extra': {'vif_port_id': uuidutils.generate_uuid()}} second_port = utils.create_test_port( self.context, node_id=self.node.id, address='52:54:00:cf:2d:33', uuid=uuidutils.generate_uuid(), local_link_connection={'switch_id': '0a:1b:2c:3d:4e:ff', 'port_id': 'Ethernet1/1', 'switch_info': 'switch2'}, **kwargs ) if is_client_id: client_ids = (CLIENT_ID1, CLIENT_ID2) ports = (self.port, second_port) for port, client_id in zip(ports, client_ids): extra = port.extra extra['client-id'] = client_id port.extra = extra port.save() expected_body = { 'port': { 'binding:vnic_type': 'baremetal', 'binding:host_id': self.node.uuid, } } port1_body = copy.deepcopy(expected_body) port1_body['port']['binding:profile'] = { 'local_link_information': [self.port.local_link_connection] } port2_body = copy.deepcopy(expected_body) port2_body['port']['binding:profile'] = { 'local_link_information': [second_port.local_link_connection] } if is_client_id: port1_body['port']['extra_dhcp_opts'] = ( [{'opt_name': 'client-id', 'opt_value': client_ids[0]}]) port2_body['port']['extra_dhcp_opts'] = ( [{'opt_name': 'client-id', 'opt_value': client_ids[1]}]) with task_manager.acquire(self.context, self.node.id) as task: self.interface.configure_tenant_networks(task) client_mock.assert_called_once_with() if vif_int_info: portid1 = self.port.internal_info['tenant_vif_port_id'] portid2 = second_port.internal_info['tenant_vif_port_id'] else: portid1 = self.port.extra['vif_port_id'] portid2 = second_port.extra['vif_port_id'] upd_mock.assert_has_calls( [mock.call(portid1, port1_body), mock.call(portid2, port2_body)], any_order=True ) def test_configure_tenant_networks_vif_extra(self): self.node.instance_uuid = uuidutils.generate_uuid() self.node.save() self._test_configure_tenant_networks() def test_configure_tenant_networks_vif_int_info(self): self.node.instance_uuid = uuidutils.generate_uuid() self.node.save() self._test_configure_tenant_networks(vif_int_info=True) def test_configure_tenant_networks_no_instance_uuid(self): self._test_configure_tenant_networks() def test_configure_tenant_networks_with_client_id(self):
F ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.core.urlresolvers import reverse from django.template import defaultfilters as filters from django.utils.translation import pgettext_lazy from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ungettext_lazy from horizon import exceptions from horizon import tables from openstack_dashboard import api from openstack_dashboard import policy LOG = log
ging.getLogger(__name__) class AddRuleLink(tables.LinkAction): name = "addrule" verbose_name = _("Add Rule") url = "horizon:project:firewalls:addrule" classes = ("ajax-modal",) icon = "plus" policy_rules = (("network", "cre
ate_firewall_rule"),) class AddPolicyLink(tables.LinkAction): name = "addpolicy" verbose_name = _("Add Policy") url = "horizon:project:firewalls:addpolicy" classes = ("ajax-modal", "btn-addpolicy",) icon = "plus" policy_rules = (("network", "create_firewall_policy"),) class AddFirewallLink(tables.LinkAction): name = "addfirewall" verbose_name = _("Create Firewall") url = "horizon:project:firewalls:addfirewall" classes = ("ajax-modal",) icon = "plus" policy_rules = (("network", "create_firewall"),) class DeleteRuleLink(policy.PolicyTargetMixin, tables.DeleteAction): name = "deleterule" policy_rules = (("network", "delete_firewall_rule"),) @staticmethod def action_present(count): return ungettext_lazy( u"Delete Rule", u"Delete Rules", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Scheduled deletion of Rule", u"Scheduled deletion of Rules", count ) def allowed(self, request, datum=None): if datum and datum.policy: return False return True def delete(self, request, obj_id): try: api.fwaas.rule_delete(request, obj_id) except Exception as e: exceptions.handle(request, _('Unable to delete rule. %s') % e) class DeletePolicyLink(policy.PolicyTargetMixin, tables.DeleteAction): name = "deletepolicy" policy_rules = (("network", "delete_firewall_policy"),) @staticmethod def action_present(count): return ungettext_lazy( u"Delete Policy", u"Delete Policies", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Scheduled deletion of Policy", u"Scheduled deletion of Policies", count ) def delete(self, request, obj_id): try: api.fwaas.policy_delete(request, obj_id) except Exception as e: exceptions.handle(request, _('Unable to delete policy. %s') % e) class DeleteFirewallLink(policy.PolicyTargetMixin, tables.DeleteAction): name = "deletefirewall" policy_rules = (("network", "delete_firewall"),) @staticmethod def action_present(count): return ungettext_lazy( u"Delete Firewall", u"Delete Firewalls", count ) @staticmethod def action_past(count): return ungettext_lazy( u"Scheduled deletion of Firewall", u"Scheduled deletion of Firewalls", count ) def delete(self, request, obj_id): try: api.fwaas.firewall_delete(request, obj_id) except Exception as e: exceptions.handle(request, _('Unable to delete firewall. %s') % e) class UpdateRuleLink(policy.PolicyTargetMixin, tables.LinkAction): name = "updaterule" verbose_name = _("Edit Rule") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "update_firewall_rule"),) def get_link_url(self, rule): base_url = reverse("horizon:project:firewalls:updaterule", kwargs={'rule_id': rule.id}) return base_url class UpdatePolicyLink(policy.PolicyTargetMixin, tables.LinkAction): name = "updatepolicy" verbose_name = _("Edit Policy") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "update_firewall_policy"),) def get_link_url(self, policy): base_url = reverse("horizon:project:firewalls:updatepolicy", kwargs={'policy_id': policy.id}) return base_url class UpdateFirewallLink(policy.PolicyTargetMixin, tables.LinkAction): name = "updatefirewall" verbose_name = _("Edit Firewall") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "update_firewall"),) def get_link_url(self, firewall): base_url = reverse("horizon:project:firewalls:updatefirewall", kwargs={'firewall_id': firewall.id}) return base_url def allowed(self, request, firewall): if firewall.status in ("PENDING_CREATE", "PENDING_UPDATE", "PENDING_DELETE"): return False return True class InsertRuleToPolicyLink(policy.PolicyTargetMixin, tables.LinkAction): name = "insertrule" verbose_name = _("Insert Rule") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "get_firewall_policy"), ("network", "insert_rule"),) def get_link_url(self, policy): base_url = reverse("horizon:project:firewalls:insertrule", kwargs={'policy_id': policy.id}) return base_url class RemoveRuleFromPolicyLink(policy.PolicyTargetMixin, tables.LinkAction): name = "removerule" verbose_name = _("Remove Rule") classes = ("ajax-modal", "btn-danger",) policy_rules = (("network", "get_firewall_policy"), ("network", "remove_rule"),) def get_link_url(self, policy): base_url = reverse("horizon:project:firewalls:removerule", kwargs={'policy_id': policy.id}) return base_url def allowed(self, request, policy): if len(policy.rules) > 0: return True return False class AddRouterToFirewallLink(policy.PolicyTargetMixin, tables.LinkAction): name = "addrouter" verbose_name = _("Add Router") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "get_firewall"), ("network", "add_router"),) def get_link_url(self, firewall): base_url = reverse("horizon:project:firewalls:addrouter", kwargs={'firewall_id': firewall.id}) return base_url def allowed(self, request, firewall): if not api.neutron.is_extension_supported(request, 'fwaasrouterinsertion'): return False tenant_id = firewall['tenant_id'] available_routers = api.fwaas.firewall_unassociated_routers_list( request, tenant_id) return bool(available_routers) class RemoveRouterFromFirewallLink(policy.PolicyTargetMixin, tables.LinkAction): name = "removerouter" verbose_name = _("Remove Router") classes = ("ajax-modal", "btn-update",) policy_rules = (("network", "get_firewall"), ("network", "remove_router"),) def get_link_url(self, firewall): base_url = reverse("horizon:project:firewalls:removerouter", kwargs={'firewall_id': firewall.id}) return base_url def allowed(self, request, firewall): if not api.neutron.is_extension_supported(request, 'fwaasrouterinsertion'): return False return bool(firewall['router_ids']) def get_rules_name(datum): return ', '.join([rule.name or rule.id[:13] for rule in datum.rules]) def get_routers_name(fir
import os import numpy as np from scipy.optimize import curve_fit def gauss(x, A, mu, sigma): return A * np.exp(-(x - mu)**2 / (2. * sigma**2)) scriptmode = True SDM_name = 'test' # The prefix to use for all output files # SDM_name = '13A-213.sb20685305.eb20706999.56398.113012800924' # Set up some useful variables (these will be altered later on) msfile = SDM_name + '.ms' hisplitms = SDM_name + '.hi.ms' splitms = SDM_name + '.hi.src.split.ms' contsubms = SDM_name + '.hi.src.split.ms.contsub' rawcleanms = SDM_name + '.hi.src.split.ms.contsub.rawcleanimg' cleanms = SDM_name + '.hi.src.split.ms.contsub.cleanimg' pathname = os.environ.get('CASAPATH').split()[0] pipepath = '/home/dcolombo/pipe_scripts/' # pipepath = '/home/dario/pipe_scripts/' source = 'SextansA' # VOS stuff vos_dir = '../vos/' vos_proc = './'
vos_link = '../vos_link/' # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&% # Find the 21cm spw and check if the obs # is single pointing or mosaic # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&% print "Fin
d HI spw..." # But first find the spw corresponding to it tb.open(vos_dir + msfile + '/SPECTRAL_WINDOW') freqs = tb.getcol('REF_FREQUENCY') nchans = tb.getcol('NUM_CHAN') tb.close() spws = range(0, len(freqs)) # Select the 21cm sel = np.where((freqs > 1.40 * 10**9) & (freqs < 1.43 * 10**9)) hispw = str(spws[sel[0][0]]) freq = freqs[sel[0][0]] nchan = nchans[sel[0][0]] print "Selected spw ", hispw, "with frequency ", freq, "and ", nchan, " channels" print "Starting split the HI line" # Mosaic or single pointing? tb.open(vos_dir + msfile + '/FIELD') names = tb.getcol('NAME') tb.close() moscount = 0 for name in names: chsrc = name.find(source) if chsrc != -1: moscount = moscount + 1 if moscount > 1: imagermode = "mosaic" else: imagermode = "csclean" # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # Split the corrected source data from the rest # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% print "Starting source split..." os.system('rm -rf ' + vos_proc + splitms) default('split') vis = vos_dir + hisplitms outputvis = vos_proc + splitms field = source spw = '' datacolumn = 'corrected' keepflags = False split() print "Created splitted-source .ms " + splitms # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # UV continum subtraction # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # 1) Save a .txt file of the amplitude vs # channels, plotms runs only to get the # ASCII file print "Estimating channels with signal..." real_amps = [] imag_amps = [] default('visstat') vis = vos_proc + splitms field = '0' datacolumn = 'data' selectdata = True useflags = False for nc in range(nchan): spw = '0:' + str(nc) axis = 'real' pdata = visstat() real_amps.append(pdata['DATA']['mean']) axis = 'imag' pdata = visstat() imag_amps.append(pdata['DATA']['mean']) real_amps = np.asarray(real_amps) imag_amps = np.asarray(imag_amps) amps = np.sqrt(real_amps**2 + imag_amps**2) chans = np.arange(nchan) + 1 # Guessing parameters for fitting A = max(amps) mu = chans[amps.tolist().index(A)] hm = chans[amps > A / 2] sigma = float(hm[-1] - hm[0]) / 2.35 opar, _ = curve_fit(gauss, chans, amps, p0=[A, mu, sigma]) # Move away to 3.5sigma for the fit, in order to exclude the data # from the fit chan1 = int(mu - 3.5 * opar[2]) chan2 = int(mu + 3.5 * opar[2]) fitspws = str(chan1) + '~' + str(chan2) print "Signal within channels " + fitspws print "Starting contsub..." # Run the routinne os.system('rm -rf ' + vos_proc + contsubms) default('uvcontsub') vis = vos_proc + splitms fitspw = '0:' + fitspws excludechans = True solint = 0.0 fitorder = 0 fitmode = 'subtract' splitdata = True uvcontsub() print "Created continum subtracted image" + contsubms # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # CLEANing # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% print "Starting CLEANing..." os.system('rm -rf ' + vos_proc + rawcleanms + '*') # First generate a 0-iterations # image to estimate the noise level # (threshold) # Get max baseline and dish size bline_max = au.getBaselineExtrema(vos_proc + splitms)[0] tb.open(vos_proc + splitms + '/ANTENNA') dishs = tb.getcol('DISH_DIAMETER') dish_min = min(dishs) tb.close() # Find the beam hi_lambda = 299792458.0 / (freq) min_lambda = 299792458.0 / (min(freqs)) syn_beam = (hi_lambda / bline_max) * 180 / np.pi * 3600 prim_beam = (min_lambda / dish_min) * 180 / np.pi * 3600 # Setting CLEANing parameters sel_cell = str(round(syn_beam / 5)) + 'arcsec' sel_imsize = int(round(prim_beam / (syn_beam / 5))) # Increase the sel_imsize of a couple of beam # to be sure dx = int(round(syn_beam / prim_beam * sel_imsize)) sel_imsize = sel_imsize + 1 * dx # The image size should be a multiplier of # 2, 3 and 5 to work well with clean so: sel_imsize = sel_imsize - 1 pnum = 1 * sel_imsize while pnum != 1: sel_imsize = sel_imsize + 1 pnum = 1 * sel_imsize while pnum % 2 == 0: pnum = pnum / 2 while pnum % 3 == 0: pnum = pnum / 3 while pnum % 5 == 0: pnum = pnum / 5 print "Image size:", sel_imsize print "Cell size:", sel_cell # First generate a 0-iterations # image to estimate the noise level # (threshold) default('clean') vis = vos_proc + contsubms imagename = vos_proc + rawcleanms cell = [sel_cell, sel_cell] imsize = [sel_imsize, sel_imsize] imagermode = imagermode mode = "channel" nchan = 4 start = chan1 - 5 width = 1 field = '0' spw = '0' interactive = False pbcor = False minpb = 0.25 restfreq = '1.420405752GHz' niter = 0 clean() print "Estimating sigma..." default('imstat') imagename = vos_proc + rawcleanms + '.image' chans = '0~3' rawclean_stat = imstat() rms = rawclean_stat['sigma'][0] * 1000 rms = round(rms) rms = str(int(rms)) + 'mJy' print "Sigma=", rms, ". Now the real CLEANing..." # Now run the real cleaning os.system('rm -rf ' + cleanms + '*') default('clean') vis = vos_proc + contsubms imagename = vos_proc + cleanms cell = [sel_cell, sel_cell] imsize = [sel_imsize, sel_imsize] imagermode = imagermode mode = "channel" start = chan1 nchan = chan2 - chan1 width = 1 field = '' spw = '' interactive = False restfreq = '1.420405752GHz' outframe = 'LSRK' niter = 10000 threshold = rms usescratch = True clean() # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # Moment maps 0,1,2 # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% default("immoments") imagename = vos_proc + cleanms + '.image' moments = [0, 1, 2] outfile = vos_proc + cleanms immoments() # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% # Convert everything to fits file # %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&% print "Exporting the image fits..." default('exportfits') imagename = vos_proc + cleanms + '.image' fitsimage = vos_proc + source + '_21cm.fits' velocity = True optical = False overwrite = True dropstokes = True exportfits() print "Exporting moment maps..." default('exportfits') # Moment 0 imagename = vos_proc + cleanms + '.integrated' fitsimage = vos_proc + source + '_21cm_mom0.fits' velocity = True optical = False overwrite = True dropstokes = True exportfits() default('exportfits') # Moment 1 imagename = vos_proc + cleanms + '.weighted_coord' fitsimage = vos_proc + source + '_21cm_mom1.fits' velocity = True optical = False overwrite = True dropstokes = True exportfits() default('exportfits') # Moment 2 imagename = vos_proc + cleanms + '.weighted_dispersion_coord' fitsimage = vos_proc + source + '_21cm_mom2.fits' velocity = True optical = False overwrite = True dropstokes = True exportfits()
'False'}), 'forward_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'map_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'map_attributes_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}), 'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:basic', 'SAMLv2 BASIC')", 'max_length': '100'}), 'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}), 'send_error_and_no_attrs_if_missing_required_attrs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'source_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with sources'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['attribute_aggregator.AttributeSource']"}) }, 'saml.authorizationattributemap': { 'Meta': {'object_name': 'AuthorizationAttributeMap'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}) }, 'saml.authorizationattributemapping': { 'Meta': {'object_name': 'AuthorizationAttributeMapping'}, 'attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'attribute_value': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'attribute_value_format': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['saml.AuthorizationAttributeMap']"}), 'source_attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}) }, 'saml.authorizationsppolicy': { 'Meta': {'object_name': 'AuthorizationSPPolicy'}, 'attribute_map': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authorization_attributes'", 'null': 'True', 'to': "orm['saml.AuthorizationAttributeMap']"}), 'default_denial_message': ('django.db.models.fields.CharField', [], {'default': "u'You are not authorized to access the service.'", 'max_length': '80'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': '
False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}) }, 'saml.idpoptio
nssppolicy': { 'Meta': {'object_name': 'IdPOptionsSPPolicy'}, 'allow_create': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'binding_for_sso_response': ('django.db.models.fields.CharField', [], {'default': "'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact'", 'max_length': '60'}), 'enable_binding_for_sso_response': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'enable_http_method_for_defederation_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'enable_http_method_for_slo_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'http_method_for_defederation_request': ('django.db.models.fields.IntegerField', [], {'default': '5', 'max_length': '60'}), 'http_method_for_slo_request': ('django.db.models.fields.IntegerField', [], {'default': '4', 'max_length': '60'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'no_nameid_policy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'requested_name_id_format': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '20'}), 'transient_is_persistent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'user_consent': ('django.db.models.fields.CharField', [], {'default': "'urn:oasis:names:tc:SAML:2.0:consent:current-implicit'", 'max_length': '60'}), 'want_authn_request_signed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'want_force_authn_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'want_is_passive_authn_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'saml.keyvalue': { 'Meta': {'object_name': 'KeyValue'}, 'key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}), 'value': ('authentic2.saml.fields.PickledObjectField', [], {}) }, 'saml.libertyartifact': { 'Meta': {'object_name': 'LibertyArtifact'}, 'artifact': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}), 'content': ('django.db.models.fields.TextField', [], {}), 'creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'provider_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'saml.libertyassertion': { 'Meta': {'object_name': 'LibertyAssertion'}, 'assertion': ('django.db.models.fields.TextField', [], {}), 'assertion_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'provider_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'session_index': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'saml.libertyfederation': { 'Meta': {'unique_together': "(('name_id_qualifier', 'name_id_format', 'name_id_content', 'name_id_sp_name_qualifier'),)", 'object_name': 'LibertyFederation'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'idp_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'name_id_content': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name_id_format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'name_id_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}), 'name_id_sp_name_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'name_id_sp_provided_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'sp_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'saml.libertyidentitydump': { 'Meta': {'object_name': 'LibertyIdentityDump'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identity_
from
django.apps import AppConfig class DataimportConfig(AppConfig): name = "intranet.apps.dataimport"
#!/usr/bin/env python import numpy as np from horton import * # specify the even tempered basis set alpha_low = 5e-3 alpha_high = 5e2 nbasis = 30 lnratio = (np.log(alpha_high) - np.log(alpha_low))/(nbasis-1) # build a list of "contractions". These aren't real contractions as every # contraction only co
ntains one basis function. bcs = [] for ibasis in xrange(nbasis): alpha = alpha_low**lnratio # arguments of GOBasisContraction: # shell_type, li
st of exponents, list of contraction coefficients bcs.append(GOBasisContraction(0, np.array([alpha]), np.array([1.0]))) # Finish setting up the basis set: ba = GOBasisAtom(bcs) obasis = get_gobasis(np.array([[0.0, 0.0, 0.0]]), np.array([3]), default=ba)
"""SCons.Variables.PathVariable This file defines an option type for SCons implementing path settings. To be used whenever a a user-sp
ecified path override should be allowed. Arguments to PathVariable are: option-name = name of this option on the command line (e.g. "prefix") option-help = help string for option option-dflt = default value for this option validator = [optional] validator for option value. Predefined validators are: PathAccept -- accepts any path setting; no validation PathIsDir -- path must be an existing directory Pat
hIsDirCreate -- path must be a dir; will create PathIsFile -- path must be a file PathExists -- path must exist (any type) [default] The validator is a function that is called and which should return True or False to indicate if the path is valid. The arguments to the validator function are: (key, val, env). The key is the name of the option, the val is the path specified for the option, and the env is the env to which the Otions have been added. Usage example: Examples: prefix=/usr/local opts = Variables() opts = Variables() opts.Add(PathVariable('qtdir', 'where the root of Qt is installed', qtdir, PathIsDir)) opts.Add(PathVariable('qt_includes', 'where the Qt includes are installed', '$qtdir/includes', PathIsDirCreate)) opts.Add(PathVariable('qt_libraries', 'where the Qt library is installed', '$qtdir/lib')) """ # # Copyright (c) 2001 - 2014 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Variables/PathVariable.py 2014/08/24 12:12:31 garyo" __all__ = ['PathVariable',] import os import os.path import SCons.Errors class _PathVariableClass(object): def PathAccept(self, key, val, env): """Accepts any path, no checking done.""" pass def PathIsDir(self, key, val, env): """Validator to check if Path is a directory.""" if not os.path.isdir(val): if os.path.isfile(val): m = 'Directory path for option %s is a file: %s' else: m = 'Directory path for option %s does not exist: %s' raise SCons.Errors.UserError(m % (key, val)) def PathIsDirCreate(self, key, val, env): """Validator to check if Path is a directory, creating it if it does not exist.""" if os.path.isfile(val): m = 'Path for option %s is a file, not a directory: %s' raise SCons.Errors.UserError(m % (key, val)) if not os.path.isdir(val): os.makedirs(val) def PathIsFile(self, key, val, env): """validator to check if Path is a file""" if not os.path.isfile(val): if os.path.isdir(val): m = 'File path for option %s is a directory: %s' else: m = 'File path for option %s does not exist: %s' raise SCons.Errors.UserError(m % (key, val)) def PathExists(self, key, val, env): """validator to check if Path exists""" if not os.path.exists(val): m = 'Path for option %s does not exist: %s' raise SCons.Errors.UserError(m % (key, val)) def __call__(self, key, help, default, validator=None): # NB: searchfunc is currenty undocumented and unsupported """ The input parameters describe a 'path list' option, thus they are returned with the correct converter and validator appended. The result is usable for input to opts.Add() . The 'default' option specifies the default path to use if the user does not specify an override with this option. validator is a validator, see this file for examples """ if validator is None: validator = self.PathExists if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key): return (key, '%s ( /path/to/%s )' % (help, key[0]), default, validator, None) else: return (key, '%s ( /path/to/%s )' % (help, key), default, validator, None) PathVariable = _PathVariableClass() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- # Based on pygxinput originally by Andrew D. Straw # http://code.astraw.com/projects/motmot/wiki/pygxinput import ctypes import pyglet from pyglet.window.xlib import xlib import lib_xinput as xi class XInputDevice: def __init__(self, display, device_info): self._x_display = display._display self._device_id = device_info.id self.name = device_info.name self._open_device = None # TODO: retrieve inputclassinfo from device_info and expose / save # for valuator axes etc. def open(self): if self._open_device: return self._open_device = xi.XOpenDevice(self._x_display, self._device_id) if not self._open_device: raise Exception('Cannot open device') def close(self): if not self._open_device: return xi.XCloseDevice(self._x_display, self._open_device) def attach(self, window): assert window._x_display == self._x_display return XInputDeviceInstance(self, window) class XInputDeviceInstance(pyglet.event.EventDispatcher): def __init__(self, device, window): """Create an opened instance of a device on the given window. :Parameters: `device` : XInputDevice Device to open `window` : Window Window to open device on """ assert device._x_display == window._x_display assert device._open_device self.device = device self.window = window self._events = list() try:
dispatcher = window.__xinput_window_event_dispatcher except AttributeError: dispatcher = window.__xinput_window_event_dispatcher = \ XInputWindowEventDispatcher() dispatcher.add_instance(self) device = device._open_device.contents if not device.num_classes: return # Bind matching extended window events to bound instance methods # on this object. # # Thi
s is inspired by test.c of xinput package by Frederic # Lepied available at x.org. # # In C, this stuff is normally handled by the macro DeviceKeyPress and # friends. Since we don't have access to those macros here, we do it # this way. for i in range(device.num_classes): class_info = device.classes[i] if class_info.input_class == xi.KeyClass: self._add(class_info, xi._deviceKeyPress, dispatcher._event_xinput_key_press) self._add(class_info, xi._deviceKeyRelease, dispatcher._event_xinput_key_release) elif class_info.input_class == xi.ButtonClass: self._add(class_info, xi._deviceButtonPress, dispatcher._event_xinput_button_press) self._add(class_info, xi._deviceButtonRelease, dispatcher._event_xinput_button_release) elif class_info.input_class == xi.ValuatorClass: self._add(class_info, xi._deviceMotionNotify, dispatcher._event_xinput_motion) elif class_info.input_class == xi.ProximityClass: self._add(class_info, xi._proximityIn, dispatcher._event_xinput_proximity_in) self._add(class_info, xi._proximityOut, dispatcher._event_xinput_proximity_out) elif class_info.input_class == xi.FeedbackClass: pass elif class_info.input_class == xi.FocusClass: pass elif class_info.input_class == xi.OtherClass: pass array = (xi.XEventClass * len(self._events))(*self._events) xi.XSelectExtensionEvent(window._x_display, window._window, array, len(array)) def _add(self, class_info, event, handler): _type = class_info.event_type_base + event _class = self.device._device_id << 8 | _type self._events.append(_class) self.window._event_handlers[_type] = handler XInputDeviceInstance.register_event_type('on_button_press') XInputDeviceInstance.register_event_type('on_button_release') XInputDeviceInstance.register_event_type('on_motion') XInputDeviceInstance.register_event_type('on_proximity_in') XInputDeviceInstance.register_event_type('on_proximity_out') class XInputWindowEventDispatcher: def __init__(self): self._instances = dict() def add_instance(self, instance): self._instances[instance.device._device_id] = instance def remove_instance(self, instance): del self._instances[instance.device._device_id] def dispatch_instance_event(self, e, *args): try: instance = self._instances[e.deviceid] except KeyError: return instance.dispatch_event(*args) @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_key_press(self, ev): raise NotImplementedError('TODO') @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_key_release(self, ev): raise NotImplementedError('TODO') @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_button_press(self, ev): e = ctypes.cast(ctypes.byref(ev), ctypes.POINTER(xi.XDeviceButtonEvent)).contents self.dispatch_instance_event(e, 'on_button_press', e.button) @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_button_release(self, ev): e = ctypes.cast(ctypes.byref(ev), ctypes.POINTER(xi.XDeviceButtonEvent)).contents self.dispatch_instance_event(e, 'on_button_release', e.button) @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_motion(self, ev): e = ctypes.cast(ctypes.byref(ev), ctypes.POINTER(xi.XDeviceMotionEvent)).contents axis_data = list() for i in range(e.axes_count): axis_data.append(e.axis_data[i]) self.dispatch_instance_event(e, 'on_motion', axis_data, e.x, e.y) @pyglet.window.xlib.XlibEventHandler(0) def _event_xinput_proximity_in(self, ev): e = ctypes.cast(ctypes.byref(ev), ctypes.POINTER(xi.XProximityNotifyEvent)).contents self.dispatch_instance_event(e, 'on_proximity_in') @pyglet.window.xlib.XlibEventHandler(-1) def _event_xinput_proximity_out(self, ev): e = ctypes.cast(ctypes.byref(ev), ctypes.POINTER(xi.XProximityNotifyEvent)).contents self.dispatch_instance_event(e, 'on_proximity_out') def _check_extension(display): major_opcode = ctypes.c_int() first_event = ctypes.c_int() first_error = ctypes.c_int() xlib.XQueryExtension(display._display, 'XInputExtension', ctypes.byref(major_opcode), ctypes.byref(first_event), ctypes.byref(first_error)) if not major_opcode.value: raise Exception('XInput extension not available') def ge
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com> # Lars Buitinck <L.J.Buitinck@uva.nl> # License: Simplified BSD from sklearn.datasets import fetch_20newsgroups from sklearn.feature_extraction.text import TfidfVectorizer from sklearn import metrics from sklearn.cluster import KMeans, MiniBatchKMeans import logging from optparse import OptionParser import sys from time import time import numpy as np # Display progress logs on stdout logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)s %(message)s') # parse commandline arguments op = OptionParser() op.add_option("--no-minibatch", action="store_false", dest="minibatch", default=True, help="Use ordinary k-means algorithm.") print __doc__ op.print_help() (opts, args) = op.parse_args() if len(args) > 0: op.error("this script takes no arguments.") sys.exit(1) ############################################################################### # Load some categories from the training set categories = [ 'alt.atheism', 'talk.religion.misc', 'comp.graphics', 'sci.space', ] # Uncomment the following to do the analysis on all the categories #categories = None print "Loading 20 newsgroups dataset for categories:" print categories dataset = fetch_20newsgroups(subset='all', categories=categories, shuffle=True, random_state=42) print "%d documents" % len(dataset.data) print "%d categories" % len(dataset.target_names) print labels = dataset.target true_k = np.unique(labels).shape[0] print "Extracting features from the training dataset using a sparse vectorizer" t0 = time() vectorizer = TfidfVectorizer(max_df=0.5, max_features=10000, stop_words='english') X = vectorizer.fit_transform(dataset.data) print "done in %fs" % (time() - t0) print "n_samples: %d, n_features: %d" % X.shape print ###################################
############################################ # Do the actual clustering if opts.minibatch: km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1, init_size=1000, batch_size=1000, verbose=1) else: km = KMeans(n_clusters=true_k, init='random', max_iter=100, n_init=1, verbose=1) print "Clustering sparse data with %s" % km t0 = time() km.fit(X)
print "done in %0.3fs" % (time() - t0) print print "Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_) print "Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_) print "V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_) print "Adjusted Rand-Index: %.3f" % \ metrics.adjusted_rand_score(labels, km.labels_) print "Silhouette Coefficient: %0.3f" % metrics.silhouette_score( X, labels, sample_size=1000) print
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Dict from unittest.mock import Mock def get_column_mock(params: Dict[str, Any]) -> Mock: mock = Mock() mock.id = params["id"] mock.column_name = params["column_name"] mock.verbose_name = params["verbose_name"] mock.description = params["description"] mock.expression = params["expression"] mock.filterable = params["filterable"] mock.groupby = params["groupby"] mock.is_dttm = params["is_dttm"] mock.type = params["type"] return mock def get_metric_mock(params: Dict[str, Any]) -> Mock: mock = Mock() mock.id = params["id"] mock.metric_name = params["metric_name"] mock.metric_name = params["verbose_name"] mock.description = params["description"] mock.expression = params["expression"] mock.warning_text = params["warning_text"] mock.d3format = params["d3format"] return mock def get_dataset_mock() -> Mock: mock = Mock() mock.id = None mock.column_formats = {"ratio": ".2%"} mock.database = {"id": 1} mock.description = "Adding a DESCRip" mock.default_endpoint = "" mock.filter_select_enabled = True mock.name = "birth_names" mock.table_name = "birth_names" mock.datasource_name = "birth_names" mock.type = "table" mock.schema = None mock.offset = 66 mock.cache_timeout = 55 mock.sql = "" mock.columns = [ get_column_mock( { "id": 504, "column_name": "ds", "verbose_name": "", "description": None, "expression": "", "filterable": True, "groupby": True, "is_dttm": True, "type": "DATETIME", } ), get_column_mock( { "id": 505, "column_name": "gender", "verbose_name": None, "description": None, "expression": "", "filterable": True, "groupby": True, "is_dttm": False, "type": "VARCHAR(16)", } ), get_column_mock( { "id": 506, "column_name": "name", "verbose_name": None, "description": None, "expression": None, "filterable": True, "groupby": True, "is_dttm": None, "type": "VARCHAR(255)", } ), get_column_mock( { "id": 508, "column_name": "state", "verbose_name": None, "description": None, "expression": None, "filterable": True, "groupby": True, "is_dttm": None, "type": "VARCHAR(10)", } ), get_column_mock( { "id": 509, "column_name": "num_boys", "verbose_name": None, "description": None, "expression": None, "filterable": True, "groupby": True, "is_dttm": None, "type": "BIGINT(20)", } ), get_column_mock( { "id": 510, "column_name": "num_girls", "verbose_name": None, "description": None, "expression": "", "filterable": False, "groupby": False, "is_dttm": False, "type": "BIGINT(20)", } ), get_column_mock( { "id": 532, "column_name": "num", "verbose_name": None, "description": None, "expression": None, "filterable": True, "groupby": True, "is_dttm": None, "type": "BIGINT(20)", } ), get_column_mock( { "id": 522, "column_name": "num_california", "verbose_name": None, "description": None, "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", "filterable": False, "groupby": False, "is_dttm": False, "type": "NUMBER", } ), ] mock.metrics = ( [ get_metric_mock( { "id": 824, "metric_name": "sum__num", "verbose_name": "Babies", "description": "", "expression": "SUM(num)", "warning_text": "", "d3format": "", } ), get_metric_mock( { "id": 836, "metric_name": "count", "verbose_name": "", "description": None, "expression": "count(1)", "warning_text"
: None, "d3format": None, } ), get_metric_mock( { "id": 843, "metric_name": "ratio", "verbose_name": "Ratio Boys/Girls", "description": "This represents
the ratio of boys/girls", "expression": "sum(num_boys) / sum(num_girls)", "warning_text": "no warning", "d3format": ".2%", } ), ], ) return mock
fr
om celery.task import Task import requests class Stra
cksFlushTask(Task): def run(self, url, data): requests.post(url + "/", data=data)
#!/usr/bin/env python # -*- coding: utf-8 -*- # # spaceClustering.py # # Copyright 2014 Carlos "casep" Sepulveda <carlos.sepulveda@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # # Performs basic clustering based on the size of the RF import sys, os sys.path.append(os.path.join(os.path.dirname(__file__), '../..','LIB')) import rfestimationLib as rfe import argparse # argument parsing import numpy as np # Numpy import densityPeaks as dp import matplotlib matplotlib.use('Agg') from matplotlib import pyplot as plt from sklearn import mixture clustersColours = ['blue', 'red', 'green', 'orange', 'black','yellow', \ '#ff006f','#00e8ff','#fcfa00', '#ff0000', '#820c2c', \ '#ff006f', '#af00ff','#0200ff','#008dff','#00e8ff', \ '#0c820e','#28ea04','#ea8404','#c8628f','#6283ff', \ '#5b6756','#0c8248','k','#820cff','#932c11', \ '#002c11','#829ca7'] def main(): parser = argparse.ArgumentParser(prog='spaceClustering.py', description='Performs basic clustering based on the size of th RF', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--sourceFolder', help='Source folder', type=str, required=True) parser.add_argument('--outputFolder', help='Output folder', type=str, required=True) parser.add_argument('--percentage', help='Percentage used to calculate the distance', type=float, default='2', required=False) parser.add_argument('--xSize', help='X size of the stimuli', type=int, default='31', required=False) parser.add_argument('--ySize', help='Y size of the stimuli', type=int, default='31', required=False) args = parser.parse_args() #Source folder of the files with the timestamps sourceFolder = rfe.fixPath(args.sourceFolder) if not os.path.exists(sourceFolder): print '' print 'Source folder does not exists ' + sourceFolder print '' sys.exit() #Output folder for the graphics outputFolder = rfe.fixPath(args.outputFolder) if not os.path.exists(outputFolder): try: os.makedirs(outputFolder) except: print '' print 'Unable to create folder ' + outputFolder print '' sys.exit() units = [] dataCluster = np.zeros((1,7)) for unitFile in sorted(os.listdir(sourceFolder)): if os.path.isdir(sourceFolder+unitFile): unitName = unitFile.rsplit('_', 1)[0] fitResult = rfe.loadFitMatrix(sourceFolder,unitFile) dataCluster = np.vstack((dataCluster,[fitResult[0][2],\ fitResult[0][3],fitResult[0][1],fitResult[0][4],\ fitResult[0][5],fitResult[0][2]*fitResult[0][3]*3,\ (fitResult[0][2]+fitResult[0][3])/2])) units.append(unitName) # remove the first row of zeroes dataCluster = dataCluster[1:,:] percentage = args.percentage #exploratory, '...for large data sets, the results of the analysis are robust with respect to the choice of d_c' # Area instead o Radius #clustersNumber, labels = dp.predict(dataCluster[:,0:2], percentage) clustersNumber, labels = dp.predict(dataCluster[:,5:7], percentage) gmix = mixture.GMM(n_components=clustersNumber, covariance_type='spherical') gmix.fit(dataCluster[:,5:7]) labels = gmix.predict(dataCluster[:,5:7]) for clusterId in range(clustersNumber): clusterFile = open(outputFolder+'cluster_'+str(clusterId)+'.csv', "w") for unit in range(labels.size): if labels[unit] == clusterId: clusterFile.write(units[un
it]+'\n') clusterFile.close xSize = args.xSize ySize = args.ySize # generate graphics of all ellipses for clusterId in range(clustersNumber): dataGrilla = np.zeros((1,7)) for unitId in range(dataCluster.shape[0]): i
f labels[unitId] == clusterId: datos=np.zeros((1,7)) datos[0]=dataCluster[unitId,:] dataGrilla = np.append(dataGrilla,datos, axis=0) ## remove the first row of zeroes dataGrilla = dataGrilla[1:,:] rfe.graficaGrilla(dataGrilla, outputFolder+'Grilla_'+str(clusterId)+'.png', 0, clustersColours[clusterId], xSize, ySize) return 0 if __name__ == '__main__': main()
le. For other cases, see the M(copy) or M(template) modules. version_added: "0.7" options: path: description: - The file to modify. - Before 2.3 this option was only usable as I(dest), I(destfile) and I(name). aliases: [ dest, destfile, name ] required: true regexp: aliases: [ 'regex' ] description: - The regular expression to look for in every line of the file. For C(state=present), the pattern to replace if found. Only the last line found will be replaced. For C(state=absent), the pattern of the line(s) to remove. Uses Python regular expressions. See U(http://docs.python.org/2/library/re.html). version_added: '1.7' state: description: - Whether the line should be there or not. choices: [ absent, present ] default: present line: description: - Required for C(state=present). The line to insert/replace into the file. If C(backrefs) is set, may contain backreferences that will get expanded with the C(regexp) capture groups if the regexp matches. backrefs: description: - Used with C(state=present). If set, C(line) can contain backreferences (both positional and named) that will get populated if the C(regexp) matches. This flag changes the operation of the module slightly; C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp) doesn't match anywhere in the file, the file will be left unchanged. If the C(regexp) does match, the last matching line will be replaced by the expanded line parameter. type: bool default: 'no' version_added: "1.1" insertafter: description: - Used with C(state=present). If specified, the line will be inserted after the last match of specified regular expression. If the first match is required, use(firstmatch=yes). A special value is available; C(EOF) for inserting the line at the end of the file. If specified regular expression has no matches, EOF will be used instead. If regular expressions are passed to both C(regexp) and C(insertafter), C(insertafter) is only honored if no match for C(regexp) is found. May not be used with C(backrefs). choices: [ EOF, '*regex*' ] default: EOF insertbefore: description: - Used with C(state=present). If specified, the line will be inserted before the last match of specified regular expression. If the first match is required, use(firstmatch=yes). A value is available; C(BOF) for inserting the line at the beginning of the file. If specified regular expression has no matches, the line will be inserted at the end of the file. If regular expressions are passed to both C(regexp) and C(insertbefore), C(insertbefore) is only honored if no match for C(regexp) is found. May not be used with C(backrefs). choices: [ BOF, '*regex*' ] version_added: "1.1" create: description: - Used with C(state=present). If specified, the file will be created if it does not already exist. By default it will fail if the file is missing. type: bool default: 'no' backup: description: - Create a backup file including the timestamp information so you can get the original file back if you somehow clobbered it incorrectly. type: bool default: 'no' firstmatch: description: - Used with C(insertafter) or C(insertbefore). If set, C(insertafter) and C(inserbefore) find a first line has regular expression matches. type: bool default: 'no' version_added: "2.5" others: description: - All arguments accepted by the M(file) module also work here. notes: - As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well. """ EXAMPLES = r""" # Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path' - lineinfile: path: /etc/selinux/config regexp: '^SELINUX=' line: 'SELINUX=enforcing' - lineinfile: path: /etc/sudoers state: absent regexp: '^%wheel' # Searches for a line that begins with 127.0.0.1 and replaces it with the value of the 'line' parameter - lineinfile: path: /etc/hosts regexp: '^127\.0\.0\.1' line: '127.0.0.1 localhost' owner: root group: root mode: 0644 - lineinfile: path: /etc/httpd/conf/httpd.conf regexp: '^Listen ' insertafter: '^#Listen ' line: 'Listen 8080' - lineinfile: path: /etc/services regexp: '^# port for http' insertbefore: '^www.*80/tcp' line: '# port for http by default' # Add a line to a file if the file does not exist, without passing regexp - lineinfile: path: /tmp/testfile line: '192.168.1.99 foo.lab.net foo' create: yes # Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs. - lineinfile: path: /etc/sudoers state: present regexp: '^%wheel\s' line: '%wheel ALL=(ALL) NOPASSWD: ALL' # Yaml requires escaping backslashes in double quotes but not in single quotes - lineinfile: path: /opt/jboss-as/bin/standalone.conf regexp: '^(.*)Xms(\\d+)m(.*)$' line: '\1Xms${xms}m\3' backrefs: yes # Validate the sudoers file before saving - lineinfile: path: /etc/sudoers state: present regexp: '^%ADMIN ALL=' line: '%ADMIN ALL=(ALL) NOPASSWD: ALL' validate: '/usr/sbin/visudo -cf %s' """ import os import re import tempfile # import module snippets from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import b from ansibl
e.module_utils._text import to_bytes, to_native def write_changes(modu
le, b_lines, dest): tmpfd, tmpfile = tempfile.mkstemp() with open(tmpfile, 'wb') as f: f.writelines(b_lines) validate = module.params.get('validate', None) valid = not validate if validate: if "%s" not in validate: module.fail_json(msg="validate must contain %%s: %s" % (validate)) (rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict')) valid = rc == 0 if rc != 0: module.fail_json(msg='failed to validate: ' 'rc:%s error:%s' % (rc, err)) if valid: module.atomic_move(tmpfile, to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'), unsafe_writes=module.params['unsafe_writes']) def check_file_attrs(module, changed, message, diff): file_args = module.load_file_common_arguments(module.params) if module.set_fs_attributes_if_different(file_args, False, diff=diff): if changed: message += " and " changed = True message += "ownership, perms or SE linux context changed" return message, changed def present(module, dest, regexp, line, insertafter, insertbefore, create, backup, backrefs, firstmatch): diff = {'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest} b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): if not create: module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) b_destpath = os.path.dirname(b_dest) if not os.path.exists(b_destpath) and not module.check_mode: try: os.makedirs(b_destpath) except Exception as e: module.fail_json(msg='Error creating %s Error code: %s Error description: %s' % (b_destpath, e[0], e[1])) b_lines = [] else: with open(b_dest, 'rb') as f: b_lines = f.readlines() if module._diff: diff['before'] = to_native(b('').join(b_lines)) if regexp is not None: bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict')) if insertafter not in (None, 'BOF', 'EOF'): bre_ins = re.compile(to_bytes(insertafter, er
e): self.url = self.url.del_query_param(value) def handle_toggle_query(self, value): query_to_toggle = self.prepare_value(value) if isinstance(query_to_toggle, str): query_to_toggle = QueryString(query_to_toggle).dict current_query = self.url.query.dict for key, value in query_to_toggle.items(): if isinstance(value, str): value = value.split(",") first, second = value if key in current_query and first in current_query[key]: self.url = self.url.set_query_param(key, second) else: self.url = self.url.set_query_param(key, first) def handle_trigger_query(self, value): query_to_trigger = self.prepare_value(value) if isinstance(query_to_trigger, str): query_to_trigger = QueryString(query_to_trigger).dict current_query = self.url.query.dict for key, value in query_to_trigger.items(): if isinstance(value, str): value = value if key in current_query and value in current_query[key]: # unset self.url = self.url.del_query_param(key) else: # set self.url = self.url.set_query_param(key, value) def handle_trigger_mquery(self, value): query_to_trigger = self.prepare_value(value) if isinstance(query_to_trigger, str): query_to_trigger = QueryString(query_to_trigger).dict current_query = self.url.query.dict for key, value in query_to_trigger.items(): # exact match of query -> unset it if key in current_query and query_to_trigger[key] == current_query[key]: self.url = self.url.del_query_param(key) return # check if current query has multiple items try: ext = current_query[key] ext = ext.split(",") except Exception as e: ext = None if ext and len(ext) > 1: if key in current_query and value in ext:
# we have a key-match, so remove it from the string ext
= [x for x in ext if x != value] else: # no key match, so add it to the string ext.append(value) ext.sort() self.url = self.url.set_query_param(key, ",".join(ext)) elif ext and len(ext) == 1: # param already here > append ext.append(value) ext.sort() ext = list(set(ext)) self.url = self.url.set_query_param(key, ",".join(ext)) else: if isinstance(value, str): value = value if key in current_query and value in current_query[key]: # unset pass # self.url = self.url.del_query_param(key) else: # set self.url = self.url.set_query_param(key, value) def handle_active_mquery(self, value): active = None query_to_trigger = self.prepare_value(value) if isinstance(query_to_trigger, str): query_to_trigger = QueryString(query_to_trigger).dict current_query = self.url.query.dict for key, value in query_to_trigger.items(): # exact match of query -> unset it if key in current_query and query_to_trigger[key] == current_query[key]: active = True # check if current query has multiple items try: ext = current_query[key] ext = ext.split(",") except Exception as e: ext = None if ext and len(ext) > 1: if key in current_query and value in ext: active = True self.url = active def handle_scheme(self, value): self.url = self.url.with_scheme(value) def handle_scheme_from(self, value): url = URLObject(value) self.url = self.url.with_scheme(url.scheme) def handle_host(self, value): host = self.prepare_value(value) self.url = self.url.with_hostname(host) def handle_host_from(self, value): url = URLObject(value) self.url = self.url.with_hostname(url.hostname) def handle_path(self, value): path = self.prepare_value(value) self.url = self.url.with_path(path) def handle_path_from(self, value): url = URLObject(value) self.url = self.url.with_path(url.path) def handle_add_path(self, value): path_to_add = self.prepare_value(value) self.url = self.url.add_path(path_to_add) def handle_add_path_from(self, value): url = URLObject(value) path_to_add = url.path if path_to_add.startswith("/"): path_to_add = path_to_add[1:] self.url = self.url.add_path(path_to_add) def handle_fragment(self, value): fragment = self.prepare_value(value) self.url = self.url.with_fragment(fragment) def handle_fragment_from(self, value): url = URLObject(value) self.url = self.url.with_fragment(url.fragment) def handle_port(self, value): self.url = self.url.with_port(int(value)) def handle_port_from(self, value): url = URLObject(value) self.url = self.url.with_port(url.port) def handle_autoescape(self, value): self.autoescape = convert_to_boolean(value) def set_sensible_defaults(self): if self.url.hostname and not self.url.scheme: self.url = self.url.with_scheme("http") def prepare_value(self, value): """Prepare a value by unescaping embedded template tags and rendering through Django's template system""" if isinstance(value, str): value = self.unescape_tags(value) value = self.render_template(value) return value def unescape_tags(self, template_string): """Spurl allows the use of templatetags inside templatetags, if the inner templatetags are escaped - {\% and %\}""" return template_string.replace("{\%", "{%").replace("%\}", "%}") def compile_string(self, template_string, origin): """Re-implementation of django.template.base.compile_string that takes into account the tags and filter of the parser that rendered the parent template""" if TEMPLATE_DEBUG: from django.template.debug import DebugLexer, DebugParser lexer_class, parser_class = DebugLexer, DebugParser else: lexer_class, parser_class = Lexer, Parser # TODO: investigate. in django 1.9 `Lexer` only takes one argument try: lexer = lexer_class(template_string, origin) except TypeError: lexer = lexer_class(template_string) parser = parser_class(lexer.tokenize()) # Attach the tags and filters from the parent parser parser.tags = self.tags parser.filters = self.filters return parser.parse() def render_template(self, template_string): """Used to render an "inner" template, ie one which is passed as an argument to spurl""" original_autoescape = self.context.autoescape self.context.autoescape = False template = Template("") if TEMPLATE_DEBUG: origin = StringOrigin(template_string) else: origin = None template.nodelist = self.compile_string(template_string, origin) rendered = template.render(self.context) self.context.autoescape = original_autoescape return rendered class SpurlNode(Node): def __init__(self, args, tags, filters, asvar=None): self.args = args self.asvar = asvar self.tags = tags self.filters = filters def render(self, context): builder = SpurlURLBuilder(self.args, context, self.tags, self.filters)
from sympy import AccumBounds, Symbol, floor, nan, oo, E, symbols, ceiling, pi, \ Rational, Float, I, sin, exp, log, factorial, frac from sympy.utilities.pytest import XFAIL x = Symbol('x') i = Symbol('i', imaginary=True) y = Symbol('y', real=True) k, n = symbols('k,n', integer=True) def test_floor(): assert floor(nan) == nan assert floor(oo) == oo assert floor(-oo) == -oo assert floor(0) == 0 assert floor(1) == 1 assert floor(-1) == -1 assert floor(E) == 2 assert floor(-E) == -3 assert floor(2*E) == 5 assert floor(-2*E) == -6 assert floor(pi) == 3 assert floor(-pi) == -4 assert floor(Rational(1, 2)) == 0 assert floor(-Rational(1, 2)) == -1 assert floor(Rational(7, 3)) == 2 assert floor(-Rational(7, 3)) == -3 assert floor
(Float(17.0)) == 17 assert floor(-Float(17.0)) == -17 assert floor(Float(7.69)) == 7 assert floor(-Float(7.69)) == -8 assert floor(I) == I as
sert floor(-I) == -I e = floor(i) assert e.func is floor and e.args[0] == i assert floor(oo*I) == oo*I assert floor(-oo*I) == -oo*I assert floor(2*I) == 2*I assert floor(-2*I) == -2*I assert floor(I/2) == 0 assert floor(-I/2) == -I assert floor(E + 17) == 19 assert floor(pi + 2) == 5 assert floor(E + pi) == floor(E + pi) assert floor(I + pi) == floor(I + pi) assert floor(floor(pi)) == 3 assert floor(floor(y)) == floor(y) assert floor(floor(x)) == floor(floor(x)) assert floor(x) == floor(x) assert floor(2*x) == floor(2*x) assert floor(k*x) == floor(k*x) assert floor(k) == k assert floor(2*k) == 2*k assert floor(k*n) == k*n assert floor(k/2) == floor(k/2) assert floor(x + y) == floor(x + y) assert floor(x + 3) == floor(x + 3) assert floor(x + k) == floor(x + k) assert floor(y + 3) == floor(y) + 3 assert floor(y + k) == floor(y) + k assert floor(3 + I*y + pi) == 6 + floor(y)*I assert floor(k + n) == k + n assert floor(x*I) == floor(x*I) assert floor(k*I) == k*I assert floor(Rational(23, 10) - E*I) == 2 - 3*I assert floor(sin(1)) == 0 assert floor(sin(-1)) == -1 assert floor(exp(2)) == 7 assert floor(log(8)/log(2)) != 2 assert int(floor(log(8)/log(2)).evalf(chop=True)) == 3 assert floor(factorial(50)/exp(1)) == \ 11188719610782480504630258070757734324011354208865721592720336800 assert (floor(y) <= y) == True assert (floor(y) > y) == False assert (floor(x) <= x).is_Relational # x could be non-real assert (floor(x) > x).is_Relational assert (floor(x) <= y).is_Relational # arg is not same as rhs assert (floor(x) > y).is_Relational def test_ceiling(): assert ceiling(nan) == nan assert ceiling(oo) == oo assert ceiling(-oo) == -oo assert ceiling(0) == 0 assert ceiling(1) == 1 assert ceiling(-1) == -1 assert ceiling(E) == 3 assert ceiling(-E) == -2 assert ceiling(2*E) == 6 assert ceiling(-2*E) == -5 assert ceiling(pi) == 4 assert ceiling(-pi) == -3 assert ceiling(Rational(1, 2)) == 1 assert ceiling(-Rational(1, 2)) == 0 assert ceiling(Rational(7, 3)) == 3 assert ceiling(-Rational(7, 3)) == -2 assert ceiling(Float(17.0)) == 17 assert ceiling(-Float(17.0)) == -17 assert ceiling(Float(7.69)) == 8 assert ceiling(-Float(7.69)) == -7 assert ceiling(I) == I assert ceiling(-I) == -I e = ceiling(i) assert e.func is ceiling and e.args[0] == i assert ceiling(oo*I) == oo*I assert ceiling(-oo*I) == -oo*I assert ceiling(2*I) == 2*I assert ceiling(-2*I) == -2*I assert ceiling(I/2) == I assert ceiling(-I/2) == 0 assert ceiling(E + 17) == 20 assert ceiling(pi + 2) == 6 assert ceiling(E + pi) == ceiling(E + pi) assert ceiling(I + pi) == ceiling(I + pi) assert ceiling(ceiling(pi)) == 4 assert ceiling(ceiling(y)) == ceiling(y) assert ceiling(ceiling(x)) == ceiling(ceiling(x)) assert ceiling(x) == ceiling(x) assert ceiling(2*x) == ceiling(2*x) assert ceiling(k*x) == ceiling(k*x) assert ceiling(k) == k assert ceiling(2*k) == 2*k assert ceiling(k*n) == k*n assert ceiling(k/2) == ceiling(k/2) assert ceiling(x + y) == ceiling(x + y) assert ceiling(x + 3) == ceiling(x + 3) assert ceiling(x + k) == ceiling(x + k) assert ceiling(y + 3) == ceiling(y) + 3 assert ceiling(y + k) == ceiling(y) + k assert ceiling(3 + pi + y*I) == 7 + ceiling(y)*I assert ceiling(k + n) == k + n assert ceiling(x*I) == ceiling(x*I) assert ceiling(k*I) == k*I assert ceiling(Rational(23, 10) - E*I) == 3 - 2*I assert ceiling(sin(1)) == 1 assert ceiling(sin(-1)) == 0 assert ceiling(exp(2)) == 8 assert ceiling(-log(8)/log(2)) != -2 assert int(ceiling(-log(8)/log(2)).evalf(chop=True)) == -3 assert ceiling(factorial(50)/exp(1)) == \ 11188719610782480504630258070757734324011354208865721592720336801 assert (ceiling(y) >= y) == True assert (ceiling(y) < y) == False assert (ceiling(x) >= x).is_Relational # x could be non-real assert (ceiling(x) < x).is_Relational assert (ceiling(x) >= y).is_Relational # arg is not same as rhs assert (ceiling(x) < y).is_Relational def test_frac(): assert isinstance(frac(x), frac) assert frac(oo) == AccumBounds(0, 1) assert frac(-oo) == AccumBounds(0, 1) assert frac(n) == 0 assert frac(nan) == nan assert frac(Rational(4, 3)) == Rational(1, 3) assert frac(-Rational(4, 3)) == Rational(2, 3) r = Symbol('r', real=True) assert frac(I*r) == I*frac(r) assert frac(1 + I*r) == I*frac(r) assert frac(0.5 + I*r) == 0.5 + I*frac(r) assert frac(n + I*r) == I*frac(r) assert frac(n + I*k) == 0 assert frac(x + I*x) == frac(x + I*x) assert frac(x + I*n) == frac(x) assert frac(x).rewrite(floor) == x - floor(x) def test_series(): x, y = symbols('x,y') assert floor(x).nseries(x, y, 100) == floor(y) assert ceiling(x).nseries(x, y, 100) == ceiling(y) assert floor(x).nseries(x, pi, 100) == 3 assert ceiling(x).nseries(x, pi, 100) == 4 assert floor(x).nseries(x, 0, 100) == 0 assert ceiling(x).nseries(x, 0, 100) == 1 assert floor(-x).nseries(x, 0, 100) == -1 assert ceiling(-x).nseries(x, 0, 100) == 0 @XFAIL def test_issue_4149(): assert floor(3 + pi*I + y*I) == 3 + floor(pi + y)*I assert floor(3*I + pi*I + y*I) == floor(3 + pi + y)*I assert floor(3 + E + pi*I + y*I) == 5 + floor(pi + y)*I def test_issue_11207(): assert floor(floor(x)) == floor(x) assert floor(ceiling(x)) == ceiling(x) assert ceiling(floor(x)) == floor(x) assert ceiling(ceiling(x)) == ceiling(x)
1) def logMsg(self, msg, lvl=1): self.className = self.__class__.__name__ utils.logMsg("%s %s" % (self.addonName, self.className), msg, lvl) def onScanStarted(self, library): self.logMsg("Kodi library scan %s running." % library, 2) if library == "video": utils.window('emby_kodiScan', value="true") def onScanFinished(self, library): self.logMsg("Kodi library scan %s finished." % library, 2) if library == "video": utils.window('emby_kodiScan', clear=True) def onSettingsChanged(self): # Monitor emby settings # Review reset setting at a later time, need to be adjusted to account for initial setup # changes. '''currentPath = utils.settings('useDirectPaths') if utils.window('emby_pluginpath') != currentPath: # Plugin path value changed. Offer to reset self.logMsg("Changed to playback mode detected", 1) utils.window('emby_pluginpath', value=currentPath) resp = xbmcgui.Dialog().yesno( heading="Playback mode change detected", line1=( "Detected the playback mode has changed. The database " "needs to be recreated for the change to be applied. " "Proceed?")) if resp: utils.reset()''' currentLog = utils.settings('logLevel') if utils.window('emby_logLevel') != currentLog: # The log level changed, set new prop self.logMsg("New log level: %s" % currentLog, 1) utils.window('emby_logLevel', value=currentLog) def onNotification(self, sender, method, data): doUtils = self.doUtils if method not in ("Playlist.OnAdd"): self.logMsg("Method: %s Data: %s" % (method, data), 1) if data: data = json.loads(data,'utf-8') if method == "Player.OnPlay": # Set up report progress for emby playback item = data.get('item') try: kodiid = item['id'] type = item['type'] except (KeyError, TypeError): self.logMsg("Item is invalid for playstate update.", 1) else: if ((utils.settings('useDirectPaths') == "1" and not type == "song") or (type == "song" and utils.settings('enableMusic') == "true")): # Set up properties for player embyconn = utils.kodiSQL('emby') embycursor = embyconn.cursor() emby_db = embydb.Embydb_Functions(embycursor) emby_dbitem = emby_db.getItem_byKodiId(kodiid, type) try: itemid = emby_dbitem[0] except TypeError: self.logMsg("No kodiid returned.", 1) else: url = "{server}/emby/Users/{UserId}/Items/%s?format=json" % itemid result = doUtils.downloadUrl(url) self.logMsg("Item: %s" % result, 2) playurl = None count = 0 while not playurl and count < 2: try: playurl = xbmc.Player().getPlayingFile() except RuntimeError: count += 1 xbmc.sleep(200) else: listItem = xbmcgui.ListItem() playback = pbutils.PlaybackUtils(result) if type == "song" and utils.settings('streamMusic') == "true": utils.window('emby_%s.playmethod' % playurl, value="DirectStream") else: utils.window('emby_%s.playmethod' % playurl, value="DirectPlay") # Set properties for player.py playback.setProperties(playurl, listItem) finally: embycursor.close() elif method == "VideoLibrary.OnUpdate": # Manually marking as watched/unwatched playcount = data.get('playcount') item = data.get('item') try: kodiid = item['id'] type = item['type'] except (KeyError, TypeError): self.logMsg("Item is invalid for playstate update.", 1) else: # Send notification to the server. embyconn = utils.kodiSQL('emby') embycursor = embyconn.cursor() emby_db = embydb.Embydb_Functions(embycursor) emby_dbitem = emby_db.getItem_byKodiId(kodiid, type) try: itemid = emby_dbitem[0] except TypeError: self.logMsg("Could not find itemid in emby database.", 1) else: # Stop from manually marking as watched unwatched, with actual playback. if utils.window('emby_skipWatched%s' % itemid) == "true": # property is set in player.py utils.window('emby_skipWatched%s' % itemid, clear=True) else: # notify the server url = "{server}/emby/Users/{UserId}/PlayedItems/%s?format=json" % itemid if playcount != 0: doUtils.downloadUrl(url, type="POST") self.logMsg("Mark as watched for itemid: %s" % itemid, 1) else: doUtils.downloadUrl(url, type="DELETE") self.logMsg("Mark as unwatched for itemid: %s" % itemid, 1) finally: embycursor.close() elif method == "VideoLibrary.OnRemove": # Removed function, because with plugin paths + clean library, it will wipe # entire library if user has permissions. Instead, use the emby context menu available # in Isengard and higher version pass '''try: kodiid = data['id'] type = data['type'] except (KeyError, TypeError): self.logMsg("Item is invalid for emby deletion.", 1) else: # Send the delete action to the server. embyconn = utils.kodiSQL('emby') embycursor = embyconn.cursor() emby_db = embydb.Embydb_Functions(embycursor) emby_dbitem = emby_db.getItem_byKodiId(kodiid, type) try: itemid = emby_dbitem[0] except TypeError: self.logMsg("Could not find itemid in emby database.", 1) else: if utils.settings('skipContextMenu') != "true": resp = xbmcg
ui.Dialog().yesno( heading="Confirm delete", line1="Delete file on Emby Server?") if not resp:
self.logMsg("User skipped deletion.", 1) embycursor.close() return url = "{server}/emby/Items/%s?format=json" % itemid self.logMsg("Deleting request: %s" % itemid) doUtils.downloadUrl(url, type="DELETE") finally: embycursor.close()''' elif method == "System.OnWake": # Allow network to wake up xbmc.sleep(10000) utils.window('emby_onWake', value="true") elif method == "Playlis
# # core.py # # Copyright (C) 2014 dredkin <dmitry.redkin@gmail.com> # # Basic plugin template created by: # Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com> # Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com> # Copyright (C) 2009 Damien Churchill <damoxc@gmail.com> # # Deluge is free software. # # You may redistribute it and/or modify it under the terms of the # GNU General Public License, as published by the Free Software # Foundation; either version 3 of the License, or (at your option) # any later version. # # deluge is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with deluge. If not, write to: # The Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor # Boston, MA 02110-1301, USA. # # In addition, as a special exception, the copyright holders give # permission to link the code of portions of this program with the OpenSSL # library. # You must obey the GNU General Public License in all respects for all of # the code used other than OpenSSL. If you modify file(s) with this # exception, you may extend this exception to your version of the file(s), # but you are not obligated to do so. If you do not wish to do so, delete # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. # from deluge.log import LOG as log from deluge.plugins.pluginbase import CorePluginBase import deluge.component as component import deluge.configmanager from deluge.core.rpcserver import export import os import locale import pkg_resources import gettext def windows(): return os.name == "nt" if windows(): import win32api DEFAULT_PREFS = { #Default to empty to have no specified root dir. "RootDirPath":"", "DisableTraversal":"false" } UTF8 = 'UTF-8' CURRENT_LOCALE = locale.getdefaultlocale()[1] if CURRENT_LOCALE is None: CURRENT_LOCALE = UTF8 class Core(CorePluginBase): def enable(self): self.config = deluge.configmanager.ConfigManager("browsebutton.conf", DEFAULT_PREFS) def disable(self): #self.config.save() pass def update(self): pass def drives_list(self): if windows(): drives = win32api.GetLogicalDriveStrings() return drives.split('\000')[:-1] else: return "/" def subfolders_list(self, absolutepath): subfolders = [] try: list = os.listdir(absolutepath) except: list = [] for f in list: if os.path.isdir(os.path.join(absolutepath,f)): f2 = f.decode(CURRENT_LOCALE).encode(UTF8) subfolders.append(f2) return subfolders def is_root_folder(self, folder): return os.path.dirname(folder) == folder @export def save_config(self): """Saves the config""" self.config.save() log.debug("RBB: config saved") @export def set_config(self, config): """Sets the config dictionary""" log.debug("RBB: set_config") for key in config.keys(): self.config[key] = config[key] log.debug("RBB: added history "+str(key)+"->"+str(config[key])) self.save_config() @export def get_config(self): """Returns the config dictionary""" log.debug("RBB: config assigned") return self.config.config @export def serverlog(self, line): log.debug(line) @export def get_folder_list(self, folder, subfolder): """Returns the list of subfolders for specified folder on server""" error = "" if folder == "": folder = os.path.expanduser("~") else: folder = folder.encode(CURRENT_LOCALE) log.debug("RBB:native folder"+folder) log.debug("RBB:orig subfolder"+subfolder) subfolder = subfolder.encode(CURRENT_LOCALE) newfolder = os.path.join(folder,subfolder) absolutepath = os.path.normpath(newfolder) if not os.path.isdir(absolutepath): log.info("RBB:NOT A FOLDER!:"+absolutepath+" (normalized from "+newfolder+")")
error = "Cannot List Contents of "+absolutepath absolutepath = os.path.expanduser("~") if windows(): isroot = self.is_root_folder(folder) and (subfolder == "..") else: isroot = self.is_root_folder(absolutepath) if windows() and isroo
t: subfolders = self.drives_list() absolutepath = "" else: subfolders = self.subfolders_list(absolutepath) return [absolutepath.decode(CURRENT_LOCALE).encode(UTF8), isroot, subfolders, error]
import random from common import generalUtils from common.log import logUtils as log from constants import clientPackets from constants import matchModModes from constants import matchTeamTypes from constants import matchTeams from constants import slotStatuses from objects import glob def handle(userToken, packetData): # Read new settings packetData = clientPackets.changeMatchSettings(packetData) # Get match ID matchID = userToken.matchID # Make sure the match exists if matchID not in glob.matches.matches: return # Host check with glob.matches.matches[matchID] as match: if userToken.userID != match.hostUserID: return # Some dank memes easter egg memeTitles = [ "RWC 2020", "Fokabot is a duck", "Dank memes", "1337ms Ping", "Iscriviti a Xenotoze", "...e i marò?", "Superman dies", "The brace is on fire", "print_foot()", "#FREEZEBARKEZ", "Ripple devs are actually cats", "Thank Mr Shaural", "NEVER GIVE UP", "T I E D W I T H U N I T E D", "HIGH
EST HDHR LOBBY OF ALL TIME", "This is gasoline and I set myself on fire", "Everyone is cheating apparently", "Kurwa mac", "TATOE", "This is not your drama landfill.", "I like cheese", "NYO IS NOT A CAT HE IS A DO(N)G", "Datingu startuato" ] # Set match name match.matchName = packetData["matchName"] if packetData["matchName"] != "meme" else random.choice(memeTitles) # Update match settings match.inProgress = packetData["inProgress"] if packetData["matchPassw
ord"] != "": match.matchPassword = generalUtils.stringMd5(packetData["matchPassword"]) else: match.matchPassword = "" match.beatmapName = packetData["beatmapName"] match.beatmapID = packetData["beatmapID"] match.hostUserID = packetData["hostUserID"] match.gameMode = packetData["gameMode"] oldBeatmapMD5 = match.beatmapMD5 oldMods = match.mods oldMatchTeamType = match.matchTeamType match.mods = packetData["mods"] match.beatmapMD5 = packetData["beatmapMD5"] match.matchScoringType = packetData["scoringType"] match.matchTeamType = packetData["teamType"] match.matchModMode = packetData["freeMods"] # Reset ready if needed if oldMods != match.mods or oldBeatmapMD5 != match.beatmapMD5: match.resetReady() # Reset mods if needed if match.matchModMode == matchModModes.NORMAL: # Reset slot mods if not freeMods match.resetMods() else: # Reset match mods if freemod match.mods = 0 # Initialize teams if team type changed if match.matchTeamType != oldMatchTeamType: match.initializeTeams() # Force no freemods if tag coop if match.matchTeamType == matchTeamTypes.TAG_COOP or match.matchTeamType == matchTeamTypes.TAG_TEAM_VS: match.matchModMode = matchModModes.NORMAL # Send updated settings match.sendUpdates() # Console output log.info("MPROOM{}: Updated room settings".format(match.matchID))
from ..rerequest import TemplateRequest init_req = TemplateRequest( re = r'(http://)?(www\.)?(?P<domain>ur(play)?)\.se/(?P<req_url>.+)', encode_vars = lambda v: { 'req_url': 'http://%(domain)s.se/%(req_url)s' % v } ) hls = { 'title': 'UR-play', 'url': 'http://urplay.se/', 'feed_url': 'http://urplay.se/rss', 'items': [init_req, TemplateRequest( re = r'file_html5":\s?"(?P<fin
al_url>[^"]+)".*?"subtitles":\s?"(?P<subtitles>[^",]*)', encode_vars = lambda v: { 'final_url': ('http://130.242.59.75/%(final_url)s/playlist.m3u8' % v).replace('\\', ''), 'suffix-hint': 'mp4', 'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] } rtmp = { 'items': [init_req, TemplateRequest( re = r'file_flash":\s?"(?P<final_url>[^"]+\.(?P<ext>mp[34]))".*?"subtitles":\s?"(?P<subtitles>[^",]*)', encode_vars = lambda v: { 'final_url': ('rtmp://130.242.59.7
5/ondemand playpath=%(ext)s:/%(final_url)s app=ondemand' % v).replace('\\', ''), 'suffix-hint': 'flv', 'rtmpdump-realtime': True, 'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] } services = [hls, rtmp]
# Copyright (C) 2010 Linaro Limited # # Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org> # # This file is part of Launch Control. # # Launch Control is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 # as published by the Free Software Foundation # # Launch Control is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FO
R A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Launch Control. If not, see <http://www.gnu.org/licenses/>. """ Tests for the Attachment model """ from django.contrib.contenttypes import generic from django.core.files.base import ContentFile from django.db import models from django.test import TestCase from dashboard_app.models import Attachment class ModelWithAttachme
nts(models.Model): """ Test model that uses attachments """ attachments = generic.GenericRelation(Attachment) class Meta: # This requires a bit of explanation. Traditionally we could add new # models inside test modules and they would be picked up by django and # synchronized (created in the test database) as a part of the test # provisioning process. # Since we started using south, synchronization is no longer occurring # for the 'dashboard_app' application. This caused some test failures # such as any tests that depended on the existence of this model. # As a workaround we artificially "stick" this model into the only # application that we can count on to exist _and_ not use south as well # -- that is south itself. # This way the test model gets synchronized when south is synchronized # and all the test code below works as expected. app_label = "south" class AttachmentTestCase(TestCase): _CONTENT = "text" _FILENAME = "filename" def setUp(self): self.obj = ModelWithAttachments.objects.create() def test_attachment_can_be_added_to_models(self): attachment = self.obj.attachments.create( content_filename=self._FILENAME, content=None) self.assertEqual(attachment.content_object, self.obj) def test_attachment_can_be_accessed_via_model(self): self.obj.attachments.create( content_filename=self._FILENAME, content=None) self.assertEqual(self.obj.attachments.count(), 1) retrieved_attachment = self.obj.attachments.all()[0] self.assertEqual(retrieved_attachment.content_object, self.obj) def test_attachment_stores_data(self): attachment = self.obj.attachments.create( content_filename=self._FILENAME, content=None) attachment.content.save( self._FILENAME, ContentFile(self._CONTENT)) self.assertEqual(attachment.content_filename, self._FILENAME) attachment.content.open() try: self.assertEqual(attachment.content.read(), self._CONTENT) finally: attachment.content.close() attachment.content.delete(save=False) def test_unicode(self): obj = Attachment(content_filename="test.json") self.assertEqual(unicode(obj), "test.json")
#!/usr/bin/env python3 import unittest from tests import testfunctions from dftintegrate.fourier import vaspdata class TestExtractingVASPDataToDatFiles(unittest.TestCase, testfunctions.TestFunctions): def setUp(self): print('Testing extracting VASP data to .dat files...') self.cases = [str(x) for x in range(1, 3)] self.root = './tests/fourier/extractvaspdata/' def test_runtestcases(self): for case in self.cases: print(' Testing case '+case+'...') vaspdata.VASPData(self.root+'tocheck/test'+case) kpts_eigenvals_ans = self.readfile(case, 'answer', 'kpts_eigenva
ls') kpts_eigenvals_tocheck = self.
readfile(case, 'tocheck', 'kpts_eigenvals') self.assertEqual(kpts_eigenvals_ans, kpts_eigenvals_tocheck, msg='kpts_eigenvals case '+case) symops_trans_ans = self.readfile(case, 'answer', 'symops_trans') symops_trans_tocheck = self.readfile(case, 'tocheck', 'symops_trans') self.assertEqual(symops_trans_ans, symops_trans_tocheck, msg='symops_trans case '+case) kmax_ans = self.readfile(case, 'answer', 'kmax') kmax_tocheck = self.readfile(case, 'tocheck', 'kmax') self.assertEqual(kmax_ans, kmax_tocheck, msg='kmax case '+case)
"""Solve the Project Euler problems using functional Python. https://projecteuler.net/archives """ from importlib import import_module from os import listdir from os.path import abspath, dirname from re import match SOLVED = set(
int(m.group(1)) for f in listdir(abspath(dirname(__file__))) for m in (match(r"^p(\d{3})\.py$", f),) if m ) def compute(problem: int): """Compute the a
nswer to problem `problem`.""" assert problem in SOLVED, "Problem currently unsolved." module = import_module("euler.p{:03d}".format(problem)) return module.compute()
import random import pymel.core as pm from impress import models, register def randomTransform( translate=False, translateAmount=1.0, translateAxis=(False,False,False), rotate=False, rotateAmount=1.0, rotateAxis=(False,False,False), scale=False, scaleAmount=1.0, scaleAxis=(False,False,False) ): """ Transforms selected objects with random values. """ objects = pm.ls( selection=True, type='transform') assert len(objects), 'randomTransform requires at least 1 selected transform object.' for object in objects: if translate: offset
= map(lambda axis: random.uniform( -translateAmount, translateAmount )*float(axis), translateAxis) object.setTranslation( offset, relative=True ) if rotate: offset = map(lambda axis: random.uniform( -rotateAmount, rotateAmount )*float(axis), rotateAxis) object.setRotation( offset, relative=True ) if scale: o
ffset = map(lambda axis: 1 + ( random.uniform( -scaleAmount, scaleAmount )*float(axis) ), scaleAxis) object.setScale( offset ) print '# Results: %i object randomized. #' % len(objects) class RandomTransformOptions( models.OptionModel ): translate = models.CheckBox( default=1, ann='about the checkbox' ) translateAmount = models.FloatSlider( default=1, precision=3, requires=(translate, 1) ) translateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(translate, 1) ) sep1 = models.Separator( style='in', height=14 ) rotate = models.CheckBox( default=1, ann='about the checkbox' ) rotateAmount = models.FloatSlider( default=1, precision=3, requires=(rotate, 1) ) rotateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(rotate, 1) ) sep2 = models.Separator( style='in', height=14 ) scale = models.CheckBox( default=1, ann='about the checkbox' ) scaleAmount = models.FloatSlider( default=1, precision=3, requires=(scale, 1) ) scaleAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(scale, 1) ) class Meta: button_label = 'Randomize' performRandomTransform = register.PerformCommand( randomTransform, RandomTransformOptions ) performRandomTransform(1)
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division,
print_function import six from cryptography import utils def generate_parameters(key_size, backend): return backend.generate_dsa_parameters(key_size) def generate_private_key(key_size, backend): return
backend.generate_dsa_private_key_and_parameters(key_size) def _check_dsa_parameters(parameters): if utils.bit_length(parameters.p) not in [1024, 2048, 3072]: raise ValueError("p must be exactly 1024, 2048, or 3072 bits long") if utils.bit_length(parameters.q) not in [160, 256]: raise ValueError("q must be exactly 160 or 256 bits long") if not (1 < parameters.g < parameters.p): raise ValueError("g, p don't satisfy 1 < g < p.") def _check_dsa_private_numbers(numbers): parameters = numbers.public_numbers.parameter_numbers _check_dsa_parameters(parameters) if numbers.x <= 0 or numbers.x >= parameters.q: raise ValueError("x must be > 0 and < q.") if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): raise ValueError("y must be equal to (g ** x % p).") class DSAParameterNumbers(object): def __init__(self, p, q, g): if ( not isinstance(p, six.integer_types) or not isinstance(q, six.integer_types) or not isinstance(g, six.integer_types) ): raise TypeError( "DSAParameterNumbers p, q, and g arguments must be integers." ) self._p = p self._q = q self._g = g p = utils.read_only_property("_p") q = utils.read_only_property("_q") g = utils.read_only_property("_g") def parameters(self, backend): return backend.load_dsa_parameter_numbers(self) class DSAPublicNumbers(object): def __init__(self, y, parameter_numbers): if not isinstance(y, six.integer_types): raise TypeError("DSAPublicNumbers y argument must be an integer.") if not isinstance(parameter_numbers, DSAParameterNumbers): raise TypeError( "parameter_numbers must be a DSAParameterNumbers instance." ) self._y = y self._parameter_numbers = parameter_numbers y = utils.read_only_property("_y") parameter_numbers = utils.read_only_property("_parameter_numbers") def public_key(self, backend): return backend.load_dsa_public_numbers(self) class DSAPrivateNumbers(object): def __init__(self, x, public_numbers): if not isinstance(x, six.integer_types): raise TypeError("DSAPrivateNumbers x argument must be an integer.") if not isinstance(public_numbers, DSAPublicNumbers): raise TypeError( "public_numbers must be a DSAPublicNumbers instance." ) self._public_numbers = public_numbers self._x = x x = utils.read_only_property("_x") public_numbers = utils.read_only_property("_public_numbers") def private_key(self, backend): return backend.load_dsa_private_numbers(self)
############################################################################### # Copyright 2016 - Climate Research Division # Environment and Climate Change Canada # # This file is part of the "EC-CAS diags" package. # # "EC-CAS diags" is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # "EC-CAS diags" is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with "EC-CAS diags". If not, see <http://www.gnu.org/licenses/>. ##################################
############################################# from .zonalmean import ZonalMean as Zonal from .vinterp import VInterp
from . import TimeVaryingDiagnostic class ZonalMean(Zonal,VInterp,TimeVaryingDiagnostic): """ Zonal mean (or standard deviation) of a field, animated in time. """ def __str__ (self): return 'zonal'+self.typestat+'_'+self.zaxis def do (self, inputs): from .movie import ZonalMovie prefix = '_'.join(inp.name for inp in inputs) + '_zonal'+self.typestat+'_'+self.fieldname+'_on_'+self.zaxis+self.suffix+self.end_suffix title = 'Zonal %s %s (in %s)'%(self.typestat,self.fieldname,self.units) aspect_ratio = 1.0 shape = (1,len(inputs)) subtitles = [inp.title for inp in inputs] fields = [inp.datasets[0].vars[0] for inp in inputs] cmaps = [inp.cmap for inp in inputs] cap_extremes = [getattr(inp,'cap_extremes',False) for inp in inputs] movie = ZonalMovie(fields, title=title, subtitles=subtitles, shape=shape, aspect_ratio=aspect_ratio, cmaps=cmaps, cap_extremes=cap_extremes) movie.save (outdir=self.outdir, prefix=prefix) from . import table table['zonal-movie'] = ZonalMean