prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
from django.conf.urls import url from . import views app_name = 'repo' ur
lpatterns = [ url(r
'^$', views.home, name='home'), url(r'^home/$', views.home, name='home'), url(r'^library/$', views.library, name='library'), url(r'^login/$', views.login, name='login'), url(r'^register/$', views.register, name='register'), url(r'^results/?P<form>[A-Za-z]+/$', views.results, name='results'), url(r'^(?P<sn>[-\/\d\w]{5,100})/borrow/$', views.borrow, name='borrow'), #url(r'^(?P<sn>[.\D\d.]+)/borrow/$', views.borrow, name='borrow'), ]
self.description = "Backup file relocation" lp1 = pmpkg("bash") lp1.files = ["etc/profile*"] lp1.backup = ["etc/profile"] self.addpkg2db("local", lp1) p1 = pmpkg("bash", "1.0-2") self.addpkg(p1) lp2 = pmpkg("filesystem
") self.addpkg2db("local", lp2) p2 = pmpkg("filesystem", "1.0-2") p2.files = ["etc/profile**"] p2.backup = ["etc/profile"] p
2.depends = [ "bash" ] self.addpkg(p2) self.args = "-U %s" % " ".join([p.filename() for p in (p1, p2)]) self.filesystem = ["etc/profile"] self.addrule("PACMAN_RETCODE=0") self.addrule("PKG_VERSION=bash|1.0-2") self.addrule("PKG_VERSION=filesystem|1.0-2") self.addrule("!FILE_PACSAVE=etc/profile") self.addrule("FILE_PACNEW=etc/profile") self.addrule("FILE_EXIST=etc/profile")
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). import logging from odoo import api, SUPERUSER_ID _logger = logging.getLogger(__name__) def post_init_hook(cr, registry): """ Create a payment group for every existint payment """ env = api.Environment(cr, SUPERUSER_ID, {}) # payments = env['account.payment'].search( # [('payment_type', '!=', 'transfer')]) # on v10, on reconciling from statements, if not partner is choosen, then # a payment is created with no partner. We still make partners mandatory # on payment groups. So, we dont create payment groups for payments # without partner_id payments = env['account.p
ayment'].search( [('partner_id', '!=', False)]) for payment in payments: _logger.info('creating payment group for payment %s' % payment.id) _state = payment.state in ['sent', 'reconciled'] and 'posted' or payment.state _state = _state if _state != 'cancelled' else 'cancel' env['account.payment.group'].create({ 'company_id': payment.company_id.id, 'partner_type': payment.partner_type, 'partner_id': payme
nt.partner_id.id, 'payment_date': payment.date, 'communication': payment.ref, 'payment_ids': [(4, payment.id, False)], 'state': _state, })
ty=stat.property).first() if fill_state is None: currently_filled = installation_epoch() fill_state = FillState.objects.create(property=stat.property, end_time=currently_filled, state=FillState.DONE) logger.info("INITIALIZED %s %s", stat.property, currently_filled) elif fill_state.state == FillState.STARTED: logger.info("UNDO START %s %s", stat.property, fill_state.end_time) do_delete_counts_at_hour(stat, fill_state.end_time) currently_filled = fill_state.end_time - time_increment do_update_fill_state(fill_state, currently_filled, FillState.DONE) logger.info("UNDO DONE %s", stat.property) elif fill_state.state == FillState.DONE: currently_filled = fill_state.end_time else: raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.") if isinstance(stat, DependentCountStat): for dependency in stat.dependencies: dependency_fill_time = last_successful_fill(dependency) if dependency_fill_time is None: logger.warning("DependentCountStat %s run before dependency %s.", stat.property, dependency) return fill_to_time = min(fill_to_time, dependency_fill_time) currently_filled = currently_filled + time_increment while currently_filled <= fill_to_time: logger.info("START %s %s", stat.property, currently_filled) start = time.time() do_update_fill_state(fill_state, currently_filled, FillState.STARTED) do_fill_count_stat_at_hour(stat, currently_filled, realm) do_update_fill_state(fill_state, currently_filled, FillState.DONE) end = time.time() currently_filled = currently_filled + time_increment logger.info("DONE %s (%dms)", stat.property, (end-start)*1000) def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None: fill_state.end_time = end_time fill_state.state = state fill_state.save() # We assume end_time is valid (e.g. is on a day or hour boundary as appropriate) # and is timezone aware. It is the caller's responsibility to enforce this! def do_fill_count_stat_at_hour(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None: start_time = end_time - stat.interval if not isinstance(stat, LoggingCountStat): timer = time.time() assert(stat.data_collector.pull_function is not None) rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm) logger.info("%s run pull_function (%dms/%sr)", stat.property, (time.time()-timer)*1000, rows_added) do_aggregate_to_summary_table(stat, end_time, realm) def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None: if isinstance(stat, LoggingCountStat): InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete() if stat.data_collector.output_table in [UserCount, StreamCount]: RealmCount.objects.filter(property=stat.property, end_time=end_time).delete() else: UserCount.objects.filter(property=stat.property, end_time=end_time).delete() StreamCount.objects.filter(property=stat.property, end_time=end_time).delete() RealmCount.objects.filter(property=stat.property, end_time=end_time).delete() InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete() def do_aggregate_to_summary_table(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None: cursor = connection.cursor() # Aggregate into RealmCount output_table = stat.data_collector.output_table if realm is not None: realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id)) else: realm_clause = SQL("") if output_table in (UserCount, StreamCount): realmcount_query = SQL(""" INSERT INTO analytics_realmcount (realm_id, value, property, subgroup, end_time) SELECT zerver_realm.id, COALESCE(sum({output_table}.value), 0), %(property)s, {output_table}.subgroup, %(end_time)s FROM zerver_realm JOIN {output_table} ON zerver_realm.id = {output_table}.realm_id WHERE {output_table}.property = %(property)s AND {output_table}.end_time = %(end_time)s {realm_clause} GROUP BY zerver_realm.id, {output_table}.subgroup """).format( output_table=Identifier(output_table._meta.db_table), realm_clause=realm_clause, ) start = time.time() cursor.execute(realmcount_query, { 'property': stat.property, 'end_time': end_time, }) end = time.time() logger.info( "%s RealmCount aggregation (%dms/%sr)", stat.property, (end - start) * 1000, cursor.rowcount, ) if realm is None: # Aggregate into InstallationCount. Only run if we just # processed counts for all realms. # # TODO: Add support for updating installation data after # changing an individual realm's values. installationcount_query = SQL(""" INSERT INTO analytics_installationcount (value, property, subgroup, end_time) SELECT sum(value), %(property)s, analytics_realmcount.subgroup, %(end_time)s FROM analytics_realmcount WHERE property = %(property)s AND end_time = %(end_time)s GROUP BY analytics_realmcount.subgroup """) start = time.time() cursor.execute(installationcount_query, { 'property': stat.property, 'end_time': end_time, }) end = time.time() logger.info( "%s InstallationCount aggregation (%dms/%sr)", stat.property, (end - start) * 1000, cursor.rowcount, ) cursor.close() ## Utility functions called from outside counts.py ## # called from zerver/lib/actions.py; should not throw any errors def do_increment_logging_stat(zerver_object: Union[Realm, UserProfile, Stream], stat: CountStat, subgroup: Optional[Union[str, int, bool]], event_time: datetime, increment: int=1) -> None: if not increment: return table = stat.data_collector.output_table if table == RealmCount: id_args = {'realm': zerver_object} elif table == UserCount: id_args = {'realm': zerver_object.realm, 'user': zerver_object} else: # StreamCount id_args = {'realm': zerver_object.realm, 'stream': zerver_object} if stat.frequency == CountStat.DAY: end_time = ceiling_to_day(event_time) else: # Count
Stat.HOUR: end
_time = ceiling_to_hour(event_time) row, created = table.objects.get_or_create( property=stat.property, subgroup=subgroup, end_time=end_time, defaults={'value': increment}, **id_args) if not created: row.value = F('value') + increment row.save(update_fields=['value']) def do_drop_all_analytics_tables() -> None: UserCount.objects.all().delete() StreamCount.objects.all().delete() RealmCount.objects.all().delete() InstallationCount.objects.all().delete() FillState.objects.all().delete() def do_drop_single_stat(property: str) -> None: UserCount.objects.filter(property=property).delete() StreamCount.objects.filter(property=property).delete() RealmCount.objects.filter(property=property).delete() InstallationCount.objects.filter(property=property).delete() FillState.objects.filter(property=property).delete() ## DataCollector-level operations ## QueryFn = Callable[[Dict[str, Composable]], Composable] def do_pull_by_sql_query( property: str, start_time: datetime,
s super_user = "bigboss" super_pw = hashlib.sha256("ultimatepw").hexdigest() admin_user = "antti.admin" admin_pw = hashlib.sha256("qwerty1234").hexdigest() basic_user = "testuser" basic_pw = hashlib.sha256("testuser").hexdigest() wrong_pw = "wrong-pw" test_course_template_1 = {"template": { "data": [ {"name": "archiveId", "value": 1}, {"name": "courseCode", "value": "810136P"}, {"name": "name", "value": "Johdatus tietojenk\u00e4sittelytieteisiin"}, {"name": "description", "value": "Lorem ipsum"}, {"name": "inLanguage", "value": "fi"}, {"name": "creditPoints", "value": 4}, {"name": "teacherId", "value": 1}] } } test_course_template_2 = {"template": { "data": [ {"name": "archiveId", "value": 1}, {"name": "courseCode", "value": "810137P"}, {"name": "name", "value": "Introduction to Information Processing Sciences"}, {"name": "description", "value": "Aaa Bbbb"}, {"name": "inLanguage", "value": "en"}, {"name": "creditPoints", "value": 5}, {"name": "teacherId", "value": 2}] } } course_resource_url = '/exam_archive/api/archives/1/courses/1/' course_resource_not_allowed_url = '/exam_archive/api/archives/2/courses/1/' courselist_resource_url = '/exam_archive/api/archives/1/courses/' # Set a ready header for authorized admin user header_auth = {'Authorization': 'Basic ' + base64.b64encode(super_user + ":" + super_pw)} # Define a list of the sample contents of the database, so we can later compare it to the test results @classmethod def setUpClass(cls): print "Testing ", cls.__name__ def test_user_not_authorized(self): ''' Check that user in not able to get course list without authenticating. ''' print '(' + self.test_user_not_authorized.__name__ + ')', \ self.test_user_not_authorized.__doc__ # Test CourseList/GET rv = self.app.get(self.courselist_resource_url) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJSON,rv.mimetype) # Test CourseList/POST rv = self.app.post(self.courselist_resource_url) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJSON,rv.mimetype) # Test Course/GET rv = self.a
pp.get(self.course_resource_url) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJSON,rv.mimetype) # Test Course/PUT rv = self.app.put(self.course_resource_url) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJSON,rv.mimetype) # Test Course/DELETE rv = self.app.put(self.course_resource_url) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJ
SON,rv.mimetype) # Try to Course/POST when not admin or super user rv = self.app.post(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.basic_user + ":" + self.basic_pw)}) self.assertEquals(rv.status_code,403) self.assertEquals(PROBLEMJSON,rv.mimetype) # Try to delete course, when not admin or super user rv = self.app.delete(self.course_resource_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.basic_user + ":" + self.basic_pw)}) self.assertEquals(rv.status_code,403) self.assertEquals(PROBLEMJSON,rv.mimetype) # Try to get Course list as basic user from unallowed archive rv = self.app.get(self.course_resource_not_allowed_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.basic_user + ":" + self.basic_pw)}) self.assertEquals(rv.status_code,403) self.assertEquals(PROBLEMJSON,rv.mimetype) # Try to get Course list as super user with wrong password rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.super_user + ":" + self.wrong_pw)}) self.assertEquals(rv.status_code,401) self.assertEquals(PROBLEMJSON,rv.mimetype) def test_user_authorized(self): ''' Check that authenticated user is able to get course list. ''' print '(' + self.test_user_authorized.__name__ + ')', \ self.test_user_authorized.__doc__ # Try to get Course list as basic user from the correct archive rv = self.app.get(self.course_resource_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.basic_user + ":" + self.basic_pw)}) self.assertEquals(rv.status_code,200) self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type) # User authorized as super user rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \ base64.b64encode(self.super_user + ":" + self.super_pw)}) self.assertEquals(rv.status_code,200) self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type) def test_course_get(self): ''' Check data consistency of Course/GET and CourseList/GET. ''' print '(' + self.test_course_get.__name__ + ')', \ self.test_course_get.__doc__ # Test CourseList/GET self._course_get(self.courselist_resource_url) # Test single course Course/GET self._course_get(self.course_resource_url) def _course_get(self, resource_url): ''' Check data consistency of CourseList/GET. ''' # Get all the courses from database courses = db.browse_courses(1) # Get all the courses from API rv = self.app.get(resource_url, headers=self.header_auth) self.assertEquals(rv.status_code,200) self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type) input = json.loads(rv.data) assert input # Go through the data data = input['collection'] items = data['items'] self.assertEquals(data['href'], resource_url) self.assertEquals(data['version'], API_VERSION) for item in items: obj = self._create_dict(item['data']) course = db.get_course(obj['courseId']) assert self._isIdentical(obj, course) def test_course_post(self): ''' Check that a new course can be created. ''' print '(' + self.test_course_post.__name__ + ')', \ self.test_course_post.__doc__ resource_url = self.courselist_resource_url new_course = self.test_course_template_1.copy() # Test CourseList/POST rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course)) self.assertEquals(rv.status_code,201) # Post returns the address of newly created resource URL in header, in 'location'. Get the identifier of # the just created item, fetch it from database and compare. location = rv.location location_match = re.match('.*courses/([^/]+)/', location) self.assertIsNotNone(location_match) new_id = location_match.group(1) # Fetch the item from database and set it to course_id_db, and convert the filled post template data above to # similar format by replacing the keys with post data attributes. course_in_db = db.get_course(new_id) course_posted = self._convert(new
import time seen = set() import_order = [] elapsed_times = {} level = 0 parent = None children = {} def new_import(name, globals={}, locals={}, fromlist=[]): global level, parent if name in seen: return old_import(name, globals, locals, fromlist) seen.add(name) import_order.append((name, level, parent)) t1 = time.time() old_parent = parent parent = name level += 1 module = old_import(name, globals, locals, fromlist) level -= 1 parent = old_parent t2 = time.time() elapsed_times[name] = t2-t1 return module old_import = __builtins_
_.__import__ __builtins__.__import__ = new_import from sympy import * parents = {} is_parent = {} for name, level, parent in import_order: parents[name] = parent is_parent[parent] = True print "== Tree ==" for name, level, parent in import_order: print "%s%s: %.3f (%s)" % (" "*level, name, elapsed_times.get(name,0), parent) print "\n" print "== Slowest (including child
ren) ==" slowest = sorted((t, name) for (name, t) in elapsed_times.items())[-50:] for elapsed_time, name in slowest[::-1]: print "%.3f %s (%s)" % (elapsed_time, name, parents[name])
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from airflow.models import BaseOperator from airflow.utils.decorators import apply_defaults class
DummyOperator(BaseOpera
tor): """ Operator that does literally nothing. It can be used to group tasks in a DAG. """ ui_color = '#e8f7e4' @apply_defaults def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) def execute(self, context): pass
#!/usr/bin/pytho
n ''' Example of zmq client. Can be used to record test data on remote PC Nacho Mas January-2017 ''' import sys import zmq import time import json from config import * # Socket to talk to server context = zmq.Context() socket = context.socket(zmq.SUB) #socket.setsockopt(zmq.CONFLATE, 1) socket.connect ("tcp://cronostamper:%s" % zmqShutterPort) topicfilter = ShutterFlange socket.setsockopt(zmq.SUBSCRIBE,
topicfilter) # Process while True: topic, msg = demogrify(socket.recv()) print "%f" % msg['unixUTC'] #time.sleep(5)
port stat import sys from functools import partial from pathlib import Path from platform import system from shutil import rmtree, which from subprocess import CalledProcessError from sys import version_info from tempfile import TemporaryDirectory from typing import ( Any, Callable, Dict, List, NamedTuple, Optional, Sequence, Tuple, Union, ) from urllib.parse import urlparse import click TEN_MINUTES_SECONDS = 600 WINDOWS = system() == "Windows" BLACK_BINARY = "black.exe" if WINDOWS else "black" GIT_BINARY = "git.exe" if WINDOWS else "git" LOG = logging.getLogger(__name__) # Windows needs a ProactorEventLoop if you want to exec subprocesses # Starting with 3.8 this is the default - can remove when Black >= 3.8 # mypy only respects sys.platform if directly in the evaluation # https://mypy.readthedocs.io/en/latest/common_issues.html#python-version-and-system-platform-checks # noqa: B950 if sys.platform == "win32": asyncio.set_event_loop(asyncio.ProactorEventLoop()) class Results(NamedTuple): stats: Dict[str, int] = {} failed_projects: Dict[str, CalledProcessError] = {} async def _gen_check_output( cmd: Sequence[str], timeout: float = TEN_MINUTES_SECONDS, env: Optional[Dict[str, str]] = None, cwd: Optional[Path] = None, stdin: Optional[bytes] = None, ) -> Tuple[bytes, bytes]: process = await asyncio.create_subprocess_exec( *cmd, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT, env=env, cwd=cwd, ) try: (stdout, stderr) = await asyncio.wait_for(process.communicate(stdin), timeout) except asyncio.TimeoutError: process.kill() await process.wait() raise # A non-optional timeout was supplied to asyncio.wait_for, guaranteeing # a timeout or completed process. A terminated Python process will have a # non-empty returncode value. assert process.returncode is not None if process.returncode != 0: cmd_str = " ".join(cmd) raise CalledProcessError( process.returncode, cmd_str, output=stdout, stderr=stderr ) return (stdout, stderr) def analyze_results(project_count: int, results: Results) -> int: failed_pct = round(((results.stats["failed"] / project_count) * 100), 2) success_pct = round(((results.stats["success"] / project_count) * 100), 2) if results.failed_projects: click.secho("\nFailed projects:\n", bold=True) for project_name, project_cpe in results.failed_projects.items(): print(f"## {project_name}:") print(f" - Returned {project_cpe.returncode}") if project_cpe.stderr: print(f" - stderr:\n{project_cpe.stderr.decode('utf8')}") if project_cpe.stdout: print(f" - stdout:\n{project_cpe.stdout.decode('utf8')}") print("") click.secho("-- primer results 📊 --\n", bold=True) click.secho( f"{results.stats['success']} / {project_count} succeeded ({success_pct}%) ✅", bold=True, fg="green", ) click.secho( f"{results.stats['failed']} / {project_count} FAILED ({failed_pct}%) 💩", bold=bool(results.stats["failed"]), fg="red", ) s = "" if results.stats["disabled"] == 1 else "s" click.echo(f" - {results.stats['disabled']} project{s} disabled by config") s = "" if results.stats["wrong_py_ver"] == 1 else "s" click.echo( f" - {results.stats['wrong_py_ver']} project{s} skipped due to Python version" ) click.echo( f" - {results.stats['skipped_long_checkout']} skipped due to long checkout" ) if results.failed_projects: failed = ", ".join(results.failed_projects.keys()) click.secho(f"\nFailed projects: {failed}\n", bold=True) return results.stats["failed"] def _flatten_cli_args(cli_args: List[Union[Sequence[str], str]]) -> List[str]: """Allow a user to put long arguments into a list of strs to make the JSON human readable""" flat_args = [] for arg in cli_args: if isinstance(arg, str): flat_args.append(arg) continue args_as_str = "".join(arg) flat_args.append(args_as_str) return flat_args async def black_run( project_name: str, repo_path: Optional[Path], project_config: Dict[str, Any], results: Results, no_diff: bool
= False, ) -> None: """Run Black and record failures""" if not repo_path: results.stats["failed"] += 1 results.failed_projects[project_name] = CalledProcessError( 69, [], f"{project_name} has no repo_path: {repo_path}".encode(), b"" ) return stdin_test = project_name.upper() == "STDIN" cmd = [str(which(BLACK_BINARY))] if "cli_arguments" in project_config and project_config["cli_arguments"]: cmd.extend(_flatten_cl
i_args(project_config["cli_arguments"])) cmd.append("--check") if not no_diff: cmd.append("--diff") # Workout if we should read in a python file or search from cwd stdin = None if stdin_test: cmd.append("-") stdin = repo_path.read_bytes() elif "base_path" in project_config: cmd.append(project_config["base_path"]) else: cmd.append(".") timeout = ( project_config["timeout_seconds"] if "timeout_seconds" in project_config else TEN_MINUTES_SECONDS ) with TemporaryDirectory() as tmp_path: # Prevent reading top-level user configs by manipulating environment variables env = { **os.environ, "XDG_CONFIG_HOME": tmp_path, # Unix-like "USERPROFILE": tmp_path, # Windows (changes `Path.home()` output) } cwd_path = repo_path.parent if stdin_test else repo_path try: LOG.debug(f"Running black for {project_name}: {' '.join(cmd)}") _stdout, _stderr = await _gen_check_output( cmd, cwd=cwd_path, env=env, stdin=stdin, timeout=timeout ) except asyncio.TimeoutError: results.stats["failed"] += 1 LOG.error(f"Running black for {repo_path} timed out ({cmd})") except CalledProcessError as cpe: # TODO: Tune for smarter for higher signal # If any other return value than 1 we raise - can disable project in config if cpe.returncode == 1: if not project_config["expect_formatting_changes"]: results.stats["failed"] += 1 results.failed_projects[repo_path.name] = cpe else: results.stats["success"] += 1 return elif cpe.returncode > 1: results.stats["failed"] += 1 results.failed_projects[repo_path.name] = cpe return LOG.error(f"Unknown error with {repo_path}") raise # If we get here and expect formatting changes something is up if project_config["expect_formatting_changes"]: results.stats["failed"] += 1 results.failed_projects[repo_path.name] = CalledProcessError( 0, cmd, b"Expected formatting changes but didn't get any!", b"" ) return results.stats["success"] += 1 async def git_checkout_or_rebase( work_path: Path, project_config: Dict[str, Any], rebase: bool = False, *, depth: int = 1, ) -> Optional[Path]: """git Clone project or rebase""" git_bin = str(which(GIT_BINARY)) if not git_bin: LOG.error("No git binary found") return None repo_url_parts = urlparse(project_config["git_clone_url"]) path_parts = repo_url_parts.path[1:].split("/", maxsplit=1) repo_path: Path = work_path / path_parts[1].replace(".git", "") cmd = [git_bin, "clone", "--depth", str(depth), project_config["git_clone_url"]] cwd = work_path if repo_path.exists() and rebase: cmd = [git_bin, "pull", "--rebase"] cwd = repo_path elif repo_path.exists(): return repo_path try: _stdou
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for testing `OperatorPDBase` and related classes.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import numpy as np import six import tensorflow as tf @six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init class OperatorPDDerivedClassTest(tf.test.TestCase): """Tests for derived classes. Subclasses should implement every abstractmethod, and this will enable all test methods to work. """ def setUp(self): self._rng = np.random.RandomState(42) def _compare_results( self, expected, actual, static_shapes=True, atol=1e-5): """Compare expected value (array) to the actual value (Tensor).""" if static_shapes: self.assertEqual(expected.shape, actual.get_shape()) self.assertAllClose(expected, actual.eval(), atol=atol) @abc.abstractmethod def _build_operator_and_mat(self, batch_shape, k, dtype=np.float64): """Build a batch matrix and an Operator that should have similar behavior. Every operator represents a (batch) matrix. This method returns both together, and is used e.g. by tests. Args: batch_shape: List-like of Python integers giving batch shape of operator. k: Python integer, the event size. dtype: Numpy dtype. Data type of returned array/operator. Returns: operator: `OperatorPDBase` subclass. mat: numpy array representing a (batch) matrix. """ # Create a matrix as a numpy array. Shape = batch_shape + [k, k]. # Create an OperatorPDDiag that should have the same behavior as the matrix. # All arguments are convertable to numpy arrays. # batch_shape = list(batch_shape) mat_shape = batch_shape + [k, k] # return operator, mat raise NotImplementedError("Not implemented yet.") def testToDense(self): with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: for dtype in [np.float32, np.float64]: operator, mat = self._build_operator_and_mat( batch_shape, k, dtype=dtype) self._compare_results( expected=mat, actual=operator.to_dense()) def testSqrtToDense(self): with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) sqrt = operator.sqrt_to_dense() self.assertEqual(mat.shape, sqrt.get_shape()) # Square roots are not unique, but SS^T should equal mat. In this # case however, we should have S = S^T. self._compare_results( expected=mat, actual=tf.batch_matmul(sqrt, sqrt)) def testDeterminants(self): with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) expected_det = tf.matrix_determinant(mat).eval() self._compare_results(expected_det, operator.det()) self._compare_results(np.log(expected_det), operator.log_det()) def testMatmul(self): with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) # Work with 5 simultaneous systems. 5 is arbitrary. x = self._rng.randn(*(batch_shape + (k, 5))) self._compare_results( expected=tf.batch_matmul(mat, x).eval(), actual=operator.matmul(x)) def testSqrtMatmul(self): # Square roots are not unique, but we should have SS^T x = Ax, and in our # case, we should have S = S^T, so SSx = Ax. with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) # Work with 5 simultaneous systems. 5 is arbitrary. x = self._rng.randn(*(batch_shape + (k, 5))) self._compare_results( expected=tf.batch_matmul(mat, x).eval(), actual=operator.sqrt_matmul(operator.sqrt_matmul(x))) def testSolve(self): with self.test_session(): for batch_shape in [(),
(2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) # Work with 5 simultaneous sys
tems. 5 is arbitrary. x = self._rng.randn(*(batch_shape + (k, 5))) self._compare_results( expected=tf.matrix_solve(mat, x).eval(), actual=operator.solve(x)) def testSqrtSolve(self): # Square roots are not unique, but we should still have # S^{-T} S^{-1} x = A^{-1} x. # In our case, we should have S = S^T, so then S^{-1} S^{-1} x = A^{-1} x. with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) # Work with 5 simultaneous systems. 5 is arbitrary. x = self._rng.randn(*(batch_shape + (k, 5))) self._compare_results( expected=tf.matrix_solve(mat, x).eval(), actual=operator.sqrt_solve(operator.sqrt_solve(x))) def testAddToTensor(self): with self.test_session(): for batch_shape in [(), (2, 3,)]: for k in [1, 4]: operator, mat = self._build_operator_and_mat(batch_shape, k) tensor = tf.ones_like(mat) self._compare_results( expected=(mat + tensor).eval(), actual=operator.add_to_tensor(tensor))
#!/F3/core/tweet_model.py # A class for representating a tweet. # Author : Ismail Sunni/@ismailsunni # Created : 2012-03-30 from db_control import db_conn from datetime import datetime, timedelta import preprocess as pp class tweet_model: '''A class for representating a tweet.''' def __init__(self, id, time, text, sentiment = 0, negation = 0): '''Standar __init__ function''' self.id = id self.time = time self.text = text self.negation = negation self.sentiment = sentiment self.parsed_word = [] self.parsed = False self.post_parsed_word = [] self.post_parsed = False # this attribute indicate that the parsed_word has been preprocess again def print_tweet(self): '''Print procedure''' import unicodedata print unicodedata.normalize('NFKD', self.text.decode('latin-1')).encode('ascii', 'ignore'), self.sentiment def get_normal_text(self): '''Return content of the tweet in normal form.''' import unicodedata return unicodedata.normalize('NFKD', self.text.decode('latin-1')).encode('ascii', 'ignore') def preprocess(self, dict_param = None): '''Preprocess a tweet and save the result in parsed_word and negation.''' self.negation, preprocesssed_text = pp.preprocess_tweet(self.text, dict_param) self.parsed_word = preprocesssed_text.split(' ') self.parsed = True temp_post_parsed_word = pp.postparsed_text(preprocesssed_text) self.post_parsed_word = temp_post_parsed_word.split(' ') self.post_parsed = True # public function def get_dev_data(): '''Retrieve data from database for training and test as list of tweet object.''' db = db_conn() tweets = [] query = "SELECT * FROM " + db.test_table + " WHERE `dev_tweet` = 1" retval = db.read(query) for row in retval: id = row[0] time = row[2] text = row[1] sentiment = row[3] negation = row[4] tweets.append(tweet_model(id, time, text, sentiment, negation)) return tweets def get_test_data(keyword = "", start_time = None, end_time = None): '''Retrieve data from database for training and test as list of tweet object.''' db = db_conn() tweets = [] query = "SELECT * FROM " + db.test_table where = " WHERE `tweet_text` LIKE '%" + keyword + "%' AND `dev_tweet` != 1" if start_time != None: where += " AND `created_at` >= '" + start_time.__str__() + "'" if end_time != None: where += " AND `created_at` <= '" + end_time.__str__() + "'" order = " ORDER BY `created_at` ASC" retval = db.read(query + where) for row in retval: id = row[0] time = row[2] text = row[1] sentiment = row[3] negation = row[4] tweets.append(tweet_model(id, time, text, sentiment, negation)) return tweets def get_test_data_by_duration(keyword = "", start_time = None, end_time = None, duration_hour = 1): '''return test data divide byu duration.''' duration_second = duration_hour * 3600 delta_duration = timedelta(0, duration_second) cur_time = start_time retval = [] dur_times = [] while (cu
r_time + delta_duration < end_time): retval.append(get_test_data(keyword, cur_time, cur_time + delta_duration)) dur_times.append(cur_time) cur_time += delta_duration if (cur_time < end_time): dur_times.append(cur_time) retval.append(get_test_data(keyword, cur_time, end_time)) return retval, dur_times # main function for testing only if _
_name__ == '__main__': keyword = "foke" start_time = datetime.strptime("10-4-2012 18:00:00", '%d-%m-%Y %H:%M:%S') end_time = datetime.strptime("18-4-2012 12:00:00", '%d-%m-%Y %H:%M:%S') duration_hour = 6 retval, dur_times = get_test_data_by_duration(keyword, start_time, end_time, duration_hour) num_tweet = 0 for ret in retval: print len(ret) num_tweet += len(ret) print num_tweet # write in excel from xlwt import Workbook from tempfile import TemporaryFile import util book = Workbook() try: sheet_idx = 1 for list_tweet in retval: activeSheet = book.add_sheet(str(sheet_idx)) activeSheet.write(0, 0, dur_times[sheet_idx - 1].__str__()) i = 1 activeSheet.write(i, 0, 'No') activeSheet.write(i, 1, 'Tweet Id') activeSheet.write(i, 2, 'Created') activeSheet.write(i, 3, 'Text') i += 1 for tweet in list_tweet: activeSheet.write(i, 0, str(i - 1)) activeSheet.write(i, 1, str(tweet.id)) activeSheet.write(i, 2, tweet.time.__str__()) activeSheet.write(i, 3, pp.normalize_character(tweet.text)) i += 1 sheet_idx += 1 book.save('output.xls') book.save(TemporaryFile()) except Exception, e: util.debug(str(e)) print 'fin'
# -*- coding: utf-8 -*- from orator.orm import Factory, Model, belongs_to, has_many from orator.connections import SQLiteConnection from orator.connectors import SQLiteConnector from .. import OratorTestCase, mock class FactoryTestCase(OratorTestCase): @classmethod def setUpClass(cls): Model.set_connection_resolver(DatabaseConnectionResolver()) @classmethod def tearDownClass(cls): Model.unset_connection_resolver() def connection(self): return Model.get_connection_resolver().connection() def schema(self): return self.connection().get_schema_builder() def setUp(self): with self.schema().create("users") as table: table.increments("id") table.string("name").unique() table.string("email").unique() table.boolean("admin").default(True) table.timestamps() with self.schema().create("posts") as table: table.increments("id") table.integer("user_id") table.string("title").unique() table.text("content").unique() table.timestamps() table.foreign("user_id").references("id").on("users") self.factory = Factory() @self.factory.define(User) def users_factory(faker): return {"name": faker.name(), "email": faker.email(), "admin": False} @self.factory.define(User, "admin") def users_factory(faker): attributes = self.factory.raw(User) attributes.update({"admin": True}) return attributes @self.factory.define(Post) def posts_factory(faker): return {"title": faker.sentence(), "content": faker.text()} def tearDown(self): self.schema().drop("posts") self.schema().drop("users") def test_factory_make(self): user = self.factory.make(User) self.assertIsInstance(user, User) self.assertIsNotNone(user.name) self.assertIsNotNone(user.email) self.assertIsNone(User.where("name", user.name).first()) def test_factory_create(self): user = self.factory.create(User) self.assertIsInstance(user, User) self.assertIsNotNone(user.name) self.assertIsNotNone(user.email) self.assertIsNotNone(User.where("name", user.name).first()) def test_factory_create_with_attributes(self): user = self.factory.create(User, name="foo", email="foo@bar.com") self.assertIsInstance(user, User) self.assertEqual("foo", user.name) self.assertEqual("foo@bar.com", user.email) self.assertIsNotNone(User.where("name", user.name).first()) def test_factory_create_with_relations(self): users = self.factory.build(User, 3) users = users.create().each(lambda u: u.posts().save(self.factory.make(Post))) self.assertEqual(3, len(users)) self.assertIsInstance(users[0], User) self.assertEqual(3, User.count()) self.assertEqual(3, Post.count()) def test_factory_call(self): user = self.factory(User).create() self.assertFalse(use
r.admin) users = self.factory(User, 3).create() self.assertEqual(3, len(users)) self.assertFalse(users[0].admin) admin = self.factory(User, "admin").create() self.assertTrue(admin.admin) admins = self.factory(User, "admin", 3).create() self.assertEqual(3, len(admins)) self.assertTrue(admins[0].admin)
class User(Model): __guarded__ = ["id"] @has_many("user_id") def posts(self): return Post class Post(Model): __guarded__ = [] @belongs_to("user_id") def user(self): return User class DatabaseConnectionResolver(object): _connection = None def connection(self, name=None): if self._connection: return self._connection self._connection = SQLiteConnection( SQLiteConnector().connect({"database": ":memory:"}) ) return self._connection def get_default_connection(self): return "default" def set_default_connection(self, name): pass
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, eithe
r express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import mock from oscdebug.tests import base from oscdebug.v1 import auth class TestAuthTypeShow(base.TestCommand): def setUp(self): super(TestAuthTypeShow, self).setUp() # Get the command object to test self.cmd = auth.ShowAuthType(self.app, None) def test_auth_type_show(self): arglist = [ 'password', ]
verifylist = [ ('auth_type', 'password'), ] parsed_args = self.check_parser(self.cmd, arglist, verifylist) # DisplayCommandBase.take_action() returns two tuples columns, data = self.cmd.take_action(parsed_args) collist = ('name', 'options') self.assertEqual(collist, columns) datalist = ( 'password', mock.ANY, ) self.assertEqual(datalist, data)
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator class GCo
deListDecorator(Scene
NodeDecorator): def __init__(self): super().__init__() self._gcode_list = [] def getGCodeList(self): return self._gcode_list def setGCodeList(self, list): self._gcode_list = list
from .ica
import * #
from .ica_gpu import ica_gpu
# Add project owner self.subproject.project.owners.add(self.second_user()) notify_merge_failure( self.subproject, 'Failed merge', 'Error\nstatus' ) # Check mail (second one is for admin) self.assertEqual(len(mail.outbox), 5) def test_notify_new_string(self): notify_new_string(self.get_translation()) # Check mail self.assertEqual(len(mail.outbox), 1) self.assertEqual( mail.outbox[0].subject, '[Weblate] New string to translate in Test/Test - Czech' ) def test_notify_new_translation(self): unit = self.get_unit() unit2 = self.get_translation().unit_set.get( source='Thank you for using Weblate.' ) notify_new_translation( unit, unit2, self.second_user() ) # Check mail self.assertEqual(len(mail.outbox), 1) self.asse
rtEqual( mail.outbox[0].subject, '[Weblate] New translation in Test/Test - Czech' ) def test_notify_new_language(self): second_user = self.second_user() notify_new_language( self.subproject, Language.objects.filter(code='de'), second_user ) # Check mail (second one is for admin)
self.assertEqual(len(mail.outbox), 2) self.assertEqual( mail.outbox[0].subject, '[Weblate] New language request in Test/Test' ) # Add project owner self.subproject.project.owners.add(second_user) notify_new_language( self.subproject, Language.objects.filter(code='de'), second_user, ) # Check mail (second one is for admin) self.assertEqual(len(mail.outbox), 5) def test_notify_new_contributor(self): unit = self.get_unit() notify_new_contributor( unit, self.second_user() ) # Check mail self.assertEqual(len(mail.outbox), 1) self.assertEqual( mail.outbox[0].subject, '[Weblate] New contributor in Test/Test - Czech' ) def test_notify_new_suggestion(self): unit = self.get_unit() notify_new_suggestion( unit, Suggestion.objects.create( contentsum=unit.contentsum, project=unit.translation.subproject.project, language=unit.translation.language, target='Foo' ), self.second_user() ) # Check mail self.assertEqual(len(mail.outbox), 1) self.assertEqual( mail.outbox[0].subject, '[Weblate] New suggestion in Test/Test - Czech' ) def test_notify_new_comment(self): unit = self.get_unit() notify_new_comment( unit, Comment.objects.create( contentsum=unit.contentsum, project=unit.translation.subproject.project, language=unit.translation.language, comment='Foo' ), self.second_user(), '' ) # Check mail self.assertEqual(len(mail.outbox), 1) self.assertEqual( mail.outbox[0].subject, '[Weblate] New comment in Test/Test' ) def test_notify_new_comment_report(self): unit = self.get_unit() notify_new_comment( unit, Comment.objects.create( contentsum=unit.contentsum, project=unit.translation.subproject.project, language=None, comment='Foo' ), self.second_user(), 'noreply@weblate.org' ) # Check mail self.assertEqual(len(mail.outbox), 2) self.assertEqual( mail.outbox[0].subject, '[Weblate] New comment in Test/Test' ) self.assertEqual( mail.outbox[1].subject, '[Weblate] New comment in Test/Test' ) class CaptchaTest(UnitTestCase): def test_decode(self): question = '1 + 1' timestamp = 1000 hashed = hash_question(question, timestamp) self.assertEqual( (question, timestamp), unhash_question(hashed) ) def test_tamper(self): hashed = hash_question('', 0) + '00' self.assertRaises( ValueError, unhash_question, hashed ) def test_invalid(self): self.assertRaises( ValueError, unhash_question, '' ) def test_object(self): captcha = MathCaptcha('1 * 2') self.assertFalse( captcha.validate(1) ) self.assertTrue( captcha.validate(2) ) restored = MathCaptcha.from_hash(captcha.hashed) self.assertEqual( captcha.question, restored.question ) self.assertRaises( ValueError, MathCaptcha.from_hash, captcha.hashed[:40] ) def test_generate(self): ''' Test generating of captcha for every operator. ''' captcha = MathCaptcha() for operator in MathCaptcha.operators: captcha.operators = (operator,) self.assertIn(operator, captcha.generate_question()) class MiddlewareTest(TestCase): def view_method(self): return 'VIEW' def test_disabled(self): middleware = RequireLoginMiddleware() request = HttpRequest() self.assertIsNone( middleware.process_view(request, self.view_method, (), {}) ) @override_settings(LOGIN_REQUIRED_URLS=(r'/project/(.*)$',)) def test_protect_project(self): middleware = RequireLoginMiddleware() request = HttpRequest() request.user = User() request.META['SERVER_NAME'] = 'server' request.META['SERVER_PORT'] = '80' # No protection for not protected path self.assertIsNone( middleware.process_view(request, self.view_method, (), {}) ) request.path = '/project/foo/' # No protection for protected path and logged in user self.assertIsNone( middleware.process_view(request, self.view_method, (), {}) ) # Protection for protected path and not logged in user request.user = AnonymousUser() self.assertIsInstance( middleware.process_view(request, self.view_method, (), {}), HttpResponseRedirect ) # No protection for login and not logged in user request.path = '/accounts/login/' self.assertIsNone( middleware.process_view(request, self.view_method, (), {}) ) class AvatarTest(ViewTestCase): def setUp(self): super(AvatarTest, self).setUp() self.user.email = 'test@example.com' self.user.save() def assert_url(self): url = avatar.avatar_for_email(self.user.email) self.assertEqual( 'https://seccdn.libravatar.org/avatar/' '55502f40dc8b7c769880b10874abc9d0', url.split('?')[0] ) def test_avatar_for_email_own(self): backup = avatar.HAS_LIBRAVATAR try: avatar.HAS_LIBRAVATAR = False self.assert_url() finally: avatar.HAS_LIBRAVATAR = backup def test_avatar_for_email_libravatar(self): if not avatar.HAS_LIBRAVATAR: raise SkipTest('Libravatar not installed') self.assert_url() def test_avatar(self): # Real user response = self.client.get( reverse( 'user_avatar', kwargs={'user': self.user.username, 'size': 32} ) ) self.assertPNG(response) # Test caching response = self.client.get( reverse( 'user_avatar', kwargs={'user': self.user.username, 's
5 s.axes_manager[0].scale = 0.01 poly = hs.model.components1D.Polynomial(order=2, legacy=True) poly.coefficients.value = [1, 2, 3] poly.coefficients.value = [1, 2, 3] poly.coefficients._bounds = ((None, None), (10, 0.0), (None, None)) poly_dict = poly.as_dictionary(True) poly2_dict = convert_to_polynomial(poly_dict) poly2 = hs.model.components1D.Polynomial(order=2, legacy=False) _ = poly2._load_dictionary(poly2_dict) assert poly2.a2.value == 1 assert poly2.a2._bounds == (None, None) assert poly2.a1.value == 2 assert poly2.a1._bounds == (10, 0.0) assert poly2.a0.value == 3 class TestPolynomial: def setup_method(self, method): s = hs.signals.Signal1D(np.zeros(1024)) s.axes_manager[0].offset = -5 s.axes_manager[0].scale = 0.01 m = s.create_model() m.append(hs.model.components1D.Polynomial(order=2, legacy=False)) coeff_values = (0.5, 2, 3) self.m = m s_2d = hs.signals.Signal1D(np.arange(1000).reshape(10, 100)) self.m_2d = s_2d.create_model() self.m_2d.append(hs.model.components1D.Polynomial(order=2, legacy=False)) s_3d = hs.signals.Signal1D(np.arange(1000).reshape(2, 5, 100)) self.m_3d = s_3d.create_model() self.m_3d.append(hs.model.components1D.Polynomial(order=2, legacy=False)) data = 50*np.ones(100) s_offset = hs.signals.Signal1D(data) self.m_offset = s_offset.create_model() # if same component is pased, axes_managers get mixed up, tests # sometimes randomly fail for _m in [self.m, self.m_2d, self.m_3d]: _m[0].a2.value = coeff_values[0] _m[0].a1.value = coeff_values[1] _m[0].a0.value = coeff_values[2] def test_gradient(self): poly = self.m[0] assert poly.a2.grad(1) == 1 assert poly.a1.grad(1) == 1 assert poly.a0.grad(1) == 1 assert poly.a2.grad(np.arange(10)).shape == (10,) @pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE) def test_estimate_parameters(self, only_current, binned)
: self.m.signal.metadata.Signal.binned = binned s = self.m.as_signal(parallel=False) s.metadata.Signal.binned = binned p = hs.model.components1D.Polynomial(order=2, legacy=False) p.estimate_parameters(s, None, None, only_current=only_current) assert_allclose(p.a2.value, 0.5) assert_allclose(p.a1.value, 2) assert_allclose(p.a0.value, 3) def test_zero_order(self):
m = self.m_offset with pytest.raises(ValueError): m.append(hs.model.components1D.Polynomial(order=0, legacy=False)) def test_2d_signal(self): # This code should run smoothly, any exceptions should trigger failure s = self.m_2d.as_signal(parallel=False) model = Model1D(s) p = hs.model.components1D.Polynomial(order=2, legacy=False) model.append(p) p.estimate_parameters(s, 0, 100, only_current=False) np.testing.assert_allclose(p.a2.map['values'], 0.5) np.testing.assert_allclose(p.a1.map['values'], 2) np.testing.assert_allclose(p.a0.map['values'], 3) def test_3d_signal(self): # This code should run smoothly, any exceptions should trigger failure s = self.m_3d.as_signal(parallel=False) model = Model1D(s) p = hs.model.components1D.Polynomial(order=2, legacy=False) model.append(p) p.estimate_parameters(s, 0, 100, only_current=False) np.testing.assert_allclose(p.a2.map['values'], 0.5) np.testing.assert_allclose(p.a1.map['values'], 2) np.testing.assert_allclose(p.a0.map['values'], 3) def test_function_nd(self): s = self.m.as_signal(parallel=False) s = hs.stack([s]*2) p = hs.model.components1D.Polynomial(order=2, legacy=False) p.estimate_parameters(s, None, None, only_current=False) axis = s.axes_manager.signal_axes[0] assert_allclose(p.function_nd(axis.axis), s.data) class TestGaussian: def setup_method(self, method): s = hs.signals.Signal1D(np.zeros(1024)) s.axes_manager[0].offset = -5 s.axes_manager[0].scale = 0.01 m = s.create_model() m.append(hs.model.components1D.Gaussian()) m[0].sigma.value = 0.5 m[0].centre.value = 1 m[0].A.value = 2 self.m = m @pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE) def test_estimate_parameters_binned(self, only_current, binned): self.m.signal.metadata.Signal.binned = binned s = self.m.as_signal(parallel=False) assert s.metadata.Signal.binned == binned g = hs.model.components1D.Gaussian() g.estimate_parameters(s, None, None, only_current=only_current) assert_allclose(g.sigma.value, 0.5) assert_allclose(g.A.value, 2) assert_allclose(g.centre.value, 1) @pytest.mark.parametrize("binned", (True, False)) def test_function_nd(self, binned): self.m.signal.metadata.Signal.binned = binned s = self.m.as_signal(parallel=False) s2 = hs.stack([s] * 2) g = hs.model.components1D.Gaussian() g.estimate_parameters(s2, None, None, only_current=False) assert g.binned == binned axis = s.axes_manager.signal_axes[0] factor = axis.scale if binned else 1 assert_allclose(g.function_nd(axis.axis) * factor, s2.data) class TestExpression: def setup_method(self, method): self.g = hs.model.components1D.Expression( expression="height * exp(-(x - x0) ** 2 * 4 * log(2)/ fwhm ** 2)", name="Gaussian", position="x0", height=1, fwhm=1, x0=0, module="numpy") def test_name(self): assert self.g.name == "Gaussian" def test_position(self): assert self.g._position is self.g.x0 def test_f(self): assert self.g.function(0) == 1 def test_grad_height(self): assert_allclose( self.g.grad_height(2), 1.5258789062500007e-05) def test_grad_x0(self): assert_allclose( self.g.grad_x0(2), 0.00016922538587889289) def test_grad_fwhm(self): assert_allclose( self.g.grad_fwhm(2), 0.00033845077175778578) def test_function_nd(self): assert self.g.function_nd(0) == 1 def test_expression_symbols(): with pytest.raises(ValueError): hs.model.components1D.Expression(expression="10.0", name="offset") with pytest.raises(ValueError): hs.model.components1D.Expression(expression="10", name="offset") with pytest.raises(ValueError): hs.model.components1D.Expression(expression="10*offset", name="Offset") def test_expression_substitution(): expr = 'A / B; A = x+2; B = x-c' comp = hs.model.components1D.Expression(expr, name='testcomp', autodoc=True, c=2) assert ''.join(p.name for p in comp.parameters) == 'c' assert comp.function(1) == -3 class TestScalableFixedPattern: def setup_method(self, method): s = hs.signals.Signal1D(np.linspace(0., 100., 10)) s1 = hs.signals.Signal1D(np.linspace(0., 1., 10)) s.axes_manager[0].scale = 0.1 s1.axes_manager[0].scale = 0.1 self.s = s self.pattern = s1 def test_both_unbinned(self): s = self.s s1 = self.pattern s.metadata.Signal.binned = False s1.metadata.Signal.binned = False m = s.create_model() fp = hs.model.components1D.ScalableFixedPattern(s1) m.append(fp) with ignore_warning(message="invalid value encountered in sqrt", category=RuntimeWarning): m.fit() assert abs(fp.yscale.value - 100) <= 0.1 def test_both_binned(self): s = self.s s1 = self.pattern s.metadata.Signal.binned = True
""" Models a GC-MS experiment represented by a list of signal peaks """ ############################################################################# # # # PyMS software for processing of metabolomic mass-spectrometry data # # Copyright (C) 2005-2012 Vladimir Likic # #
# # This program is free software; you
can redistribute it and/or modify # # it under the terms of the GNU General Public License version 2 as # # published by the Free Software Foundation. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program; if not, write to the Free Software # # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # # # ############################################################################# from pyms.Utils.Error import error from pyms.Utils.Utils import is_str from pyms.Peak.Class import Peak from pyms.Peak.List.Utils import is_peak_list, sele_peaks_by_rt class Experiment: """ @summary: Models an experiment object @author: Vladimir Likic @author: Andrew Isaac """ def __init__(self, expr_code, peak_list): """ @summary: Models an experiment @param expr_code: Unique identifier for the experiment @type expr_code: StringType @param peak_list: A list of peak objects @type peak_list: ListType """ if not is_str(expr_code): error("'expr_code' must be a string") if not is_peak_list(peak_list): error("'peak_list' must be a list of Peak objects") self.__expr_code = expr_code self.__peak_list = peak_list def get_expr_code(self): """ @summary: Returns the expr_code of the experiment @return: The expr_code of the experiment @rtype: StringType """ return self.__expr_code def get_peak_list(self): """ @summary: Returns the peak list @return: A list of peak objects @rtype: ListType """ return self.__peak_list def sele_rt_range(self, rt_range): """ @summary: Discards all peaks which have the retention time outside the specified range @param rt_range: Min, max retention time given as a list [rt_min,rt_max] @type rt_range: ListType @return: none @rtype: NoneType """ peaks_sele = sele_peaks_by_rt(self.__peak_list, rt_range) self.__peak_list = peaks_sele
from django.apps import AppConfig class Player
sConfig(AppConfig
): name = 'players'
#!/usr/bin/python # # Copyright Friday Film Club. All Rights Reserved. """League unit tests.""" __author__ = 'adamjmcgrath@gmail.com (Adam McGrath)' import unittest import base import helpers import models class LeagueTestCase(base.TestCase): def testPostPutHook(self): league_owner = helpers.user() league_member_1 = helpers.user() league_member_2 = h
elpers.user() league = models.League(name='Foo', owner=league_owner.put(), users=[league_member_1.put(), league_member_2.put()]) league_key = league.put() self.asser
tListEqual(league_owner.leagues, [league_key]) self.assertListEqual(league_member_1.leagues, [league_key]) self.assertListEqual(league_member_2.leagues, [league_key]) league.users = [league_member_2.key] league.put() self.assertListEqual(league_member_1.leagues, []) self.assertListEqual(league_member_2.leagues, [league_key]) def testPostDeleteHook(self): league_owner = helpers.user() league_member_1 = helpers.user() league_member_2 = helpers.user() league = models.League(name='Foo', owner=league_owner.put(), users=[league_member_1.put(), league_member_2.put()]) league_key = league.put() self.assertListEqual(league_owner.leagues, [league_key]) self.assertListEqual(league_member_1.leagues, [league_key]) self.assertListEqual(league_member_2.leagues, [league_key]) league.key.delete() self.assertListEqual(league_owner.leagues, []) self.assertListEqual(league_member_1.leagues, []) self.assertListEqual(league_member_2.leagues, []) def testGetByName(self): league = models.League(name='Foo', owner=helpers.user().put()) league.put() self.assertEqual(models.League.get_by_name('foo'), league) if __name__ == '__main__': unittest.main()
import ctypes import os import types from platform_utils import paths def load_library(libname): if path
s.is_frozen(): libfile = os.path.join(paths.embedded_data_path(), 'accessible_output2', 'lib', libname) else: libfile = os.path.join(paths.module_path(), 'lib', libname) return ctypes.windll[libfile] def get_output_classes(): import outputs module_type = types.ModuleType classes = [m.output_class for m in outputs.__dict__.itervalues() if type(m) == module_type and hasattr(m, 'output_class')] return sorted(classes, key=lambda c: c.priority) def find_dat
afiles(): import os import platform from glob import glob import accessible_output2 if platform.system() != 'Windows': return [] path = os.path.join(accessible_output2.__path__[0], 'lib', '*.dll') results = glob(path) dest_dir = os.path.join('accessible_output2', 'lib') return [(dest_dir, results)]
""", (self.study_id, self.feature_id, self.uid) ) except Exception, e: context.logger.error('Location: %s (%s)', self.uid, e) class Study (canary.context.Cacheable, DTable): TABLE_NAME = 'studies' # FIXME: does this only belong here or on loader.QueuedRecord? # A Study has only one STATUS_TYPE STATUS_TYPES = { 'unclaimed' : 0, 'claimed' : 1, 'curated' : 2, } # A Study has only one ARTICLE_TYPE ARTICLE_TYPES = { 'unknown' : 0, 'irrelevant' : 1, 'traditional' : 2, 'general' : 3, 'review' : 4, 'outcomes only' : 5, 'exposures only' : 6, 'curated' : 7, 'duplicate' : 8, } # For dynamic iteration over related tables TABLES = { 'methodologies' : Methodology, 'exposures': Exposure, 'risk_factors': RiskFactor, 'outcomes': Outcome, 'species': Species, 'locations': Location, } CACHE_KEY = 'study' def __init__ (self, context=None, uid=-1, record_id=-1): try: if self.record_id >= 0: return except AttributeError: pass self.uid = uid self.record_id = -1 self.status = self.STATUS_TYPES['unclaimed'] self.article_type = self.ARTICLE_TYPES['unknown'] self.curator_user_id = '' self.has_outcomes = False self.has_exposures = False self.has_relationships = False self.has_interspecies = False self.has_exposure_linkage = False self.has_outcome_linkage = False self.has_genomic = False self.comments = '' self.methodologies = [] self.exposures = [] self.risk_factors = [] self.outcomes = [] self.species = [] self.locations = [] self.date_modified = None self.date_entered = None self.date_curated = None self.history = {} def __str__ (self): out = [] out.append('<Study uid=%s record_id=%s' % (self.uid, self.record_id)) out.append('\tstatus=%s' % self.get_text_value(self.STATUS_TYPES, self.status)) out.append('\tcurator_user_id=%s' % self.curator_user_id) out.append('\tarticle_type=%s' % self.get_text_value(self.ARTICLE_TYPES, self.article_type)) out.append('\thas_outcomes=%s' % self.has_outcomes) out.append('\thas_exposures=%s' % self.has_exposures) out.append('\thas_relationships=%s' % self.has_relationships) out.append('\thas_interspecies=%s' % self.has_interspecies) out.append('\thas_exposure_linkage=%s' % self.has_exposure_linkage) out.append('\thas_outcome_linkage=%s' % self.has_outcome_linkage) out.append('\thas_genomic=%s' % self.has_genomic) # What are you wanting here? TYPES is not like OUTCOMES, is it? #for table_name in self.TABLES: # if len(getattr(self, table_name)) > 0: # out.append('\t%s=' % table_name + \ # ','.join(getattr(self, 'get_' + table_name)(text=True))) #if len(self.types) > 0: # out.append('\ttypes=' + ','.join(self.get_types(text=True))) out.append('\tcomments=%s' % self.comments or '') out.append('/>') return '\n'.join(out) def get_text_value (self, lookup_table, value): for k, v in lookup_table.iteritems(): if v == value: return k return '' """Simple accessors for basic study parameters.""" # FIXME: some of these could be parameterized. def set_status (self, value): if value in self.STATUS_TYPES.keys(): self.status = self.STATUS_TYPES[value] def get_status (self, text=False): if text: return self.get_text_value(self.STATUS_TYPES, self.status) else: return self.status def set_article_type (self, value): try: if str(value) in self.ARTICLE_TYPES.keys(): self.article_type = self.ARTICLE_TYPES[str(value)] except: # FIXME: proper error here pass def get_article_type (self, text=False): if text: return self.get_text_value(self.ARTICLE_TYPES, self.article_type) else: return self.article_type def get_concept_from_concept (self, concept): """ For use in matching searches for exposure/species/outcome against summary data. NOTE: not checking 'risk_factor', but that should be refactored in with a broader concept code refactoring. """ for concept_type in ('exposures', 'outcomes', 'species'): for c in getattr(self, concept_type): if c.concept_id == concept.uid: # Eliminate trailing 's' if concept_type in ('exposures', 'outcomes'): concept_type = concept_type[:-1] return c, concept_type return None, None def add_methodology (self, methodology): for meth in self.methodologies: if meth.uid == methodology.uid: return methodology.study_id = self.uid self.methodologies.append(methodology) def delete_methodology (self, context, methodology): for meth in self.methodologies: if meth.uid == methodology.uid: self.methodologies.remove(meth) meth.delete(context) def get_methodology (self, id): for methodology in self.methodologies: if methodology.uid == id: return methodology return None def has_exposure (self, exposure): """ Returns True if this exposure has already been added to this Study. Note that has_exposure may be used before exposure is added, hence it does not check exposure.uid. """ for exp in self.exposures: if exp.concept_id == exposure.concept_id: return True return False def add_exposure (self, exposure): if not self.has_exposure(exposure): exposure.study_id = self.uid self.exposures.append(exposure) def delete_exposure (self, context, exposure): for exp in self.exposures: if exp.concept_id == exposure.concept_id: self.exposures.remove(exp) exp.delete(context) def get_exposu
re (self, id): """ Return the matching exposure, if added. Note that get_exposure is for use in matching or deleting exposures, i.e., only after an exposure has been added to the Study, so uid matching is required. """ f
or exp in self.exposures: if exp.uid == id: return exp return None def get_exposure_from_exposure (self, exposure): for exp in self.exposures: if exp.concept_id == exposure.concept_id: return exp return None def has_risk_factor (self, risk_factor): """ Returns True if this risk_factor has already been added to this Study. Note that has_risk_factor may be used before risk_factor is added, hence it does not check risk_factor.uid. """ for rf in self.risk_factors: if rf.concept_id == risk_factor.concept_id: return True return False def add_risk_factor (self, risk_factor): if not self.has_risk_factor(risk_factor): risk_factor.study_id = self.uid self.risk_factors.append(risk_factor) def delete_risk_factor (self, context, risk_factor): for rf in self.risk_factors: if rf.concept_id == risk_factor.concept_id: self.risk_factors.remove(rf) rf.delete(context) def get_risk_factor (self, id):
#!/usr/bin/env python #! -*- coding: utf-8 -*- ### # Copyright (c) Rice University 2012-13 # This software is subject to # the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. ### """ THis exists solely to provide less typing for a "leaf node" in a simple realtional schema (1:M and 1:M-N:1) when used with SQLAlchemy SA does not support class based inheritence in the normal Python way for objects
inheriting from Base. Thus we have those objects perform multiple inheritence... """ import json import sqlalchemy.types import datetime class CNXBase(): def from_dict(self, userprofile_dict): """ SHould test for schema validity etc. """ d = userprofile_dict for k in d: setattr(self, k, d[k]) def to_dict(self):
"""Return self as a dict, suitable for jsonifying """ d = {} for col in self.__table__.columns: d[col.name] = self.safe_type_out(col) return d def jsonify(self): """Helper function that returns simple json repr """ selfd = self.to_dict() jsonstr = json.dumps(selfd) # here use the Json ENcoder??? return jsonstr def safe_type_out(self, col): """return the value of a coulmn field safely as something that json can use This is essentially a JSONEncoder sublclass inside this object. """ if isinstance(type(col.type), sqlalchemy.types.DateTime): outstr = getattr(self, col.name).isoformat() else: outstr = getattr(self, col.name) return outstr
from d
jango.conf.urls import url from django.views.generic import TemplateView urlpatterns = [ url(r'^$', TemplateView.as_view(template_name='homepage.html')), url(r'^remote.html$', TemplateView.as_view(template_
name='remote.html'), name="remote.html"), ]
eed(1) tf.reset_default_graph() def testMetricsCollection(self): my_collection_name = '__metrics__' mean, _ = metrics.streaming_auc( predictions=tf.ones((10, 1)), labels=tf.ones((10, 1)), metrics_collections=[my_collection_name]) self.assertListEqual(tf.get_collection(my_collection_name), [mean]) def testUpdatesCollection(self): my_collection_name = '__updates__' _, update_op = metrics.streaming_auc( predictions=tf.ones((10, 1)), labels=tf.ones((10, 1)), updates_collections=[my_collection_name]) self.assertListEqual(tf.get_collection(my_collection_name), [update_op]) def testValueTensorIsIdempotent(self): predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.float32, seed=1) labels = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1) auc, update_op = metrics.streaming_auc( predictions, labels) with self.test_session() as sess: sess.run(tf.local_variables_initializer()) # Run several updates. for _ in range(10): sess.run(update_op) # Then verify idempotency. initial_auc = auc.eval() for _ in range(10): self.assertAlmostEqual(initial_auc, auc.eval(), 5) def testAllCorrect(self): self.allCorrectAsExpected('ROC') def allCorrectAsExpected(self, curve): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = tf.constant(inputs, dtype=tf.float32) labels = tf.constant(inputs) auc, update_op = metrics.streaming_auc(predictions, labels, curve=curve) sess.run(tf.local_variables_initializer()) self.assertEqual(1, sess.run(update_op)) self.assertEqual(1, auc.eval()) def testSomeCorrect(self): with self.test_session() as sess: predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32) labels = tf.constant([0, 1, 1, 0], shape=(1, 4)) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.5, sess.run(update_op)) self.assertAlmostEqual(0.5, auc.eval()) def testWeighted1d(self): with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32) labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
weights = tf.constant([2], shape=(1, 1)) auc, update_op = metrics.streaming_auc(predictions, labels, weights=weights) sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.5, sess.run(update_op), 5) self.assertAlmostEqual(0.5, auc.eval(), 5) def testWeighted2d(self): with self.test_session() as sess: predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32) labels = tf.constant([0, 1, 1, 0], shape=(1, 4)) weights = tf.constant([1, 2, 3, 4], shape=(1, 4)) auc, update_op = metrics.streaming_auc(predictions, labels, weights=weights) sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.7, sess.run(update_op), 5) self.assertAlmostEqual(0.7, auc.eval(), 5) def testAUCPRSpecialCase(self): with self.test_session() as sess: predictions = tf.constant([0.1, 0.4, 0.35, 0.8], shape=(1, 4), dtype=tf.float32) labels = tf.constant([0, 0, 1, 1], shape=(1, 4)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.79166, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.79166, auc.eval(), delta=1e-3) def testAnotherAUCPRSpecialCase(self): with self.test_session() as sess: predictions = tf.constant([0.1, 0.4, 0.35, 0.8, 0.1, 0.135, 0.81], shape=(1, 7), dtype=tf.float32) labels = tf.constant([0, 0, 1, 0, 1, 0, 1], shape=(1, 7)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.610317, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.610317, auc.eval(), delta=1e-3) def testThirdAUCPRSpecialCase(self): with self.test_session() as sess: predictions = tf.constant([0.0, 0.1, 0.2, 0.33, 0.3, 0.4, 0.5], shape=(1, 7), dtype=tf.float32) labels = tf.constant([0, 0, 0, 0, 1, 1, 1], shape=(1, 7)) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0.90277, sess.run(update_op), delta=1e-3) self.assertAlmostEqual(0.90277, auc.eval(), delta=1e-3) def testAllIncorrect(self): inputs = np.random.randint(0, 2, size=(100, 1)) with self.test_session() as sess: predictions = tf.constant(inputs, dtype=tf.float32) labels = tf.constant(1 - inputs, dtype=tf.float32) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(0, sess.run(update_op)) self.assertAlmostEqual(0, auc.eval()) def testZeroTruePositivesAndFalseNegativesGivesOneAUC(self): with self.test_session() as sess: predictions = tf.zeros([4], dtype=tf.float32) labels = tf.zeros([4]) auc, update_op = metrics.streaming_auc(predictions, labels) sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op), 6) self.assertAlmostEqual(1, auc.eval(), 6) def testRecallOneAndPrecisionOneGivesOnePRAUC(self): with self.test_session() as sess: predictions = tf.ones([4], dtype=tf.float32) labels = tf.ones([4]) auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR') sess.run(tf.local_variables_initializer()) self.assertAlmostEqual(1, sess.run(update_op), 6) self.assertAlmostEqual(1, auc.eval(), 6) def np_auc(self, predictions, labels, weights): """Computes the AUC explicitely using Numpy. Args: predictions: an ndarray with shape [N]. labels: an ndarray with shape [N]. weights: an ndarray with shape [N]. Returns: the area under the ROC curve. """ if weights is None: weights = np.ones(np.size(predictions)) is_positive = labels > 0 num_positives = np.sum(weights[is_positive]) num_negatives = np.sum(weights[~is_positive]) # Sort descending: inds = np.argsort(-predictions) sorted_labels = labels[inds] sorted_weights = weights[inds] is_positive = sorted_labels > 0 tp = np.cumsum(sorted_weights * is_positive) / num_positives return np.sum((sorted_weights * tp)[~is_positive]) / num_negatives def testWithMultipleUpdates(self): num_samples = 1000 batch_size = 10 num_batches = int(num_samples / batch_size) # Create the labels and data. labels = np.random.randint(0, 2, size=num_samples) noise = np.random.normal(0.0, scale=0.2, size=num_samples) predictions = 0.4 + 0.2 * labels + noise predictions[predictions > 1] = 1 predictions[predictions < 0] = 0 def _enqueue_as_batches(x, enqueue_ops): x_batches = x.astype(np.float32).reshape((num_batches, batch_size)) x_queue = tf.FIFOQueue(num_batches, dtypes=tf.float32, shapes=(batch_size,)) for i in range(num_batches): enqueue_ops[i].append(x_queue.enqueue(x_batches[i, :])) return x_queue.dequeue() for weights in (None, np.ones(num_samples), np.random.exponential(scale=1.0, size=num_samples)): expected_auc = self.np_auc(predictions, labels, weights) with self.test_session() as sess: enqueue_ops = [[] for i in range(num_batches)] tf_predictions = _enqueue_as_batches(predictions, enqueue_ops) tf_labels = _enqueue_as_batches(labels, enqueue_o
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from openerp import api, fields, models class CrmActivity(models.Model): ''' CrmActivity is a model introduced in Odoo v9 that models activities performed in CRM, like phonecalls, sending emails, making demonstrations, ... Users are able to configure their custom activities. Each activity has up to three next activities. This allows to model light custom workflows. This way sales manager can configure their crm workflow that salepersons will use in their daily job. CrmActivity inherits from mail.message.subtype. This allows users to follow some activities through subtypes. Each activity will generate messages with the matching subtypes, allowing reporting and statistics computation based on mail.message.subtype model. ''' _name = 'crm.activity' _description = 'CRM Activity' _inherits = {'mail.message.subtype': 'subtype_id'} _rec_name = 'name' _order = "sequence" days = fields.Integer('Number of days', default=0, help='Number of days before doing fulfilling the action, allowing to plan the action date.')
sequence = fields.Integer('Sequence', default=0) team_id = fields.Many2one('crm.team', string='Sales Team') subtype_id = fields.Many2one('mail.message.subtype', string='Message Subtype', required=True, ondelete='cascade') activity_1_id = fields.Many2one('crm.activity', string="Next Activity 1") activi
ty_2_id = fields.Many2one('crm.activity', string="Next Activity 2") activity_3_id = fields.Many2one('crm.activity', string="Next Activity 3") @api.model def create(self, values): ''' Override to set the res_model of inherited subtype to crm.lead. This cannot be achieved using a default on res_model field because of the inherits. Indeed a new field would be created. However the field on the subtype would still exist. Being void, the subtype will be present for every model in Odoo. That's quite an issue. ''' if not values.get('res_model') and 'default_res_model' not in self._context: values['res_model'] = 'crm.lead' if 'internal' not in values and 'default_internal' not in self._context: values['internal'] = True return super(CrmActivity, self).create(values)
# coding: utf-8 ''' Created on 2012-8-30 @author: shanfeng ''' import smtplib from email.mime.text import MIMEText import urllib import web class XWJemail: ''' classdocs ''' def __init__(self, params): ''' Constructor ''' pass @staticmethod def sendfindpass(user,hash): link = "%s/account/newpass?%s" %(web.ctx.sitehost,urllib.urlencode({'email':user.u_email,"v":hash})) mail_body = """ <html> <head></head> <body> <h4>%s,你好</h4> 您刚才在 liulin.info 申请了找回密码。<br> 请点击下面的链接来重置密码:<br> <a href="%s">%s</a><br> 如果无法点击上面的链接,您可以复制该地址,并粘帖在浏览器的地址栏中访问。<br> </body> </html> """ % (web.utf8(user.u_name),link,link) #mail_body = web.utf8(mail_body) if isinstance(mail_body,unicode): mail_body = str(mail_body) mail_from = "liulin.info<wukong10086@163.com>" mail_to = user.u_email mail_subject = 'liulin.info重置密码邮件' msg = MIMEText(mail_body,'html','utf-8') #msg=MIMEText(mail_body,'html') if not isinstance(mail_subject,unicode): mail_subject = unicode(mail_subject) msg['Subject']= mail_subject msg['From']=mail_from msg['To'] = mail_to msg["Accept-Language"]="zh-CN" msg["Accept-Charset"]="ISO-8859-1,utf-8" smtp=sm
tplib.SMTP() smtp.connect('smtp.163.com') smtp.login('wukong10086@163.com','831112') smtp.sendmail(mail_from,mail_to,msg.as_string()) smtp.quit() def sendMail(mailto,subject,body,format='plain'): if isinstance(body,unicode): body = str(body) me= ("%s<"+fromMail+">") % (Header(_mailFrom,'utf-8'),) msg = MIMEText(body,format,'utf-8') if not isinstance(subject,unicode): subject = unicode(subject)
msg['Subject'] = subject msg['From'] = me msg['To'] = mailto msg["Accept-Language"]="zh-CN" msg["Accept-Charset"]="ISO-8859-1,utf-8" try: s = smtplib.SMTP() s.connect(host) s.login(user,password) s.sendmail(me, mailto, msg.as_string()) s.close() return True except Exception, e: print str(e) return False
#!/usr/bin/python # This script reads through a enotype likelihood file and the respective mean genotype likelihood file. It writes a nexus file for all individuals and the given genotypesi, with '0' for ref homozygote, '1' for heterozygote, and '2' for alt homozygote. # Usage: ~/vcf2nex012.py pubRetStriUG_unlnkd.gl pntest_pubRetStriUG_unlnkd.txt from sys import argv
# read genotype likelihood fil
e to get scaffold:bp (which is not in the same order as the vcf file, resulting from vcf2gl.py) with open(argv[1], 'rb') as gl_file: scafPos_gl = list() for line in gl_file: if line.split(' ')[0] == '65': continue elif line.split(' ')[0] == 'CR1043': ind_id = line.split(' ') ind_id[len(ind_id)-1] = ind_id[len(ind_id)-1].split('\n')[0] else: scafPos_gl.append(line.split(' ')[0]) # read the file with mean genotypes with open(argv[2], 'rb') as mean_gt_file: ind_dict = dict() for line in mean_gt_file: gt_line = line.split(' ') for i, ind in enumerate(ind_id): if not ind in ind_dict: gt_line[i] ind_dict[ind] = [float(gt_line[i])] else: ind_dict[ind].append(float(gt_line[i])) # parse the mean genotypes and write the proper bases for key, value in ind_dict.iteritems(): newline = list() for i, pos in enumerate(scafPos_gl): if round(float(value[i])) == 0: newline.append(str(0)) elif round(float(value[i])) == 1: newline.append(str(1)) elif round(float(value[i])) == 2: newline.append(str(2)) else: continue print str(key + '\t' + ''.join(newline)) #print scafPos_gl #for key, value in iter(refp_dict.iteritems()): # print key, ''.join(value)
""" ListCompToMap transforms list comprehension into intrinsics. """ from pythran.analyses import OptimizableComprehension from pythran.passmanager import Transformation from pythran.transformations import NormalizeTuples import ast class ListCompToMap(Transformation): ''' Transforms list comprehension into intrinsics. >>> import ast >>> from pythran import passmanager, backend >>> node = ast.parse("[x*x for x in range(10)]") >>> pm = passmanager.PassManager("test") >>> _, node = pm.apply(ListCompToMap, node) >>> print pm.dump(backend.Python, node) __builtin__.map((lambda x: (x * x)), range(10)) ''' def __init__(self): Transformation.__init__(self, NormalizeTuples, OptimizableComprehension) def make_Iterator(self, gen): if gen.ifs: ldFilter = ast.Lambda( ast.arguments([ast.Name(gen.target.id, ast.Param())], None, None, []), ast.BoolOp(ast.And(), gen.ifs)) ifilterName = ast.Attribute( value=ast.Name(id='itertools', ctx=ast.Load()), attr='ifilter', ctx=ast.Load()) return ast.Call(ifilterName, [ldFilter, gen.iter], [], None, None) else: return gen.iter def visit_ListComp(self, node): if node in self.optimizable_comprehension: self.update = True self.generic_visit(node) iterList = [] varList = [] for gen in node.generators: iterList.append(self.make_Iterator(gen)) varList.append(ast.Name(gen.target.id, ast.Param())) # If dim = 1, product is useless if len(iterList) == 1: iterAST = iterList[0] varAST = ast.arguments([varList[0]], None, None, []) else: prodName = ast.Attribute( value=ast.Name(id='itertools', ctx=ast.Load()), attr='product', ctx=ast.Load()) iterAST = ast.Call(prodName, iterList, [], None, None) varAST = ast.arguments([ast.Tuple(varList, ast.Store())],
None, None, []) mapName = ast.Attribute( value=ast.Name(id='__builtin__', ctx=ast.Load()), attr='map', ctx=ast.Load()) ldBodymap = node.elt ldmap = ast.Lambda(varAST, ldBodymap) return ast.Call(mapName, [ldmap, iterAST], [], None, None) else: r
eturn self.generic_visit(node)
from recursive function calls.""" def __init__(self): Object.__init__(self) # XXX: Do cycles need an id? self.functions = set() def add_function(self, function): assert function not in self.functions self.functions.add(function) # XXX: Aggregate events? if function.cycle is not None: for other in function.cycle.functions: if function not in self.functions: self.add_function(other) function.cycle = self class Profile(Object): """The whole profile.""" def __init__(self): Object.__init__(self) self.functions = {} self.cycles = [] def add_function(self, function): if function.id in self.functions: sys.stderr.write('warning: overwriting function %s (id %s)\n' % (function.name, str(function.id))) self.functions[function.id] = function def add_cycle(self, cycle): self.cycles.append(cycle) def validate(self): """Validate the edges.""" for function in self.functions.itervalues(): for callee_id in function.calls.keys(): assert function.calls[callee_id].callee_id == callee_id if callee_id not in self.functions: sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name)) del function.calls[callee_id] def find_cycles(self): """Find cycles
using Tarjan's strongly connected components algorithm.""" # Apply the Tarjan's algorithm successively until all functions are visited visited
= set() for function in self.functions.itervalues(): if function not in visited: self._tarjan(function, 0, [], {}, {}, visited) cycles = [] for function in self.functions.itervalues(): if function.cycle is not None and function.cycle not in cycles: cycles.append(function.cycle) self.cycles = cycles if 0: for cycle in cycles: sys.stderr.write("Cycle:\n") for member in cycle.functions: sys.stderr.write("\t%s\n" % member.name) def _tarjan(self, function, order, stack, orders, lowlinks, visited): """Tarjan's strongly connected components algorithm. See also: - http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm """ visited.add(function) orders[function] = order lowlinks[function] = order order += 1 pos = len(stack) stack.append(function) for call in function.calls.itervalues(): callee = self.functions[call.callee_id] # TODO: use a set to optimize lookup if callee not in orders: order = self._tarjan(callee, order, stack, orders, lowlinks, visited) lowlinks[function] = min(lowlinks[function], lowlinks[callee]) elif callee in stack: lowlinks[function] = min(lowlinks[function], orders[callee]) if lowlinks[function] == orders[function]: # Strongly connected component found members = stack[pos:] del stack[pos:] if len(members) > 1: cycle = Cycle() for member in members: cycle.add_function(member) return order def call_ratios(self, event): # Aggregate for incoming calls cycle_totals = {} for cycle in self.cycles: cycle_totals[cycle] = 0.0 function_totals = {} for function in self.functions.itervalues(): function_totals[function] = 0.0 for function in self.functions.itervalues(): for call in function.calls.itervalues(): if call.callee_id != function.id: callee = self.functions[call.callee_id] function_totals[callee] += call[event] if callee.cycle is not None and callee.cycle is not function.cycle: cycle_totals[callee.cycle] += call[event] # Compute the ratios for function in self.functions.itervalues(): for call in function.calls.itervalues(): assert CALL_RATIO not in call if call.callee_id != function.id: callee = self.functions[call.callee_id] if callee.cycle is not None and callee.cycle is not function.cycle: total = cycle_totals[callee.cycle] else: total = function_totals[callee] call[CALL_RATIO] = ratio(call[event], total) def integrate(self, outevent, inevent): """Propagate function time ratio allong the function calls. Must be called after finding the cycles. See also: - http://citeseer.ist.psu.edu/graham82gprof.html """ # Sanity checking assert outevent not in self for function in self.functions.itervalues(): assert outevent not in function assert inevent in function for call in function.calls.itervalues(): assert outevent not in call if call.callee_id != function.id: assert CALL_RATIO in call # Aggregate the input for each cycle for cycle in self.cycles: total = inevent.null() for function in self.functions.itervalues(): total = inevent.aggregate(total, function[inevent]) self[inevent] = total # Integrate along the edges total = inevent.null() for function in self.functions.itervalues(): total = inevent.aggregate(total, function[inevent]) self._integrate_function(function, outevent, inevent) self[outevent] = total def _integrate_function(self, function, outevent, inevent): if function.cycle is not None: return self._integrate_cycle(function.cycle, outevent, inevent) else: if outevent not in function: total = function[inevent] for call in function.calls.itervalues(): if call.callee_id != function.id: total += self._integrate_call(call, outevent, inevent) function[outevent] = total return function[outevent] def _integrate_call(self, call, outevent, inevent): assert outevent not in call assert CALL_RATIO in call callee = self.functions[call.callee_id] subtotal = call[CALL_RATIO]*self._integrate_function(callee, outevent, inevent) call[outevent] = subtotal return subtotal def _integrate_cycle(self, cycle, outevent, inevent): if outevent not in cycle: total = inevent.null() for member in cycle.functions: subtotal = member[inevent] for call in member.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is not cycle: subtotal += self._integrate_call(call, outevent, inevent) total += subtotal cycle[outevent] = total callees = {} for function in self.functions.itervalues(): if function.cycle is not cycle: for call in function.calls.itervalues(): callee = self.functions[call.callee_id] if callee.cycle is cycle: try: callees[callee] += call[CALL_RATIO] except KeyError: callees[callee] = call[CALL_RATIO] for callee, call_ratio in callees.iteritems(): ranks = {} call_ratios = {} partials = {} self._rank_cycle_function(cycle, callee, 0, ranks) s
from HSM_Reactions import * ########## RIGHT MEMBERS OF ODEs, rewritten with only 10 equations to isolate those that are independent ############## def f10eqs(t, y, ksetDict, TparamSet, REACparamSet, DirectControlnuPp, IC_PplusPp, IC_SplusSs): #P = y[0] Ph = y[0] #S = y[2] Ss = y[1] F = y[2] Fs = y[3] G = y[4] FsG = y[5] FG = y[6] RF = y[7] RHP = y[8] HP = y[9] kP0 = ksetDict["kP0"] kP0p = ksetDict["kP0p"] kS = ksetDict["kS"] kSp0 = ksetDict["kSp0"] kFp0 = ksetDict["kFp0"] kF0 = ksetDict["kF0"] kFpi0 = ksetDict["kFpi0"] kFGp = ksetDict["kFGp"] kFG = ksetDict["kFG"] ketaF = ksetDict["ketaF"] kFsG = ksetDict["kFsG"] kFsGp = ksetDict["kFsGp"] kFsp = ksetDict["kFsp"] kFs = ksetDict["kFs"] kpiRF = ksetDict["kpiRF"] kpiRH = ksetDict["kpiRH"] kpiHP = ksetDict["kpiHP"] ketaHP = ksetDict["ketaHP"] ketaRF = ksetDict["ketaRF"] ketaRHP = ksetDict["ketaRHP"] n1 = REACparamSet["n1"] n2 = REACparamSet["n2"] P0const = REACparamSet["P0const"] I = REACparamSet["I"] T0const = REACparamSet["T0const"] piRFconst = REACparamSet["piRFconst"] piRHPconst = REACparamSet["piRHPconst"] PplusPpCONST = IC_PplusPp # (microM) Initial Condition protein P SplusSsCONST = IC_SplusSs # (microM) Initial Condition stresskinease S system = [ #nuP(Ph, HP, kP0) - nuPp(P, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # P - nuP(Ph, HP, kP0) + nuPp(PplusPpCONST - Ph, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # Ph #nuS(Ss, kS) - nuSp(S, Ph, kSp0, n2, P0const), # S - nuS(Ss, kS) + nuSp(SplusSsCONST - Ss, Ph, kSp0, n2, P0const), # Ss nuF(I, Fs, kF0) + piF(RF, kFpi0) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFp(F, Ss, kFp0) - etaF(F, ketaF), # F - nuF(I, Fs, kF0) + nuFp(F, Ss, kFp0) + nuFsGp(FsG, kFsGp) - nuFsG(G, Fs, kFsG), # Fs nuFsGp(FsG, kFsGp) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFsG(G, Fs, kFsG), # G nuFsG(G, Fs, kFsG) + nuFs(FG, kFs) - nuFsp(FsG, I, kFsp) - nuFsGp(FsG, kFsGp), # FsG nuFsp(FsG, I, kFsp) + nuFG(G, F, kFG) - nuFGp(FG, kFGp) - nuFs(FG, kFs), # FG piRF(FsG, kpiRF) + piRFAddConst(piRFconst) - etaRF(RF, ketaRF),
# RF Added const to Alex model piRHP(FsG, kpiRH) + piRHPAddConst(piRHPconst) - etaRHP(RHP, ketaRHP), # RHP Aded const to Alex model piHP(RHP, kpiHP) - etaHP(HP, ketaHP)] # HP # Notice presence of nuFG() in line of F, presence of nuFsG() in that of Fs,
absence of pi in that of FsG. return system ########## RIGHT MEMBERS OF ODEs, rewritten with only 9 equations to isolate those that are independent ############## def f9eqs(t, y, ksetDict, TparamSet, REACparamSet, DirectControlnuPp, IC_PplusPp, IC_SplusSs, IC_GplusFsGplusFG): #P = y[0] Ph = y[0] #S = y[2] Ss = y[1] F = y[2] Fs = y[3] #G = y[4] FsG = y[4] FG = y[5] RF = y[6] RHP = y[7] HP = y[8] kP0 = ksetDict["kP0"] kP0p = ksetDict["kP0p"] kS = ksetDict["kS"] kSp0 = ksetDict["kSp0"] kFp0 = ksetDict["kFp0"] kF0 = ksetDict["kF0"] kFpi0 = ksetDict["kFpi0"] kFGp = ksetDict["kFGp"] kFG = ksetDict["kFG"] ketaF = ksetDict["ketaF"] kFsG = ksetDict["kFsG"] kFsGp = ksetDict["kFsGp"] kFsp = ksetDict["kFsp"] kFs = ksetDict["kFs"] kpiRF = ksetDict["kpiRF"] kpiRH = ksetDict["kpiRH"] kpiHP = ksetDict["kpiHP"] ketaHP = ksetDict["ketaHP"] ketaRF = ksetDict["ketaRF"] ketaRHP = ksetDict["ketaRHP"] n1 = REACparamSet["n1"] n2 = REACparamSet["n2"] P0const = REACparamSet["P0const"] I = REACparamSet["I"] T0const = REACparamSet["T0const"] piRFconst = REACparamSet["piRFconst"] piRHPconst = REACparamSet["piRHPconst"] PplusPpCONST = IC_PplusPp # (microM) Initial Condition protein P SplusSsCONST = IC_SplusSs # (microM) Initial Condition stresskinease S GplusFsGplusFG = IC_GplusFsGplusFG # (microM) Initial Condition gene G G = GplusFsGplusFG - FsG - FG system = [ #nuP(Ph, HP, kP0) - nuPp(P, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # P - nuP(Ph, HP, kP0) + nuPp(PplusPpCONST - Ph, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # Ph #nuS(Ss, kS) - nuSp(S, Ph, kSp0, n2, P0const), # S - nuS(Ss, kS) + nuSp(SplusSsCONST - Ss, Ph, kSp0, n2, P0const), # Ss nuF(I, Fs, kF0) + piF(RF, kFpi0) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFp(F, Ss, kFp0) - etaF(F, ketaF), # F - nuF(I, Fs, kF0) + nuFp(F, Ss, kFp0) + nuFsGp(FsG, kFsGp) - nuFsG(G, Fs, kFsG), # Fs #nuFsGp(FsG, kFsGp) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFsG(G, Fs, kFsG), # G nuFsG(G, Fs, kFsG) + nuFs(FG, kFs) - nuFsp(FsG, I, kFsp) - nuFsGp(FsG, kFsGp), # FsG nuFsp(FsG, I, kFsp) + nuFG(G, F, kFG) - nuFGp(FG, kFGp) - nuFs(FG, kFs), # FG piRF(FsG, kpiRF) + piRFAddConst(piRFconst) - etaRF(RF, ketaRF), # RF Added const to Alex model piRHP(FsG, kpiRH) + piRHPAddConst(piRHPconst) - etaRHP(RHP, ketaRHP), # RHP Aded const to Alex model piHP(RHP, kpiHP) - etaHP(HP, ketaHP)] # HP # Notice presence of nuFG() in line of F, presence of nuFsG() in that of Fs, absence of pi in that of FsG. return system
#!/usr/bin/env python3 ######################################################################### # File Name: mthreading.py # Author: ly # Created Time: Wed 05 Jul 2017 08:46:57 PM CST # Description: ###########
############################################################## # -*- coding: utf-8 -*- import time import threading def play(name,count): for i in range(1,count): print('%s %d in %d' %(name, i, count)) time.sleep(1) return if __name__=='__main__': t1=t
hreading.Thread(target=play, args=('t1',10)) # 设置为守护线程 t1.setDaemon(True) t1.start() print("main") # 等待子线程结束 t1.join() exit(1)
# coding=utf-8 """TV base class.""" from __future__ import unicode_literals import threading from builtins import object from medusa.indexers.config import INDEXER_TVDBV2 class Identifier(object): """Base identifier class.""" def __bool__(self): """Magic method.""" raise NotImplementedError def __ne__(self, other): """Magic method.""" return not self == other class TV(object): """Base class for Series and Episode.""" def __init__(self, indexe
r, indexerid, ignored_properties): """Initialize class. :param indexer: :type indexer: int :param indexerid: :type indexerid: int :param ignored_properties: :type ignored_properties: set(str) """ self.__dirty = True self.__ignored_properties = ignored_properties |
{'lock'} self.indexer = int(indexer) self.indexerid = int(indexerid) self.lock = threading.Lock() @property def series_id(self): """To make a clear distinction between an indexer and the id for the series. You can now also use series_id.""" return self.indexerid def __setattr__(self, key, value): """Set the corresponding attribute and use the dirty flag if the new value is different from the old value. :param key: :type key: str :param value: """ if key == '_location' or (not key.startswith('_') and key not in self.__ignored_properties): self.__dirty |= self.__dict__.get(key) != value super(TV, self).__setattr__(key, value) @property def dirty(self): """Return the dirty flag. :return: :rtype: bool """ return self.__dirty def reset_dirty(self): """Reset the dirty flag.""" self.__dirty = False @property def tvdb_id(self): """Get the item's tvdb_id.""" if self.indexerid and self.indexer == INDEXER_TVDBV2: return self.indexerid def __getstate__(self): """Make object serializable.""" d = dict(self.__dict__) del d['lock'] return d def __setstate__(self, d): """Un-serialize the object.""" d['lock'] = threading.Lock() self.__dict__.update(d)
sky, swidth=source.apcor.swidth, apcor=source.apcor.apcor, zmag=source.zmag, maxcount=30000, extno=0) return observations def match_planted(fk_candidate_observations, match_filename, bright_limit=BRIGHT_LIMIT, object_planted=OBJECT_PLANTED, minimum_bright_detections=MINIMUM_BRIGHT_DETECTIONS, bright_fraction=MINIMUM_BRIGHT_FRACTION): """ Using the fk_candidate_observations as input get the Object.planted file from VOSpace and match planted sources with found sources. The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the first exposure as read from the .astrom file. :param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted :param match_filename: a file that will contain a list of all planted sources and the matched found source @param minimum_bright_detections: if there are too few bright detections we raise an error. """ found_pos = [] detections = fk_candidate_observations.get_sources() for detection in detections: reading = detection.get_reading(0) # create a list of positions, to be used later by match_lists found_pos.append([reading.x, reading.y]) # Now get the Object.planted file, either from the local FS or from VOSpace. objects_planted_uri = object_planted if not os.access(objects_planted_uri, os.F_OK): objects_planted_uri = fk_candidate_observations.observations[0].get_object_planted_uri() try: lines = storage.open_vos_or_local(objects_planted_uri) lines = lines.read().decode('utf-8') except Exception as ex: logging.critical(f'{ex}') print(lines) raise ex # we are changing the format of the Object.planted header to be compatible with astropy.io.ascii but # there are some old Object.planted files out there so we do these string/replace calls to reset those. new_lines = lines.replace("pix rate", "pix_rate") new_lines = new_lines.replace("""''/h rate""", "sky_rate") planted_objects_table = ascii.read(new_lines, header_start=-1, data_start=0) planted_objects_table.meta = None # The match_list method expects a list that contains a position, not an x and a y vector, so we transpose. planted_pos = numpy.transpose([planted_objects_table['x'].data, planted_objects_table['y'].data]) # match_idx is an order list. The list is in the order of the first list of positions and each entry # is the index of the matching position from the second list. (match_idx, match_fnd) = util.match_lists(numpy.array(planted_pos), numpy.array(found_pos)) assert isinstance(match_idx, numpy.ma.MaskedArray) assert isinstance(match_fnd, numpy.ma.MaskedArray) false_positives_table = Table() # Once we've matched the two lists we'll need some new columns to store the information in. # these are masked columns so that object.planted entries that have no detected match are left 'blank'. new_columns = [MaskedColumn(name="measure_x", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_y", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_rate", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_angle", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_mag1", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_merr1", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_mag2", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_merr2", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_mag3", length=len(planted_objects_table), mask=True), MaskedColumn(name="measure_merr3", length=len(planted_objects_table), mask=True)] planted_objects_table.add_columns(new_columns) tlength = 0 new_columns = [MaskedColumn(name="measure_x", length=tlength, mask=True), MaskedColumn(name="measure_y", length=tlength, mask=True), MaskedColumn(name="measure_rate", length=0, mask=True), MaskedColumn(name="measure_angle", length=0, mask=T
rue), MaskedColumn(name="measure_mag1", length=0, mask=True), MaskedColumn(name="measure_merr1", length=0, mask=True), MaskedColumn(name="measure_mag2", length=0, mask=True), MaskedColumn(name="measure_merr2", length=0, mask=True), MaskedColumn(name="measure_mag3", length=tlength, mask=True), MaskedColumn(name="measure_merr3", length=tlength, mask=True)] false_positives_table.add_c
olumns(new_columns) # We do some 'checks' on the Object.planted match to diagnose pipeline issues. Those checks are made using just # those planted sources we should have detected. bright = planted_objects_table['mag'] < bright_limit n_bright_planted = numpy.count_nonzero(planted_objects_table['mag'][bright]) measures = [] idxs = [] for idx in range(len(match_idx)): # The match_idx value is False if nothing was found. if not match_idx.mask[idx]: # Each 'source' has multiple 'readings' measures.append(detections[match_idx[idx]].get_readings()) idxs.append(idx) observations = measure_mags(measures) for oidx in range(len(measures)): idx = idxs[oidx] readings = measures[oidx] start_jd = Time(readings[0].obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd end_jd = Time(readings[-1].obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd rate = math.sqrt((readings[-1].x - readings[0].x) ** 2 + (readings[-1].y - readings[0].y) ** 2) / ( 24 * (end_jd - start_jd)) rate = int(rate * 100) / 100.0 angle = math.degrees(math.atan2(readings[-1].y - readings[0].y, readings[-1].x - readings[0].x)) angle = int(angle * 100) / 100.0 planted_objects_table[idx]['measure_rate'] = rate planted_objects_table[idx]['measure_angle'] = angle planted_objects_table[idx]['measure_x'] = observations[readings[0].obs]['mags']["XCENTER"][oidx] planted_objects_table[idx]['measure_y'] = observations[readings[0].obs]['mags']["YCENTER"][oidx] for ridx in range(len(readings)): reading = readings[ridx] mags = observations[reading.obs]['mags'] planted_objects_table[idx]['measure_mag{}'.format(ridx+1)] = mags["MAG"][oidx] planted_objects_table[idx]['measure_merr{}'.format(ridx+1)] = mags["MERR"][oidx] # for idx in range(len(match_fnd)): # if match_fnd.mask[idx]: # measures = detections[idx].get_readings() # false_positives_table.add_row() # false_positives_table[-1] = measure_mags(measures, false_positives_table[-1]) # Count an object as detected if it has a measured magnitude in the first frame of the triplet. n_bright_found = numpy.count_nonzero(planted_objects_table['measure_mag1'][bright]) # Also compute the offset and standard deviation of the measured magnitude from that planted ones. offset = numpy.mean(planted_objects_table['mag'][bright] - planted_objects_table['measure_mag1'][bright]) try: offset = "{:5.2f}".format(offset) except: offset = "indef" std = numpy.std(planted_objects_table['mag'][bright] - planted_objects_table['measure_mag1'][bright]) try: std = "{:5.2f}".format(std) except: std = "indef" if os.access(match_filename, os.R_OK): fout = open(match_filename, 'a') else:
= self.IOBinding(self) io.set_filename_change_hook(self.filename_change_hook) # Create the recent files submenu self.recent_files_menu = Menu(self.menubar) self.menudict['file'].insert_cascade(3, label='Recent Files', underline=0, menu=self.recent_files_menu) self.update_recent_files_list() self.color = None # initialized below in self.ResetColorizer if filename: if o
s.path.exists(filename) and not os.path.isdir(filename):
io.loadfile(filename) else: io.set_filename(filename) self.ResetColorizer() self.saved_change_hook() self.set_indentation_params(self.ispythonsource(filename)) self.load_extensions() menu = self.menudict.get('windows') if menu: end = menu.index("end") if end is None: end = -1 if end >= 0: menu.add_separator() end = end + 1 self.wmenu_end = end WindowList.register_callback(self.postwindowsmenu) # Some abstractions so IDLE extensions are cross-IDE self.askyesno = tkMessageBox.askyesno self.askinteger = tkSimpleDialog.askinteger self.showerror = tkMessageBox.showerror self._highlight_workaround() # Fix selection tags on Windows def _highlight_workaround(self): # On Windows, Tk removes painting of the selection # tags which is different behavior than on Linux and Mac. # See issue14146 for more information. if not sys.platform.startswith('win'): return text = self.text text.event_add("<<Highlight-FocusOut>>", "<FocusOut>") text.event_add("<<Highlight-FocusIn>>", "<FocusIn>") def highlight_fix(focus): sel_range = text.tag_ranges("sel") if sel_range: if focus == 'out': HILITE_CONFIG = idleConf.GetHighlight( idleConf.CurrentTheme(), 'hilite') text.tag_config("sel_fix", HILITE_CONFIG) text.tag_raise("sel_fix") text.tag_add("sel_fix", *sel_range) elif focus == 'in': text.tag_remove("sel_fix", "1.0", "end") text.bind("<<Highlight-FocusOut>>", lambda ev: highlight_fix("out")) text.bind("<<Highlight-FocusIn>>", lambda ev: highlight_fix("in")) def _filename_to_unicode(self, filename): """convert filename to unicode in order to display it in Tk""" if isinstance(filename, unicode) or not filename: return filename else: try: return filename.decode(self.filesystemencoding) except UnicodeDecodeError: # XXX try: return filename.decode(self.encoding) except UnicodeDecodeError: # byte-to-byte conversion return filename.decode('iso8859-1') def new_callback(self, event): dirname, basename = self.io.defaultfilename() self.flist.new(dirname) return "break" def home_callback(self, event): if (event.state & 4) != 0 and event.keysym == "Home": # state&4==Control. If <Control-Home>, use the Tk binding. return if self.text.index("iomark") and \ self.text.compare("iomark", "<=", "insert lineend") and \ self.text.compare("insert linestart", "<=", "iomark"): # In Shell on input line, go to just after prompt insertpt = int(self.text.index("iomark").split(".")[1]) else: line = self.text.get("insert linestart", "insert lineend") for insertpt in xrange(len(line)): if line[insertpt] not in (' ','\t'): break else: insertpt=len(line) lineat = int(self.text.index("insert").split('.')[1]) if insertpt == lineat: insertpt = 0 dest = "insert linestart+"+str(insertpt)+"c" if (event.state&1) == 0: # shift was not pressed self.text.tag_remove("sel", "1.0", "end") else: if not self.text.index("sel.first"): self.text.mark_set("my_anchor", "insert") # there was no previous selection else: if self.text.compare(self.text.index("sel.first"), "<", self.text.index("insert")): self.text.mark_set("my_anchor", "sel.first") # extend back else: self.text.mark_set("my_anchor", "sel.last") # extend forward first = self.text.index(dest) last = self.text.index("my_anchor") if self.text.compare(first,">",last): first,last = last,first self.text.tag_remove("sel", "1.0", "end") self.text.tag_add("sel", first, last) self.text.mark_set("insert", dest) self.text.see("insert") return "break" def set_status_bar(self): self.status_bar = self.MultiStatusBar(self.top) if sys.platform == "darwin": # Insert some padding to avoid obscuring some of the statusbar # by the resize widget. self.status_bar.set_label('_padding1', ' ', side=RIGHT) self.status_bar.set_label('column', 'Col: ?', side=RIGHT) self.status_bar.set_label('line', 'Ln: ?', side=RIGHT) self.status_bar.pack(side=BOTTOM, fill=X) self.text.bind("<<set-line-and-column>>", self.set_line_and_column) self.text.event_add("<<set-line-and-column>>", "<KeyRelease>", "<ButtonRelease>") self.text.after_idle(self.set_line_and_column) def set_line_and_column(self, event=None): line, column = self.text.index(INSERT).split('.') self.status_bar.set_label('column', 'Col: %s' % column) self.status_bar.set_label('line', 'Ln: %s' % line) menu_specs = [ ("file", "_File"), ("edit", "_Edit"), ("format", "F_ormat"), ("run", "_Run"), ("options", "_Options"), ("windows", "_Windows"), ("help", "_Help"), ] if sys.platform == "darwin": menu_specs[-2] = ("windows", "_Window") def createmenubar(self): mbar = self.menubar self.menudict = menudict = {} for name, label in self.menu_specs: underline, label = prepstr(label) menudict[name] = menu = Menu(mbar, name=name) mbar.add_cascade(label=label, menu=menu, underline=underline) if macosxSupport.isCarbonTk(): # Insert the application menu menudict['application'] = menu = Menu(mbar, name='apple') mbar.add_cascade(label='IDLE', menu=menu) self.fill_menus() self.base_helpmenu_length = self.menudict['help'].index(END) self.reset_help_menu_entries() def postwindowsmenu(self): # Only called when Windows menu exists menu = self.menudict['windows'] end = menu.index("end") if end is None: end = -1 if end > self.wmenu_end: menu.delete(self.wmenu_end+1, end) WindowList.add_windows_to_menu(menu) rmenu = None def right_menu_event(self, event): self.text.mark_set("insert", "@%d,%d" % (event.x, event.y)) if not self.rmenu: self.make_rmenu() rmenu = self.rmenu self.event = event iswin = sys.platform[:3] == 'win' if iswin: self.text.config(cursor="arrow") for item in self.rmenu_specs: try: label, eventname, verify_state = item except ValueError: # see issue1207589 continue if verify_state is None: continue state = getattr(self, verify_state)() rmenu.entryconfigure(label, state=state)
"""Implements a HD44780 character LCD connected via PCF8574 on I2C. This was tested with: https://www.wemos.cc/product/d1-mini.html""" from time import sleep_ms, ticks_ms from machine import I2C, Pin from esp8266_i2c_lcd import I2cLcd # The PCF8574 has a jumper selectable address: 0x20 - 0x27 DEFAULT_I2C_ADDR = 0x27 def test_
main(): """Test function for verifying basic functionality.""" print("Running test_main") i2c = I2C(scl=Pin(5), sda=Pin(4), freq=100000) lcd = I2cLcd(i2c, DEFAULT_I2C_ADDR, 2, 16) lcd.putstr("It Works!\nSecond Line") sleep_m
s(3000) lcd.clear() count = 0 while True: lcd.move_to(0, 0) lcd.putstr("%7d" % (ticks_ms() // 1000)) sleep_ms(1000) count += 1 if count % 10 == 3: print("Turning backlight off") lcd.backlight_off() if count % 10 == 4: print("Turning backlight on") lcd.backlight_on() if count % 10 == 5: print("Turning display off") lcd.display_off() if count % 10 == 6: print("Turning display on") lcd.display_on() if count % 10 == 7: print("Turning display & backlight off") lcd.backlight_off() lcd.display_off() if count % 10 == 8: print("Turning display & backlight on") lcd.backlight_on() lcd.display_on() #if __name__ == "__main__": test_main()
from jinja2 import Markup class momentjs(object): def __init__(self, timestamp): self.timestamp = timestamp def render(self, format): return Markup("<script>\ndocument.write(moment(\"%s\").%s);\n</s
cript>" % (self.timestamp.strftime("%Y-%m-%dT%H:%M:%S Z"), format)) def format(self, fmt): return self.render("format(\"%s\")" % fmt) def calendar(self): return self.render("calendar()") def fro
mNow(self): return self.render("fromNow()")
port sessions from plaso.engine import knowledge_base from plaso.formatters import manager as formatters_manager from plaso.formatters import mediator as formatters_mediator from plaso.parsers import interface from plaso.parsers import mediator from plaso.storage import fake_storage from tests import test_lib as shared_test_lib class _EventsHeap(object): """Events heap.""" def __init__(self): """Initializes an events heap.""" super(_EventsHeap, self).__init__() self._heap = [] def PopEvent(self): """Pops an event from the heap. Returns: EventObject: event. """ try: _, _, _, event = heapq.heappop(self._heap) return event except IndexError: return None def PopEvents(self): """Pops events from the heap. Yields: EventObject: event. """ event = self.PopEvent() while event: yield event event = self.PopEvent() def PushEvent(self, event): """Pushes an event onto the heap. Args: event (EventObject): event. """ # TODO: replace this work-around for an event "comparable". event_values = event.CopyToDict() attributes = [] for attribute_name, attribute_value in sorted(event_values.items()): if isinstance(attribute_value, dict): attribute_value = sorted(attribute_value.items()) comparable = u'{0:s}: {1!s}'.format(attribute_name, attribute_value) attributes.append(comparable) comparable = u', '.join(attributes) event_values = sorted(event.CopyToDict().items()) heap_values = (event.timestamp, event.timestamp_desc, comparable, event) heapq.heappush(self._heap, heap_values) def PushEvents(self, events): """Pushes events onto the heap. Args: events list[EventObject]: events. """ for event in events: self.PushEvent(event) class ParserTestCase(shared_test_lib.BaseTestCase): """Parser test case.""" def _CreateParserMediator( self, storage_writer, file_entry=None, knowledge_base_values=None, parser_chain=None, timezone=u'UTC'): """Creates a parser mediator. Args: storage_writer (StorageWriter): storage writer. file_entry (Optional[dfvfs.FileEntry]): file entry object being parsed. knowledge_base_values (Optional[dict]): knowledge base values. parser_chain (Optional[str]): parsing chain up to this point. timezone (str): timezone. Returns: ParserMediator: parser mediator. """ knowledge_base_object = knowledge_base.KnowledgeBase() if knowledge_base_values: for identifier, value
in iter(knowledge_base_values.items()): knowledge_base_object.SetValue(identifier, value) knowledge_base_object.SetTimezon
e(timezone) parser_mediator = mediator.ParserMediator( storage_writer, knowledge_base_object) if file_entry: parser_mediator.SetFileEntry(file_entry) if parser_chain: parser_mediator.parser_chain = parser_chain return parser_mediator def _CreateStorageWriter(self): """Creates a storage writer object. Returns: FakeStorageWriter: storage writer. """ session = sessions.Session() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() return storage_writer def _GetSortedEvents(self, events): """Retrieves events sorted in a deterministic order. Args: events (list[EventObject]): events. Returns: list[EventObject]: sorted events. """ events_heap = _EventsHeap() events_heap.PushEvents(events) return list(events_heap.PopEvents()) def _GetShortMessage(self, message_string): """Shortens a message string to a maximum of 80 character width. Args: message_string (str): message string. Returns: str: short message string, if it is longer than 80 characters it will be shortened to it's first 77 characters followed by a "...". """ if len(message_string) > 80: return u'{0:s}...'.format(message_string[0:77]) return message_string def _ParseFile( self, path_segments, parser, knowledge_base_values=None, timezone=u'UTC'): """Parses a file with a parser and writes results to a storage writer. Args: path_segments (list[str]): path segments inside the test data directory. parser (BaseParser): parser. knowledge_base_values (Optional[dict]): knowledge base values. timezone (str): timezone. Returns: FakeStorageWriter: storage writer. """ path = self._GetTestFilePath(path_segments) path_spec = path_spec_factory.Factory.NewPathSpec( dfvfs_definitions.TYPE_INDICATOR_OS, location=path) return self._ParseFileByPathSpec( path_spec, parser, knowledge_base_values=knowledge_base_values, timezone=timezone) def _ParseFileByPathSpec( self, path_spec, parser, knowledge_base_values=None, timezone=u'UTC'): """Parses a file with a parser and writes results to a storage writer. Args: path_spec (dfvfs.PathSpec): path specification. parser (BaseParser): parser. knowledge_base_values (Optional[dict]): knowledge base values. timezone (str): timezone. Returns: FakeStorageWriter: storage writer. """ storage_writer = self._CreateStorageWriter() file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec) parser_mediator = self._CreateParserMediator( storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values, timezone=timezone) if isinstance(parser, interface.FileEntryParser): parser.Parse(parser_mediator) elif isinstance(parser, interface.FileObjectParser): file_object = file_entry.GetFileObject() try: parser.Parse(parser_mediator, file_object) finally: file_object.close() else: self.fail(u'Got unsupported parser type: {0:s}'.format(type(parser))) return storage_writer def _TestGetMessageStrings( self, event, expected_message, expected_message_short): """Tests the formatting of the message strings. This function invokes the GetMessageStrings function of the event formatter on the event object and compares the resulting messages strings with those expected. Args: event (EventObject): event. expected_message (str): expected message string. expected_message_short (str): expected short message string. """ formatter_mediator = formatters_mediator.FormatterMediator( data_location=self._DATA_PATH) message, message_short = ( formatters_manager.FormattersManager.GetMessageStrings( formatter_mediator, event)) self.assertEqual(message, expected_message) self.assertEqual(message_short, expected_message_short) def _TestGetSourceStrings( self, event, expected_source, expected_source_short): """Tests the formatting of the source strings. This function invokes the GetSourceStrings function of the event formatter on the event object and compares the resulting source strings with those expected. Args: event (EventObject): event. expected_source (str): expected source string. expected_source_short (str): expected short source string. """ # TODO: change this to return the long variant first so it is consistent # with GetMessageStrings. source_short, source = ( formatters_manager.FormattersManager.GetSourceStrings(event)) self.assertEqual(source, expected_source) self.assertEqual(source_short, expected_source_short) def assertDictContains(self, received, expected): """Asserts if a dictionary contains every key-value pair as expected. Recieved can contain new keys. If any value is a dict, this function is called recursively. Args: received (dict): received dictionary. expected (dict): expected dictionary. """ for key, value in expected.items(): self.assertIn(key, received) if isinstance(value, dict): self.assertDictEqual(received[key], expected[key]) else: self.assertEqual(v
from setuptools import setup, find_packages setup(name='MO
DEL1201230000', version=20140916, description='MODEL1201230000 from BioModels', url='http://www.ebi.ac.uk/biomodels-main/MODEL1201230000', maintainer='Stanley Gu', maintainer_url='stanleygu@gmail.com', packages=find_packages(), package_data={
'': ['*.xml', 'README.md']}, )
bTopMarginF', 'lgAutoManage', 'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor', 'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF', 'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF', 'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder', 'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes', 'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF', 'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection', 'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor', 'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF', 'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition', 'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF', 'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF', 'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont', 'lgLineLabelFontAspectF', 'lgLineLabelFontColor', 'lgLineLabelFontColors', 'lgLineLabelFontHeightF', 'lgLineLabelFontHeights', 'lgLineLabelFontQuality', 'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode', 'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF', 'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors', 'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes', 'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex', 'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen', 'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight', 'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex', 'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation', 'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF', 'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF', 'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF', 'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont', 'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF', 'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode', 'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition', 'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount', 'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF', 'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF', 'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF', 'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor', 'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF', 'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution', 'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern', 'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary', 'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor', 'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF', 'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground', 'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default', 'mpFillScaleF', 'mpFillScales', 'mpFillScales-default', 'mpFixedAreaGroups', 'mpGeophysicalLineColor', 'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF', 'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn', 'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF', 'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF', 'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode', 'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF', 'mpInlandWaterFillColor', 'mpInlandWaterFillPattern', 'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor', 'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF', 'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor', 'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF', 'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF', 'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF', 'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor', 'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF', 'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers', 'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF', 'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern', 'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern', 'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern', 'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder', 'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers', 'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern', 'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn', 'mpPolyMode', 'mpProjection', 'mpProvincialLineColor', 'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF', 'mpProvincialLineThicknessF', 'mpRelativeCenterLat', 'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF', 'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF', 'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF', 'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F', 'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors', 'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns', 'mpSpecifiedFillPriority', 'mpSpecifiedFillScales', 'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF', 'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF', 'mpUSStateLineColor', 'mpUSStateLineDashPattern', 'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF', 'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode', 'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF', 'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF', 'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF', 'pmLegendKeepAspect', 'pmLegendOrthogonalPosF', 'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF', 'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode', 'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone', 'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray', 'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV', 'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF', 'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex', 'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds', 'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex', 'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF', 'stArrowStride', 'stCrossoverCheckCount', 'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn', 'stLabelFormat', 'stLengthCheckCount', 'stLevelColors', 'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode', 'stLevelSpacingF', 'stLevels', 'stLineColor',
same name this help avoid confusion by being flexible). """ # pep-8 naming exception -- this is a decorator class def __init__(self, exc): self._exc = exc self._ctx = None def __call__(self, func): @functools.wraps(func) def run_raises_test(*args, **kwargs): pytest.raises(self._exc, func, *args, **kwargs) return run_raises_test def __enter__(self): self._ctx = pytest.raises(self._exc) return self._ctx.__enter__() def __exit__(self, *exc_info): return self._ctx.__exit__(*exc_info) _deprecations_as_exceptions = False _include_astropy_deprecations = True _modules_to_ignore_on_import = set([ 'compiler', # A deprecated stdlib module used by py.test 'scipy', 'pygments', 'ipykernel', 'setuptools']) _warnings_to_ignore_entire_module = set([]) _warnings_to_ignore_by_pyver = { (3, 4): set([ # py.test reads files with the 'U' flag, which is now # deprecated in Python 3.4. r"'U' mode is deprecated", # BeautifulSoup4 triggers warning in stdlib's html module.x r"The strict argument and mode are deprecated\.", r"The value of convert_charrefs will become True in 3\.5\. " r"You are encouraged to set the value explicitly\."]), (3, 5): set([ # py.test raised this warning in inspect on Python 3.5. # See https://github.com/pytest-dev/pytest/pull/1009 # Keeping it since e.g. lxml as of 3.8.0 is still calling getargspec() r"inspect\.getargspec\(\) is deprecated, use " r"inspect\.signature\(\) instead"]), (3, 6): set([ # inspect raises this slightly different warning on Python 3.6. # Keeping it since e.g. lxml as of 3.8.0 is still calling getargspec() r"inspect\.getargspec\(\) is deprecated, use " r"inspect\.signature\(\) or inspect\.getfullargspec\(\)"])} def enable_deprecations_as_exceptions(include_astropy_deprecations=True, modules_to_ignore_on_import=[], warnings_to_ignore_entire_module=[], warnings_to_ignore_by_pyver={}): """ Turn on the feature that turns deprecations into exceptions. Parameters ---------- include_astropy_deprecations : bool If set to `True`, ``AstropyDeprecationWarning`` and ``AstropyPendingDeprecationWarning`` are also turned into exceptions. modules_to_ignore_on_import : list of str List of additional modules that generate deprecation warnings on import, which are to be ignored. By default, these are already included: ``compiler``, ``scipy``, ``pygments``, ``ipykernel``, and ``setuptools``. warnings_to_ignore_entire_module : list of str List of modules with deprecation warnings to ignore completely, not just during import. If ``include_astropy_deprecations=True`` is given, ``AstropyDeprecationWarning`` and ``AstropyPendingDeprecationWarning`` are also ignored for the modules. warnings_to_ignore_by_pyver : dict Dictionary mapping tuple of ``(major, minor)`` Python version to a list of deprecation warning messages to ignore. This is in addition of those already ignored by default (see ``_warnings_to_ignore_by_pyver``
values). """ global _deprecations_as_exceptions _deprecations_as_exceptions = True global _include_astropy_deprecations _include_astropy_deprecations = include_astropy_deprecations global _modules_to_ignore_on_import _modules_to_ignore_on_import.update(modules_to_ignore_on_import)
global _warnings_to_ignore_entire_module _warnings_to_ignore_entire_module.update(warnings_to_ignore_entire_module) global _warnings_to_ignore_by_pyver for key, val in six.iteritems(warnings_to_ignore_by_pyver): if key in _warnings_to_ignore_by_pyver: _warnings_to_ignore_by_pyver[key].update(val) else: _warnings_to_ignore_by_pyver[key] = set(val) def treat_deprecations_as_exceptions(): """ Turn all DeprecationWarnings (which indicate deprecated uses of Python itself or Numpy, but not within Astropy, where we use our own deprecation warning class) into exceptions so that we find out about them early. This completely resets the warning filters and any "already seen" warning state. """ # First, totally reset the warning state. The modules may change during # this iteration thus we copy the original state to a list to iterate # on. See https://github.com/astropy/astropy/pull/5513. for module in list(six.itervalues(sys.modules)): # We don't want to deal with six.MovedModules, only "real" # modules. if (isinstance(module, types.ModuleType) and hasattr(module, '__warningregistry__')): del module.__warningregistry__ if not _deprecations_as_exceptions: return warnings.resetwarnings() # Hide the next couple of DeprecationWarnings warnings.simplefilter('ignore', DeprecationWarning) # Here's the wrinkle: a couple of our third-party dependencies # (py.test and scipy) are still using deprecated features # themselves, and we'd like to ignore those. Fortunately, those # show up only at import time, so if we import those things *now*, # before we turn the warnings into exceptions, we're golden. for m in _modules_to_ignore_on_import: try: __import__(m) except ImportError: pass # Now, start over again with the warning filters warnings.resetwarnings() # Now, turn DeprecationWarnings into exceptions _all_warns = [DeprecationWarning] # Only turn astropy deprecation warnings into exceptions if requested if _include_astropy_deprecations: _all_warns += [AstropyDeprecationWarning, AstropyPendingDeprecationWarning] for w in _all_warns: warnings.filterwarnings("error", ".*", w) # This ignores all deprecation warnings from given module(s), # not just on import, for use of Astropy affiliated packages. for m in _warnings_to_ignore_entire_module: for w in _all_warns: warnings.filterwarnings('ignore', category=w, module=m) for v in _warnings_to_ignore_by_pyver: if sys.version_info[:2] >= v: for s in _warnings_to_ignore_by_pyver[v]: warnings.filterwarnings("ignore", s, DeprecationWarning) class catch_warnings(warnings.catch_warnings): """ A high-powered version of warnings.catch_warnings to use for testing and to make sure that there is no dependence on the order in which the tests are run. This completely blitzes any memory of any warnings that have appeared before so that all warnings will be caught and displayed. ``*args`` is a set of warning classes to collect. If no arguments are provided, all warnings are collected. Use as follows:: with catch_warnings(MyCustomWarning) as w: do.something.bad() assert len(w) > 0 """ def __init__(self, *classes): super(catch_warnings, self).__init__(record=True) self.classes = classes def __enter__(self): warning_list = super(catch_warnings, self).__enter__() treat_deprecations_as_exceptions() if len(self.classes) == 0: warnings.simplefilter('always') else: warnings.simplefilter('ignore') for cls in self.classes: warnings.simplefilter('always', cls) return warning_list def __exit__(self, type, value, traceback): treat_deprecations_as_exceptions() class ignore_warnings(catch_warnings): """ This can be used either as a context manager or function decorator to ignore all warnings that occur within a function or block of code. An optional category option can be supplied to only ignore warnings of a certain category or categories (if a list is provided). """
import sqlalchemy metadata = sqlalchemy.MetaData() log_table = sqlalchemy.Table('log', metadata,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True), sqlalchemy.Column('filename', sqlalchemy.Unicode), sqlalchemy.Column('digest', sqlalchemy.Unicode), sqlalchemy.Column('comment', sqlalchemy.Unicode), sqlalchemy.Column('user_agent', sqlalchemy.Unicode), sqlalchemy.Column('traceback', sqlalchemy.Uni
code)) def init(engine): metadata.create_all(bind=engine)
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-10-30 12:53 from __futu
re__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='user', name='added', ), migrations.RemoveField( model_name='user', name='changed
', ), ]
input = """ g(1). g(2). g(3). f(a,b). f(A,B):- g(A), g(B). f(a,a). """ o
utput = """ {f(1,1), f(1,2), f(1,3), f(2,1), f(2,2), f(2,3), f(3,1), f(3,2), f(3,3), f(a,a), f(a,b), g(1), g(2)
, g(3)} """
from velox_d
eploy import *
# (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com> # (c) 2017 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = """ lookup: redis author: - Jan-Piet Mens (@jpmens) <jpmens(at)gmail.com> - Ansible Core version_added: "2.5" short_description: fetch data from Redis description: - This looup returns a list of results from a Redis DB corresponding to a list of items given to it requirements: - redis (python library https://github.com/andymccurdy/redis-py/) options: _terms: description: list of keys to query host: description: location of Redis host default: '127.0.0.1' env: - name: ANSIBLE_REDIS_HOST ini: - section: lookup_redis key: host port: port: description: port on which Redis is listening on default: 6379A type: int env: - name: ANSIBLE_REDIS_PORT ini: - section: lookup_redis key: port socket: description: path to socket on which to query Redis, this option overrides host and port options when set. type: path env: - name: ANSIBLE_REDIS_SOCKET ini: - section: lookup_redis key: socket """ EXAMPLES = """ - name: query redis for somekey (default or configured settings used) debug: msg="{{ lookup('redis', 'somekey'}}" - name: query redis for list of keys and non-default host and port debug: msg="{{ lookup('redis', item, host='myredis.internal.com', port=2121) }}" loop: '{{list_of_redis_keys}}' - name: use list directly debug: msg="{{ lookup('redis', 'key1', 'key2', 'key3') }}" - name: use list directly with a socket debug: msg="{{ lookup('redis', 'key1', 'key2', socket='/var/tmp/redis.sock') }}" """ RETURN = """ _raw: description: value(s) stored in Redis """ import os HAVE_REDIS = False try: import redis HAVE_REDIS = True except ImportError: pass from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase class LookupModule(LookupBase): def run(self, terms, variables, **kwargs): if not HAVE_REDIS: raise AnsibleError("Can't LOOKUP(redis_kv): module redis is not installed") # get options self.set_options(direct=kwargs) # setup connection host = self.get_option('host') port = self.get_option('port') socket = self.get_option('socket') if socket is N
one: conn = redis.Redis(host=host, port=port) else: conn = redis.Redis(unix_socket_path=socket) ret = [] for term in
terms: try: res = conn.get(term) if res is None: res = "" ret.append(res) except Exception: ret.append("") # connection failed or key not found return ret
from __future__ import absolute_import ########################################################################### # (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# # # This file i
s part of AmCAT - The Amsterdam Content Analysis Toolkit # # # # AmCAT is free software: you can redistribute it and/or modify it under # # the terms of the GNU Affero General Public License as published by the # # Free Software Foundation, either version 3 of the License, or (at your # # option) any later version. # # # # AmCAT is distributed in the hope that it will be useful, but WITHOUT # # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public # # License for more details. # # # # You should have received a copy of the GNU Affero General Public # # License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. # ########################################################################### """ Module for running scrapers """ import logging;log = logging.getLogger(__name__) from collections import namedtuple from amcat.models import Article, Project ScrapeError = namedtuple("ScrapeError", ["i", "unit", "error"]) class Controller(object): def __init__(self): self.errors = [] self.articles = [] def run(self, scraper): try: units = list(scraper._get_units()) except Exception as e: self.errors.append(ScrapeError(None,None,e)) log.exception("scraper._get_units failed") return self.articles for i, unit in enumerate(units): try: articles = list(scraper._scrape_unit(unit)) except Exception as e: log.exception("scraper._scrape_unit failed") self.errors.append(ScrapeError(i,unit,e)) continue self.articles += articles for article in self.articles: _set_default(article, 'project', scraper.project) try: articles, errors = Article.create_articles(self.articles, scraper.articleset) self.saved_article_ids = {getattr(a, "duplicate_of", a.id) for a in self.articles} for e in errors: self.errors.append(ScrapeError(None,None,e)) except Exception as e: self.errors.append(ScrapeError(None,None,e)) log.exception("scraper._get_units failed") return self.saved_article_ids def _set_default(obj, attr, val): try: if getattr(obj, attr, None) is not None: return except Project.DoesNotExist: pass # django throws DNE on x.y if y is not set and not nullable setattr(obj, attr, val)
e.__name__, ] ) def test_urlconf_overridden_with_null(self): """ Overriding request.urlconf with None will fall back to the default URLconf. """ response = self.client.get('/test/me/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/') response = self.client.get('/inner_urlconf/second_test/') self.assertEqual(response.status_code, 200) response = self.client.get('/second_test/') self.assertEqual(response.status_code, 404) @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseInnerInResponseMiddleware' % middleware.__name__, ] ) def test_reverse_inner_in_response_middleware(self): """ Test reversing an URL from the *overridden* URLconf from inside a response middleware. """ response = self.client.get('/second_test/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseOuterInResponseMiddleware' % middleware.__name__, ] ) def test_reverse_outer_in_response_middleware(self): """ Test reversing an URL from the *default* URLconf from inside a response middleware. """ message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found." with self.assertRaisesMessage(NoReverseMatch, message): self.client.get('/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseInnerInStreaming' % middleware.__name__, ] ) def test_reverse_inner_in_streaming(self): """ Test reversing an URL from the *overridden* URLconf from inside a streaming response. """ response = self.client.get('/second_test/') self.assertEqual(response.status_code, 200) self.assertEqual(b''.join(response), b'/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseOuterInStreaming' % middleware.__name__, ] ) def test_reverse_outer_in_streaming(self): """ Test reversing an URL from the *default* URLconf from inside a streaming response. """ message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found." with self.assertRaisesMessage(NoReverseMatch, message): self.client.get('/second_test/') b''.join(self.client.get('/second_test/')) class ErrorHandlerResolutionTests(SimpleTestCase): """Tests for handler400, handler404 and handler500"""
def setUp(self): urlconf = 'urlpatterns_reverse.urls_error_handlers' urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables' self.resolver = RegexURLResolver(r'^$', urlconf) self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables) def test_named_handlers(self): handler = (empty_view, {}) self.assertEqual(self.resolver.resolve_error_handler(400), handler) self.assertEqual(sel
f.resolver.resolve_error_handler(404), handler) self.assertEqual(self.resolver.resolve_error_handler(500), handler) def test_callable_handlers(self): handler = (empty_view, {}) self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler) self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler) self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler) @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import') class DefaultErrorHandlerTests(SimpleTestCase): def test_default_handler(self): "If the urls.py doesn't specify handlers, the defaults are used" response = self.client.get('/test/') self.assertEqual(response.status_code, 404) with self.assertRaisesMessage(ValueError, "I don't think I'm getting good"): self.client.get('/bad_view/') @override_settings(ROOT_URLCONF=None) class NoRootUrlConfTests(SimpleTestCase): """Tests for handler404 and handler500 if ROOT_URLCONF is None""" def test_no_handler_exception(self): with self.assertRaises(ImproperlyConfigured): self.client.get('/test/me/') @override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls') class ResolverMatchTests(SimpleTestCase): @ignore_warnings(category=RemovedInDjango20Warning) def test_urlpattern_resolve(self): for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data: # Test legacy support for extracting "function, args, kwargs" match_func, match_args, match_kwargs = resolve(path) self.assertEqual(match_func, func) self.assertEqual(match_args, args) self.assertEqual(match_kwargs, kwargs) # Test ResolverMatch capabilities. match = resolve(path) self.assertEqual(match.__class__, ResolverMatch) self.assertEqual(match.url_name, url_name) self.assertEqual(match.app_name, app_name) self.assertEqual(match.namespace, namespace) self.assertEqual(match.view_name, view_name) self.assertEqual(match.func, func) self.assertEqual(match.args, args) self.assertEqual(match.kwargs, kwargs) # ... and for legacy purposes: self.assertEqual(match[0], func) self.assertEqual(match[1], args) self.assertEqual(match[2], kwargs) @ignore_warnings(category=RemovedInDjango20Warning) def test_resolver_match_on_request(self): response = self.client.get('/resolver_match/') resolver_match = response.resolver_match self.assertEqual(resolver_match.url_name, 'test-resolver-match') def test_resolver_match_on_request_before_resolution(self): request = HttpRequest() self.assertIsNone(request.resolver_match) @override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls') class ErroneousViewTests(SimpleTestCase): def test_noncallable_view(self): # View is not a callable (explicit import; arbitrary Python object) with self.assertRaisesMessage(TypeError, 'view must be a callable'): url(r'uncallable-object/$', views.uncallable) def test_invalid_regex(self): # Regex contains an error (refs #6170) msg = '(regex_error/$" is not a valid regular expression' with self.assertRaisesMessage(ImproperlyConfigured, msg): reverse(views.empty_view) class ViewLoadingTests(SimpleTestCase): def test_view_loading(self): self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view) # passing a callable should return the callable self.assertEqual(get_callable(empty_view), empty_view) def test_exceptions(self): # A missing view (identified by an AttributeError) should raise # ViewDoesNotExist, ... with self.assertRaisesMessage(ViewDoesNotExist, "View does not exist in"): get_callable('urlpatterns_reverse.views.i_should_not_exist') # ... but if the AttributeError is caused by something else don't # swallow it. with self.assertRaises(AttributeError): get_callable('urlpatterns_reverse.views_broken.i_am_broken') class IncludeTests(SimpleTestCase): url_patterns = [ url(r'^inner/$', views.empty_view, name='urlobject-view'), url(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'), url(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'), ] app_urls = URLObject('inc-app') def test_i
from worldengine.simulations.basic import * import random from worldengine.views.basic import color_prop from PyQt4 import QtGui class WatermapView(object): def is_applicable(self, world): return world.has_watermap() def draw(self, world, canvas): width = world.width height = world.height th = world.watermap['thresholds']['river'] for y in range(0, height): for x in range(0, width): if world.is_ocean((x, y)): r = g = 0 b = 255 else
: w = world.watermap['data'][y][x] if w > th: r = g = 0 b = 255 else
: r = g = b = 0 col = QtGui.QColor(r, g, b) canvas.setPixel(x, y, col.rgb())
"""NuGridPy p
ackage version""" __version__ = '0.7.6
'
#!/usr/bin/env python # -*- encoding: utf-8 -*- # -*- mode: python -*- # vi: set ft=python : import os from setuptools import setup, find_package
s README_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README') DESCRIPTION = 'Easy image thumbnails in Django.' if os.path.exists(README_PATH): LONG_DESCRIPTION = open(README_PATH).read() else
: LONG_DESCRIPTION = DESCRIPTION setup( name='django-thumbs', version='1.0.4', install_requires=['django'], description=DESCRIPTION, long_description=LONG_DESCRIPTION, author='Matt Pegler', author_email='matt@pegler.co', url='https://github.com/pegler/django-thumbs/', packages=['thumbs'], )
# -*- coding: utf-8 -*- """ lets.transparentlet ~~~~~~~~~~~~~~~~~~~ Deprecated. gevent-1.1
keeps a traceback exactly. If you want to just prevent to print an exception by the hub, use :mod:`lets.quietlet` instead. :copyright: (c) 2013-2018 by Heungsub Lee :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from gevent.pool import Group as TransparentGroup from lets.quietlet import quiet as no_error_handling from lets.quietlet import Quietlet as Transparentlet __all
__ = ['Transparentlet', 'TransparentGroup', 'no_error_handling']
template = "Exception {0} in uve stream proc. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("%s : traceback %s" % \ (messag, traceback.format_exc())) lredis = None if pb is not None: pb.close() pb = None gevent.sleep(2) return None class UveStreamer(gevent.Greenlet): def __init__(self, logger, q, rfile, agp_cb, partitions, rpass): gevent.Greenlet.__init__(self) self._logger = logger self._q = q self._rfile = rfile self._agp_cb = agp_cb self._agp = {} self._parts = {} self._partitions = partitions self._rpass = rpass def _run(self): inputs = [ self._rfile ] outputs = [ ] msg = {'event': 'init', 'data':\ json.dumps({'partitions':self._partitions})} self._q.put(sse_pack(msg)) while True: readable, writable, exceptional = select.select(inputs, outputs, inputs, 1) if (readable or writable or exceptional): break newagp = self._agp_cb() set_new, set_old = set(newagp.keys()), set(self._agp.keys()) intersect = set_new.intersection(set_old) # deleted parts for elem in set_old - intersect: self.partition_stop(elem) # new parts for elem in set_new - intersect: self.partition_start(elem, newagp[elem]) # changed parts for elem in intersect: if self._agp[elem] != newagp[elem]: self.partition_stop(elem) self.partition_start(elem, newagp[elem]) self._agp = newagp for part, pi in self._agp.iteritems(): self.partition_stop(part) def partition_start(self, partno, pi): self._logger.error("Starting agguve part %d using %s" %( partno, pi)) msg = {'event': 'clear', 'data':\ json.dumps({'partition':partno, 'acq_time':pi.acq_time})} self._q.put(sse_pack(msg)) self._parts[partno] = UveStreamPart(partno, self._logger, self._q, pi, self._rpass) self._parts[partno].start() def partition_stop(self, partno): self._logger.error("Stopping agguve part %d" % partno) self._parts[partno].kill() self._parts[partno].get() del self._parts[partno] class PartitionHandler(gevent.Greenlet): def __init__(self, brokers, group, topic, logger, limit): gevent.Greenlet.__init__(self) self._brokers = brokers self._group = group self._topic = topic self._logger = logger self._limit = limit self._uvedb = {} self._partoffset = 0 self._kfk = None def msg_handler(self, mlist): self._logger.info("%s Reading %s" % (self._topic, str(mlist))) return True def _run(self): pcount = 0 while True: try: self._logger.error("New KafkaClient
%s" % self._topic) self._kfk = KafkaClient(self._brokers , "kc-" + self._topic) try: consumer = SimpleConsumer(self._kfk, self._group, self._topic, buffer_size = 4096*4, max_buffer_size=4096*32) #except: except Exception as ex:
template = "Consumer Failure {0} occured. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.info("%s" % messag) raise RuntimeError(messag) self._logger.error("Starting %s" % self._topic) # Find the offset of the last message that has been queued consumer.seek(-1,2) try: mi = consumer.get_message(timeout=0.1) consumer.commit() except common.OffsetOutOfRangeError: mi = None #import pdb; pdb.set_trace() self._logger.info("Last Queued for %s is %s" % \ (self._topic,str(mi))) # start reading from last previously processed message if mi != None: consumer.seek(0,1) else: consumer.seek(0,0) if self._limit: raise gevent.GreenletExit while True: try: mlist = consumer.get_messages(10,timeout=0.5) if not self.msg_handler(mlist): raise gevent.GreenletExit consumer.commit() pcount += len(mlist) except TypeError as ex: self._logger.error("Type Error: %s trace %s" % \ (str(ex.args), traceback.format_exc())) gevent.sleep(0.1) except common.FailedPayloadsError as ex: self._logger.error("Payload Error: %s" % str(ex.args)) gevent.sleep(0.1) except gevent.GreenletExit: break except AssertionError as ex: self._partoffset = ex break except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" messag = template.format(type(ex).__name__, ex.args) self._logger.error("%s : traceback %s" % \ (messag, traceback.format_exc())) self.stop_partition() gevent.sleep(2) self._logger.error("Stopping %s pcount %d" % (self._topic, pcount)) partdb = self.stop_partition() return self._partoffset, partdb class UveStreamProc(PartitionHandler): # Arguments: # # brokers : broker list for kafka bootstrap # partition : partition number # uve_topic : Topic to consume # logger : logging object to use # callback : Callback function for reporting the set of the UVEs # that may have changed for a given notification # rsc : Callback function to check on collector status # and get sync contents for new collectors # aginst : instance_id of alarmgen # rport : redis server port # disc : discovery client to publish to def __init__(self, brokers, partition, uve_topic, logger, callback, host_ip, rsc, aginst, rport, disc = None): super(UveStreamProc, self).__init__(brokers, "workers", uve_topic, logger, False) self._uvedb = {} self._uvein = {} self._uveout = {} self._callback = callback self._partno = partition self._host_ip = host_ip self._ip_code, = struct.unpack('>I', socket.inet_pton( socket.AF_INET, host_ip)) self.disc_rset = set() self._resource_cb = rsc self._aginst = aginst self._disc = disc self._acq_time = UTCTimestampUsec() self._rport = rport def acq_time(self): return self._acq_time def resource_check(self, msgs): ''' This function compares the known collectors with the list from discovery, and syncs UVE keys accordingly ''' newset , coll_delete, chg_res = self._resource_cb(self._partno, self.disc_rset, msgs) for coll in coll_delete: self._logger.error("Part %d lost collector %s" % (self._partno, coll)) self.stop_partition(coll) if len(chg_res): self.start_partition(chg_res) self.disc_rset = newset if self._disc: data = { 'instance-id' : self._aginst, 'partition' : str(self._partno), 'ip-address': self._host_ip, 'acq-time': str(self._acq_time)
icense, or # (at your option) any later version. # # This woob module is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this woob module. If not, see <http://www.gnu.org/licenses/>. from woob.browser.pages import JsonPage, pagination, HTMLPage from woob.browser.elements import ItemElement, DictElement, method from woob.browser.filters.json import Dict from woob.browser.filters.html import XPath from woob.browser.filters.standard import (CleanText, CleanDecimal, Currency, Env, Regexp, Field, BrowserURL) from woob.capabilities.base import NotAvailable, NotLoaded from woob.capabilities.housing import (Housing, HousingPhoto, City, UTILITIES, ENERGY_CLASS, POSTS_TYPES, ADVERT_TYPES) from woob.capabilities.address import PostalAddress from woob.tools.capabilities.housing.housing import PricePerMeterFilter from woob.tools.json import json from woob.ex
ceptions import ActionNeeded from .constants import TYPES, RET import codecs import decimal class ErrorPage(HTMLPage): def on_load(self): raise ActionNeeded("Please resolve the captcha") class CitiesPage(JsonPage): @method class iter_cities(DictElement): ignore_duplicate = True class item(I
temElement): klass = City obj_id = Dict('Params/ci') obj_name = Dict('Display') class SearchResultsPage(HTMLPage): def __init__(self, *args, **kwargs): HTMLPage.__init__(self, *args, **kwargs) json_content = Regexp(CleanText('//script'), r"window\[\"initialData\"\] = JSON.parse\(\"({.*})\"\);window\[\"tags\"\]")(self.doc) json_content = codecs.unicode_escape_decode(json_content)[0] json_content = json_content.encode('utf-8', 'surrogatepass').decode('utf-8') self.doc = json.loads(json_content) @pagination @method class iter_housings(DictElement): item_xpath = 'cards/list' # Prevent DataError on same ids ignore_duplicate = True def next_page(self): page_nb = Dict('navigation/pagination/page')(self) max_results = Dict('navigation/counts/count')(self) results_per_page = Dict('navigation/pagination/resultsPerPage')(self) if int(max_results) / int(results_per_page) > int(page_nb): return BrowserURL('search', query=Env('query'), page_number=int(page_nb) + 1)(self) # TODO handle bellesdemeures class item(ItemElement): klass = Housing def condition(self): return ( Dict('cardType')(self) not in ['advertising', 'ali', 'localExpert'] and Dict('id', default=False)(self) and Dict('classifiedURL', default=False)(self) ) obj_id = Dict('id') def obj_type(self): idType = int(Env('query_type')(self)) type = next(k for k, v in TYPES.items() if v == idType) if type == POSTS_TYPES.FURNISHED_RENT: # SeLoger does not let us discriminate between furnished and not furnished. return POSTS_TYPES.RENT return type def obj_title(self): return "{} - {} - {}".format(Dict('estateType')(self), " / ".join(Dict('tags')(self)), Field('location')(self)) def obj_advert_type(self): is_agency = Dict('contact/agencyId', default=False)(self) if is_agency: return ADVERT_TYPES.PROFESSIONAL else: return ADVERT_TYPES.PERSONAL obj_utilities = UTILITIES.EXCLUDED def obj_photos(self): photos = [] for photo in Dict('photos')(self): photos.append(HousingPhoto(photo)) return photos def obj_location(self): quartier = Dict('districtLabel')(self) quartier = quartier if quartier else '' ville = Dict('cityLabel')(self) ville = ville if ville else '' cp = Dict('zipCode')(self) cp = cp if cp else '' return u'%s %s (%s)' % (quartier, ville, cp) obj_url = Dict('classifiedURL') obj_text = Dict('description') obj_cost = CleanDecimal(Dict('pricing/price', default=NotLoaded), default=NotLoaded) obj_currency = Currency(Dict('pricing/price', default=NotLoaded), default=NotLoaded) obj_price_per_meter = CleanDecimal(Dict('pricing/squareMeterPrice'), default=PricePerMeterFilter) class HousingPage(HTMLPage): def __init__(self, *args, **kwargs): HTMLPage.__init__(self, *args, **kwargs) json_content = Regexp( CleanText('//script'), r"window\[\"initialData\"\] = JSON.parse\(\"({.*})\"\);" )(self.doc) json_content = codecs.unicode_escape_decode(json_content)[0] json_content = json_content.encode('utf-8', 'surrogatepass').decode('utf-8') self.doc = { "advert": json.loads(json_content).get('advert', {}).get('mainAdvert', {}), "agency": json.loads(json_content).get('agency', {}) } @method class get_housing(ItemElement): klass = Housing def parse(self, el): self.agency_doc = el['agency'] self.el = el['advert'] obj_id = Dict('id') def obj_house_type(self): naturebien = Dict('propertyNatureId')(self) try: return next(k for k, v in RET.items() if v == naturebien) except StopIteration: return NotLoaded def obj_type(self): idType = Dict('idTransactionType')(self) try: type = next(k for k, v in TYPES.items() if v == idType) if type == POSTS_TYPES.FURNISHED_RENT: # SeLoger does not let us discriminate between furnished and not furnished. return POSTS_TYPES.RENT return type except StopIteration: return NotAvailable def obj_advert_type(self): if 'Agences' in self.agency_doc['type']: return ADVERT_TYPES.PROFESSIONAL else: return ADVERT_TYPES.PERSONAL def obj_photos(self): photos = [] for photo in Dict('photoList')(self): photos.append(HousingPhoto(photo['fullscreenUrl'])) return photos obj_title = Dict('title') def obj_location(self): address = Dict('address')(self) return u'%s %s (%s)' % (address['neighbourhood'], address['city'], address['zipCode']) def obj_address(self): address = Dict('address')(self) p = PostalAddress() p.street = address['street'] p.postal_code = address['zipCode'] p.city = address['city'] p.full_address = Field('location')(self) return p obj_text = Dict('description') def obj_cost(self): propertyPrice = Dict('propertyPrice')(self) return decimal.Decimal(propertyPrice['prix']) def obj_currency(self): propertyPrice = Dict('propertyPrice')(self) return propertyPrice['priceUnit'] obj_price_per_meter = PricePerMeterFilter() obj_area = CleanDecimal(Dict('surface')) def obj_url(self): return self.page.url def obj_phone(self): return self.agency_doc.get('agen
# perf trace event handlers, generated by perf trace -g python # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # This script tests basic functionality such as flag and symbol # strings, common_xxx() calls back into perf, begin, end, unhandled # events, etc. Basically, if this script runs successfully and # displays expected results, Python scripting support should be ok. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from Core import * from perf_trace_context import * unhandled = autodict() def trace_begin(): print "trace_begin" pass def trace_end(): print_unhandled() def irq__softirq_entry(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, vec): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common_comm) print_uncommon(context) print "vec=%s\n" % \ (symbol_str("irq__softirq_entry", "vec", vec)), def kmem__kmalloc(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, call_site, ptr, bytes_req, bytes_alloc, gfp_flags): print_header(event_name, common_cpu, common_secs, common_nsecs, common_pid, common
_comm) print_uncommon(context) print "call_site=%u, ptr=%u, bytes_req=%u, " \ "bytes_alloc=%u, gfp_flags=%s\n" % \ (call_site, ptr, bytes_req, bytes_alloc, flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)), def trace_unhandled(event_name, context, event_fields_dict): try: unhandled[event_name] += 1 except TypeError:
unhandled[event_name] = 1 def print_header(event_name, cpu, secs, nsecs, pid, comm): print "%-20s %5u %05u.%09u %8u %-20s " % \ (event_name, cpu, secs, nsecs, pid, comm), # print trace fields not included in handler args def print_uncommon(context): print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \ % (common_pc(context), trace_flag_str(common_flags(context)), \ common_lock_depth(context)) def print_unhandled(): keys = unhandled.keys() if not keys: return print "\nunhandled events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "-----------"), for event_name in keys: print "%-40s %10d\n" % (event_name, unhandled[event_name])
# flake8: noqa import sys import toml import log from .uploader import DropboxUploader from .file_manager import DirectoryPoller, VolumePoller SECT = 'flysight-manager' class ConfigError(Exception): pass class FlysightConfig(object): pass class DropboxConfig(object): pass class VimeoConfig(object): pass class YoutubeConfig(object): pass class SendgridConfig(object): pass class PushoverConfig(object): pass class CameraConfig(object): def __init__(self, name, cfg): self._name = name self._mountpoint = cfg["mountpoint"] self._uuid = cfg["uuid"] @property def mountpoint(self): return self._mountpoint @property def uuid(self): return self._uuid class GoProConfig(object): def __init__(self): self._cameras = {} def add_camera(self, name, config): self._cameras[name] = CameraConfig(name, config) def cameras(self): return self._cameras class GswoopConfig(object): pass def get_poller(ty): if ty == 'flysight': get_sect = lambda cfg: cfg.flysight_cfg elif ty == 'gopro': get_sect = lambda cfg: cfg else: raise "Unknown ty: %s" % (repr(ty)) platform = sys.platform if platform.startswith('linux'): return lambda name, cfg: VolumePoller(name, get_sect(cfg).uuid, ty) elif platform == 'darwin': return lambda name, cfg: DirectoryPoller(name, get_sect(cfg).mountpoint, ty) else: raise 'Unknown platform: %s' % (repr(platform)) @log.make_loggable class Configuration(object): """Stub class to be replaced by a real configuration system""" CONFIG_FILE = 'flysight-manager.ini' def __init__(self): self.flysight_enabled = False self.gopro_enabled = False self.gswoop_enabled = False self.vimeo_enabled = False self.youtube_enabled = False self.sendgrid_enabled = False self.noop = False self.preserve = False self.processors = [] self.info("Loading config from %s" % self.CONFIG_FILE) cfg = toml.load(open(self.CONFIG_FILE, 'rb')) self.load_config(cfg) self._uploader = None if self.gswoop_enabled: self.info("Enabling gswoop processor") self.processors.append("gswoop") def load_config(self, cfg): """Validate the configuration""" get = lambda x: cfg[SECT][x] # TODO: Confirm how this handles bools enabled = lambda x: cfg[x]["enabled"] backend = get('storage_backend') if backend == 'dropbox': self.storage_backend = 'dropbox' self.dropbox_cfg = self.load_dropbox_opts(cfg) else: raise ConfigError("Unknown storage_backend: %s" % backend) if enabled("flysight"): self.flysight_enabled = True self.flysight_cfg = self.load_flysight_opts(cfg) if enabled("gopro"): self.gopro_enabled = True self.gopro_cfg = self.load_gopro_opts(cfg) if enabled("gswoop"): self.gswoop_enabled = True self.gswoop_cfg = self.load_gswoop_opts(cfg) if enabled("vimeo"): self.vimeo_enabled = True self.vimeo_cfg = self.load_vimeo_opts(cfg) if enabled("youtube"): self.youtube_enabled = True self.youtube_cfg = self.load_youtube_opts(cfg) if enabled("sendgrid"): self.sendgrid_enabled = True self.sendgrid_cfg = self.load_sendgrid_opts(cfg) if enabled("pushover"): self.pushover_enabled = True self.pushover_cfg = self.load_pushover_opts(cfg) def load_dropbox_opts(self, cfg): get = lambda x: cfg["dropbox"][x] _cfg = DropboxConfig() _cfg.token = get("token") return _cfg def load_vimeo_opts(self, cfg): get = lambda x: cfg["vimeo"][x] _cfg = VimeoConfig() _cfg.token = get("token") return _cfg def load_sendgrid_opts(self, cfg): get = lambda x: cfg["sendgrid"][x] _cfg = SendgridConfig() _cfg.token = get("toke
n") _cfg.from_addr = get("from") _cfg.to_addr = get("to") _cfg.subject = get("subject") return _cfg def load_pushover_opts(self, cfg): get = lambda x: cfg["pushover"][x] _cfg = PushoverConfig() _cfg.token = get("token") _cfg.user = get("
user") return _cfg def load_youtube_opts(self, cfg): get = lambda x: cfg["youtube"][x] _cfg = YoutubeConfig() _cfg.access_token = get("access_token") _cfg.client_id = get("client_id") _cfg.client_secret = get("client_secret") _cfg.refresh_token = get("refresh_token") _cfg.token_uri = get("token_uri") return _cfg def load_gopro_opts(self, cfg): _cfg = GoProConfig() # Extract the enabled key, then pray that anything else is a camera for k, v in cfg["gopro"].items(): if isinstance(v, dict): _cfg.add_camera(k, v) return _cfg def load_flysight_opts(self, cfg): get = lambda x: cfg["flysight"][x] _cfg = FlysightConfig() _cfg.mountpoint = get("mountpoint") _cfg.uuid = get("uuid") return _cfg def load_gswoop_opts(self, cfg): get = lambda x: cfg["gswoop"][x] _cfg = GswoopConfig() _cfg.binary = get("binary") return _cfg @property def uploader(self): if not self._uploader: if self.storage_backend == 'dropbox': self._uploader = DropboxUploader(self.dropbox_cfg.token, self.noop) else: raise ConfigError('Unknown storage backend: %s' % self.storage_backend) return self._uploader def update_with_args(self, args): if args.noop: self.debug("Setting noop flag") self.noop = args.noop if args.preserve: self.debug("Setting preserve flag") self.preserve = args.preserve
# -*- coding: utf-8 -*- # Copyright (C) 2014-2022 Daniele Simonetti # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. from PyQt5 import QtCore, QtGui, QtWidgets import l5r.widgets as widgets import l5r.api as api import l5r.api.character.rankadv class NextRankDlg(QtWidgets.QDialog): def __init__(self, pc, parent=None): super(NextRankDlg, self).__init__(parent) self.pc = pc self.build_ui() self.connect_signals() # self.setWindowFlags(QtCore.Qt.Tool) self.setWindowTitle(self.tr("L5R: CM - Advance Rank")) def build_ui(self): vbox = QtWidgets.QVBoxLayout(self) vbox.addWidget(QtWidgets.QLabel(self.tr("""\ You can now advance your Rank, what would you want to do? """))) self.bt_go_on = QtWidgets.QPushButton( self.tr("Advance in my current school") ) self.bt_new_school = QtWidgets.QPushButton( self.tr("Join a new school")) for bt in [self.bt_go_on, self.bt_new_school]: bt.setMinimumSize(QtCore.QSize(0, 38)) vbox.addWidget(self.bt_go_on) vbox.add
Widget(self.bt_new_school) vbox.setSpacing(12) is_path = api.data.schools.is_path( api.character.schools.get_current() ) former_school_adv = api.character.rankadv.get_former_school() former_school = api.data.schools.get(former_school_adv.school) if former_school_adv
else None # check if the PC is following an alternate path if is_path: # offer to going back if former_school: self.bt_go_on.setText(self.tr("Continue ") + former_school.name) else: self.bt_go_on.setText(self.tr("Go back to your old school")) self.bt_go_on.setEnabled(former_school != None) def connect_signals(self): self.bt_go_on.clicked.connect(self.simply_go_on) self.bt_new_school.clicked.connect(self.join_new_school) def join_new_school(self): dlg = widgets.SchoolChooserDialog(self) if dlg.exec_() == QtWidgets.QDialog.Rejected: return self.accept() def simply_go_on(self): is_path = api.data.schools.is_path( api.character.schools.get_current() ) # check if the PC is following an alternate path if is_path: # the PC want to go back to the old school. # find the first school that is not a path api.character.rankadv.leave_path() else: api.character.rankadv.advance_rank() self.accept() def test(): import sys app = QtWidgets.QApplication(sys.argv) dlg = NextRankDlg(None, None) dlg.show() sys.exit(app.exec_()) if __name__ == '__main__': test()
fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('expression', models.CharField(max_length=255)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='DataStoreBase', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='LayerObj', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=255)), ('layer_type', models.SmallIntegerField(choices=[(3, b'raster'), (2, b'vector polygon'), (1, b'vector line'), (0, b'vector point')])), ('projection', models.CharField(default=b'init=epsg:4326', help_text=b'PROJ4 definition of the layer projection', max_length=255)), ('data', models.CharField(help_text=b'Full filename of the spatial data to process.', max_length=255)), ('class_item', models.CharField(help_text=b'Item name in attribute table to use for class lookups.', max_length=255, blank=True)), ('ows_abstract', models.TextField(blank=True)), ('ows_enable_request', models.CharField(default=b'*', max_length=255)), ('ows_include_items', models.CharField(default=b'all', max_length=50, blank=True)), ('gml_include_items', models.CharField(default=b'all', max_length=50, blank=True)), ('ows_opaque', models.SmallIntegerField(null=True, blank=True)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='MapLayer', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('status', models.SmallIntegerField(choices=[(0, b'off'), (1, b'on'), (2, b'default')])), ('layer_obj', models.ForeignKey(to='djangomapserver.LayerObj')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='MapObj', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(help_text=b'Unique identifier.', max_length=255)), ('status', models.SmallIntegerField(choices=[(0, b'off'), (1, b'on'), (2, b'default')])), ('proj
ection', models.CharField(default=b'init=epsg:4326', help_text=b'PROJ4 definition of the map projection', max_length=255)), ('units', models.SmallIntegerField(blank=True, choices=[(5, b'Decimal degrees')])), ('size', models.CommaSeparatedIntegerField(help_text=b'Map size in pixel units', max_length=10)), ('cell_size', models.FloatField(help_text=b'Pixel size in map units.', null=True, blank=True)), ('imag
e_type', models.CharField(max_length=10, choices=[(b'png', b'png')])), ('ows_sld_enabled', models.BooleanField(default=True)), ('ows_abstract', models.TextField(blank=True)), ('ows_enable_request', models.CharField(default=b'*', max_length=255)), ('ows_encoding', models.CharField(default=b'utf-8', max_length=20)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='MapServerColor', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('red', models.IntegerField(null=True, blank=True)), ('green', models.IntegerField(null=True, blank=True)), ('blue', models.IntegerField(null=True, blank=True)), ('hex_string', models.CharField(max_length=9, blank=True)), ('attribute', models.CharField(max_length=255, blank=True)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='RectObj', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('max_x', models.FloatField()), ('max_y', models.FloatField()), ('min_x', models.FloatField()), ('min_y', models.FloatField()), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='ShapefileDataStore', fields=[ ('datastorebase_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='djangomapserver.DataStoreBase')), ('path', models.CharField(help_text=b'Path to the directory holding shapefiles.', max_length=255)), ], options={ }, bases=('djangomapserver.datastorebase',), ), migrations.CreateModel( name='SpatialiteDataStore', fields=[ ('datastorebase_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='djangomapserver.DataStoreBase')), ('path', models.CharField(help_text=b'Path to the Spatialite database file.', max_length=255)), ], options={ }, bases=('djangomapserver.datastorebase',), ), migrations.CreateModel( name='StyleObj', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('class_obj', models.ForeignKey(to='djangomapserver.ClassObj')), ('color', models.ForeignKey(to='djangomapserver.MapServerColor')), ], options={ }, bases=(models.Model,), ), migrations.AddField( model_name='mapobj', name='extent', field=models.ForeignKey(help_text=b"Map's spatial extent.", to='djangomapserver.RectObj'), preserve_default=True, ), migrations.AddField( model_name='mapobj', name='image_color', field=models.ForeignKey(blank=True, to='djangomapserver.MapServerColor', help_text=b'Initial map background color.', null=True), preserve_default=True, ), migrations.AddField( model_name='mapobj', name='layers', field=models.ManyToManyField(to='djangomapserver.LayerObj', null=True, through='djangomapserver.MapLayer', blank=True), preserve_default=True, ), migrations.AddField( model_name='maplayer', name='map_obj', field=models.ForeignKey(to='djangomapserver.MapObj'), preserve_default=True, ), migrations.AddField( model_name='maplayer', name='style', field=models.ForeignKey(blank=True, to='djangomapserver.StyleObj', null=True), preserve_default=True, ), migrations.AddField( model_name='layerobj', name='data_store', field=models.ForeignKey(to='djangomapserver.DataStoreBase'), preserve_default=True, ), migrations.AddField( model_name='layerobj', name='extent', field=models.ForeignKey(help_text=b"Layer's spatial extent.", to='djangomapserver.RectObj'), preserve_default=True, ), migrations.AddField( model_name='classobj', name='layer_obj', field=
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_
dataset as art art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 30, transform = "
None", sigma = 0.0, exog_count = 20, ar_order = 12);
# -*- coding: utf-8 -*- # Copyright (c) 2009-2014, Erkan Ozgur Yilmaz # # This module is part of oyProjectManager and is released under the BSD 2 # License: http://www.opensource.org/licenses/BSD-2-Clause """ Database Module =============== This is where all the magic happens. .. versionadded:: 0.2.0 SQLite3 Database: To hold the information about all the data created :class:`~oyProjectManager.models.project.Project`\ s, :class:`~oyProjectManager.models.sequence.Sequence`\ s, :class:`~oyProjectManager.models.shot.Shot`\ s, :class:`~oyProjectManager.models.asset.Asset`\ s and :class:`~oyProjectManager.models.version.VersionType`\ s , there is a ".metadata.db" file in the repository root. This SQLite3 database has all the information about everything. With this new extension it is much faster to query any data needed. Querying data is very simple and fun. To get any kind of data from the database, just call the ``db.setup()`` and then use ``db.query`` to get the data. For a simple example, lets get all the shots for a Sequence called "TEST_SEQ" in the "TEST_PROJECT":: from oyProjectManager import db from oyProjectManager import Project, Sequence, Shot # setup the database session db.setup() all_shots = Shot.query().join(Sequence).\ filter(Sequence.project.name="TEST_PROJECT").\ filter(Shot.sequence.name=="TEST_SEQ").all() that's it. """ import os import logging import sqlalchemy import oyProjectManager from oyProjectManager.db.declarative import Base # SQLAlchemy database engine engine = None # SQLAlchemy session manager session = None query = None # SQLAlchemy metadata metadata = None database_url = None # create a logger logger = logging.getLogger(__name__) #logger.setLevel(logging.WARNING) logger.setLevel(logging.DEBUG) def setup(database_url_in=None): """Utility function that helps to connect the system to the given database. Returns the created session :param database_url_in: The database address, default is None. If the database_url is skipped or given as None, the default database url from the :mod:`oyProjectManager.config` will be used. This is good, just call ``db.setup()`` and then use ``db.session`` and ``db.query`` to get th
e data. :returns: sqlalchemy.orm.session """
global engine global session global query global metadata global database_url # create engine # TODO: create tests for this if database_url_in is None: logger.debug("using the default database_url from the config file") # use the default database conf = oyProjectManager.conf database_url_in = conf.database_url # expand user and env variables if any # TODO: because the dialect part and the address part are now coming from # from one source, it is not possible to expand any variables in the path, # try to use SQLAlchemy to separate the dialect and the address part and # expand any data and then merge it again #database_url_in = os.path.expanduser( # os.path.expandvars( # os.path.expandvars( # database_url_in # ) # ) #) while "$" in database_url_in or "~" in database_url_in: database_url_in = os.path.expanduser( os.path.expandvars( database_url_in ) ) database_url = database_url_in logger.debug("setting up database in %s" % database_url) engine = sqlalchemy.create_engine(database_url, echo=False) # create the tables metadata = Base.metadata metadata.create_all(engine) # create the Session class Session = sqlalchemy.orm.sessionmaker(bind=engine) # create and save session object to session session = Session() query = session.query # initialize the db __init_db__() # TODO: create a test to check if the returned session is session return session def __init_db__(): """initializes the just setup database It adds: - Users - VersionTypes to the database. """ logger.debug("db is newly created, initializing the db") global query global session # get the users from the config from oyProjectManager import conf # ------------------------------------------------------ # create the users from oyProjectManager.models.auth import User # get all users from db users_from_db = query(User).all() for user_data in conf.users_data: name = user_data.get("name") initials = user_data.get("initials") email = user_data.get("email") user_from_config = User(name, initials, email) if user_from_config not in users_from_db: session.add(user_from_config) # ------------------------------------------------------ # add the VersionTypes from oyProjectManager.models.version import VersionType version_types_from_db = query(VersionType).all() for version_type in conf.version_types: version_type_from_conf = VersionType(**version_type) if version_type_from_conf not in version_types_from_db: session.add(version_type_from_conf) session.commit() logger.debug("finished initialization of the db")
By: # # Andrea Gavana, @ 16 Aug 2007 # Latest Revision: 14 Apr 2010, 12.00 GMT # # # TODO List # # 1. Find A Way To Reduce Flickering On The 2 ColourPanels; # # 2. See Why wx.GCDC Doesn't Work As I Thought (!). It Looks Slow As A Turtle, # But Probably I Am Doing Something Wrong While Painting The Alpha Textures. # # # For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please # Write To Me At: # # andrea.gavana@gmail.com # gavana@kpo.kz # # Or, Obviously, To The wxPython Mailing List!!! # # # End Of Comments # --------------------------------------------------------------------------- # """ CubeColourDialog is an alternative implementation of `wx.ColourDialog`. Description =========== The CubeColourDialog is an alternative implementation of `wx.ColourDialog`, and it offers different functionalities with respect to the default wxPython one. It can be used as a replacement of `wx.ColourDialog` with exactly the same syntax and methods. Some features: - RGB components may be controlled using spin controls or with mouse gestures on a 3D RGB cube, with the 3 components laying on the X, Y, Z axes; - HSB components may be controlled using spin controls or with mouse gestures on a 2D colour wheel; - Brightness has its own vertical slider to play with; - The colour alpha channel can be controlled using another vertical slider, or via spin control; - The colour alpha channel controls can be completely hidden at startup or the choice to use the alpha channel can be left to the user while playing with the dialog, via a simple `wx.CheckBox`; - The "old colour" and "new colour" are displayed in two small custom panel, which support alpha transparency and texture; - CubeColourDialog displays also the HTML colour code in hexadeci
mal format; - When available, a corresponding "Web Safe" colour is generated using a 500 web colours "database" (a dictionary inside the widget source code). Web Safe colours are re
cognized by all the browsers; - When available, a corresponding "HTML name" for the selected colour is displayed, by using the same 500 web colours "database"; - When available, a corresponding "Microsoft Access Code" for the selected colour is displayed, by using the same 500 web colours "database". And much more. Window Styles ============= This class supports the following window styles: ================== =========== ================================================== Window Styles Hex Value Description ================== =========== ================================================== ``CCD_SHOW_ALPHA`` 0x1 Show the widget used to control colour alpha channels in `CubeColourDialog`. ================== =========== ================================================== Events Processing ================= `No custom events are available for this class.` License And Version =================== CubeColourDialog is distributed under the wxPython license. Latest Revision: Andrea Gavana @ 14 Apr 2010, 12.00 GMT Version 0.3. """ __docformat__ = "epytext" #---------------------------------------------------------------------- # Beginning Of CUBECOLOURDIALOG wxPython Code #---------------------------------------------------------------------- import wx import colorsys from math import pi, sin, cos, sqrt, atan2 from wx.lib.embeddedimage import PyEmbeddedImage # Define a translation string _ = wx.GetTranslation # Show the alpha control in the dialog CCD_SHOW_ALPHA = 1 """ Show the widget used to control colour alpha channels in `CubeColourDialog`. """ # Radius of the HSB colour wheel RADIUS = 100 """ Radius of the HSB colour wheel. """ # Width of the mouse-controlled colour pointer RECT_WIDTH = 5 """ Width of the mouse-controlled colour pointer. """ # Dictionary keys for the RGB colour cube RED, GREEN, BLUE = 0, 1, 2 """ Dictionary keys for the RGB colour cube. """ Vertex = wx.Point(95, 109) Top = wx.Point(95, 10) Left = wx.Point(16, 148) Right = wx.Point(174, 148) colourAttributes = ["r", "g", "b", "h", "s", "v"] colourMaxValues = [255, 255, 255, 359, 255, 255] checkColour = wx.Colour(200, 200, 200) HTMLCodes = {'#B0171F': ['Indian red', '2037680', ''], '#DC143C': ['Crimson', '3937500', '#CC0033'], '#FFB6C1': ['Lightpink', '12695295', '#FFCCCC'], '#FFAEB9': ['Lightpink 1', '12168959', ''], '#EEA2AD': ['Lightpink 2', '11379438', ''], '#CD8C95': ['Lightpink 3', '9800909', ''], '#8B5F65': ['Lightpink 4', '6643595', ''], '#FFC0CB': ['Pink', '13353215', '#FFCCCC'], '#FFB5C5': ['Pink 1', '12957183', ''], '#EEA9B8': ['Pink 2', '12102126', ''], '#CD919E': ['Pink 3', '10392013', ''], '#8B636C': ['Pink 4', '7103371', ''], '#DB7093': ['Palevioletred', '9662683', '#CC6699'], '#FF82AB': ['Palevioletred 1', '11240191', ''], '#EE799F': ['Palevioletred 2', '10451438', ''], '#CD6889': ['Palevioletred 3', '9005261', ''], '#8B475D': ['Palevioletred 4', '6113163', ''], '#FFF0F5': ['Lavenderblush 1 (lavenderblush)', '16118015', '#FFFFFF'], '#EEE0E5': ['Lavenderblush 2', '15065326', ''], '#CDC1C5': ['Lavenderblush 3', '12960205', ''], '#8B8386': ['Lavenderblush 4', '8815499', ''], '#FF3E96': ['Violetred 1', '9846527', ''], '#EE3A8C': ['Violetred 2', '9190126', ''], '#CD3278': ['Violetred 3', '7877325', ''], '#8B2252': ['Violetred 4', '5382795', ''], '#FF69B4': ['Hotpink', '11823615', '#FF66CC'], '#FF6EB4': ['Hotpink 1', '11824895', ''], '#EE6AA7': ['Hotpink 2', '10971886', ''], '#CD6090': ['Hotpink 3', '9461965', ''], '#8B3A62': ['Hotpink 4', '6437515', ''], '#872657': ['Raspberry', '5711495', ''], '#FF1493': ['Deeppink 1 (deeppink)', '9639167', '#FF0099'], '#EE1289': ['Deeppink 2', '8983278', ''], '#CD1076': ['Deeppink 3', '7737549', ''], '#8B0A50': ['Deeppink 4', '5245579', ''], '#FF34B3': ['Maroon 1', '11744511', ''], '#EE30A7': ['Maroon 2', '10957038', ''], '#CD2990': ['Maroon 3', '9447885', ''], '#8B1C62': ['Maroon 4', '6429835', ''], '#C71585': ['Mediumvioletred', '8721863', '#CC0066'], '#D02090': ['Violetred', '9445584', ''], '#DA70D6': ['Orchid', '14053594', '#CC66CC'], '#FF83FA': ['Orchid 1', '16417791', ''], '#EE7AE9': ['Orchid 2', '15301358', ''], '#CD69C9': ['Orchid 3', '13199821', ''], '#8B4789': ['Orchid 4', '8996747', ''], '#D8BFD8': ['Thistle', '14204888', '#CCCCCC'], '#FFE1FF': ['Thistle 1', '16769535', ''], '#EED2EE': ['Thistle 2', '15651566', ''], '#CDB5CD': ['Thistle 3', '13481421', ''], '#8B7B8B': ['Thistle 4', '9141131', ''], '#FFBBFF': ['Plum 1', '16759807', ''], '#EEAEEE': ['Plum 2', '15642350', ''], '#CD96CD': ['Plum 3', '13473485', ''], '#8B668B': ['Plum 4', '9135755', ''], '#DDA0DD': ['Plum', '14524637', '#CC99CC'], '#EE82EE': ['Violet', '15631086', '#FF99FF'], '#FF00FF': ['Magenta (fuchsia)', '16711935', '#FF00FF'], '#EE00EE': ['Magenta 2', '15597806', ''], '#CD00CD': ['Magenta 3', '13435085', ''], '#8B008B': ['Magenta 4 (darkmagenta)', '9109643', '#990099'], '#800080': ['Purple', '8388736', '#990099'], '#BA55D3': ['Mediumorchid', '13850042', '#CC66CC'], '#E066FF': ['Mediumorchid 1', '16738016', ''], '#D15FEE': ['Mediumorchid 2', '15622097', ''], '#B452CD': ['Mediumorchid 3', '13456052', ''], '#7A378B': ['Mediumorchid 4', '9123706', ''], '#9400D3': ['Darkviolet', '13828244', '#9900CC'], '#9932CC': ['Darkorchid', '13382297', '#9933CC'], '#BF3EFF': ['Darkorchid 1', '16727743',
amedtuple import gimp from . import pgpath from . import objectfilter #=============================================================================== pdb = gimp.pdb #========================================
======================================= class ItemData(object): """ This class i
s an interface to store all items (and item groups) of a certain type (e.g. layers, channels or paths) of a GIMP image in an ordered dictionary, allowing to access the items via their names and get various custom attributes derived from the existing item attributes. Use one of the subclasses for items of a certain type: * `LayerData` for layers, * `ChannelData` for channels, * `PathData` for paths (vectors). For custom item attributes, see the documentation for the `_ItemDataElement` class. `_ItemDataElement` is common for all `ItemData` subclasses. Attributes: * `image` - GIMP image to get item data from. * `is_filtered` - If True, ignore items that do not match the filter (`ObjectFilter`) in this object when iterating. * `filter` (read-only) - `ObjectFilter` instance where you can add or remove filter rules or subfilters to filter items. """ __metaclass__ = abc.ABCMeta def __init__(self, image, is_filtered=False, filter_match_type=objectfilter.ObjectFilter.MATCH_ALL): self.image = image self.is_filtered = is_filtered # Filters applied to all items in self._itemdata self._filter = objectfilter.ObjectFilter(filter_match_type) # Contains all items (including item groups) in the item tree. # key: `_ItemDataElement.orig_name` (derived from `gimp.Item.name`, which is unique) # value: `_ItemDataElement` object self._itemdata = OrderedDict() # key `_ItemDataElement` object (parent) or None (root of the item tree) # value: set of `_ItemDataElement` objects self._uniquified_itemdata = {} self._fill_item_data() @property def filter(self): return self._filter def __getitem__(self, name): """ Access an `_ItemDataElement` object by its `orig_name` attribute. """ return self._itemdata[name] def __contains__(self, name): """ Return True if an `_ItemDataElement` object, specified by its `orig_name` attribute, is in the item data. Otherwise return False. """ return name in self._itemdata def __len__(self): """ Return the number of all item data elements - that is, all immediate children of the image and all nested children. """ return len([item_elem for item_elem in self]) def __iter__(self): """ If `is_filtered` is False, iterate over all items. If `is_filtered` is True, iterate only over items that match the filter in this object. Yields: * `item_elem` - The current `_ItemDataElement` object. """ if not self.is_filtered: for item_elem in self._itemdata.values(): yield item_elem else: for item_elem in self._itemdata.values(): if self._filter.is_match(item_elem): yield item_elem def _items(self): """ Yield current (`gimp.Item.name`, `_ItemDataElement` object) tuple. """ if not self.is_filtered: for name, item_elem in self._itemdata.items(): yield name, item_elem else: for name, item_elem in self._itemdata.items(): if self._filter.is_match(item_elem): yield name, item_elem def uniquify_name(self, item_elem, include_item_path=True, uniquifier_position=None, uniquifier_position_parents=None): """ Make the `name` attribute in the specified `_ItemDataElement` object unique among all other, already uniquified `_ItemDataElement` objects. To achieve uniquification, a string ("uniquifier") in the form of " (<number>)" is inserted at the end of the item names. Parameters: * `item_elem` - `_ItemDataElement` object whose `name` attribute will be uniquified. * `include_item_path` - If True, take the item path into account when uniquifying. * `uniquifier_position` - Position (index) where the uniquifier is inserted into the current item. If the position is None, insert the uniquifier at the end of the item name (i.e. append it). * `uniquifier_position_parents` - Position (index) where the uniquifier is inserted into the parents of the current item. If the position is None, insert the uniquifier at the end of the name of each parent. This parameter has no effect if `include_item_path` is False. """ if include_item_path: for elem in item_elem.parents + [item_elem]: parent = elem.parent if parent not in self._uniquified_itemdata: self._uniquified_itemdata[parent] = set() if elem not in self._uniquified_itemdata[parent]: item_names = set([elem_.name for elem_ in self._uniquified_itemdata[parent]]) if elem.name not in item_names: self._uniquified_itemdata[parent].add(elem) else: if elem == item_elem: position = uniquifier_position else: position = uniquifier_position_parents elem.name = pgpath.uniquify_string(elem.name, item_names, position) self._uniquified_itemdata[parent].add(elem) else: # Use None as the root of the item tree. parent = None if parent not in self._uniquified_itemdata: self._uniquified_itemdata[parent] = set() item_elem.name = pgpath.uniquify_string( item_elem.name, self._uniquified_itemdata[parent], uniquifier_position) self._uniquified_itemdata[parent].add(item_elem.name) def _fill_item_data(self): """ Fill the _itemdata dictionary, containing <gimp.Item.name, _ItemDataElement> pairs. """ _ItemTreeNode = namedtuple('_ItemTreeNode', ['children', 'parents']) item_tree = [_ItemTreeNode(self._get_children_from_image(self.image), [])] while item_tree: node = item_tree.pop(0) index = 0 for item in node.children: parents = list(node.parents) item_elem = _ItemDataElement(item, parents) if pdb.gimp_item_is_group(item): item_tree.insert(index, _ItemTreeNode(self._get_children_from_item(item), parents + [item_elem])) index += 1 self._itemdata[item_elem.orig_name] = item_elem @abc.abstractmethod def _get_children_from_image(self, image): """ Return a list of immediate child items from the specified image. If no child items exist, return an empty list. """ pass @abc.abstractmethod def _get_children_from_item(self, item): """ Return a list of immediate child items from the specified item. If no child items exist, return an empty list. """ pass class LayerData(ItemData): def _get_children_from_image(self, image): return image.layers def _get_children_from_item(self, item): return item.layers class ChannelData(ItemData): def _get_children_from_image(self, image): return image.channels def _get_children_from_item(self, item): return item.children class PathData(ItemData): def _get_children_from_image(self, image): return image.vectors def _get_children_from_item(self, item): return item.children #=============================================================================== class _ItemDataElement(object): """ This class wraps a `gimp.Item` object and defines custom item attributes. Note that the attributes will not be up to date if changes were made to the original `gimp.Item` object. Attributes: * `item` (read-only) - `gimp.Item` object. * `parents` (read-only) - List of `_ItemDataElement` parents for this item, sorted from the topmost parent to the bottommost (immediate) parent. * `level` (read-only) - Integer indicating which level in the it
#!/usr/bin/env python # coding: utf-8 from module import Module import numpy as np try: from im2col_cyt import im2col_cython, col2im_cython except ImportError: print('Installation broken, please reinstall PyFunt') from numpy.lib.stride_tricks import as_strided def tile_array(a, b1, b2): r, c = a.shape rs, cs = a.strides x = as_strided(a, (r, b1, c, b2), (rs, 0, cs, 0)) return x.reshape(r*b1, c*b2) class SpatialUpSamplingNearest(Module): def __init__(self, scale): super(SpatialUpSampli
ngNearest, self).__init__() self.scale_factor = scale if self.scale_factor < 1: raise Exception('scale_factor must be greater than 1') if np.floor(self.scale_factor) != self.scale_factor: raise Exception('scale_factor must be integer') def update_output(self, x): out_siz
e = x.shape out_size[x.ndim - 1] *= self.scale_factor out_size[x.ndim - 2] *= self.scale_factor N, C, H, W = out_size stride = self.scale_factor pool_height = pool_width = stride x_reshaped = x.transpose(2, 3, 0, 1).flatten() out_cols = np.zeros(out_size) out_cols[:, np.arange(out_cols.shape[1])] = x_reshaped out = col2im_cython(out_cols, N * C, 1, H, W, pool_height, pool_width, padding=0, stride=stride) out = out.reshape(out_size) return self.grad_input return self.output def update_grad_input(self, x, grad_output, scale=1): N, C, H, W = grad_output.shape pool_height = pool_width = self.scale_factor stride = self.scale_factor out_height = (H - pool_height) / stride + 1 out_width = (W - pool_width) / stride + 1 grad_output_split = grad_output.reshape(N * C, 1, H, W) grad_output_cols = im2col_cython( grad_output_split, pool_height, pool_width, padding=0, stride=stride) grad_intput_cols = grad_output_cols[0, np.arange(grad_output_cols.shape[1])] grad_input = grad_intput_cols.reshape( out_height, out_width, N, C).transpose(2, 3, 0, 1) self.output = grad_input
Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import re import six from oslo_policy import _checks from oslo_policy._i18n import _LE LOG = logging.getLogger(__name__) def reducer(*tokens): """Decorator for reduction methods. Arguments are a sequence of tokens, in order, which should trigger running this reduction method. """ def decorator(func): # Make sure we have a list of reducer sequences if not hasattr(func, 'reducers'): func.reducers = [] # Add the tokens to the list of reducer sequences func.reducers.append(list(tokens)) return func return decorator class ParseStateMeta(type): """Metaclass for the :class:`.ParseState` class. Facilitates identifying reduction methods. """ def __new__(mcs, name, bases, cls_dict): """Create the class. Injects the 'reducers' list, a list of tuples matching token sequences to the names of the corresponding reduction methods. """ reducers = [] for key, value in cls_dict.items(): if not hasattr(value, 'reducers'): continue for reduction in value.reducers: reducers.append((reduction, key)) cls_dict['reducers'] = reducers return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict) @six.add_metaclass(ParseStateMeta) class ParseState(object): """Implement the core of parsing the policy language. Uses a greedy reduction algorithm to reduce a sequence of tokens into a single terminal, the value of which will be the root of the :class:`Check` tree. .. note:: Error reporting is rather lacking. The best we can get with this parser formulation is an overall "parse failed" error. Fortunately, the policy language is simple enough that this shouldn't be that big a problem. """ def __init__(self): """Initialize the ParseState.""" self.tokens = [] self.values = [] def reduce(self): """Perform a greedy reduction of the token stream. If a reducer method matches, it will be executed, then the :meth:`reduce` method will be called recursively to search for any more possible reductions. """ for reduction, methname in self.reducers: if (len(self.tokens) >= len(reduction) and self.tokens[-len(reduction):] == reduction): # Get the reduction method meth = getattr(self, methname) # Reduce the token stream results = meth(*self.values[-len(reduction):]) # Update the tokens and values self.tokens[-len(reduction):] = [r[0] for r in results] self.values[-len(reduction):] = [r[1] for r in results] # Check for any more reductions return self.reduce() def shift(self, tok, value): """Adds one more token to the state. Calls :meth:`reduce`. """ self.tokens.append(tok) self.values.append(value) # Do a greedy reduce... self.reduce() @property def result(self): """Obtain the final result of the parse. :raises ValueError: If the parse failed to reduce to a single result. """ if len(self.values) != 1: raise ValueError('Could not parse rule') return self.values[0] @reducer('(', 'check', ')') @reducer('(', 'and_expr', ')') @reducer('(', 'or_expr', ')') def _wrap_check(self, _p1, check, _p2): """Turn parenthesized expressions into a 'check' token.""" return [('check', check)] @reducer('check', 'and', 'check') def _make_and_expr(self, check1, _and, check2): """Create an 'and_expr'. Join two checks by the 'and' operator. """ return [('and_expr', _checks.AndCheck([check1, check2]))] @reducer('and_expr', 'and', 'check') def _extend_and_expr(self, and_expr, _and, check): """Extend an 'and_expr' by adding one more check.""" return [('and_expr', and_expr.add_check(check))] @reducer('check', 'or', 'check') def _make_or_expr(self, check1, _or, check2): """Create an 'or_expr'. Join two checks by the 'or' operator. """ return [('or_expr', _checks.OrCheck([check1, check2]))] @reducer('or_expr', 'or', 'check') def _extend_or_expr(self, or_expr, _or, check): """Extend an 'or_expr' by adding one more check.""" return [('or_expr', or_expr.add_check(check))] @reducer('not', 'check') def _make_not_expr(self, _not, check): """Invert the result of another check.""" return [('check', _checks.NotCheck(check))] def _parse_check(rule): """Parse a single base check rule into an appropriate Check object.""" # Handle the special checks if rule == '!': return _checks.FalseCheck() elif rule == '@': return _checks.TrueCheck() try: kind, match = rule.split(':', 1) except Exception: LOG.exception(_LE('Failed to understand rule %s') % rule) # If the rule is invalid, we'll fail closed return _checks.FalseCheck() # Find what implements the check if kind in _checks.registered_checks: return _checks.registered_checks[kind](kind, match) elif None in _checks.registered_checks: return _checks.registered_checks[None](kind, match) else: LOG.error(_LE('No handler for matches of kind %s') % kind) return _checks.FalseCheck() # Used for tokenizing the policy language _tokenize_re = re.compile(r'\s+') def _parse_tokenize(rule): """Tokenizer for the policy language. Most of the single-character tokens are specified in the _tokenize_re; however, parentheses need to be handled specially, because they can appear inside a check string. Thankfully, those parentheses that appear inside a check string can never occur at the very beginning or end ("%(variable)s" is the correct syntax). """ for tok in _tokenize_re.split(rule): # Skip empty tokens if not tok or tok.isspace(): continue # Handle leading parens on the token clean = tok.lstrip('(') for i in range(len(tok) - len(clean)): yield '(', '(' # If it was only parentheses, continue if not clean: continue else: tok = clean # Handle trailing parens on the token clean = tok.rstrip(')') trail = len(tok) - len(clean) # Yield the cleaned token lowered = clean.lower() if lowered in ('and', 'or', 'not'): # Special tokens yield lowered, clean elif clean: # Not a special to
ken, but not composed solely of ')' if len(tok) >= 2 and ((tok[0], tok[-1]) in [('"', '"'), ("'", "'")]): # It's a quoted string yield 'string', tok[1:-1] else: yield 'check', _parse_check(clean) # Yield the trailing parens for i in range(trail): yield '
)', ')' def parse_rule(rule): """Parses policy to the tree. Translates a policy written in the policy language into a tree of Check objects. """ # Empty rule means always accept if not rule: return _checks.TrueCheck() # Parse the token stream state = ParseState() fo
#!/usr/bin/env python import telnetlib import time import socket import sys import getpass TELNET_PORT = 23 TELNET_TIMEOUT = 6 def send_command(remot
e_conn, cmd): ''' Initiate the Telnet Session ''' cmd = cmd.rstrip() remote_conn.write(cmd + '\n') time.sleep(1) return remote_conn.read_very_eager() def login(remote_conn, username, password): ''' Login to pynet-rtr1 ''' output = remote_conn.read_until("sername:", TELNET_TIMEOUT) remote_conn.write(username +
'\n') output += remote_conn.read_until("ssword:", TELNET_TIMEOUT) remote_conn.write(password + '\n') return output def no_more(remote_conn, paging_cmd='terminal length 0'): ''' No paging of Output ''' return send_command(remote_conn, paging_cmd) def telnet_connect(ip_addr): ''' Establish the Telnet Connection ''' try: return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT) except socket.timeout: sys.exit("Connection timed-out") def main(): ''' Connect to pynet-rtr1, login, and issue 'show ip int brief' ''' ip_addr = raw_input("IP address: ") ip_addr = ip_addr.strip() username = 'pyclass' password = getpass.getpass() remote_conn = telnet_connect(ip_addr) output = login(remote_conn, username, password) time.sleep(1) remote_conn.read_very_eager() no_more(remote_conn) output = send_command(remote_conn, 'show ip int brief') print "\n\n" print output print "\n\n" remote_conn.close() if __name__ == "__main__": main()
from django.shortcuts import render, render_to_response from django.shortcuts import redirect from django.template import RequestContext from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse from django.conf import settings from manage.forms import * from manage.models import * from tasks.models import * import os import csv from django.http import HttpResponse, HttpRequest # Views def login(request): if request.method == "POST": username = request.POST.get('username') password = request.POST.get('password') if (username == settings.MANAGE_USERNAME and password == settings.MANAGE_PASS): return redirect('manage.views.main') return render(request, 'manage/login.html', {}) def main(request): # Make sure no direct access to main page try: referer = request.META['HTTP_REFERER'] except: return redirect('manage.views.login') if referer.startswith('http://colcat.calit2.uci.edu:8003'): return render(request, 'manage/main.html', {}) return redirect('manage.views.login') # LANGUAGES def new_language(request): if request.method == "POST": form = LanguageForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.save() return HttpResponseRedirect(reverse('manage.views.view_languages')) else: form = LanguageForm() return render(request, 'manage/new-language.html', {'form': form}) def view_languages(request): language_list = Language.objects.all() context_dict = {'languages': language_list} return render(request, 'manage/view-languages.html', context_dict) # IMAGES def new_image(request): # Handle file upload if request.method == 'POST': form = ImageForm(request.POST, request.FILES) if form.is_valid(): print request.FILES['image_filepath'].name print request.FILES['image_filepath'] image_name = os.path.splitext(request.FILES['image_filepath'].name)[0] newimg = Image_Data(image_filepath = request.FILES['image_filepath'], image_id = image_name, language_name = request.POST.get('language_name'), task_type_id = request.POST.get('task_type_id')) newimg.save() # Redirect to the document list after POST return HttpResponseRedirect(reverse('manage.vie
ws.view_images')) else: form = ImageForm() # A empty, unbound form return render(request, 'manage/new-image.html', {'form': form}) def view_images(request): image_list = Image_Data.objects.all() context_dict = {'images': image_list} return render(request, 'manage/view-images.html', context_dict) # DATA MODELS def new_data_model(request): if request.method == "POST": form = DataModelForm(request.POST) if form
.is_valid(): post = form.save(commit=False) post.save() return HttpResponseRedirect(reverse('manage.views.view_data_models')) else: form = DataModelForm() return render(request, 'manage/new-data-model.html', {'form': form}) def view_data_models(request): model_list = Data_Model.objects.all() context_dict = {'models': model_list} return render(request, 'manage/view-data-models.html', context_dict) # TASKS def new_task(request): if request.method == "POST": form = TaskForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.language_id = request.POST.get('language_id') post.task_type_id = request.POST.get('task_type_id') post.image_id = request.POST.get('image_id') post.task_name = request.POST.get('language_id') + '_' + request.POST.get('task_type_id') + '_' + request.POST.get('image_id') post.task_url = '/tasks/'+request.POST.get('language_id')+'/'+request.POST.get('task_type_id') + '/'+request.POST.get('image_id') post.save() return HttpResponseRedirect(reverse('manage.views.view_tasks')) else: form = TaskForm() return render(request, 'manage/new-task.html', {'form': form}) def view_tasks(request): if request.method == "POST": if 'create_batch_file' in request.POST: print "Creating batch file..." task_choices = request.POST.getlist('task_choices') response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="batch.csv"' writer = csv.writer(response) headers = ['task_language_id', 'task_type_id', 'task_img_id'] writer.writerow(headers) for tid in task_choices: task = Task.objects.get(task_id=tid) task_info = [task.language_id, task.task_type_id, task.image_id] writer.writerow(task_info) print 'Finished writing batch file' return response elif 'mark_tasks_complete' in request.POST: print "Marking tasks complete..." tasks_complete = request.POST.getlist('tasks_complete') print tasks_complete for tid in tasks_complete: task = Task.objects.get(task_id=tid) task.complete = True task.save() task_list = Task.objects.all() context_dict = {'tasks': task_list} return render(request, 'manage/view-tasks.html', context_dict) def new_task_type(request): if request.method == "POST": form = TaskTypeForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.save() return HttpResponseRedirect(reverse('manage.views.view_task_types')) else: form = TaskTypeForm() return render(request, 'manage/new-task-type.html', {'form': form}) def view_task_types(request): task_type_list = Task_Type.objects.all() context_dict = {'task_types': task_type_list} return render(request, 'manage/view-task-types.html', context_dict) def new_task_template(request): if request.method == "POST": form = TaskTemplateForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.save() return HttpResponseRedirect(reverse('manage.views.view_task_templates')) else: form = TaskTemplateForm() return render(request, 'manage/new-task-template.html', {'form': form}) def view_task_templates(request): template_list = Task_Template.objects.all() context_dict = {'templates': template_list} return render(request, 'manage/view-task-templates.html', context_dict) # RESPONSES def download_responses(request): response_lists = [] # Add objects for each response type try: response_list_foci_001 = Task_Foci_001.objects.all() response_lists.append(response_list_foci_001) except: pass try: response_list_naming_001 = Task_Naming_001.objects.all() response_lists.append(response_list_naming_001) except: pass context_dict = {'response_lists': [r.model.__name__ for r in response_lists]} for rlist in response_lists: write_responses_to_csv(rlist, 'uploads/responses/'+rlist.model.__name__+'.csv') return render(request, 'manage/download-responses.html', context_dict) import csv from django.db.models.loading import get_model def write_responses_to_csv(qs, outfile_path): model = qs.model writer = csv.writer(open(outfile_path, 'w')) headers = [] for field in model._meta.fields: headers.append(field.name) writer.writerow(headers) for obj in qs: row = [] for field in headers: val = getattr(obj, field) if callable(val): val = val() if type(val) == unicode: val = val.encode("utf-8") row.append(val) writer.writerow(row)
import numpy as np from scipy.stats import sem import scipy.constants as const from uncertainties import ufloat import uncertainties.unumpy as unp from uncertainties.unumpy import (nominal_values as noms, std_devs as stds) import matplotlib.pyplot as plt from scipy.optimize import curve_fit from PIL import Image import scipy.misc from pint import UnitRegistry u = UnitRegistry() Q_ = u.Quantity ## Wellenlängen in nm lambda_b =
Q_(480.0, 'nanometer') n_b = 1.4635 h = Q_(const.
h, 'joule * second') e_0 = Q_(const.e, 'coulomb') mu_bohr = Q_(const.physical_constants['Bohr magneton'][0], 'joule/tesla') c = Q_(const.c, 'meter / second') d = Q_(4, 'millimeter') dispsgebiet_b = lambda_b**2 / (2 * d) * np.sqrt(1 / (n_b**2 - 1)) ## Hysterese, B in mT def poly(x, a, b, c, d): return a * x**3 + b * x**2 + c * x + d B_auf = np.array([4, 87, 112,174, 230, 290, 352, 419, 476, 540, 600, 662, 714, 775, 823,872, 916, 959, 987, 1015, 1046, 1072]) B_ab = np.array([7, 57, 120, 180, 251, 306, 361, 428, 480, 550, 612, 654, 715, 780, 830, 878, 924, 962, 993, 1020, 1050, 1072]) I = np.linspace(0, 21, 22) params_B_auf, covariance_B_auf = curve_fit(poly, I, B_auf) params_B_ab, covariance_B_ab = curve_fit(poly, I, B_ab) ### BLAU ### ## Bild eins Zeitstempel 10:33 ## Bild zwei I = 5.6 A Pol = +-1 ## Abstände zwischen zwei Linien zu den benachbarten ## beiden Linien gemessen +-> |*| |*| (so wurde 1 gemessen) ## zwei beinhaltet die Abstände der Peaks von einer gespaltenen Linie ## Pixelbreiten der 3 + 13 Linie pixel_01_b = np.array([(1405 + 1244) / 2, (1690 + 1541) / 2, (1952 + 1852) / 2, (2170 + 2055) / 2, (2399 + 2278) / 2, (2596 + 2481) / 2, (2781 + 2673) / 2, (2961 + 2861) / 2, (3130 + 3033) / 2, (3294 + 3202) / 2]) pixel_02_b_1 = np.array([(1419 + 1060) / 2, (1728 + 1419) / 2, (1973 + 1728) / 2, (1973 + 1728) / 2, (2215 + 1973) / 2, (2435 + 2215) / 2, (2638 + 2435) / 2, (2816 + 2638) / 2, (3013 + 2816) / 2, (3176 + 3010) / 2, (3342 + 3176) / 2]) pixel_02_b_2 = np.array([(1494 -1339), (1776 - 1657), (2035 - 1910), (2273 - 2154), (2478 - 2377), (2677 - 2582), (2873 - 2769), (3045 - 2959), 3217 - 3135, 3383 - 3303]) delta_S_b = np.zeros(len(pixel_01_b) - 1) for i in range(0, len(pixel_01_b) - 1, 1): delta_S_b[i] = pixel_01_b[i + 1] - pixel_01_b[i] #print(delta_S_b) del_S_b = pixel_02_b_2[1:10]#np.zeros(9) #for i in range(0, len(pixel_02_b_2) - 1, 1): # del_S_b[i] = pixel_02_b_2[i + 1] - pixel_02_b_2[i] del_lambda_b = (1 / 2 * dispsgebiet_b * del_S_b / delta_S_b) delta_E_b = (h * c / lambda_b**2 * del_lambda_b).to('eV') g_b = (delta_E_b / (mu_bohr * Q_(poly(5.6, *params_B_auf), 'millitesla'))).to('dimensionless') g_b_best = ufloat(np.mean(g_b), np.std(g_b, ddof=1)) print(g_b,'##', g_b_best) print(del_S_b, '##', delta_S_b) print('Hysterese 5.6 A', poly(5.6, *params_B_auf)) print((2 + 3/2) / 2)
from mutant_django.generator import DjangoBase def register(app): app.extend_g
enerator('django', django_json_field) def django_json_field(gen): gen.field_generators['JSON'] = JSONField class JSONField(DjangoBase): DJANGO_FIE
LD = 'JSONField' def render_imports(self): return ['from jsonfield import JSONField']
self.is_bdb_compiled(): # Make a legacy wallet and check it is BDB self.nodes[0].createwallet(wallet_name="legacy1", descriptors=False) wallet_info = self.nodes[0].getwalletinfo() assert_equal(wallet_info['format'], 'bdb') self.nodes[0].unloadwallet("legacy1") else: self.log.warning("Skipping BDB test") # Make a descriptor wallet self.log.info("Making a descriptor wallet") self.nodes[0].createwallet(wallet_name="desc1", descriptors=True) # A descriptor wallet should have 100 addresses * 4 types = 400 keys self.log.info("Checking wallet info") wallet_info = self.nodes[0].getwalletinfo() assert_equal(wallet_info['format'], 'sqlite') assert_equal(wallet_info['keypoolsize'], 400) assert_equal(wallet_info['keypoolsize_hd_internal'], 400) assert 'keypoololdest' not in wallet_info # Check that getnewaddress works self.log.info("Test that getnewaddress and getrawchangeaddress work") addr = self.nodes[0].getnewaddress("", "legacy") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('pkh(') assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/0/0') addr = self.nodes[0].getnewaddress("", "p2sh-segwit") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('sh(wpkh(') assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/0/0') addr = self.nodes[0].getnewaddress("", "bech32") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('wpkh(') assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/0/0') # Check that getrawchangeaddress works addr = self.nodes[0].getrawchangeaddress("legacy") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('pkh(') assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/1/0') addr = self.nodes[0].getrawchangeaddress("p2sh-segwit") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('sh(wpkh(') assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/1/0') addr = self.nodes[0].getrawchangeaddress("bech32") addr_info = self.nodes[0].getaddressinfo(addr) assert addr_info['desc'].startswith('wpkh(') assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/1/0') # Make a wallet to receive coins at self.nodes[0].createwallet(wallet_name="desc2", descriptors=True) recv_wrpc = self.nodes[0].get_wallet_rpc("desc2") send_wrpc = self.nodes[0].get_wallet_rpc("desc1") # Generate some coins self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, send_wrpc.getnewaddress()) # Make transactions self.log.info("Test sending and receiving") addr = recv_wrpc.getnewaddress() send_wrpc.sendtoaddress(addr, 10) # Make sure things are disabled self.log.info("Test disabled RPCs") assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importprivkey, "cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW") assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importpubkey, send_wrpc.getaddressinfo(send_wrpc.getnewaddress())) assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importaddress, recv_wrpc.getnewaddress()) assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importmulti, []) assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.addmultisigaddress, 1, [recv_wrpc.getnewaddress()]) assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpprivkey, recv_wrpc.getnewaddress()) assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpwallet, 'wallet.dump') assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importwallet, 'wallet.dump') assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.sethdseed) self.log.info("Test encryption") # Get the master fingerprint before encrypt info1 = send_wrpc.getaddressinfo(send_wrpc.getnewaddress()) # Encrypt wallet 0 send_wrpc.encryptwallet('pass') send_wrpc.walletpassphrase('pass', 10) addr = send_wrpc.getnewaddress() info2 = send_wrpc.getaddressinfo(addr) assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint'] send_wrpc.walletlock() assert 'hdmasterfingerprint' in send_wrpc.getaddressinfo(send_wrpc.getnewaddress()) in
fo3 = send_wrpc.getaddressinfo(addr) assert_equal(info2['desc'], info3['desc']) s
elf.log.info("Test that getnewaddress still works after keypool is exhausted in an encrypted wallet") for _ in range(500): send_wrpc.getnewaddress() self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet") send_wrpc.walletpassphrase('pass', 10) send_wrpc.importdescriptors([{ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n", "timestamp": "now", "range": [0,10], "active": True }]) send_wrpc.walletlock() # Exhaust keypool of 100 for _ in range(100): #send_wrpc.getnewaddress(address_type='bech32') send_wrpc.getnewaddress('', 'bech32') # This should now error assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", send_wrpc.getnewaddress, '', 'bech32') self.log.info("Test born encrypted wallets") self.nodes[0].createwallet('desc_enc', False, False, 'pass', False, True) enc_rpc = self.nodes[0].get_wallet_rpc('desc_enc') enc_rpc.getnewaddress() # Makes sure that we can get a new address from a born encrypted wallet self.log.info("Test blank descriptor wallets") self.nodes[0].createwallet(wallet_name='desc_blank', blank=True, descriptors=True) blank_rpc = self.nodes[0].get_wallet_rpc('desc_blank') assert_raises_rpc_error(-4, 'This wallet has no available keys', blank_rpc.getnewaddress) self.log.info("Test descriptor wallet with disabled private keys") self.nodes[0].createwallet(wallet_name='desc_no_priv', disable_private_keys=True, descriptors=True) nopriv_rpc = self.nodes[0].get_wallet_rpc('desc_no_priv') assert_raises_rpc_error(-4, 'This wallet has no available keys', nopriv_rpc.getnewaddress) self.log.info("Test descriptor exports") self.nodes[0].createwallet(wallet_name='desc_export', descriptors=True) exp_rpc = self.nodes[0].get_wallet_rpc('desc_export') self.nodes[0].createwallet(wallet_name='desc_import', disable_private_keys=True, descriptors=True) imp_rpc = self.nodes[0].get_wallet_rpc('desc_import') addr_types = [('legacy', False, 'pkh(', '44\'/1\'/0\'', -13), ('p2sh-segwit', False, 'sh(wpkh(', '49\'/1\'/0\'', -14), ('bech32', False, 'wpkh(', '84\'/1\'/0\'', -13), ('legacy', True, 'pkh(', '44\'/1\'/0\'', -13), ('p2sh-segwit', True, 'sh(wpkh(', '49\'/1\'/0\'', -14), ('bech32', True, 'wpkh(', '84\'/1\'/0\'', -13)] for addr_type, internal, desc_prefix, deriv_path, int_idx in addr_types: int_str = 'internal' if internal else 'external' self.log.info("Testing descriptor address type for {} {}".format(addr_type, int_str)) if internal: addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
dimensions(8,2) wall((0, 2), (8, 2)) wall((1,
1.5),(1.5, 1.5)) wall((2, 1.6),(2.8, 1.6)) wall((3.1, 1.4),(3.5, 1.4))
initialRobotLoc(1.0, 1.0)
import web urls = ( '/hello','Index' ) app = web.ap
plication(urls,globals()) render = web.template.render('/usr/local/LPTHW/ex51/gothonweb/templates/',base="layout") class Index(object): def GET(self): return re
nder.hello_form() def POST(self): form = web.input(name="Nobody",greet="Hello") greeting = "%s,%s" % (form.greet,form.name) return render.index(greeting = greeting) if __name__ == '__main__': app.run()
import os, sys, re import ConfigParser import optparse import shutil import subprocess import difflib import collections #import numpy as np # Alberto Meseguer file; 18/11/2016 # Modified by Quim Aguirre; 13/03/2017 # This file is the master coordinator of the DIANA project. It is used to run multiple DIANA commands in parallel in the cluster #-------------# # Functions # #-------------# #-------------# # Options # #-------------# def parse_options(): ''' This function parses the command line arguments and returns an optparse object. ''' parser = optparse.OptionParser("pddi.py [--dummy=DUMMY_DIR] -i INPUT_FILE [-o OUTPUT_DIR] [-v]") # Directory arguments parser.add_option("-i", action="store", type="string", dest="input_file", help="Input crossings file", metavar="INPUT_FILE") parser.add_option("-s", action="store", type="string", dest="sif_file", help="Input SIF file") parser.add_option("-t", action="store", type="string", dest="type_of_analysis", help="Type of analysis: 'profile_creation' or 'comparison'") parser.add_option("--dummy_dir", default="dummy/", action="store", type="string", dest="dummy_dir", help="Dummy directory (default = ./)", metavar="DUMMY_DIR") parser.add_option('-ws','--worspace',dest='workspace',action = 'store',default=os.path.join(os.path.dirname(__file__), 'workspace'), help = """Define the workspace directory where the data directory and the results directory will be created""") (options, args) = parser.parse_args() if options.input_file is None or options.sif_file is None or options.type_of_analysis is None: parser.error("missing arguments: type option \"-h\" for help") return options #-------------# # Main # #-------------# # Add "." to sys.path # src_path = os.path.abspath(os.path.dirname(__file__)) sys.path.append(src_path) # Read configuration file # config = ConfigParser.ConfigParser() config_file = os.path.join(src_path, "config_marvin.ini") config.read(config_file) import hashlib # Imports my functions # import functions # Define which python to be used # python = os.path.join(config.get("Paths", "python_path"), "python") # Arguments & Options # options = parse_options() # Directory arguments input_file = os.path.abspath(options.input_file) dummy_dir = os.path.abspath(options.dummy_dir) # Create directories if necessary logs_dir = src_path + "/logs" if not os.path.exists(logs_dir): os.mkdir(logs_dir) f = open(input_file, "r") # Depending on the type of analysis, we will submit different commands if options.type_of_analysis == 'profile_creation': analysis = '-prof' all_drugs = set() for line in f: (drug1, drug2) = line.strip().split('---') all_drugs.add(drug1) all_drugs.add(drug2) f.close() for drug in all_drugs: # Check if the p-value file is already created. If so, skip pvalue_file = data_dir + "/" + drug + "/guild_results_using_sif/output_scores.sif.netcombo.pval" if os.path.exists(pvalue_file): continue guild_path = '/gpfs42/robbyfs/homes/users/qaguirre/guild/scoreN' command = 'python {}/diana_cluster/scripts/generate_profiles.py -d {} -pt geneid -sif {} -gu {}'.format( src_path, drug, options.sif_file, guild_path ) print(command) # python /home/quim/project/diana_cluster/scripts/generate_profiles.py -d 'DCC0303' -pt 'geneid' -sif /home/quim/project/diana_cluster/workspace/sif/human_eAFF_geneid_2017.sif -gu /home/quim/project/diana_cluster/diana/toolbox/scoreN # To run the command at the local machine #os.system(command) #To run in the cluster submitting files to queues functions.submit_command_to_queue(command, max_jobs_in_queue=int(config.get("Cluster", "max_jobs_in_queue")), queue_file="command_queues_marvin.txt", dummy_dir=dummy_dir) elif options.type_of_analysis == 'comparison': analysis = '-comp' for line in f: (drug1, drug2) = line.strip().split('---') # Ch
eck if the results are already done comp_results_dir = res_dir + "/results_" + drug1 + "_" + drug2 table_file = comp_results_dir + '/table_results_' + drug1 + '_' + drug2 + '.txt' if os.path.exists(table_file): continue command = 'python {}/diana_cluster/scripts/compare_profiles.py -d1 {} -d2 {} -pt geneid'.format( src_path, drug1, drug2 ) print(command) # python /ho
me/quim/project/diana_cluster/scripts/compare_profiles.py -d1 'DCC0303' -d2 'DCC1743' -pt 'geneid' # To run the command at the local machine #os.system(command) #To run in the cluster submitting files to queues functions.submit_command_to_queue(command, max_jobs_in_queue=int(config.get("Cluster", "max_jobs_in_queue")), queue_file="command_queues_marvin.txt", dummy_dir=dummy_dir) f.close() else: print('The type of analysis has been wrongly defined. Introduce \'profile_creation\' or \'comparison\'') sys.exit(10)
for x in self._validModuleTargets: if x.name == name: return x return None def validModuleTargets(self): return self._validModuleTargets def invalidModuleTargets(self): return self._invalidModuleTargets # --- PARAMETERS -------------------------------------------------------------- def listParameters(self): path = self.getParametersRoot() dirs = listDirectories(path, fullpath=True) tmp = [] for x in dirs: if Parameters.check(x): tmp.append(x) if tmp is not None: tmp.sort() return tmp def openParameters(self): list = self.listParameters() self._validParameters = [] self._invalidParameters = [] for x in list: m = Parameters() if m.open(x): self._validParameters.append(m) else: self._invalidParameters.append(m) return self._validParameters def getParameterByName(self, name): if name is None: raise CoreError("CoreContainer::getModule() name is None") for x in self._validParameters: if x.name == name: return x return None def validParameters(self): return self._validParameters def invalidParameters(self): return self._invalidParameters def listParametersTargets(self): path = self.getParametersTargetsRoot() dirs = listDirectories(path, fullpath=True) if dirs is not None: dirs.sort() tmp = [] for d in dirs: files = listFilesByAndStripExtension(os.path.join(path, d), "json") if files is not None: files.sort() for f in files: tmp.append([d, f]) return tmp def openParametersTargets(self): list = self.listParametersTargets() self._validParametersTargets = [] self._invalidParametersTargets = [] for x in list: m = ParametersTarget() if m.open(x[0], x[1]): self._validParametersTargets.append(m) else: self._invalidParametersTargets.append(m) return self._validParametersTargets def validParameterTargets(self): return self._validParametersTargets def invalidParameterTargets(self): return self._invalidParametersTargets class Workspace(CoreWorkspaceBase): def __init__(self): self.root = None self.sources = None self.generated = None self.build = None self.valid = False self.reason = "" self.requiredModules = [] self.requiredPackages = [] self.core = Core() self.coreWorkspace = CoreWorkspace() self.packagesCoreDependencies = [] self.packagesWorkspaceDependencies = [] self.packagesNoneDependencies = [] self.modulesWorkspaceDependencies = [] self.modulesCoreDependencies = [] self.modulesNoneDependencies = [] def open(self, coreRoot=None, workspaceRoot=None): self.__init__() if not self.core.open(coreRoot): self.reason = self.core.reason return False else: if not self.coreWorkspace.open(self.getRoot(workspaceRoot)): self.reason = self.coreWorkspace.reason return False return True def isValid(self): return self.core.valid and self.coreWorkspace.valid def clean(self, force): root = self.getRoot() if root is not None: if not force: print("OK: " + root) else: print("!!!!") def validModuleTargets(self): return self.coreWorkspace.validModuleTargets() def invalidModuleTargets(self): return self.coreWorkspace.invalidModuleTargets() def validParameters(self): return self.coreWorkspace.validParameters() def invalidParameters(self): return self.coreWorkspace.invalidParameters() def validParameterTargets(self): return self.coreWorkspace.validParameterTargets() def invalidParameterTargets(self): return self.coreWorkspace.invalidParameterTargets() def getParameters(self, name) -> Parameters: for x in self.validParameters(): if x.name == name: return x return None def getCoreConfiguration(self, package, name): p = self.getCorePackage(package) tmp = None if p is not None: tmp = CoreConfiguration() tmp.open(name, p) return tmp def getCoreMessage(self, package, name): p = self.getCorePackage(package) tmp = None if p is not None: tmp = CoreMessage() tmp.open(name, p) return tm
p def getCorePackage(self, name): tmpW = self.c
oreWorkspace.getPackageByName(name) tmpC = self.core.getPackageByName(name) if tmpW is not None: return tmpW else: if tmpC is not None: return tmpC return None def getCoreModule(self, name): tmpW = self.coreWorkspace.getModuleByName(name) tmpC = self.core.getModuleByName(name) if tmpW is not None: return tmpW else: if tmpC is not None: return tmpC return None def getRequiredModules(self): tmp = [] for x in self.validModuleTargets(): tmp.append(x.module) self.requiredModules = list(set(tmp)) return self.requiredModules def getRequiredPackages(self): tmp = [] for x in self.validModuleTargets(): for y in x.requiredPackages: tmp.append(y) m = self.getCoreModule(x.module) if m is not None: for y in m.requiredPackages: tmp.append(y) for x in self.validParameterTargets(): p = self.getParameters(x.parameters) if p is not None: for y in p.requiredPackages(): tmp.append(y) self.requiredPackages = list(set(tmp)) self.requiredPackages.sort() return self.requiredPackages def checkPackagesDependencies(self): self.packagesWorkspaceDependencies = [] self.packagesCoreDependencies = [] self.packagesNoneDependencies = [] isOk = True for x in self.getRequiredPackages(): tmpW = self.coreWorkspace.getPackageByName(x) tmpC = self.core.getPackageByName(x) if tmpW is not None: self.packagesWorkspaceDependencies.append(tmpW) else: if tmpC is not None: self.packagesCoreDependencies.append(tmpC) else: self.packagesNoneDependencies.append(x) isOk = False return isOk def getPackagesDependenciesSummary(self): table = [] for x in self.getRequiredPackages(): tmpW = self.coreWorkspace.getPackageByName(x) tmpC = self.core.getPackageByName(x) l = CoreConsole.highlight(x) s = "" n = "" if tmpW is not None: if tmpC is None: s = "Workspace" else: s = "Workspace" n = "Shadows Core" else: if tmpC is not None: s = "Core" else: n = CoreConsole.error("Not found") table.append([l, s, n]) return table @staticmethod def getPackagesDependenciesSummaryFields(): return ["Package", "Source", "Notes"] def checkModulesDependencies(self): self.modulesWorkspaceDependencies = [] self.modulesCoreDependencies = [] self.modulesNoneDependencies = [] isOk = True for x in self.getRequiredModules():
es = self.get_agent_images(docker_client()) def get_agent_images(self, client): images = client.images(filters={'label': SYSTEM_LABEL}) system_images = {} for i in images: try: label_val = i['Labels'][SYSTEM_LABEL] for l in i['RepoTags']: system_images[l] = label_val
if l.endswith(':latest'): alias = l[:-7] system_images[alias] = label_val except KeyError: pass return system_images @staticmethod def get_container_by(client, func): containers = client.container
s(all=True, trunc=False) containers = filter(func, containers) if len(containers) > 0: return containers[0] return None @staticmethod def find_first(containers, func): containers = filter(func, containers) if len(containers) > 0: return containers[0] return None def on_ping(self, ping, pong): if not DockerConfig.docker_enabled(): return self._add_resources(ping, pong) self._add_instances(ping, pong) def _add_instances(self, ping, pong): if not utils.ping_include_instances(ping): return utils.ping_add_resources(pong, { 'type': 'hostUuid', 'uuid': DockerConfig.docker_uuid() }) containers = [] running, nonrunning = self._get_all_containers_by_state() for key, container in running.iteritems(): self.add_container('running', container, containers) for key, container in nonrunning.iteritems(): self.add_container('stopped', container, containers) utils.ping_add_resources(pong, *containers) utils.ping_set_option(pong, 'instances', True) def add_container(self, state, container, containers): try: labels = container['Labels'] except KeyError: labels = [] container_data = { 'type': 'instance', 'uuid': self._get_uuid(container), 'state': state, 'systemContainer': self._get_sys_container(container), 'dockerId': container['Id'], 'image': container['Image'], 'labels': labels, 'created': container['Created'], } containers.append(container_data) def _get_all_containers_by_state(self): client = docker_client(timeout=2) nonrunning_containers = {} for c in client.containers(all=True): # Blank status only wait to distinguish created from stopped if c['Status'] != '' and c['Status'] != 'Created': nonrunning_containers[c['Id']] = c running_containers = {} for c in client.containers(all=False): running_containers[c['Id']] = c del nonrunning_containers[c['Id']] return running_containers, nonrunning_containers def _get_sys_container(self, container): try: image = container['Image'] if image in self.system_images: return self.system_images[image] except (TypeError, KeyError): pass try: return container['Labels']['io.rancher.container.system'] except (TypeError, KeyError): pass def _get_uuid(self, container): try: uuid = container['Labels'][UUID_LABEL] if uuid: return uuid except (TypeError, KeyError): pass names = container['Names'] if not names: # No name?? Make one up return 'no-uuid-%s' % container['Id'] if names[0].startswith('/'): return names[0][1:] else: return names[0] def _determine_state(self, container): status = container['Status'] if status == '' or (status is not None and status.lower() == 'created'): return 'created' elif 'Up ' in status: return 'running' elif 'Exited ' in status: return 'stopped' else: # Unknown. Assume running and state should sync up eventually. return 'running' def _get_host_labels(self): try: return self.host_info.host_labels() except: log.exception("Error getting host labels") return {} def _get_host_create_labels(self): labels = Config.labels() if labels: return labels return {} def _add_resources(self, ping, pong): if not utils.ping_include_resources(ping): return stats = None if utils.ping_include_stats(ping): try: stats = self.host_info.collect_data() except: log.exception("Error getting host info stats") physical_host = Config.physical_host() compute = { 'type': 'host', 'kind': 'docker', 'hostname': Config.hostname(), 'createLabels': self._get_host_create_labels(), 'labels': self._get_host_labels(), 'physicalHostUuid': physical_host['uuid'], 'uuid': DockerConfig.docker_uuid(), 'info': stats } pool = { 'type': 'storagePool', 'kind': 'docker', 'name': compute['hostname'] + ' Storage Pool', 'hostUuid': compute['uuid'], 'uuid': compute['uuid'] + '-pool' } resolved_ip = socket.gethostbyname(DockerConfig.docker_host_ip()) ip = { 'type': 'ipAddress', 'uuid': resolved_ip, 'address': resolved_ip, 'hostUuid': compute['uuid'], } proxy = Config.host_proxy() if proxy is not None: compute['apiProxy'] = proxy utils.ping_add_resources(pong, physical_host, compute, pool, ip) def inspect(self, container): return docker_client().inspect_container(container) @staticmethod def _name_filter(name, container): names = container.get('Names') if names is None: return False found = False for n in names: if n.endswith(name): found = True break return found @staticmethod def _id_filter(id, container): container_id = container.get('Id') return id == container_id @staticmethod def _agent_id_filter(id, container): try: return container['Labels']['io.rancher.container.agent_id'] == id except (TypeError, KeyError, AttributeError): pass def get_container(self, client, instance, by_agent=False): if instance is None: return None # First look for UUID label directly labeled_containers = client.containers(all=True, trunc=False, filters={ 'label': '{}={}'.format(UUID_LABEL, instance.uuid)}) if len(labeled_containers) > 0: return labeled_containers[0] # Next look by UUID using fallback method container_list = client.containers(all=True, trunc=False) container = self.find_first(container_list, lambda x: self._get_uuid(x) == instance.uuid) if container: return container if hasattr(instance, 'externalId') and instance.externalId: container = self.find_first(container_list, lambda x: self._id_filter( instance.externalId, x)) if container: return container if by_agent and hasattr(instance, 'agentId') and instance.agentId: container = self.find_first(container_list, lambda x: self._agent_id_filter( str(instance.agentId), x)) return container
#!/usr/bin/env python __author__ = 'Jamie Diprose' import rospy from sensor_msgs.msg import JointState from ros_pololu_servo.msg import servo_pololu import math class EinsteinController(): def __init__(self): rospy.init_node('einstein_controller') rospy.Subscriber("joint_angles", JointState, self.handle_joint_angles, queue_size=10) self.pololu_pub =
rospy.Publisher("cmd_pololu", servo_pololu) self.joint_ids = {'neck_yaw': 23, 'neck_roll': 2, 'neck_pitch': 3} def handle_joint_angles(self, msg): rospy.logdebug("
Received a joint angle target") for i, joint_name in enumerate(msg.name): servo_msg = servo_pololu() servo_msg.id = self.joint_ids[joint_name] servo_msg.angle = msg.position[i] servo_msg.speed = (msg.velocity * 255.0) servo_msg.acceleration = msg.effort #TODO: check this self.pololu_pub.publish(servo_msg) #tTODO: enforce joint angles if __name__ == '__main__': rospy.loginfo("Starting einstein_controller...") controller = EinsteinController() controller.start() rospy.loginfo("einstein_controller started") rospy.spin() rospy.loginfo("einstein_controller stopped")
# -*- coding: utf-8
-*- # Generated by Django 1.9.7 on 2016-08-19 21:08 from __future__ import unicode_literals import django.contrib.gis.db.models.fields from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Subway', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('coordinates', django.contrib.gis.db.models.fields.PointField(null=True, srid=4326)), ('name', models.CharField(max_length=64)), ], options={ 'abstract': False, }, ), ]
# Copyright 2015 Internap. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import re from twisted.internet.protocol import Protocol from lxml import etree from fake_switches.netconf import dict_2_etree, NS_BASE_1_0, normalize_operation_name, SimpleDatastore, \ Response, OperationNotSupported, NetconfError from fake_switches.netconf.capabilities import Base1_0 class NetconfProtocol(Protocol): def __init__(self, datastore=None, capabilities=None, additionnal_namespaces=None, logger=None): self.logger = logger or logging.getLogger("fake_switches.netconf") self.input_buffer = "" self.session_count = 0 self.been_greeted = False self.datastore = datastore or SimpleDatastore() caps_class_list = capabilities or [] caps_class_list.insert(0, Base1_0) self.capabilities = [cap(self.datastore) for cap in caps_class_list] self.additionnal_namespaces = additionnal_namespaces or {} def __call__(self, *args, **kwargs): return self def connectionMade(self): self.logger.info("Connected, sending <hello>") self.session_count += 1 self.say(dict_2_etree({ "hello": [ {"session-id": str(self.session_count)}, {"capabilities": [{"capability": cap.get_url()} for cap in self.capabilities]} ] })) def dataReceived(self, data): self.logger.info("Received : %s" % repr(data)) self.input_buffer += data if self.input_buffer.rstrip().endswith("]]>]]>"): self.process(self.input_buffer.rstrip()[0:-6]) self.input_buffer = "" def process(self, data): if not self.been_greeted: self.logger.info("Client's greeting received") self.been_greeted = True return xml_request_root = remove_namespaces(etree.fromstring(data)) message_id = xml_request_root.get("message-id") operation = xml_request_root[0] self.logger.info("Operation requested %s" % repr(operation.tag)) handled = False operation_name = normalize_operation_name(operation) for capability in self.capabilities: if hasattr(capability, operation_name): try: self.reply(message_id, getattr(capability, operation_name)(operation)) except NetconfError as e:
self.reply(message_id, error_to_response(e)) handled = True if not handled: self.reply(message_id, error_to_response(OperationNotSupported(operation_name))) def reply(self, message_id, response): reply = etree.Element("rpc-reply",
xmlns=NS_BASE_1_0, nsmap=self.additionnal_namespaces) reply.attrib["message-id"] = message_id reply.append(response.etree) self.say(reply) if response.require_disconnect: self.logger.info("Disconnecting") self.transport.loseConnection() def say(self, etree_root): self.logger.info("Saying : %s" % repr(etree.tostring(etree_root))) self.transport.write(etree.tostring(etree_root, pretty_print=True) + "]]>]]>\n") def error_to_response(error): error_specs = { "error-message": error.message } if error.type: error_specs["error-type"] = error.type if error.tag: error_specs["error-tag"] = error.tag if error.severity: error_specs["error-severity"] = error.severity if error.info: error_specs["error-info"] = error.info return Response(dict_2_etree({"rpc-error": error_specs})) def remove_namespaces(xml_root): xml_root.tag = unqualify(xml_root.tag) for child in xml_root: remove_namespaces(child) return xml_root def unqualify(tag): return re.sub("\{[^\}]*\}", "", tag)
import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) DEBUG = False ALLOWED_HOSTS = ['localhost', '127.0.0.1'] SECRET_KEY = 'my-key' ROOT_URLCONF = 'tests.urls' INSTALLED_APPS = [ 'tests', 'cloudinary_storage', # 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', ], }, }, ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenti
cationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] STATIC_URL = '/static/' STATICFILES_STORAGE = 'cloudinary_storage.storage.StaticHashedCloudinaryStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = 'cloudinary_storage.storage.MediaCloudinaryStorage' CLOUDINARY_STORAGE = { 'CLOUD_NAME': os.getenv('CLOUDINARY_CLOUD_NA
ME', 'my-cloud-name'), 'API_KEY': os.getenv('CLOUDINARY_API_KEY', 'my-api-key'), 'API_SECRET': os.getenv('CLOUDINARY_API_SECRET', 'my-api-secret') } LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'handlers': { 'console': { 'class': 'logging.StreamHandler', }, }, 'loggers': { 'django': { 'handlers': ['console'], 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'), }, }, }
import unittest from isbn_verifier import is_valid # Tests adapted from `problem-specifications//canonical-data.json` class IsbnVerifierTest(unittest.TestCase): def test_valid_isbn(self): self.assertIs(is_valid("3-598-21508-8"), True) def test_invalid_isbn_check_digit(self): self.assertIs(is_valid("3-598-21508-9"), False) def test_valid_isbn_with_a_check_digit_of_10(self): self.assertIs(is_valid("3-598-21507-X"), True) def test_check_digit_is_a_character_other_than_x(self): self.assertIs(is_valid("3-598-21507-A"), False) def test_invalid_character_in_isbn(self): self.assertIs(is_valid("3-598-P1581-X"), False)
def test_x_is_only_valid_as_a_check_digit(self):
self.assertIs(is_valid("3-598-2X507-9"), False) def test_valid_isbn_without_separating_dashes(self): self.assertIs(is_valid("3598215088"), True) def test_isbn_without_separating_dashes_and_x_as_check_digit(self): self.assertIs(is_valid("359821507X"), True) def test_isbn_without_check_digit_and_dashes(self): self.assertIs(is_valid("359821507"), False) def test_too_long_isbn_and_no_dashes(self): self.assertIs(is_valid("3598215078X"), False) def test_too_short_isbn(self): self.assertIs(is_valid("00"), False) def test_isbn_without_check_digit(self): self.assertIs(is_valid("3-598-21507"), False) def test_check_digit_of_x_should_not_be_used_for_0(self): self.assertIs(is_valid("3-598-21515-X"), False) def test_empty_isbn(self): self.assertIs(is_valid(""), False) def test_input_is_9_characters(self): self.assertIs(is_valid("134456729"), False) def test_invalid_characters_are_not_ignored(self): self.assertIs(is_valid("3132P34035"), False) def test_input_is_too_long_but_contains_a_valid_isbn(self): self.assertIs(is_valid("98245726788"), False) if __name__ == "__main__": unittest.main()
from datetime import datetime import mock from nose.tools import eq_ import mkt import mkt.site.tests from mkt.account.serializers import (AccountSerializer, AccountInfoSerializer, TOSSerializer) from mkt.users.models import UserProfile class TestAccountSerializer(mkt.site.tests.TestCase): def setUp(self): self.account = UserProfile() def serializer(self): return AccountSerializer(instance=self.account) def test_display_name_returns_name(self): with mock.patch.object(UserProfile, 'name', 'Account name'): eq_(self.serializer().data['display_name'], 'Account name') def test_recommendations(self): # Test default. eq_(self.serializer().data['enable_recommendations'], True) self.account.enable_recommendations = False eq_(self.serializer().data['enable_recommendations'], False) class TestAccountInfoSerializer(mkt.site.tests.TestCase): UNKNOWN = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_UNKNOWN] FIREFOX_ACCOUNTS = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_FXA] PERSONA = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_
SOURCE_BROWSERID] def setUp(self): self.account = UserProfile() self.account.pk = 25 def serializer(self): return AccountInfoSerializer(instance=self.account) def test_source_is_a_slug_default(self): eq_(self.serializer().data['source'], self.PERSONA) def test_source_is_unknown(self): self.account.source = mkt.LOGIN_SOURCE_UNK
NOWN eq_(self.serializer().data['source'], self.PERSONA) def test_source_is_fxa(self): self.account.source = mkt.LOGIN_SOURCE_FXA eq_(self.serializer().data['source'], self.FIREFOX_ACCOUNTS) def test_source_is_invalid(self): self.account.source = -1 eq_(self.serializer().data['source'], self.PERSONA) def test_source_is_unrelated(self): self.account.source = mkt.LOGIN_SOURCE_BROWSERID eq_(self.serializer().data['source'], self.PERSONA) def test_account_has_no_pk(self): self.account.source = mkt.LOGIN_SOURCE_FXA self.account.pk = None eq_(self.serializer().data['source'], self.UNKNOWN) def test_source_is_read_only(self): serializer = AccountInfoSerializer( instance=None, data={'source': mkt.LOGIN_SOURCE_FXA, 'display_name': 'Hey!'}, partial=True) eq_(serializer.is_valid(), True) # This works because the model field is `editable=False`. eq_(serializer.save().source, mkt.LOGIN_SOURCE_UNKNOWN) def test_not_verified(self): self.account.is_verified = False eq_(self.serializer().data['verified'], False) def test_verified(self): self.account.is_verified = True eq_(self.serializer().data['verified'], True) class TestTOSSerializer(mkt.site.tests.TestCase): def setUp(self): self.account = UserProfile() def serializer(self): context = { 'request': mkt.site.tests.req_factory_factory('') } context['request'].user = self.account return TOSSerializer(instance=self.account, context=context) def test_has_signed(self): eq_(self.serializer().data['has_signed'], False) self.account.read_dev_agreement = datetime.now() eq_(self.serializer().data['has_signed'], True)
('sv_gq'), dp=gt_args.get('sv_dp'), max_dp=gt_args.get('sv_max_dp'), het_ab=gt_args.get('sv_het_ab'), hom_ab=gt_args.get('sv_hom_ab'), del_dhffc=gt_args.get('del_dhffc'), dup_dhbfc=gt_args.get('dup_dhbfc')) self._sv_gt_fields = set(self.sv_gt_filter.fields) self.sv_con_gt_filter = SvGtFilter( family_filter.vcf, gq=gt_args.get('sv_min_control_gq'), dp=gt_args.get('sv_min_control_dp'), max_dp=gt_args.get('sv_max_control_dp'), het_ab=gt_args.get('sv_control_het_ab'), hom_ab=gt_args.get('sv_control_hom_ab'), ref_ab_filter=gt_args.get('sv_con_ref_ab'), del_dhffc=gt_args.get('control_del_dhffc'), dup_dhbfc=gt_args.get('control_dup_dhbfc')) self._sv_gt_fields.update(self.sv_con_gt_filter.fields) self._prev_coordinate = (None, None) # to ensure records are processed self._processed_contigs = set() # in coordinate order if snpeff_mode: try: self._csq_header = self.family_filter.vcf.header.ann_fields except KeyError: self._csq_header = None # only required for report file self.csq_attribute = 'ANN' self.feature_label = 'Feature_ID' else: try: self._csq_header = self.family_filter.vcf.header.csq_fields except KeyError: self._csq_header = None # only required for report file self.csq_attribute = 'CSQ' self.feature_label = 'Feature' if self.report_file: self._write_report_header() def get_header_fields(self): ''' Return dict of dicts with INFO header field names as keys and dicts of features as values. These are suitable for handing to VcfHeader class's add_header_field() method. Each INFO field must be defined in self.header_fields in the child class, which should be a list of tuples where each tuple consists of the name anddescription of the field. ''' hf = dict() for f in self.header_fields: hf[f[0]] = {'Number': 'A', 'Type': 'String', 'Description': f[1]} return hf def confirm_heterozygous(self, record, samples): for s in samples: if len(set(record.samples[s]['GT'])) != 2: return False return True def _get_allele_counts(self, allele, rec): a_counts = dict() gt_filter_args = dict() if rec.IS_SV: gt_filter = self.sv_gt_filter control_filter = self.sv_con_gt_filter gt_filter_args['svtype'] = rec.record.info.get('SVTYPE', '') else: gt_filter = self.gt_filter control_filter = self.con_gt_filter for samp in self.unaffected: if control_filter.gt_is_ok(rec.record.samples, samp, allele, **gt_filter_args): a_counts[samp] = rec.record.samples[samp]['GT'].count(allele) else: a_counts[samp] = None if (rec.record.samples[samp]['GT'] == (0, 0) and control_filter.ad_over_threshold is not None): if control_filter.ad_over_threshold(rec.record.samples, samp, allele): a_counts[samp] = 1 for samp in self.affected: if gt_filter.gt_is_ok(rec.record.samples, samp, allele, **gt_filter_args): a_counts[samp] = rec.record.samples[samp]['GT'].count(allele) else: a_counts[samp] = None return a_counts def _check_sorted(self, record): if self._prev_coordinate[0] != record.chrom: if record.chrom in self._processed_contigs: raise RuntimeError("Input must be sorted by chromosome and " + "position for recessive filtering. " + "Contig '{}' " .format(record.chrom) + "encountered before and after contig " + "'{}'." .format(self._prev_coordinate[0])) if self._prev_coordinate[0] is not None: self._processed_contigs.add(self._prev_coordinate[0]) elif record.pos < self._prev_coordinate[1]: raise RuntimeError("Input must be sorted by chromosome and " + "position for inheritance filtering. " + "Encountered position {}:{} after {}:{}" .format(record.chrom, record.pos, self._prev_coordinate[0], self._prev_coordinate[1])) self._prev_coordinate = (record.chrom, record.pos) def process_record(self, record): '''Return True if record should be printed/kept''' return NotImplementedError("process_record method should be " + "overriden by child class!") def _write_report_header(self): if self._csq_header is not None: header = str.join("\t", (x for x in self._csq_header if x != 'Allele')) header += "\tALT_No.\t" + str.join("\t", self.annot_fields) header += "\tCHROM\tPOS\tID\tREF\tALT\tALLELE\tQUAL\tFILTER" self.report_file.write(header + "\n") def check_g2p(self, record, ignore_csq, inheritance, csqs=None): if self.f
amily_filter.g2p: if csqs is None: csqs = getattr(record, self.csq_attribute) if self.family_filter.check_g2p_consequence: fail = (not x for x in self.family_filter.g2p.csq_and_allelic_
requirement_met( csqs, inheritance)) else: fail = (not x for x in self.family_filter.g2p.allelic_requirement_met( csqs, inheritance)) if ignore_csq: ignore_csq = [x or y for x, y in zip(ignore_csq, fail)] else: ignore_csq = list(fail) return ignore_csq class RecessiveFilter(InheritanceFilter): ''' This class assumes that each family has a shared biallelic genetic cause of disease. It will not cope with phenocopies, pseudodominance or other more complicated inheritance patterns. ''' def __init__(self, family_filter, gt_args, min_families=1, snpeff_mode=False, strict=False, exclude_denovo=False, report_file=None): ''' Args: family_filter: FamilyFilter object gt_args: A dict of arguments to use for filtering genotypes. These should all correspond to arguments to provide to SampleFilter objects. min_families: Require at least this many families to have a qualifying biallelic combination of alleles in a feature before outputting. Default=1. snpeff_mode: Use SnpEff annotations instead of VEP annotations from input VCF. strict: If True, for any affected sample with parents, require confirmation of parental genotypes. If ei
t, image_width, 1] assert blue.get_shape().as_list()[1:] == [image_height, image_width, 1] bgr = tf.concat([ blue - VGG_MEAN[0], green - VGG_MEAN[1], red - VGG_MEAN[2], ],3) assert bgr.get_shape().as_list()[1:] == [image_height, image_width, 3] # Conv layer 1 self.conv1_1 = self.conv_layer_const(bgr, 'conv1_1') self.conv1_2 = self.conv_layer_const(self.conv1_1, 'conv1_2') self.pool1 = self.max_pool(self.conv1_2, 'pool1') # Conv layer 2 self.conv2_1 = self.conv_layer_const(self.pool1, 'conv2_1') self.conv2_2 = self.conv_layer_const(self.conv2_1, 'conv2_2') self.pool2 = self.max_pool(self.conv2_2, 'pool2') # Conv layer 3 self.conv3_1, conv3_1_wd = self.conv_layer(self.pool2, 'conv3_1') self.conv3_2, conv3_2_wd = self.conv_layer(self.conv3_1, 'conv3_2') self.conv3_3, conv3_3_wd = self.conv_layer(self.conv3_2, 'conv3_3') self.weight_dacay = conv3_1_wd + conv3_2_wd + conv3_3_wd self.pool3 = self.max_pool(self.conv3_3, 'pool3') # Conv layer 4 self.conv4_1, conv4_1_wd = self.conv_layer(self.pool3, 'conv4_1') self.conv4_2, conv4_2_wd = self.conv_layer(self.conv4_1, 'conv4_2') self.conv4_3, conv4_3_wd = self.conv_layer(self.conv4_2, 'conv4_3') self.weight_dacay += conv4_1_wd + conv4_2_wd + conv4_3_wd self.pool4 = self.max_pool(self.conv4_3, 'pool4') # Conv layer 5 self.conv5_1, conv5_1_wd = self.conv_layer(self.pool4, 'conv5_1') self.conv5_2, conv5_2_wd = self.conv_layer(self.conv5_1, 'conv5_2') self.conv5_3, conv5_3_wd = self.conv_layer(self.conv5_2, 'conv5_3') self.weight_dacay += conv5_1_wd + conv5_2_wd + conv5_3_wd # RPN_TEST_6(>=7) normalization_factor = tf.sqrt(tf.reduce_mean(tf.square(self.conv5_3))) self.gamma3 = tf.Variable(np.sqrt(2), dtype=tf.float32, name='gamma3') self.gamma4 = tf.Variable(1.0, dtype=tf.float32, name='gamma4') # Pooling to the same size self.pool3_p = tf.nn.max_pool(self.pool3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool3_proposal') # L2 Normalization self.pool3_p = self.pool3_p / ( tf.sqrt(tf.reduce_mean(tf.square(self.pool3_p))) / normalization_factor) * self.gamma3 self.pool4_p = self.pool4 / ( tf.sqrt(tf.reduce_mean(tf.square(self.pool4))) / normalization_factor) * self.gamma4 # Proposal Convolution self.conv_proposal_3, conv_proposal_3_wd = self.conv_layer_new(self.pool3_p, 'conv_proposal_3', kernel_size=[5, 2], out_channel=256, stddev=0.01) self.relu_proposal_3 = tf.nn.relu(self.conv_proposal_3) self.conv_proposal_4, conv_proposal_4_wd = self.conv_layer_new(self.pool4_p, 'conv_proposal_4', kernel_size=[5, 2], out_channel=512, stddev=0.01) self.relu_proposal_4 = tf.nn.relu(self.conv_proposal_4) self.conv_proposal_5, conv_proposal_5_wd = self.conv_layer_new(self.conv5_3, 'conv_proposal_5', kernel_size=[5, 2], out_channel=512, stddev=0.01) self.relu_proposal_5 = tf.nn.relu(self.conv_proposal_5) self.weight_dacay += conv_proposal_3_wd + conv_proposal_4_wd + conv_proposal_5_wd # Concatrate self.relu_proposal_all = tf.concat( [self.relu_proposal_3, self.relu_proposal_4, self.relu_proposal_5],3) # RPN_TEST_6(>=7) self.c
onv_cls_score, conv_cls_wd = self.conv_layer_new(self.relu_proposal_all, 'conv_cls_score', kernel_size=[1, 1], out_channel=18, stddev=0.01) self.conv_bbox_pred, conv_bbox_wd = self.conv_layer_n
ew(self.relu_proposal_all, 'conv_bbox_pred', kernel_size=[1, 1], out_channel=36, stddev=0.01) self.weight_dacay += conv_cls_wd + conv_bbox_wd assert self.conv_cls_score.get_shape().as_list()[1:] == [feature_height, feature_width, 18] assert self.conv_bbox_pred.get_shape().as_list()[1:] == [feature_height, feature_width, 36] self.cls_score = tf.reshape(self.conv_cls_score, [-1, 2]) self.bbox_pred = tf.reshape(self.conv_bbox_pred, [-1, 4]) self.prob = tf.nn.softmax(self.cls_score, name="prob") self.cross_entropy = tf.reduce_sum( tf.nn.softmax_cross_entropy_with_logits(labels=label, logits=self.cls_score) * label_weight) / tf.reduce_sum(label_weight) bbox_error = tf.abs(self.bbox_pred - bbox_target) bbox_loss = 0.5 * bbox_error * bbox_error * tf.cast(bbox_error < 1, tf.float32) + (bbox_error - 0.5) * tf.cast( bbox_error >= 1, tf.float32) self.bb_loss = tf.reduce_sum( tf.reduce_sum(bbox_loss, reduction_indices=[1]) * bbox_loss_weight) / tf.reduce_sum(bbox_loss_weight) self.loss = self.cross_entropy + 0.0005 * self.weight_dacay + 0.5 * self.bb_loss self.train_step = tf.train.MomentumOptimizer(learning_rate, 0.9).minimize(self.loss) self.data_dict = None print('build model finished: %ds' % (time.time() - start_time)) def avg_pool(self, bottom, name): return tf.nn.avg_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name=name) def max_pool(self, bottom, name): return tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name=name) def conv_layer(self, bottom, name): with tf.variable_scope(name): filt = self.get_conv_filter(name) conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME') conv_biases = self.get_bias(name) bias = tf.nn.bias_add(conv, conv_biases) relu = tf.nn.relu(bias) weight_dacay = tf.nn.l2_loss(filt, name='weight_dacay') return relu, weight_dacay def conv_layer_const(self, bottom, name): with tf.variable_scope(name): filt = self.get_conv_filter_const(name) conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME') conv_biases = self.get_bias_const(name) bias = tf.nn.bias_add(conv, conv_biases) relu = tf.nn.relu(bias) return relu def conv_layer_new(self, bottom, name, kernel_size=[3, 3], out_channel=512, stddev=0.01): with tf.variable_scope(name): shape = bottom.get_shape().as_list()[-1] filt = tf.Variable( tf.random_normal([kernel_size[0], kernel_size[1], shape, out_channel], mean=0.0, stddev=stddev), name='filter') conv_biases = tf.Variable(tf.zeros([out_channel]), name='biases') conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME') bias = tf.nn.bias_add(conv, conv_biases) weight_dacay = tf.nn.l2_loss(filt, name='weight_dacay') return bias, weight_dacay def get_conv_filter(self, name): return tf.Variable(self.data_dict[name][0], name='filter') def get_bias(self, name): return tf.Variable(self.data_dict[name][1], name='biases') def get_conv_filter_const(self, name): return tf.constant(self.data_dict[name][0], name='filter') def get_bias_const(self, name): return tf.constant(self.data_dict[name][1], name='biases') def save(self, save_dir, step=None): params = {} for var in tf.trainable_variables(): param_name = var.name.split('/') if param_name[1] in params.keys(): params[param_name[1]].append(sess.run(var)) else: params[param_name[1]] = [sess.run(var)] if step == None: step = 100000 np.save(save_dir + 'params_' + str(step) + '.npy', params) def checkFile(file
import sys import os import re import shutil from setuptools import setup name = 'django-skivvy' package = 'skivvy' description = ('Write faster integration tests for Django views – with less ' 'code.') url = 'https://github.com/oliverroick/django-skivvy' author = 'Oliver Roick' author_email = 'oliver.roick@gmail.com' license = 'AGPL' readme_file = os.path.join(os.path.dirname(__file__), 'README.rst') with open(readme_file, 'r') as f: long_description = f.readline().strip() def get_version(package): """ Return package version as listed in `__version__` in `init.py`. """ init_py = open(os.path.join(package, '__init__.py')).read() return re.search("^__version__ = ['\"]([^'\"]+)['\"]", init_py, re.MULTILINE).group(1) def get_packages(package): """ Return root package and all sub-packages. """ return [dirpath for dirpath, dirnames, filenames in os.walk(package) if os.path.exists(os.path.join(dirpath, '__init__.py'))] def get_package_data(package): """ Return all files under the root package, that are not in a package themselves. """ walk = [(dirpath.replace(package + os.sep, '', 1), filenames) for dirpath, dirnames, filenames in os.walk(package) if not os.path.exists(os.path.join(dirpath, '__init__.py'))] filepaths = [] for base, filenames in walk: filepaths.extend([os.path.join(base, filename) for filename in filenames]) return {package: filepaths} version =
get_version(package) if sys.argv[-1] == 'publish': if os.system("pip freeze | grep twine"): print("twine not installed.\nUse `pip install twine`.\nExiting.") sys.exit() shutil.rmtree('dist', ignore_errors=True) shutil.rmtree('build', ignore_errors=True) os.system("python setup.py sdist") os.system("python setup.py bdist_wheel") os.system("twine upload dist/*
") print("You probably want to also tag the version now:") print(" git tag -a {0} -m 'version {0}'".format(version)) print(" git push --tags") sys.exit() setup( name=name, version=version, url=url, license=license, description=description, long_description=long_description, author=author, author_email=author_email, packages=get_packages(package), package_data=get_package_data(package), install_requires=[], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Framework :: Django :: 1.11', 'Framework :: Django :: 2.1', 'Framework :: Django :: 2.2', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Natural Language :: English', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Testing :: Mocking', ] )
a new primary partition for LVM - parted: device: /dev/sdb number: 2 flags: [ lvm ] state: present part_start: 1GiB # Read device information (always use unit when probing) - parted: device=/dev/sdb unit=MiB register: sdb_info # Remove all partitions from disk - parted: device: /dev/sdb number: "{{ item.num }}" state: absent with_items: - "{{ sdb_info.partitions }}" """ from ansible.module_utils.basic import AnsibleModule import math import re import os # Reference prefixes (International System of Units and IEC) units_si = ['B', 'KB', 'MB', 'GB', 'TB'] units_iec = ['B', 'KiB', 'MiB', 'GiB', 'TiB'] parted_units = units_si + units_iec + ['s', '%', 'cyl', 'chs', 'compact'] def parse_unit(size_str, unit=''): """ Parses a string containing a size of information """ matches = re.search(r'^([\d.]+)([\w%]+)?$', size_str) if matches is None: # "<cylinder>,<head>,<sector>" format matches = re.search(r'^(\d+),(\d+),(\d+)$', size_str) if matches is None: module.fail_json( msg="Error interpreting parted size output: '%s'" % size_str ) size = { 'cylinder': int(matches.group(1)), 'head': int(matches.group(2)), 'sector': int(matches.group(3)) } unit = 'chs' else: # Normal format: "<number>[<unit>]" if matches.group(2) is not None: unit = matches.group(2) size = float(matches.group(1)) return size, unit def parse_partition_info(parted_output, unit): """ Parses the output of parted and transforms the data into a dictionary. Parted Machine Parseable Output: See: https://lists.alioth.debian.org/pipermail/parted-devel/2006-December/00 0573.html - All lines end with a semicolon (;) - The first line indicates the units in which the output is expressed. CHS, CYL and BYT stands for CHS, Cylinder and Bytes respectively. - The second line is made of disk information in the following format: "path":"size":"transport-type":"logical-sector-size":"physical-sector-siz e":"partition-table-type":"model-name"; - If the first line was either CYL or CHS, the next line will contain information on no. of cylinders, heads, sectors and cylinder size. - Partition information begins from the next line. This is of the format: (for BYT) "number":"begin":"end":"size":"filesystem-type":"partition-name":"flags-s et"; (for CHS/CYL) "number":"begin":"end":"filesystem-type":"partition-name":"flags-set"; """ lines = [x for x in parted_output.split('\n') if x.strip() != ''] # Generic device info generic_params = lines[1].rstrip(';').split(':') # The unit is read once, because parted always returns the same unit size, unit = parse_unit(generic_params[1], unit) generic = { 'dev': generic_params[0], 'size': size, 'unit': unit.lower(), 'table': generic_params[5], 'model': generic_params[6], 'logical_block': int(generic_params[3]), 'physical_block': int(generic_params[4]) } # CYL and CHS have an additional line in the output if unit in ['cyl', 'chs']: chs_info = lines[2].rstrip(';').split(':') cyl_size, cyl_unit = parse_unit(chs_info[3]) generic['chs_info'] = { 'cylinders': int(chs_info[0]), 'heads': int(chs_info[1]), 'sectors': int(chs_info[2]), 'cyl_size': cyl_size, 'cyl_size_unit': cyl_unit.lower() } lines = lines[1:] parts = [] for line in lines[2:]: part_params = line.rstrip(';').split(':') # CHS use a different format than BYT, but contrary to what stated by # the author, CYL is the same as BYT. I've tested this undocumented # behaviour down to parted version 1.8.3, which is the first version # that supports the machine parseable output. if unit != 'chs': size = parse_unit(part_params[3])[0] fstype = part_params[4] name = part_params[5] flags = part_params[6] else: size = "" fstype = part_params[3] name = part_params[4] flags = part_params[5] parts.append({ 'num': int(part_params[0]), 'begin': parse_unit(part_params[1])[0], 'end': parse_unit(part_params[2])[0], 'size': size, 'fstype': fstype, 'name': name, 'flags': [f.strip() for f in flags.split(', ') if f != ''], 'unit': unit.lower(), }) return {'generic': generic, 'partitions': parts}
def format_disk_size(size_bytes, unit): """ Formats a size in bytes into a different unit, like parted does. It doesn't manage CYL and CHS formats, though. This function has been adapted from https://github.com/Distrotech/parted/blo b/279d9d869ff472c52b9ec2e180d5
68f0c99e30b0/libparted/unit.c """ global units_si, units_iec unit = unit.lower() # Shortcut if size_bytes == 0: return 0.0 # Cases where we default to 'compact' if unit in ['', 'compact', 'cyl', 'chs']: index = max(0, int( (math.log10(size_bytes) - 1.0) / 3.0 )) unit = 'b' if index < len(units_si): unit = units_si[index] # Find the appropriate multiplier multiplier = 1.0 if unit in units_si: multiplier = 1000.0 ** units_si.index(unit) elif unit in units_iec: multiplier = 1024.0 ** units_iec.index(unit) output = size_bytes / multiplier * (1 + 1E-16) # Corrections to round up as per IEEE754 standard if output < 10: w = output + 0.005 elif output < 100: w = output + 0.05 else: w = output + 0.5 if w < 10: precision = 2 elif w < 100: precision = 1 else: precision = 0 # Round and return return round(output, precision), unit def get_unlabeled_device_info(device, unit): """ Fetches device information directly from the kernel and it is used when parted cannot work because of a missing label. It always returns a 'unknown' label. """ device_name = os.path.basename(device) base = "/sys/block/%s" % device_name vendor = read_record(base + "/device/vendor", "Unknown") model = read_record(base + "/device/model", "model") logic_block = int(read_record(base + "/queue/logical_block_size", 0)) phys_block = int(read_record(base + "/queue/physical_block_size", 0)) size_bytes = int(read_record(base + "/size", 0)) * logic_block size, unit = format_disk_size(size_bytes, unit) return { 'generic': { 'dev': device, 'table': "unknown", 'size': size, 'unit': unit, 'logical_block': logic_block, 'physical_block': phys_block, 'model': "%s %s" % (vendor, model), }, 'partitions': [] } def get_device_info(device, unit): """ Fetches information about a disk and its partitions and it returns a dictionary. """ global module, parted_exec # If parted complains about missing labels, it means there are no partitions. # In this case only, use a custom function to fetch information and emulate # parted formats for the unit. label_needed = check_parted_label(device) if label_needed: return get_unlabeled_device_info(device, unit) command = "%s -s -m %s -- unit '%s' print" % (parted_exec, device, unit) rc, out, err = module.run_command(command) if rc != 0 and 'unrecognised disk label' not in err: module.fail_json(msg=( "Error while getting device information with parted " "script: '%s'" % command), rc=rc, out=out, err=err ) return parse_partition_info(out, unit) def check_parted_label(device): """ Determines if parted needs a label to complete its duties. Versions prior t
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2013 Radim Rehurek <me@radimrehurek.com> # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html import os from smart_open import smart_open try: import cPickle as _pickle except ImportError: import pickle as _pickle from gensim.models.doc2vec import Doc2Vec from gensim.models.word2vec import Word2Vec try: from annoy import AnnoyIndex except ImportError: raise ImportError("Annoy has not been installed, if you wish to use the annoy indexer, please run `pip install annoy`") class AnnoyIndexer(object): def __init__(self, model=None, num_trees=None): self.index = None self.labels = None self.model = model self.num_trees = num_trees if model and num_trees: if isinstance(self.model, Doc2Vec): self.build_from_doc2vec() elif isinstance(self.model, Word2Vec): self.build_from_word2vec() else: raise ValueError("Only a Word2Vec or Doc2Vec instance can be used") def save(self, fname, protocol
=2): fname_dict = fname + '.d' self.index.save(fname) d = {'f': self.model.vector_size, 'num_trees': self.num_trees, 'labels': self.labels} with smart_open(fname_dict, 'wb') as fout: _pickle.dump(d, fout, protocol=protocol) def load(self, fname): fname_dict = fname+'.d' if not (os.path.exists(fname) and os.path.exists(fname_dict)): raise IOError( "Can't find index files '%s' and '%s' - Unable to
restore AnnoyIndexer state." % (fname, fname_dict)) else: with smart_open(fname_dict) as f: d = _pickle.loads(f.read()) self.num_trees = d['num_trees'] self.index = AnnoyIndex(d['f']) self.index.load(fname) self.labels = d['labels'] def build_from_word2vec(self): """Build an Annoy index using word vectors from a Word2Vec model""" self.model.init_sims() return self._build_from_model(self.model.wv.syn0norm, self.model.index2word , self.model.vector_size) def build_from_doc2vec(self): """Build an Annoy index using document vectors from a Doc2Vec model""" docvecs = self.model.docvecs docvecs.init_sims() labels = [docvecs.index_to_doctag(i) for i in range(0, docvecs.count)] return self._build_from_model(docvecs.doctag_syn0norm, labels, self.model.vector_size) def _build_from_model(self, vectors, labels, num_features): index = AnnoyIndex(num_features) for vector_num, vector in enumerate(vectors): index.add_item(vector_num, vector) index.build(self.num_trees) self.index = index self.labels = labels def most_similar(self, vector, num_neighbors): """Find the top-N most similar items""" ids, distances = self.index.get_nns_by_vector( vector, num_neighbors, include_distances=True) return [(self.labels[ids[i]], 1 - distances[i] / 2) for i in range(len(ids))]
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2011 Deepin, Inc. # 2011 Wang Yong # 2012 Reza Faiz A # # Author: Wang Yong <lazycat.manatee@gmail.com> # Maintainer: Wang Yong <lazycat.manatee@gmail.com> # Reza Faiz A <ylpmiskrad@gmail.com> # Remixed : Reza Faiz A <ylpmiskrad@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from appItem import * from draw import * from lang import __, getDefaultLanguage import gtk import updateView import utils class UpdatePage(object): '''Interface for update page.''' def __init__(self, repoCache, switchStatus, downloadQueue, entryDetailCallback, sendVoteCallback, fetchVoteCallback, upgradeSelectedPkgsCallback, addIgnorePkgCallback, showIgnorePageCallback): '''Init for update page.''' # Init. self.repoCache = repoCache self.box = gtk.VBox() self.updateView = updateView.UpdateView( repoCache, switchStatus, downloadQueue, entryDetailCallback, sendVoteCallback, fetchVoteCallback, addIgnorePkgCallback, ) self.topbar = Topbar(self.repoCache, self.updateView.selectAllPkg, self.updateView.unselectAllPkg, self.updateView.getSelectList, upgradeSelectedPkgsCallback, showIgnorePageCallback) # Connect components. self.box.pack_start(self.topbar.eventbox, False, False) self.box.pack_start(self.updateView.scrolledwindow) self.box.show_all() class Topbar(object): '''Top bar.''' def __init__(self, repoCache, selectAllPkgCallback, unselectAllPkgCallback, getSelectListCallback, upgradeSelectedPkgsCallback, showIgnorePageCallback): '''Init for top bar.''' # Init. self.repoCache = repoCache self.paddingX = 5 self.selectAllPkgCallback = selectAllPkgCallback self.unselectAllPkgCallback = unselectAllPkgCallback self.showIgnorePageCallback = showIgnorePageCallback self.box = gtk.HBox() self.boxAlign = gtk.Alignment() self.boxAlign.set(0.0, 0.5, 1.0, 1.0) self.boxAlign.set_padding(0, 0, TOPBAR_PADDING_LEFT, TOPBAR_PADDING_UPDATE_RIGHT) self.boxAlign.add(self.box) self.eventbox = gtk.EventBox() drawTopbar(self.eventbox) upgradeBox = gtk.HBox()
upgradeAlign = gtk.Alignment() upgradeAlign.set(1.0, 0.0, 0.0, 1.0) upgradeAlign.add(upgradeBox) self.numLabel = gtk.Label() sel
f.ignoreNumBox = gtk.HBox() self.ignoreNumAlign = gtk.Alignment() self.ignoreNumAlign.set(0.0, 0.5, 0.0, 0.0) self.ignoreNumAlign.add(self.ignoreNumBox) self.selectAllId = "selectAll" self.unselectAllId = "unselectAll" self.labelId = self.selectAllId (self.selectAllBox, self.selectAllEventBox) = setDefaultRadioButton( __("Select All"), self.selectAllId, self.setLabelId, self.getLabelId, self.selectAllPkgStatus ) upgradeBox.pack_start(self.selectAllBox, False, False, self.paddingX) (self.unselectAllBox, self.unselectAllEventBox) = setDefaultRadioButton( __("Unselect All"), self.unselectAllId, self.setLabelId, self.getLabelId, self.unselectAllPkgStatus ) upgradeBox.pack_start(self.unselectAllBox, False, False, self.paddingX) (self.upgradeButton, upgradeButtonAlign) = newActionButton( "search", 0.0, 0.5, "cell", False, __("Action Update"), BUTTON_FONT_SIZE_MEDIUM, "bigButtonFont") upgradeBox.pack_start(upgradeButtonAlign, False, False, 26) self.upgradeButton.connect("button-press-event", lambda w, e: upgradeSelectedPkgsCallback(getSelectListCallback())) # Connect. self.updateNum(self.repoCache.getUpgradableNum()) self.numLabel.set_alignment(0.0, 0.5) self.box.pack_start(self.numLabel, False, False, self.paddingX) self.box.pack_start(self.ignoreNumAlign, True, True, self.paddingX) self.box.pack_start(upgradeAlign, True, True, self.paddingX) self.eventbox.add(self.boxAlign) self.updateIgnoreNum(self.repoCache.getIgnoreNum()) def selectAllPkgStatus(self): '''Select all pkg status.''' self.selectAllEventBox.queue_draw() self.unselectAllEventBox.queue_draw() self.selectAllPkgCallback() def unselectAllPkgStatus(self): '''Select all pkg status.''' self.selectAllEventBox.queue_draw() self.unselectAllEventBox.queue_draw() self.unselectAllPkgCallback() def setLabelId(self, lId): '''Set label id.''' self.labelId = lId def getLabelId(self): '''Get label id.''' return self.labelId def updateIgnoreNum(self, ignoreNum): '''Update ignore number label.''' utils.containerRemoveAll(self.ignoreNumBox) if ignoreNum > 0: (ignoreLabel, ignoreEventBox) = setDefaultClickableDynamicLabel( __("No Notify UpdatePage") % (ignoreNum), "topbarButton", ) ignoreEventBox.connect("button-press-event", lambda w, e: self.showIgnorePageCallback()) self.ignoreNumBox.add(ignoreEventBox) self.ignoreNumBox.show_all() def updateNum(self, upgradeNum): '''Update number.''' if upgradeNum == 0: markup = "" else: markup = (__("Topbar UpdatePage") % (LABEL_FONT_SIZE, appTheme.getDynamicColor("topbarNum").getColor(), LABEL_FONT_SIZE, str(upgradeNum), LABEL_FONT_SIZE)) self.numLabel.set_markup(markup) # LocalWords: efe
# coding=utf8 from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from __future__ import division import os from os.path import join import tempfile import shutil from six.moves import configparser import pytest from tests import setenv, test_doc0 from knowhow.index import Index import knowhow.util as util @pytest.fixture def tmpd(request): tempdir = tempfile.mkdtemp() request.addfinalizer(lambda: shutil.rmtree(tempdir)) return tempdir @pytest.fixture def conf(): try: c = configparser.SafeConfigParser() except AttributeError: c = configparser.ConfigParser() c.add_section("main") c.set("main", "data", util.decode("/app/data")) return c @pytest.fixture def conf_path(conf, tmpd): path = join(tmpd, "knowhow.ini") with open(path, "w") as f: conf.write(f) return path @pytest.fixture def tmp_app_index_dir_paths(tmpd): app_dir = join(tmpd, "app") index_dir = join(tmpd, "index") return tmpd,
app_dir, index
_dir @pytest.fixture def tmp_app_index_dirs(tmp_app_index_dir_paths): tmpd, appd, indexd = tmp_app_index_dir_paths os.mkdir(appd) os.mkdir(indexd) return tmpd, appd, indexd @pytest.fixture def index_empty(request, tmp_app_index_dirs): _, app_dir, index_dir = tmp_app_index_dirs orig_home = os.environ.get("KNOWHOW_HOME") orig_data = os.environ.get("KNOWHOW_DATA") def restore(): setenv("KNOWHOW_HOME", orig_home) setenv("KNOWHOW_DATA", orig_data) request.addfinalizer(restore) os.environ["KNOWHOW_HOME"] = app_dir os.environ["KNOWHOW_DATA"] = index_dir index = Index(app_dir=app_dir, index_dir=index_dir) index.open(clear=True) return index @pytest.fixture def index_one(index_empty): index_empty.add(**test_doc0) return index_empty
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html import scrapy class SexyItem(scrapy.Item): # define the fields for your item here like: name = scrapy.Field()
dirname = scrapy.Fiel
d() file_urls = scrapy.Field() files = scrapy.Field()
class Printer(object): """ """ def __init__(self): self._depth = -1 self._str = str self.emptyPrinter = str def doprint(self, e
xpr): """Returns the pretty representation for expr (as
a string)""" return self._str(self._print(expr)) def _print(self, expr): self._depth += 1 # See if the class of expr is known, or if one of its super # classes is known, and use that pretty function res = None for cls in expr.__class__.__mro__: if hasattr(self, '_print_'+cls.__name__): res = getattr(self, '_print_'+cls.__name__)(expr) break # Unknown object, just use its string representation if res is None: res = self.emptyPrinter(expr) self._depth -= 1 return res
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-11-02 10:04 from __future__ import unicode_literals from django.db import migrations def update_version_queues(apps, schema_editor): VersionQueue = apps.get_model('repository', 'VersionQueue') for
queue in VersionQueue.objects.all(): queue.title = queue.preprint.title queue.abstract = queue.preprint.abstract queue.save()
class Migration(migrations.Migration): dependencies = [ ('repository', '0019_auto_20201030_1423'), ] operations = [ migrations.RunPython( update_version_queues, reverse_code=migrations.RunPython.noop, ) ]
# Copyright 2021 The Chromium OS Authors. All rights reserved. # Use of this source code is gove
rned by a BSD-style license that can be # found in the
LICENSE file. register_host_test("ec_app")
###########################
##### # These variables are overwritten by Zenoss when the ZenPac
k is exported # or saved. Do not modify them directly here. # NB: PACKAGES is deprecated NAME = "ZenPacks.community.SquidMon" VERSION = "1.0" AUTHOR = "Josh Baird" LICENSE = "GPLv2" NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community'] PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.SquidMon'] INSTALL_REQUIRES = [] COMPAT_ZENOSS_VERS = '>=2.4' PREV_ZENPACK_NAME = "" # STOP_REPLACEMENTS ################################ # Zenoss will not overwrite any changes you make below here. from setuptools import setup, find_packages setup( # This ZenPack metadata should usually be edited with the Zenoss # ZenPack edit page. Whenever the edit page is submitted it will # overwrite the values below (the ones it knows about) with new values. name = NAME, version = VERSION, author = AUTHOR, license = LICENSE, # This is the version spec which indicates what versions of Zenoss # this ZenPack is compatible with compatZenossVers = COMPAT_ZENOSS_VERS, # previousZenPackName is a facility for telling Zenoss that the name # of this ZenPack has changed. If no ZenPack with the current name is # installed then a zenpack of this name if installed will be upgraded. prevZenPackName = PREV_ZENPACK_NAME, # Indicate to setuptools which namespace packages the zenpack # participates in namespace_packages = NAMESPACE_PACKAGES, # Tell setuptools what packages this zenpack provides. packages = find_packages(), # Tell setuptools to figure out for itself which files to include # in the binary egg when it is built. include_package_data = True, # The MANIFEST.in file is the recommended way of including additional files # in your ZenPack. package_data is another. #package_data = {} # Indicate dependencies on other python modules or ZenPacks. This line # is modified by zenoss when the ZenPack edit page is submitted. Zenoss # tries to put add/delete the names it manages at the beginning of this # list, so any manual additions should be added to the end. Things will # go poorly if this line is broken into multiple lines or modified to # dramatically. install_requires = INSTALL_REQUIRES, # Every ZenPack egg must define exactly one zenoss.zenpacks entry point # of this form. entry_points = { 'zenoss.zenpacks': '%s = %s' % (NAME, NAME), }, # All ZenPack eggs must be installed in unzipped form. zip_safe = False, )
#!/usr/bin
/python # -*- coding: utf-8 -*- from .tinytag import TinyTag, StringWalker, ID3, Ogg, Wave, Flac __version__ = '0.9.1' if __name__ == '__main__'
: print(TinyTag.get(sys.argv[1]))
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Monk-ee (magic.monkee.magic@gmail.
com). # """__init__.py: Init for unit testing this module.""" __author__ = "monkee" __maintainer__ = "monk-ee" __email__ = "magic.monkee.magic@gmail.com" __status__ = "Development" import unittest from PuppetDBClientTestCaseV2 import PuppetDBClientTestCaseV2 from PuppetDBClientTestCaseV3 import PuppetDBClientTestCaseV3 def all_tests(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(PuppetDBClientTestCaseV2)) suite.addTest(unittest.makeSuit
e(PuppetDBClientTestCaseV3)) return suite
# -*- coding: utf-8 -*- # Copyright(C) 2013 Romain Bignon # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms o
f the GNU Affero General Public License as published by # the Free Software
Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. import re from decimal import Decimal from datetime import time, datetime, timedelta from weboob.tools.browser import BasePage from weboob.tools.json import json from weboob.tools.mech import ClientForm from weboob.capabilities.base import UserError, Currency __all__ = ['CitiesPage', 'SearchPage', 'SearchErrorPage', 'SearchInProgressPage', 'ResultsPage', 'ForeignPage'] class ForeignPage(BasePage): def on_loaded(self): raise UserError('Your IP address is localized in a country not supported by this module (%s). Currently only the French website is supported.' % self.group_dict['country']) class CitiesPage(BasePage): def get_stations(self): result = json.loads(self.document[self.document.find('{'):-2]) return result['CITIES'] class SearchPage(BasePage): def search(self, departure, arrival, date, age, card, comfort_class): self.browser.select_form(name='saisie') self.browser['ORIGIN_CITY'] = departure.encode(self.browser.ENCODING) self.browser['DESTINATION_CITY'] = arrival.encode(self.browser.ENCODING) if date is None: date = datetime.now() + timedelta(hours=1) elif date < datetime.now(): raise UserError("You cannot look for older departures") self.browser['OUTWARD_DATE'] = date.strftime('%d/%m/%y') self.browser['OUTWARD_TIME'] = [str(date.hour)] self.browser['PASSENGER_1'] = [age] self.browser['PASSENGER_1_CARD'] = [card] self.browser['COMFORT_CLASS'] = [str(comfort_class)] self.browser.controls.append(ClientForm.TextControl('text', 'nbAnimalsForTravel', {'value': ''})) self.browser['nbAnimalsForTravel'] = '0' self.browser.submit() class SearchErrorPage(BasePage): def on_loaded(self): p = self.document.getroot().cssselect('div.messagesError p') if len(p) > 0: message = p[0].text.strip() raise UserError(message) class SearchInProgressPage(BasePage): def on_loaded(self): link = self.document.xpath('//a[@id="url_redirect_proposals"]')[0] self.browser.location(link.attrib['href']) class ResultsPage(BasePage): def get_value(self, div, name, last=False): i = -1 if last else 0 p = div.cssselect(name)[i] sub = p.find('p') if sub is not None: txt = sub.tail.strip() if txt == '': p.remove(sub) else: return unicode(txt) return unicode(self.parser.tocleanstring(p)) def parse_hour(self, div, name, last=False): txt = self.get_value(div, name, last) hour, minute = map(int, txt.split('h')) return time(hour, minute) def iter_results(self): for div in self.document.getroot().cssselect('div.train_info'): info = None price = None currency = None for td in div.cssselect('td.price'): txt = self.parser.tocleanstring(td) p = Decimal(re.sub('([^\d\.]+)', '', txt)) if price is None or p < price: info = list(div.cssselect('strong.price_label')[0].itertext())[-1].strip().strip(':') price = p currency = Currency.get_currency(txt) yield {'type': self.get_value(div, 'div.transporteur-txt'), 'time': self.parse_hour(div, 'div.departure div.hour'), 'departure': self.get_value(div, 'div.departure div.station'), 'arrival': self.get_value(div, 'div.arrival div.station', last=True), 'arrival_time': self.parse_hour(div, 'div.arrival div.hour', last=True), 'price': price, 'currency': currency, 'price_info': info, }
from pymander.e
xceptions import CantParseLine from pymander.handlers import LineHandler, RegexLineHandler, ArgparseLineHandler from pymander.contexts import StandardPrompt from pymander.commander import Commander from pymander.decorators import bind_command class DeeperLineHandler(LineHandler): def try_execute(self, line): if line.strip() == 'deeper': deeper_context = self.context.clone() deeper_context.name = '{0} / ctx {1}'.format(self.context.name, id(deeper_context))
self.context.write('Going deeper!\nNow in: {0}\n'.format(deeper_context)) return deeper_context raise CantParseLine(line) class RaynorLineHandler(LineHandler): def try_execute(self, line): if line.strip() == 'kerrigan': self.context.write('Oh, Sarah...\n') return raise CantParseLine(line) class BerryLineHandler(RegexLineHandler): @bind_command(r'pick a (?P<berry_kind>\w+)') def pick_berry(self, berry_kind): self.context.write('Picked a {0}\n'.format(berry_kind)) @bind_command(r'make (?P<berry_kind>\w+) jam') def make_jam(self, berry_kind): self.context.write('Made some {0} jam\n'.format(berry_kind)) class GameLineHandler(ArgparseLineHandler): @bind_command('play', [ ['game', {'type': str, 'default': 'nothing'}], ['--well', {'action': 'store_true'}], ]) def play(self, game, well): self.context.write('I play {0}{1}\n'.format(game, ' very well' if well else '')) @bind_command('win') def win(self): self.context.write('I just won!\n') def main(): com = Commander( StandardPrompt([ DeeperLineHandler(), BerryLineHandler(), GameLineHandler(), RaynorLineHandler(), ]) ) com.mainloop() if __name__ == '__main__': main()
from django.conf.urls import include, url from django.contrib import admin from rest_framework.routers import DefaultRouter from sk_map.api.map import MapViewSet, WallViewSet, BoxViewSet, PointViewSet, MenViewSet,\ WallListViewSet, BoxListViewSet, PointListViewSet, MenListViewSet, MapListViewSet from sk_auth.api.auth import RegisterView, AuthAPIView from sk_game.api.game import GameViewSet from sk_skins.api.skins import SkinView action = {'get': 'retrieve', 'put': 'update', '
delete': 'destroy'} action_with_patch = {'get': 'retrieve', 'put': 'update', 'delete': 'destroy', 'patch': 'partial_update'} action_no_pk = {'get': 'list', 'post': 'create'} router = DefaultRouter() rout
er.register(r'skins', SkinView) router.register(r'auth/register', RegisterView) urlpatterns = router.urls urlpatterns_game = [ url('^game/(?P<map>\d+)/$', GameViewSet.as_view({'get': 'retrieve', 'patch': 'partial_update'})), url('^game/$', GameViewSet.as_view({'get': 'retrieve', 'put': 'update', 'delete': 'destroy', 'post': 'create'})), ] urlpatterns_map = { url('^map/(?P<pk>\d+)/$', MapViewSet.as_view(action_with_patch)), url('^map/$', MapListViewSet.as_view(action_no_pk)), } urlpatterns_map_obj = [ url('^wall/(?P<pk>\d+)/$', WallViewSet.as_view(action)), url('^wall/$', WallListViewSet.as_view(action_no_pk)), url('^box/(?P<pk>\d+)/$', BoxViewSet.as_view(action)), url('^box/$', BoxListViewSet.as_view(action_no_pk)), url('^point/(?P<pk>\d+)/$', PointViewSet.as_view(action)), url('^point/$', PointListViewSet.as_view(action_no_pk)), url('^men/(?P<pk>\d+)/$', MenViewSet.as_view(action)), url('^men/$', MenListViewSet.as_view(action_no_pk)), ] urlpatterns_admin =[ url(r'^admin/', include(admin.site.urls)), ] urlpatterns_auth = [ url(r'^auth/', AuthAPIView.as_view(), name='login_view') ] patterns_swagger = [ url(r'^docs/', include('rest_framework_swagger.urls')), ] urlpatterns += urlpatterns_admin urlpatterns += urlpatterns_auth urlpatterns += patterns_swagger urlpatterns += urlpatterns_map_obj urlpatterns += urlpatterns_game urlpatterns += urlpatterns_map
nd the most expansive ones (adv. stargate) ~ 6 days. # We are using log10() as it's quicker than log() _magicBase = 1.0 / (turnsPerDay * 2) _repairMagicBase = math.log10(480 * structDefaultCpCosts) ** 2 * _magicBase repairRatioFunc = lambda x: _repairMagicBase / math.log10(x) ** 2 # building decay ratio bigger or equivalent of 480 CP repair decayRatioFunc = lambda x: min( _magicBase, repairRatioFunc(x)) decayProdQueue = 0.02 ## Environment envInterval = 1000 envAutoMod = 10.0 envMax = 200 envSelfUpgradeChance = {"H": 5, "C": 1, "B": 500, "m": 100, "r": 100, "p": 100, "e": 100} # in ten thousandths (10 000) planetSpec = {} planetSpec[u'A'] = makeIDataHolder( minBio = 0, maxBio = 0, upgradeTo = None, downgradeTo = None, ) planetSpec[u'G'] = makeIDataHolder( minBio = 0, maxBio = 0, upgradeTo = None, downgradeTo = None, ) planetSpec[u'C'] = makeIDataHolder( minBio = 0, maxBio = 6, upgradeTo = u'D', upgradeEnReqs = (5, 180), downgradeTo = None, ) planetSpec[u'R'] = makeIDataHolder( minBio = 0, maxBio = 6, upgradeTo = u'D', upgradeEnReqs = (5, 180), downgradeTo = None, ) planetSpec[u'D'] = makeIDataHolder( minBio = 6, maxBio = 12, upgradeTo = u'H', upgradeEnReqs = (25, 150), downgradeTo = u'R', ) planetSpec[u'H'] = makeIDataHolder( minBio = 12, maxBio = 25, upgradeTo = u'M', upgradeEnReqs = (50, 125), downgradeTo = u'D', ) planetSpec[u'M'] = makeIDataHolder( minBio = 25, maxBio = 75, upgradeTo = u'E', upgradeEnReqs = (50, 100), downgradeTo = u'H', ) planetSpec[u'E'] = makeIDataHolder( minBio = 75, maxBio = 125, upgradeTo = u"I", upgradeEnReqs = (50, 100), downgradeTo = u'M', ) planetSpec[u"I"] = makeIDataHolder( # gaia minBio = 125, maxBio = 200, upgradeTo = None, downgradeTo = u"E", ) ## New colony settings colonyMinBio = 600 colonyMinMin = 600 colonyMinEn = 600 ## Storage popPerSlot = 0 bioPerSlot = 0 minPerSlot = 0 enPerSlot = 0 popBaseStor = 4800 bioBaseStor = 4800 minBaseStor = 4800 enBaseStor = 4800 autoMinStorTurns = 2 tlPopReserve = 100 ## Resources stratResRate = turnsPerDay * 6 stratResAmountBig = 10 stratResAmountSmall = 1 ## Population popGrowthRate = 0.02 popMinGrowthRate = int(5000 * popGrowthRate) # Increase the Minimum Population Growth from 20 to 100 per turn popDieRate = 0.1 popMinDieRate = 100 popKillMod = 0.25 popSlotKillMod = 5 # how many people per 1 DMG get killed when slot is hit popSlotHP = 100 # HP of habitable structures on slot (where people live) ## Research maxRsrchQueueLen = 10 techBaseImprovement = 1 techMaxImprovement = 5 techImprCostMod = {1:480, 2:480, 3:720, 4:960, 5:1200, 6: 1440, 7: 1680} #per level sciPtsPerCitizen = {1: 0, 2: 0.00075, 3: 0.00150, 4: 0.00175, 5: 0.00200, 6: 0.002125, 7: 0.00225, 99: 0} #per level techImprEff = {1:0.750, 2:0.875, 3:1.000, 4:1.125, 5:1.250} #per sublevel #maxSciPtsTL = {1:100, 2:200, 3:300, 4:400, 5:500, 6:600, 7:700} #sciPtsStepFraction = 0.25 ## Scanner maxSignature = 100 scannerMinPwr = 1 scannerMaxPwr = 150 level1InfoScanPwr = 1000 level2InfoScanPwr = 1200 level3InfoScanPwr = 1400 level4InfoScanPwr = 1600 maxScanPwr = 200000 mapForgetScanPwr = 0.94 partnerScanPwr = 300000 ## Fleets maxCmdQueueLen = 10 signatureBase = 1.10 operProdRatio = 0.001
combatRetreatWait = 3 starGateDamage = 0.2 # damage for 100% speed boost (doub
le for 200%, etc...) shipDecayRatio = 0.04 maxDamageAbsorb = 5 # max absorbed damage for tech "damageAbsorb" property. # max seq_mod equipments of equipType; anything not in list is unlimited maxEquipType = { 'ECM' : 1, # +Missile DEF 'Combat Bonuses' : 1, # +%ATT, +%DEF 'Combat Modifiers' : 1, # +ATT, +DEF 'Shields' : 1, # not hardshields 'Stealth' : 1, 'Auto Repair' : 1, } ## Buildings plShieldRegen = 0.05 #regen rate of planetary shield ## Diplomacy baseRelationChange = -5 relLostWhenAttacked = -1000000 defaultRelation = Const.REL_NEUTRAL contactTimeout = 6 * turnsPerDay voteForImpAnnounceOffset = 2 * turnsPerDay voteForImpPeriod = 6 * turnsPerDay ratioNeededForImp = 0.6666 pactDescrs = {} pactDescrs[Const.PACT_ALLOW_CIVILIAN_SHIPS] = makeIDataHolder( targetRel = 500, relChng = 10, validityInterval = (0, 10000), ) pactDescrs[Const.PACT_ALLOW_MILITARY_SHIPS] = makeIDataHolder( targetRel = 750, relChng = 8, validityInterval = (0, 10000), ) pactDescrs[Const.PACT_ALLOW_TANKING] = makeIDataHolder( targetRel = 750, relChng = 7, validityInterval = (0, 10000), ) pactDescrs[Const.PACT_MINOR_CP_COOP] = makeIDataHolder( targetRel = 1000, relChng = 6, effectivity = 0.05, validityInterval = (625, 10000), ) pactDescrs[Const.PACT_MAJOR_CP_COOP] = makeIDataHolder( targetRel = 1000, relChng = 1, effectivity = 0.05, validityInterval = (875, 10000), ) pactDescrs[Const.PACT_SHARE_SCANNER] = makeIDataHolder( targetRel = 1000, relChng = 1, validityInterval = (625, 10000), ) pactDescrs[Const.PACT_MINOR_SCI_COOP] = makeIDataHolder( targetRel = 750, relChng = 1, effectivity = 0.05, validityInterval = (625, 10000), ) pactDescrs[Const.PACT_MAJOR_SCI_COOP] = makeIDataHolder( targetRel = 1000, relChng = 1, effectivity = 0.05, validityInterval = (875, 10000), ) ## Morale baseGovPwr = 50000 maxMorale = 100.0 minMoraleTrgt = 30.0 revoltThr = 25.0 moraleChngPerc = 0.03 moraleHighPopPenalty = 2.0 moraleBasePop = 10000 moraleLowPop = 5000 moraleLowPopBonus = 40.0 moraleLostWhenSurrender = 0.0 moraleLostNoFood = 1.0 moraleModPlHit = 96.0 # how many morale point per 1 per cent of damage moralePerPointChance = 5.0 # for every point below revoltThr % chance for revolt moraleProdStep = 10 moraleProdBonus = [-0.875, -0.75, -0.625, -0.50, -0.375, -0.25, -0.125, 0.0, 0.0, 0.125, 0.25] # we expect pop reserve from TL to get into unemployed # tlPopReserve * TL1 # if we get no reserve, there is a hit, if we get at least # the reserve, it's a bonus, linear in between unemployedMoraleLow = -20 unemployedMoraleHigh = 10 ## Revolt revoltDestrBio = 0.05 revoltDestrMin = 0.05 revoltDestrEn = 0.05 revoltPenalty = 0.75 ## Messages messageMaxAge = turnsPerDay * 3 ## Projects projECOINIT3PlBio = 1 ## Ships shipImprovementMod = 1.05 shipMaxImprovements = 5 shipMaxDesigns = 40 shipExpToLevel = {0:1, 1:2, 2:2, 3:3, 4:3, 5:3, 6:3, 7:4, 8:4, 9:4, 10:4, 11:4, 12:4, 13:4, 15:5} shipDefLevel = 5 shipLevelEff = {1:0.50, 2:0.75, 3:1.00, 4:1.25, 5:1.50} shipBaseExpMod = 20 shipBaseExp = {0:10, 1:20, 2:40, 3:80, 4:160} shipTargetPerc = [25, 50, 90, 100] shipMinUpgrade = 120 shipUpgradeMod = 1.375 shipUpgradePts = [1, 3, 10] weaponDmgDegrade = [1.0, 0.5, 0.25, 0.125] ## EMR emrMinDuration = 36 emrMaxDuration = 60 emrPeriod = 576 emrSeasons = [None, None, None, None] emrSeasons[0] = makeIDataHolder( name = "spring", startTime = 0, endTime = 143, emrLevelMin = 0.75, emrLevelMax = 1.25, ) emrSeasons[1] = makeIDataHolder( name = "summer", startTime = 144, endTime = 287, emrLevelMin = 0.50, emrLevelMax = 1.00, ) emrSeasons[2] = makeIDataHolder( name = "fall", startTime = 287, endTime = 431, emrLevelMin = 0.50, emrLevelMax = 1.50, ) emrSeasons[3] = makeIDataHolder( name = "winter", startTime = 432, endTime = 575, emrLevelMin = 1.00, emrLevelMax = 1.50, ) ## Pirates ## General pirateInfluenceRange = 7.5 # in parsecs pirateGovPwr = int(500000 * 1.25) ## Fame pirateGainFamePropability = lambda d: 2 - d * 0.2 pirateLoseFameProbability = lambda d: 1 - (15 - d) * 0.2 pirateCaptureInRangeFame = 1 pirateSurvivalFame = 1 pirateCaptureOutOfRangeFame = -1 ## Colonization pirateColonyCostMod = 1.5 # base multiplier - all other multipliers are multiplied by this pirateTL3StratResColonyCostMod = 0.25 piratePlayerZoneCostMod = 1.25 pirateColonyFameZoneCost = lambda d: min(d * 0.1 + pirateTL3StratResColonyCostMod,1) pirateColonyPlayerZoneCost = lambda d: piratePlayerZoneCostMod + (d - 15) * 0.01 * piratePlayerZoneCostMod ## Techs pirateCanStealImprovements = 3 pirateGrantHSE = 60*24*3600 #60 days; AI only pira
"""__Main__.""" import sys import os import logging import argparse import traceback import shelve from datetime import datetime from CONSTANTS import CONSTANTS from settings.settings import load_config, load_core, load_remote, load_email from settings.settings import load_html, load_sms from core import read_structure, readStructureFromFile, updateStructure from core import clean_video_db, syncDirTree, transferLongVersions from core import executeToDoFile, build_html_report, umount from core import check_and_correct_videos_errors, clean_remote from core import get_new_file_ids_from_structure, mount, check_mkv_videos from notifications import send_sms_notification, send_mail_report, send_mail_log def get_args(): """Get args.""" parser = argparse.ArgumentParser(description='pyHomeVM') parser.add_argument('-c', '--config_file_path', action='store', default='settings/dev_config.cfg', help='path to config file that is to be used.') parser.add_argument('-s', '--sms', help='Enables sms notifications', action='store_true') parser.add_argument('-l', '--log', help='Enables log sending by e-mail', action='store_true') parser.add_argument('-r', '--report', help='Enables html report sending by e-mail', action='store_true') parser.add_argument('-rem', '--remote', help='Enables transfer of long versions to remote storage', action='store_true') parser.add_argument('-b', '--backup', help='Enables backup of first videos', action='store_true') parser.add_argument('-stats', help='Gets you statistics about your videos', action='store_true') args = parser.parse_args() return args def load_logger(): """Load logger.""" logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) handler = logging.FileHandler(CONSTANTS['log_file_path']) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) return logger def main(argv=None): """Run main.""" start_time = datetime.now() args = get_args() # Get args logger = load_logger() # Set logger logger.info('PROGRAM STARTED') pid = str(os.getpid()) pidfile = "/tmp/pyHomeVM.pid" config = load_config(args.config_file_path) # load config file if os.path.isfile(pidfile): logger.info('Program already running') html = load_html(config) email = load_email(config) send_ma
il_log(CONSTANTS['log_file_path'], email, html) sys.exit() file(pidfile, 'w').write(pid) (ffmpeg, local) = load_core(config) # load core configs remote = load_remote(config) html = load_html(config) sms = load_sms(con
fig) email = load_email(config) if(args.log): email = load_email(config) if(args.report): html = load_html(config) if(args.remote): remote = load_remote(config) if(args.sms): sms = load_sms(config) video_db = shelve.open(CONSTANTS['video_db_path'], writeback=True) try: if not os.path.exists(CONSTANTS['structure_file_path']): raise Exception("Directory structure definition file not found.") past_structure = readStructureFromFile(CONSTANTS) except Exception: logger.info(traceback.format_exc()) logger.info('{} not found'.format(CONSTANTS['structure_file_path'])) past_structure = {} # Start as new new_structure = read_structure(local) video_ids = get_new_file_ids_from_structure(new_structure, video_db) check_and_correct_videos_errors(video_ids, video_db, local, ffmpeg) logger.info('Checked for errors and corrupted') html_data = updateStructure( past_structure, read_structure(local), local, ffmpeg, remote, video_db) sms_sent_file = os.path.join(CONSTANTS['script_root_dir'], 'sms_sent') if(mount(remote)): logger.info('Mount succesfull') syncDirTree(local, remote) transferLongVersions(local, remote, video_db) if(os.path.isfile(CONSTANTS['todo_file_path'])): executeToDoFile(CONSTANTS['todo_file_path'], local, CONSTANTS) if(os.path.exists(sms_sent_file)): os.remove(sms_sent_file) logger.info('sms_sent file has been deleted') clean_remote(remote) umount(remote) else: logger.info('Mount unssuccesfull') if(not os.path.exists(sms_sent_file) and args.sms): send_sms_notification(sms) logger.info('Sms sent') with open(sms_sent_file, 'w') as sms_not: msg = 'SMS has been sent {}'.format(CONSTANTS['TODAY']) sms_not.write(msg) logger.info(msg) if(args.report and ( html_data['new'] != '' or html_data['modified'] != '' or html_data['deleted'] != '' or html_data['moved'] != '')): html_report = build_html_report(html_data, CONSTANTS, html) send_mail_report(html_report, email) logger.info('Mail report sent') if(args.log): send_mail_log(CONSTANTS['log_file_path'], email, html) logger.info('log file sent') clean_video_db(video_db) check_mkv_videos(local, video_db) logger.info('DB cleaned') video_db.close() logger.info('Script ran in {}'.format(datetime.now() - start_time)) os.unlink(pidfile) if __name__ == "__main__": sys.exit(main())
# -*-coding:Utf-8 -* # Copyright (c) 2013 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Fichier contenant le paramètre 'recruter' de la commande 'matelot'.""" from primaires.interpreteur.masque.parametre import Parametre class PrmRecruter(Parametre): """Commande 'matelot recruter'. """ def __init__(self): """Constructeur du paramètre""" Parametre.__init__(self, "recruter", "recruit") self.schema = "(<nombre> <personnage_present>)" self.tronquer = True self.aide_courte = "recrute un matelot" self.aide_longue = \ "Cette commande permet de recruter un matelot présent " \ "dans la même salle que vous. Deux cas sont à distinguer " \ ": si vous êtes à terre (si vous êtes dans un bureau de " \ "recrutement par exemple), vous pouvez demander aux matelots " \ "récemment recrutés de rejoindre votre bord. Si vous êtes " \ "sur un navire (que vous venez d'aborder, par exemple), vous " \ "pouvez demander à un matelot de rejoindre votre navire si " \ "celui-ci est assez proche. Cette commande prend deux " \ "arguments : le numéro correspondant à votre navire. Vous " \ "pouvez entrer la commande sans paramètre pour le connaître, " \ "les navires que vous possédez (et qui peuvent être utilisés " \ "pour le recrutement) seront affichés. Le second paramètre " \ "est un fragment du nom du personnage que vous souhaitez " \ "recruter. Si la commande réussi, le matelot recruté " \ "rejoindra le navire ciblé d'ici quelques instants. Veillez " \ "à rester accosté si vous êtes dans un port, sans quoi les " \ "matelots ne pourront pas vous rejoindre." def interpreter(self, personnage, dic_masques): """Interprétation du paramètre""" salle = personnage.salle navires = importeur.navigation.get_navires_possedes(personnage) navire = getattr(salle, "navire", None) if dic_masques["nombre"] and dic_masques["personnage_present"]: nombre = dic_masques["nombre"].nombre cible = dic_masques["personnage_present"].personnage cle = getattr(cible, "cle", None) try: fiche = importeur.navigation.fiches[cle] except KeyError: personnage.envoyer("|err|Vous ne pouvez recruter {}.|ff|", cible) return try: n_cible = navires[nombre - 1] except IndexError: personnage << "|err|Ce navire n'est pas visible.|ff|" return if cible.etats: personnage.envoyer("{} est occupé.", cible) return # Feint de partir if navire is None: sortie = [s for s in salle.sorties][0] salle.envoyer("{{}} s'en va vers {}.".format( sortie.nom_complet), cible) else: salle.envoyer("{} saute à l'eau.", cible) matelot = navire.equipage.get_matelot_depuis_personnage( cible) if matelot: navire.equipage.supprimer_matelot(matelot.nom) cible.salle = None nom = "matelot_" + cible.identifiant importeur.diffact.ajouter_action(nom, 15, fiche.recruter, cible, n_cible) personnage.envoyer("Vous recrutez {{}} sur {}.".format( n_cible.desc_survol), cible) else: if navires: msg = "Navires que vous possédez :\n"
for i, navire in enumerate(navires): msg += "\n |ent|{}|ff
| - {}".format(i + 1, navire.desc_survol) else: msg = "|att|Vous ne possédez aucun navire " \ "pouvant servir au recrutement.|ff|" personnage << msg