code
stringlengths
3
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
3
1.05M
# coding=utf-8 from core.redditrover import RedditRover if __name__ == "__main__": rr = RedditRover()
DarkMio/RedditRover
main.py
Python
gpl-2.0
107
def makefastqdumpScript(sraFileNameListFileName, outputDirectory, scriptFileName): # Make a script that will run fastq-dump on each of a list of SRA files sraFileNameListFile = open(sraFileNameListFileName) scriptFile = open(scriptFileName, 'w+') for line in sraFileNameListFile: # Iterate through the SRA files and make a line in the script for each scriptFile.write("fastq-dump --split-3 --outdir " + outputDirectory + " --gzip " + line.strip() + "\n") sraFileNameListFile.close() scriptFile.close() if __name__=="__main__": import sys sraFileNameListFileName = sys.argv[1] # Contents should end with .sra outputDirectory = sys.argv[2] scriptFileName = sys.argv[3] makefastqdumpScript(sraFileNameListFileName, outputDirectory, scriptFileName)
imk1/IMKTFBindingCode
makefastqdumpScript.py
Python
mit
788
# Copyright 2013 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from django import template from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _ from horizon import tabs from openstack_dashboard import api from openstack_dashboard.dashboards.project.routers.extensions.routerrules\ import rulemanager from openstack_dashboard.dashboards.project.routers.extensions.routerrules\ import tables as rrtbl class RouterRulesTab(tabs.TableTab): table_classes = (rrtbl.RouterRulesTable,) name = _("Router Rules") slug = "routerrules" template_name = "horizon/common/_detail_table.html" def allowed(self, request): try: getattr(self.tab_group.kwargs['router'], 'router_rules') return True except Exception: return False def get_routerrules_data(self): try: routerrules = getattr(self.tab_group.kwargs['router'], 'router_rules') except Exception: routerrules = [] return [rulemanager.RuleObject(r) for r in routerrules] def post(self, request, *args, **kwargs): if request.POST['action'] == 'routerrules__resetrules': kwargs['reset_rules'] = True rulemanager.remove_rules(request, [], **kwargs) self.tab_group.kwargs['router'] = \ api.neutron.router_get(request, kwargs['router_id']) class RulesGridTab(tabs.Tab): name = _("Router Rules Grid") slug = "rulesgrid" template_name = ("project/routers/extensions/routerrules/grid.html") def allowed(self, request): try: getattr(self.tab_group.kwargs['router'], 'router_rules') return True except Exception: return False def render(self): context = template.RequestContext(self.request) return render_to_string(self.get_template_name(self.request), self.data, context_instance=context) def get_context_data(self, request, **kwargs): data = {'router': {'id': self.tab_group.kwargs['router_id']}} self.request = request rules, supported = self.get_routerrules_data(checksupport=True) if supported: data["rulesmatrix"] = self.get_routerrulesgrid_data(rules) return data def get_routerrulesgrid_data(self, rules): ports = self.tab_group.kwargs['ports'] networks = api.neutron.network_list_for_tenant( self.request, self.request.user.tenant_id) netnamemap = {} subnetmap = {} for n in networks: netnamemap[n['id']] = n.name_or_id for s in n.subnets: subnetmap[s.id] = {'name': s.name, 'cidr': s.cidr} matrix = [] subnets = [] for port in ports: for ip in port['fixed_ips']: if ip['subnet_id'] not in subnetmap: continue sub = {'ip': ip['ip_address'], 'subnetid': ip['subnet_id'], 'subnetname': subnetmap[ip['subnet_id']]['name'], 'networkid': port['network_id'], 'networkname': netnamemap[port['network_id']], 'cidr': subnetmap[ip['subnet_id']]['cidr']} subnets.append(sub) subnets.append({'ip': '0.0.0.0', 'subnetid': 'external', 'subnetname': '', 'networkname': 'external', 'networkid': 'external', 'cidr': '0.0.0.0/0'}) subnets.append({'ip': '0.0.0.0', 'subnetid': 'any', 'subnetname': '', 'networkname': 'any', 'networkid': 'any', 'cidr': '0.0.0.0/0'}) for source in subnets: row = {'source': dict(source), 'targets': []} for target in subnets: target.update(self._get_subnet_connectivity( source, target, rules)) row['targets'].append(dict(target)) matrix.append(row) return matrix def _get_subnet_connectivity(self, src_sub, dst_sub, rules): v4_any_words = ['external', 'any'] connectivity = {'reachable': '', 'inverse_rule': {}, 'rule_to_delete': False} src = src_sub['cidr'] dst = dst_sub['cidr'] # differentiate between external and any src_rulename = src_sub['subnetid'] if src == '0.0.0.0/0' else src dst_rulename = dst_sub['subnetid'] if dst == '0.0.0.0/0' else dst if str(src) == str(dst): connectivity['reachable'] = 'full' return connectivity matchingrules = [] for rule in rules: rd = rule['destination'] if rule['destination'] in v4_any_words: rd = '0.0.0.0/0' rs = rule['source'] if rule['source'] in v4_any_words: rs = '0.0.0.0/0' rs = netaddr.IPNetwork(rs) src = netaddr.IPNetwork(src) rd = netaddr.IPNetwork(rd) dst = netaddr.IPNetwork(dst) # check if cidrs are affected by rule first if (int(dst.network) >= int(rd[-1]) or int(dst[-1]) <= int(rd.network) or int(src.network) >= int(rs[-1]) or int(src[-1]) <= int(rs.network)): continue # skip matching rules for 'any' and 'external' networks if (str(dst) == '0.0.0.0/0' and str(rd) != '0.0.0.0/0'): continue if (str(src) == '0.0.0.0/0' and str(rs) != '0.0.0.0/0'): continue # external network rules only affect external traffic if (rule['source'] == 'external' and src_rulename not in v4_any_words): continue if (rule['destination'] == 'external' and dst_rulename not in v4_any_words): continue match = {'bitsinsrc': rs.prefixlen, 'bitsindst': rd.prefixlen, 'rule': rule} matchingrules.append(match) if not matchingrules: connectivity['reachable'] = 'none' connectivity['inverse_rule'] = {'source': src_rulename, 'destination': dst_rulename, 'action': 'permit'} return connectivity sortedrules = sorted(matchingrules, key=lambda k: (k['bitsinsrc'], k['bitsindst']), reverse=True) match = sortedrules[0] if (match['bitsinsrc'] > src.prefixlen or match['bitsindst'] > dst.prefixlen): connectivity['reachable'] = 'partial' connectivity['conflicting_rule'] = match['rule'] return connectivity if (match['rule']['source'] == src_rulename and match['rule']['destination'] == dst_rulename): connectivity['rule_to_delete'] = match['rule'] if match['rule']['action'] == 'permit': connectivity['reachable'] = 'full' inverseaction = 'deny' else: connectivity['reachable'] = 'none' inverseaction = 'permit' connectivity['inverse_rule'] = {'source': src_rulename, 'destination': dst_rulename, 'action': inverseaction} return connectivity def get_routerrules_data(self, checksupport=False): try: routerrules = getattr(self.tab_group.kwargs['router'], 'router_rules') supported = True except Exception: routerrules = [] supported = False if checksupport: return routerrules, supported return routerrules
FNST-OpenStack/horizon
openstack_dashboard/dashboards/project/routers/extensions/routerrules/tabs.py
Python
apache-2.0
8,803
# -*- coding: utf-8 -*- # Copyright (C) 2013-2014 Avencall # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> import re class DialplanParsingError(Exception): pass class DialplanParser(object): def parse(self, fobj): parse_result = self._do_parse(fobj) parse_result.filename = '<fobj>' return parse_result def parse_file(self, filename): with open(filename) as fobj: parse_result = self._do_parse(fobj) parse_result.filename = filename return parse_result def _do_parse(self, fobj): parse_result = _DialplanParseResult() line_parser = _DialplanLineParser() for dialplan_line in fobj: line_parser.parse_next_line(dialplan_line) line = _DialplanLineParseResult(dialplan_line.rstrip('\n'), line_parser.is_executable) if line_parser.is_executable: line.context = line_parser.context line.extension = line_parser.extension line.priority = line_parser.priority parse_result._add_extension(line_parser.context, line_parser.extension) parse_result.lines.append(line) return parse_result class _DialplanLineParser(object): def __init__(self): self.context = None self.extension = None self.priority = None self.is_executable = False _CONTEXT_REGEX = re.compile(r'^\[([a-zA-Z0-9_\-]+)\]') _EXTEN_REGEX = re.compile(r'^exten\s*=\s*([^,]+),(n|\d+)') _SAME_REGEX = re.compile(r'^same\s*=\s*(n|\d+)') def parse_next_line(self, line): stripped_line = line.strip() if not stripped_line: # blank line self.is_executable = False elif stripped_line.startswith(';'): # comment line self.is_executable = False elif self._try_parse_context_line(stripped_line): self.is_executable = False elif self._try_parse_extension_line(stripped_line): self.is_executable = True elif self._try_parse_same_line(stripped_line): self.is_executable = True else: # unknown line, probably an include or an #exec self.is_executable = False def _try_parse_context_line(self, stripped_line): mo = self._CONTEXT_REGEX.match(stripped_line) if mo: self.context = mo.group(1) self.extension = None self.priority = None return True def _try_parse_extension_line(self, stripped_line): mo = self._EXTEN_REGEX.match(stripped_line) if mo: if self.context is None: raise DialplanParsingError('extension defined outside a context') self.extension = mo.group(1) new_priority = mo.group(2) self._adjust_priority(new_priority) return True def _adjust_priority(self, new_priority): if new_priority == 'n': if self.priority is None: raise DialplanParsingError('n priority used before explicit priority') self.priority += 1 else: self.priority = int(new_priority) def _try_parse_same_line(self, stripped_line): mo = self._SAME_REGEX.match(stripped_line) if mo: if self.context is None: raise DialplanParsingError('extension defined outside a context') if self.extension is None: raise DialplanParsingError('same extension before explicit extension') new_priority = mo.group(1) self._adjust_priority(new_priority) return True class _DialplanParseResult(object): def __init__(self): self.lines = [] self._contexts = {} def _add_extension(self, context, extension): self._contexts.setdefault(context, set()).add(extension) def has_extension(self, context, extension): return extension in self._contexts.get(context, ()) class _DialplanLineParseResult(object): def __init__(self, content, is_executable): self.content = content self.is_executable = is_executable
wazo-pbx/xivo-tools
visualplan/src/visualplan/dialplan.py
Python
gpl-3.0
4,796
import dbus as _dbus from razer.client.device import RazerDeviceFactory as _RazerDeviceFactory from razer.client import constants __version__ = '1.1.13' class DaemonNotFound(Exception): pass class DeviceManager(object): """ DeviceManager Class """ def __init__(self): # Load up the DBus session_bus = _dbus.SessionBus() try: self._dbus = session_bus.get_object("org.razer", "/org/razer") except _dbus.DBusException: raise DaemonNotFound("Could not connect to daemon") # Get interface for daemon methods self._dbus_daemon = _dbus.Interface(self._dbus, "razer.daemon") # Get interface for devices methods self._dbus_devices = _dbus.Interface(self._dbus, "razer.devices") self._device_serials = self._dbus_devices.getDevices() self._devices = [] self._daemon_version = self._dbus_daemon.version() for serial in self._device_serials: device = _RazerDeviceFactory.get_device(serial) self._devices.append(device) def stop_daemon(self): """ Stops the Daemon via a DBus call """ self._dbus_daemon.stop() @property def turn_off_on_screensaver(self): return self._dbus_devices.getOffOnScreensaver() @turn_off_on_screensaver.setter def turn_off_on_screensaver(self, enable): """ Enable or Disable the logic to turn off the devices whilst the screensaver is active If True, when the screensaver is active the devices' brightness will be set to 0. When the screensaver is inactive the devices' brightness will be restored :param enable: True to enable screensaver disable :type enable: bool :raises ValueError: If enable isnt a bool """ if not isinstance(enable, bool): raise ValueError("Enable must be a boolean") self._dbus_devices.enableTurnOffOnScreensaver(enable) @property def sync_effects(self): return self._dbus_devices.getSyncEffects() @sync_effects.setter def sync_effects(self, sync): """ Enable or disable the syncing of effects between devices If sync is enabled, whenever an effect is set then it will be set on all other devices if the effect is available or a similar effect if it is not. :param sync: Sync effects :type sync: bool :raises ValueError: If sync isnt a bool """ if not isinstance(sync, bool): raise ValueError("Sync must be a boolean") self._dbus_devices.syncEffects(sync) @property def devices(self): """ A list of Razer devices :return: List of devices :rtype: list[razer.client.devices.RazerDevice] """ return self._devices @property def version(self): """ Python library version :return: Version :rtype: str """ return __version__ @property def daemon_version(self): """ Daemon version :return: Daemon version :rtype: str """ return str(self._daemon_version) if __name__ == '__main__': a = DeviceManager() b = a.devices[0] print()
z3ntu/razer-drivers
pylib/razer/client/__init__.py
Python
gpl-2.0
3,290
""" 2D-Fourier Magnitude Coefficients --------------------------------- .. autosummary:: :toctree: generated/ Segmenter """ from .config import * from .segmenter import *
urinieto/msaf
msaf/algorithms/fmc2d/__init__.py
Python
mit
180
from MapCombiner import MapCombiner
hiuwo/acq4
acq4/analysis/modules/MapCombiner/__init__.py
Python
mit
36
# Copyright (C) 2007-2014 CEA/DEN, EDF R&D, OPEN CASCADE # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com # # --- # File : GEOM_example7.py # Author : Vadim SANDLER, Open CASCADE S.A.S. (vadim.sandler@opencascade.com) # --- # import os import salome salome.salome_init() import GEOM from salome.geom import geomBuilder geompy = geomBuilder.New(salome.myStudy) data_dir = os.getenv('DATA_DIR') if data_dir: texture_1 = geompy.LoadTexture(os.path.join(data_dir, "Textures", "texture1.dat")) texture_2 = geompy.LoadTexture(os.path.join(data_dir, "Textures", "texture2.dat")) texture_3 = geompy.LoadTexture(os.path.join(data_dir, "Textures", "texture3.dat")) Vertex_1 = geompy.MakeVertex(0, 0, 0) Vertex_2 = geompy.MakeVertex(100, 0, 0) Vertex_3 = geompy.MakeVertex(0, 100, 0) Vertex_4 = geompy.MakeVertex(0, 0, 100) Vertex_5 = geompy.MakeVertex(100, 0, 100) Vertex_1.SetMarkerTexture(texture_1); Vertex_2.SetMarkerTexture(texture_2); Vertex_3.SetMarkerTexture(texture_3); Vertex_4.SetMarkerStd(GEOM.MT_O_PLUS, GEOM.MS_25); Vertex_5.SetMarkerStd(GEOM.MT_BALL, GEOM.MS_40); geompy.addToStudy( Vertex_1, "Vertex_1" ) geompy.addToStudy( Vertex_2, "Vertex_2" ) geompy.addToStudy( Vertex_3, "Vertex_3" ) geompy.addToStudy( Vertex_4, "Vertex_4" ) geompy.addToStudy( Vertex_5, "Vertex_5" ) pass
FedoraScientific/salome-geom
src/GEOM_SWIG/GEOM_example7.py
Python
lgpl-2.1
2,133
# -*- coding: utf-8 -*- import os import re from froide.settings import Base, ThemeBase, German rec = lambda x: re.compile(x, re.I | re.U) class FragDenStaatBase(German, ThemeBase, Base): gettext = lambda s: s LANGUAGES = ( ('de', gettext('German')), ) FROIDE_THEME = 'fragdenstaat_de.theme' @property def INSTALLED_APPS(self): installed = super(FragDenStaatBase, self).INSTALLED_APPS installed += [ 'celery_haystack', 'djcelery_email', 'django.contrib.redirects', 'tinymce' ] return installed @property def GEOIP_PATH(self): return os.path.join(super(FragDenStaatBase, self).PROJECT_ROOT, '..', 'data') TINYMCE_DEFAULT_CONFIG = { 'plugins': "table,spellchecker,paste,searchreplace", 'theme': "advanced", 'cleanup_on_startup': False } MIDDLEWARE_CLASSES = [ 'django.contrib.sessions.middleware.SessionMiddleware', 'fragdenstaat_de.theme.ilf_middleware.CsrfViewIlfMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.common.CommonMiddleware', 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware', 'django.contrib.redirects.middleware.RedirectFallbackMiddleware', 'froide.account.middleware.AcceptNewTermsMiddleware', ] CACHES = { 'default': { 'LOCATION': 'unique-snowflake', 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache' } } # ######## Celery Haystack ######## # Experimental feature to update index after 60s CELERY_HAYSTACK_COUNTDOWN = 60 ########## Debug ########### HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.solr_backend.SolrEngine', 'URL': 'http://127.0.0.1:8983/solr/fragdenstaat' } } HAYSTACK_SIGNAL_PROCESSOR = 'celery_haystack.signals.CelerySignalProcessor' SITE_NAME = "FragDenStaat.de" SITE_EMAIL = "info@fragdenstaat.de" SITE_URL = 'http://localhost:8000' SECRET_URLS = { "admin": "admin", } DEFAULT_FROM_EMAIL = 'info@fragdenstaat.de' EMAIL_SUBJECT_PREFIX = '[AdminFragDenStaat] ' EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' CELERY_EMAIL_TASK_CONFIG = { 'max_retries': None, 'ignore_result': False, 'acks_late': True, 'store_errors_even_if_ignored': True } # Fig broker setup if 'BROKER_1_PORT' in os.environ: BROKER_URL = 'amqp://guest:**@%s/' % os.environ['BROKER_1_PORT'].replace('tcp://', '') @property def FROIDE_CONFIG(self): config = super(FragDenStaatBase, self).FROIDE_CONFIG config.update(dict( create_new_publicbody=False, publicbody_empty=True, user_can_hide_web=True, public_body_officials_public=True, public_body_officials_email_public=False, default_law=2, doc_conversion_binary="/usr/bin/libreoffice", dryrun=False, dryrun_domain="fragdenstaat.stefanwehrmeyer.com", allow_pseudonym=True, api_activated=True, search_engine_query='http://www.google.de/search?as_q=%(query)s&as_epq=&as_oq=&as_eq=&hl=en&lr=&cr=&as_ft=i&as_filetype=&as_qdr=all&as_occt=any&as_dt=i&as_sitesearch=%(domain)s&as_rights=&safe=images', show_public_body_employee_name=False, greetings=[rec(u"Sehr geehrt(er? (?:Herr|Frau)(?: ?Dr\.?)?(?: ?Prof\.?)? .*)")], closings=[rec(u"[Mm]it( den)? (freundlichen|vielen|besten) Gr\xfc\xdfen,?"), rec("Hochachtungsvoll,?"), rec('i\. ?A\.'), rec('[iI]m Auftrag')] )) return config class Dev(FragDenStaatBase): pass try: from .local_settings import * # noqa except ImportError: pass
catcosmo/fragdenstaat_de
fragdenstaat_de/settings.py
Python
mit
4,056
#~~~~~~~GLOBAL IMPORTS~~~~~~~# # Standard library packages import import gzip from os import close, remove, path from multiprocessing import cpu_count from time import time from tempfile import mkstemp # Local library packages from pyDNA.Utilities import run_command, file_basename, fgunzip from BlastHit import BlastHit #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# class Aligner(object): """ @class Aligner @brief Perform de blastn of a DNA query against a blast database. If hits are found, a list of BlastHit objects is returned. Blast+ 2.8+ needs to be install and eventually added to the path. """ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# #~~~~~~~FONDAMENTAL METHODS~~~~~~~# def __repr__(self): msg = "BLASTN WRAPPER\n" msg += "Blastn path : {}\n".format(self.blastn) msg += "Options : {}\n".format(self.blastn_opt) msg += repr(self.Blastdb) return msg def __str__(self): return "<Instance of {} from {} >\n".format(self.__class__.__name__, self.__module__) def __init__ (self, Blastdb, blastn_opt="", blastn="blastn", num_threads=1): """ Initialize the object and index the reference genome if necessary @param Blastdb Blast database object NewDB or ExistingDB @param blastn_opt Blastn command line options as a string @param blastn Path ot the bwa executable. If bwa if already added to your system path do not change the default value """ # Creating object variables self.blastn = blastn self.Blastdb = Blastdb self.num_threads = num_threads # init an option dict and attribute defaut options # if num_threads == 0 use all cores on the node if self.num_threads == 0: self.num_threads = cpu_count() self.blastn_opt = "{} -num_threads {} -task {} -outfmt {} -dust {} -db {}".format( blastn_opt, self.num_threads, "blastn", 6, "no", self.Blastdb.db_path) #~~~~~~~PUBLIC METHODS~~~~~~~# def align (self, query): """ Blast query against a subject database and return a list of BlastHit object @param query Path to a fasta file containing the query sequences @param evalue Cutoff used in blast to select valid hits @return A list of BlastHit objects if at least one hit was found @exception (SystemError,OSerror) May be returned by run_command in case of invalid command line """ # Build the command line string query_name = file_basename(query) # If the fasta file is compressed = extract the file in a temporary file if query[-2:].lower() == "gz": print ("Extracting the compressed fasta in a temporary file") fd, tmp_path = mkstemp() fgunzip (in_path=query, out_path=tmp_path) blastn_opt = self.blastn_opt + " -query {}".format(tmp_path) hits_list = self._align(query_name, blastn_opt) close(fd) remove(tmp_path) return hits_list # Else just proceed by using the fasta reference else: blastn_opt = self.blastn_opt + " -query {}".format(query) return self._align(query_name, blastn_opt) def _align (self, query_name, blastn_opt): print ("Blast {} against {} database with blastn".format(query_name, file_basename (self.Blastdb.db_path))), # Build the command line string cmd = "{} {}".format(self.blastn, blastn_opt) # Run the command line without stdin and asking only stdout blast_lines = run_command(cmd, stdin=None, ret_stderr=False, ret_stdout=True).splitlines() for line in blast_lines: # Parse each result lines and create a BlastHit object h = line.split() BlastHit(h[0], h[1] , h[2], h[3], h[4], h[5], h[6], h[7], h[8], h[9], h[10], h[11]) # Sumarize the hit count in the different references print ("\t{} hits found".format(BlastHit.count_total())) # Get the list of hits from BlastHit class and reset the class list. hits_list = BlastHit.get() BlastHit.reset_list() return hits_list
a-slide/pyDNA
Blast/BlastnWrapper.py
Python
gpl-2.0
4,335
import sys secs_per_year = (60 * 60 * 24 * 365) secs_per_day = (60 * 60 * 24) secs_per_hour = (60 * 60) secs_per_minute = 60 # Takes a number of seconds and returns a list in the following format: # [seconds, minutes, hours, days, years] # Length of the returned list is variable - if the function is passed '65', [5, 1] will be returned. def split_seconds(seconds_total): seconds = int(float(seconds_total)) split_time = [] years = seconds / secs_per_year if years > 0: split_time.insert(0, years) seconds = seconds - (years * secs_per_year) days = seconds / secs_per_day if days > 0: split_time.insert(0, days) seconds = seconds - (days * secs_per_day) hours = seconds / secs_per_hour if hours > 0: split_time.insert(0, hours) seconds = seconds - (hours * secs_per_hour) minutes = seconds / secs_per_minute if minutes > 0: split_time.insert(0, minutes) seconds = seconds - (minutes * secs_per_minute) split_time.insert(0, seconds) return split_time
davidfic/linux-dash-flask
app/utility.py
Python
mit
1,073
import datetime import json import mock import pytest from django.core.urlresolvers import reverse from django.conf import settings from django.utils import timezone from crashstats.crashstats.tests.test_views import BaseTestViews, Response from crashstats.crashstats.models import CrontabberState from crashstats.supersearch.models import SuperSearch from crashstats.monitoring.views import assert_supersearch_no_errors class TestViews(BaseTestViews): def test_index(self): url = reverse('monitoring:index') response = self.client.get(url) assert response.status_code == 200 assert reverse('monitoring:crontabber_status') in response.content class TestCrontabberStatusViews(BaseTestViews): def test_crontabber_status_ok(self): def mocked_get(**options): recently = timezone.now() return { 'state': { 'job1': { 'error_count': 0, 'depends_on': [], 'last_run': recently, } } } CrontabberState.implementation().get.side_effect = mocked_get url = reverse('monitoring:crontabber_status') response = self.client.get(url) assert response.status_code == 200 assert json.loads(response.content) == {'status': 'ALLGOOD'} def test_crontabber_status_trouble(self): def mocked_get(**options): recently = timezone.now() return { 'state': { 'job1': { 'error_count': 1, 'depends_on': [], 'last_run': recently, }, 'job2': { 'error_count': 0, 'depends_on': ['job1'], 'last_run': recently, }, 'job3': { 'error_count': 0, 'depends_on': ['job2'], 'last_run': recently, }, 'job1b': { 'error_count': 0, 'depends_on': [], 'last_run': recently, }, } } CrontabberState.implementation().get.side_effect = mocked_get url = reverse('monitoring:crontabber_status') response = self.client.get(url) assert response.status_code == 200 data = json.loads(response.content) assert data['status'] == 'Broken' assert data['broken'] == ['job1'] assert data['blocked'] == ['job2', 'job3'] def test_crontabber_status_not_run_for_a_while(self): some_time_ago = ( timezone.now() - datetime.timedelta( minutes=settings.CRONTABBER_STALE_MINUTES ) ) def mocked_get(**options): return { 'state': { 'job1': { 'error_count': 0, 'depends_on': [], 'last_run': some_time_ago, }, 'job2': { 'error_count': 0, 'depends_on': ['job1'], 'last_run': some_time_ago, }, } } CrontabberState.implementation().get.side_effect = mocked_get url = reverse('monitoring:crontabber_status') response = self.client.get(url) assert response.status_code == 200 data = json.loads(response.content) assert data['status'] == 'Stale' assert data['last_run'] == some_time_ago.isoformat() def test_crontabber_status_never_run(self): def mocked_get(**options): return { 'state': {} } CrontabberState.implementation().get.side_effect = mocked_get url = reverse('monitoring:crontabber_status') response = self.client.get(url) assert response.status_code == 200 data = json.loads(response.content) assert data['status'] == 'Stale' class TestHealthcheckViews(BaseTestViews): def test_healthcheck_elb(self): url = reverse('monitoring:healthcheck') response = self.client.get(url, {'elb': 'true'}) assert response.status_code == 200 assert json.loads(response.content)['ok'] is True # This time, ignoring the results, make sure that running # this does not cause an DB queries. self.assertNumQueries( 0, self.client.get, url, {'elb': 'true'} ) @mock.patch('requests.get') @mock.patch('crashstats.monitoring.views.elasticsearch') def test_healthcheck(self, mocked_elasticsearch, rget): searches = [] def mocked_supersearch_get(**params): searches.append(params) assert params['product'] == [settings.DEFAULT_PRODUCT] assert params['_results_number'] == 1 assert params['_columns'] == ['uuid'] return { 'hits': [ {'uuid': '12345'}, ], 'facets': [], 'total': 30002, 'errors': [], } SuperSearch.implementation().get.side_effect = ( mocked_supersearch_get ) def mocked_requests_get(url, **params): return Response(True) rget.side_effect = mocked_requests_get url = reverse('monitoring:healthcheck') response = self.client.get(url) assert response.status_code == 200 assert json.loads(response.content)['ok'] is True assert len(searches) == 1 def test_assert_supersearch_errors(self): searches = [] def mocked_supersearch_get(**params): searches.append(params) assert params['product'] == [settings.DEFAULT_PRODUCT] assert params['_results_number'] == 1 assert params['_columns'] == ['uuid'] return { 'hits': [ {'uuid': '12345'}, ], 'facets': [], 'total': 320, 'errors': ['bad'], } SuperSearch.implementation().get.side_effect = ( mocked_supersearch_get ) with pytest.raises(AssertionError): assert_supersearch_no_errors() assert len(searches) == 1
Tayamarn/socorro
webapp-django/crashstats/monitoring/tests/test_views.py
Python
mpl-2.0
6,622
__author__ = 'mark' # StarbaseMini Staribus/Starinet Client for the British Astronomical Association Staribus Protocol # Copyright (C) 2015 Mark Horn # # This file is part of StarbaseMini. # # StarbaseMini is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # StarbaseMini is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with StarbaseMini. If not, see <http://www.gnu.org/licenses/>. import re def check_staribus_address(address): ''' Checks Staribus address is within 001 - 253 :param address: staribus address string. :return: True or False Notes: 000 is reserved for virtual instruments such as Starinet instrument 254 is reserved for discovery 255 is reserved for address not assigned. All of which will return False. ''' if address is not str: address = str(address) if re.match('^0*([1-9][0-9]?|1[0-9]{2}|2[0-4][0-9]|25[0-3])$', address): return True else: return False
mhorn71/StarbaseMini
utilities/staribus_address.py
Python
gpl-2.0
1,396
# -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.views.debug import ExceptionReporter from django.utils.log import AdminEmailHandler from django.conf import settings from django import template from copy import copy class CustomAdminEmailHandler(AdminEmailHandler): def emit(self,record): try: request = record.request subject = '%s (%s IP): %s' % ( record.levelname, ('internal' if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS else 'EXTERNAL'), record.getMessage() ) except Exception: subject = '%s: %s' % ( record.levelname, record.getMessage() ) request = None subject = self.format_subject(subject) # Since we add a nicely formatted traceback on our own, create a copy # of the log record without the exception data. no_exc_record = copy(record) no_exc_record.exc_info = None no_exc_record.exc_text = None if record.exc_info: exc_info = record.exc_info else: exc_info = (None, record.getMessage(), None) reporter = ExceptionReporter(request, is_email=True, *exc_info) error_message ="\n".join(reporter.get_traceback_text().strip().split("GET:")[0].splitlines()[-4:-1]) message = "%s\n\n%s" % (self.format(no_exc_record), error_message) html_message = reporter.get_traceback_html() if self.include_html else None self.send_mail(subject, message, fail_silently=True, html_message=html_message)
taigaio/taiga-back
taiga/base/utils/logs.py
Python
agpl-3.0
2,323
# -*- coding: UTF-8 -*- """ ``Gassst`` ----------------------- :Authors: Liron Levin :Affiliation: Bioinformatics core facility :Organization: National Institute of Biotechnology in the Negev, Ben Gurion University. .. Note:: This module was developed as part of a study led by Dr. Jacob Moran Gilad Short Description ~~~~~~~~~~~~~~~~~~~~~~~~~ A module for executing Gassst on a nucleotide fasta file. The search can be either on a sample fasta or on a project-wide fasta. It can use the fasta as a database or as a query. Requires ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * fasta files in the following slot for sample-wise Gassst: * ``sample_data[<sample>]["fasta.nucl"]`` * or fasta files in the following slots for project-wise Gassst: * ``sample_data["fasta.nucl"]`` Output ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * puts Gassst output files in the following slots for sample-wise Gassst: * ``sample_data[<sample>]["blast"]`` * ``sample_data[<sample>]["blast.nucl"]`` * puts fasta output files in the following slots for project-wise Gassst: * ``sample_data["blast"]`` * ``sample_data["blast.nucl"]`` Parameters that can be set ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. csv-table:: :header: "Parameter", "Values", "Comments" :widths: 15, 10, 10 "scope", "project/sample", "Set if project-wide fasta.nucl file type should be used [project] the default is sample-wide fasta.nucl file type" Comments ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * This module was tested on: ``Gassst v1.28`` * The following python packages are required: ``pandas`` * Only -d [database] or -i [query] not both * The Gassst module will generate blast like output with fields: ```"qseqid sallseqid qlen slen qstart qend sstart send length evalue sseq"`` Lines for parameter file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: Step_Name: # Name of this step module: Gassst # Name of the module to use base: # Name of the step [or list of names] to run after [mast be after a fasta generating step] script_path: # Command for running the Gassst script # The Gassst module will generate blast like output with fields: # "qseqid sallseqid qlen slen qstart qend sstart send length evalue sseq" scope: # Set if project-wide fasta.nucl file type should be used [project] the default is sample-wide fasta.nucl file type qsub_params: -pe: # Number of CPUs to reserve for this analysis redirects: -h: # Max hits per query, for downstream best hit will be chosen! -i: # Only -d [database] or -i [query] not both -l: # Complexity_filter off -d: # Only -d [database] or -i [query] not both -n: # Number of CPUs running Gassst -p: # Minimum percentage of identity. Must be in the interval [0 100] References ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Rizk, Guillaume, and Dominique Lavenier. "GASSST: global alignment short sequence search tool." Bioinformatics 26.20 (2010): 2534-2540.‏ """ import os import sys import re from neatseq_flow.PLC_step import Step,AssertionExcept __author__ = "Liron Levin" __version__= "1.2.0" class Step_Gassst(Step): def step_specific_init(self): """ Called on intiation Good place for parameter testing. Wrong place for sample data testing """ self.shell = "bash" self.file_tag = ".Gassst.out" import inspect self.module_location=os.path.dirname(os.path.abspath(inspect.getsourcefile(lambda:0))) # Gassst can only work with nucleotide fasta self.params["fasta2use"] = "nucl" # Check that either db or query (not both) are set in redir_params: assert len(set(["-d","-i"]) & set(self.params["redir_params"].keys())) == 1, "In %s:\tYou must supply either 'db' or 'query'\n" % self.get_step_name() # Check that the -p argument is supplied assert "-p" in list(self.params["redir_params"].keys()), "In %s:\tYou must supply -p argument for Gassst \n" % self.get_step_name() def step_sample_initiation(self): """ A place to do initiation stages following setting of sample_data """ if "scope" in list(self.params.keys()): if self.params["scope"]=="project": self.step_sample_initiation_byproject() else: self.step_sample_initiation_bysample() else: self.step_sample_initiation_bysample() def step_sample_initiation_bysample(self): """ A place to do initiation stages following setting of sample_data This set of tests is performed for sample-level BLAST """ for sample in self.sample_data["samples"]: # Getting list of samples out of samples_hash if not "blast" in list(self.sample_data[sample].keys()): self.sample_data[sample]["blast"] = dict() for sample in self.sample_data["samples"]: # Getting list of samples out of samples_hash if not "blast.nucl" in list(self.sample_data[sample].keys()): self.sample_data[sample]["blast.nucl"] = dict() # Decide on locations of db and query if "-i" in list(self.params["redir_params"].keys()): assert "fasta.nucl" in list(self.sample_data[sample].keys()), "In %s:\tFor sample-as-DB , you need to have a fasta in the sample (sample %s).\nIf the query is a project fasta, set parameter 'scope' to 'project' \n" % (self.get_step_name(), sample) # Decide which fasta to use in Gassst: # "fasta" is not defined for the sample: assert "fasta.nucl" in list(self.sample_data[sample].keys()), "In %s:\tNo 'fasta.nucl' defined for sample %s.\nIf the query is a project fasta, use parameter 'scop: project'\n" % (self.get_step_name(),sample) pass def step_sample_initiation_byproject(self): """ A place to do initiation stages following setting of sample_data This set of tests is performed for project-level BLAST """ if not "blast" in list(self.sample_data.keys()): self.sample_data["project_data"]["blast"] = dict() if not "blast.nucl" in list(self.sample_data.keys()): self.sample_data["project_data"]["blast.nucl"] = dict() assert "fasta.nucl" in list(self.sample_data.keys()), "In %s:\tYou need a 'fasta.nucl' file defined to run Gassst.\nIf the 'fasta.nucl' files are per sample, use 'scope: sample' parameter.\n" % (self.get_step_name()) # Decide on locations of db and query if "-i" in list(self.params["redir_params"].keys()): assert "fasta.nucl" in list(self.sample_data.keys()), "In %s:\tFor sample-as-DB , you need to have a fasta.nucl in the sample .\n" % (self.get_step_name()) pass def create_spec_wrapping_up_script(self): """ Add stuff to check and agglomerate the output data """ if "scope" in list(self.params.keys()): if self.params["scope"]=="project": pass else: self.make_sample_file_index() # see definition below else: self.make_sample_file_index() # see definition below def build_scripts(self): """ This is the actual script building function """ if "scope" in list(self.params.keys()): if self.params["scope"]=="project": self.build_scripts_byproject() else: self.build_scripts_bysample() else: self.build_scripts_bysample() def build_scripts_bysample(self): """ Script building function for sample-level BLAST """ # Each iteration must define the following class variables: # spec_script_name # script for sample in self.sample_data["samples"]: # Getting list of samples out of samples_hash # Name of specific script: self.spec_script_name = self.set_spec_script_name(sample) self.script = "" # Make a dir for the current sample: sample_dir = self.make_folder_for_sample(sample) # This line should be left before every new script. It sees to local issues. # Use the dir it returns as the base_dir for this step. use_dir = self.local_start(sample_dir) # Define output filename output_filename = "".join([use_dir , sample , self.file_tag]) if "Gassst2blast.py" in os.listdir(self.module_location): self.params["redir_params"]["-m"]='0' self.script += self.get_script_const() # Define query and db files: # If db is defined by user, set the query to the correct 'fasta' if "-d" in list(self.params["redir_params"].keys()): self.script += "-i %s \\\n\t" % self.sample_data[sample]["fasta.nucl"] # If db is not defined by user, set the db to the correct blastdb, with 'fasta' # query must be set by user. assertion is made in step_specific_init() else: self.script += "-d %s \\\n\t" % self.sample_data[sample]["fasta.nucl"] self.script += "-o %s\n\n" % output_filename if "Gassst2blast.py" in os.listdir(self.module_location): self.script += "python %s \\\n\t" % os.path.join(self.module_location,"Gassst2blast.py") self.script += "-i %s \\\n\t" % output_filename self.script += "-o %s \\\n\t" % output_filename # Store BLAST result file: self.sample_data[sample]["blast"] = (sample_dir + os.path.basename(output_filename)) self.stamp_file(self.sample_data[sample]["blast"]) self.sample_data[sample]["blast.nucl"] = (sample_dir + os.path.basename(output_filename)) # Wrapping up function. Leave these lines at the end of every iteration: self.local_finish(use_dir,sample_dir) # Sees to copying local files to final destination (and other stuff) self.create_low_level_script() def build_scripts_byproject(self): """ Script building function for project-level BLAST """ # Each iteration must define the following class variables: # spec_script_name # script # Name of specific script: self.spec_script_name = self.set_spec_script_name() self.script = "" # This line should be left before every new script. It sees to local issues. # Use the dir it returns as the base_dir for this step. use_dir = self.local_start(self.base_dir) # Define output filename output_filename = "".join([use_dir , self.sample_data["Title"] , self.file_tag]) if "Gassst2blast.py" in os.listdir(self.module_location): self.params["redir_params"]["-m"]='0' self.script += self.get_script_const() # Define query and db files: # If db is defined by user, set the query to the correct 'fasta' if "-d" in list(self.params["redir_params"].keys()): self.script += "-i %s \\\n\t" % self.sample_data["project_data"]["fasta.nucl"] # If -d is not defined by user, set the -d to the correct fasta, with 'fasta2use' # -i must be set by user. assertion is made in step_specific_init() else: self.script += "-d %s \\\n\t" % self.sample_data["project_data"]["fasta.nucl"] self.script += "-o %s\n\n" % output_filename if "Gassst2blast.py" in os.listdir(self.module_location): self.script += "python %s \\\n\t" % os.path.join(self.module_location,"Gassst2blast.py") self.script += "-i %s \\\n\t" % output_filename self.script += "-o %s \\\n\t" % output_filename # Store BLAST result file: self.sample_data["project_data"]["blast"] = (self.base_dir + os.path.basename(output_filename)) self.stamp_file(self.sample_data["project_data"]["blast"]) self.sample_data["project_data"]["blast.nucl"] = (self.base_dir + os.path.basename(output_filename)) # Wrapping up function. Leave these lines at the end of every iteration: self.local_finish(use_dir,self.base_dir) # Sees to copying local files to final destination (and other stuff) self.create_low_level_script() def make_sample_file_index(self): """ Make file containing samples and target file names. This can be used by scripts called by create_spec_wrapping_up_script() to summarize the BLAST outputs. """ with open(self.base_dir + "Gassst_files_index.txt", "w") as index_fh: index_fh.write("Sample\tGassst_report\n") for sample in self.sample_data["samples"]: # Getting list of samples out of samples_hash index_fh.write("%s\t%s\n" % (sample,self.sample_data[sample]["blast"])) self.sample_data["project_data"]["BLAST_files_index"] = self.base_dir + "Gassst_files_index.txt"
bioinfo-core-BGU/neatseq-flow_modules
neatseq_flow_modules/Liron/Gassst_module/Gassst.py
Python
gpl-3.0
14,125
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django_emailsupport.utils.imap import get_unread_messages, mark_seen from django_emailsupport.settings import NOTIFICATION_EMAIL_LIST from django_emailsupport.tasks import send_mail from models import Email, Submitter def sanitize_email(email): return email def notify_users(email): content = email.get_body() send_mail(email.subject, content, settings.DEFAULT_FROM_EMAIL, NOTIFICATION_EMAIL_LIST) def get_submitter_by_email(email, name): return Submitter.objects.get_or_create(address=sanitize_email(email), defaults={'name': name}) def download_and_save(): email_list = [] uid_list = [] for uid, message in get_unread_messages(): submitter_data = message.sent_from[-1] submitter, created = get_submitter_by_email(submitter_data['email'], submitter_data['name']) email = Email() email.submitter = submitter email.subject = message.subject try: email.body = message.body['plain'][-1] except IndexError: pass try: email.body_html = message.body['html'][-1] except IndexError: pass email_list.append(email) uid_list.append(uid) Email.objects.bulk_create(email_list) mark_seen(uid_list) for email in email_list: notify_users(email)
rosti-cz/django-emailsupport
django_emailsupport/processor.py
Python
mit
1,527
"""Provides PoseRegNet class that implements deep CNNs. PoseRegNet provides interface for building the CNN. PoseRegNetParams is the parametrization of these CNNs. Copyright 2015 Markus Oberweger, ICG, Graz University of Technology <oberweger@icg.tugraz.at> This file is part of DeepPrior. DeepPrior is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. DeepPrior is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with DeepPrior. If not, see <http://www.gnu.org/licenses/>. """ import theano.tensor as T from net.convpoollayer import ConvPoolLayer, ConvPoolLayerParams from net.hiddenlayer import HiddenLayer, HiddenLayerParams from net.dropoutlayer import DropoutLayer, DropoutLayerParams from net.netbase import NetBase, NetBaseParams from net.poollayer import PoolLayerParams from util.helpers import ReLU __author__ = "Markus Oberweger <oberweger@icg.tugraz.at>" __copyright__ = "Copyright 2015, ICG, Graz University of Technology, Austria" __credits__ = ["Markus Oberweger"] __license__ = "GPL" __version__ = "1.0" __maintainer__ = "Markus Oberweger" __email__ = "oberweger@icg.tugraz.at" __status__ = "Development" class PoseRegNetParams(NetBaseParams): def __init__(self, type=0, nChan=1, wIn=128, hIn=128, batchSize=128, numJoints=16, nDims=3): """ Init the parametrization :type type: int :param type: type of descr network """ super(PoseRegNetParams, self).__init__() self.batch_size = batchSize self.numJoints = numJoints self.nDims = nDims self.inputDim = (batchSize, nChan, hIn, wIn) if type == 0: # Try DeepPose CNN similar configuration self.layers.append(ConvPoolLayerParams(inputDim=(batchSize, nChan, hIn, wIn), # w,h,nChannel nFilters=8, filterDim=(5, 5), poolsize=(4, 4), activation=ReLU)) self.layers.append(ConvPoolLayerParams(inputDim=self.layers[-1].outputDim, nFilters=8, filterDim=(5, 5), poolsize=(2, 2), activation=ReLU)) self.layers.append(ConvPoolLayerParams(inputDim=self.layers[-1].outputDim, nFilters=8, filterDim=(3, 3), poolsize=(1, 1), activation=ReLU)) l3out = self.layers[-1].outputDim self.layers.append(HiddenLayerParams(inputDim=(l3out[0], l3out[1] * l3out[2] * l3out[3]), outputDim=(batchSize, 1024), activation=ReLU)) self.layers.append(DropoutLayerParams(inputDim=self.layers[-1].outputDim, outputDim=self.layers[-1].outputDim)) self.layers.append(HiddenLayerParams(inputDim=self.layers[-1].outputDim, outputDim=(batchSize, 1024), activation=ReLU)) self.layers.append(DropoutLayerParams(inputDim=self.layers[-1].outputDim, outputDim=self.layers[-1].outputDim)) self.layers.append(HiddenLayerParams(inputDim=self.layers[-1].outputDim, outputDim=(batchSize, numJoints * nDims), activation=None)) self.outputDim = self.layers[-1].outputDim elif type == 11: # Try DeepPose CNN similar configuration self.layers.append(ConvPoolLayerParams(inputDim=(batchSize, nChan, hIn, wIn), # w,h,nChannel nFilters=8, filterDim=(5, 5), poolsize=(4, 4), activation=ReLU)) self.layers.append(ConvPoolLayerParams(inputDim=self.layers[-1].outputDim, nFilters=8, filterDim=(5, 5), poolsize=(2, 2), activation=ReLU)) self.layers.append(ConvPoolLayerParams(inputDim=self.layers[-1].outputDim, nFilters=8, filterDim=(3, 3), poolsize=(1, 1), activation=ReLU)) l3out = self.layers[-1].outputDim self.layers.append(HiddenLayerParams(inputDim=(l3out[0], l3out[1] * l3out[2] * l3out[3]), outputDim=(batchSize, 1024), activation=ReLU)) self.layers.append(DropoutLayerParams(inputDim=self.layers[-1].outputDim, outputDim=self.layers[-1].outputDim)) self.layers.append(HiddenLayerParams(inputDim=self.layers[-1].outputDim, outputDim=(batchSize, 1024), activation=ReLU)) self.layers.append(DropoutLayerParams(inputDim=self.layers[-1].outputDim, outputDim=self.layers[-1].outputDim)) self.layers.append(HiddenLayerParams(inputDim=self.layers[-1].outputDim, outputDim=(batchSize, 30), activation=None)) self.layers.append(HiddenLayerParams(inputDim=self.layers[-1].outputDim, outputDim=(batchSize, numJoints * nDims), activation=None)) self.outputDim = self.layers[-1].outputDim else: raise NotImplementedError("not implemented") class PoseRegNet(NetBase): def __init__(self, rng, inputVar=None, cfgParams=None): """ :type cfgParams: DescriptorNetParams """ if cfgParams is None: raise Exception("Cannot create a Net without config parameters (ie. cfgParams==None)") if inputVar is None: inputVar = T.tensor4('x') # input variable elif isinstance(inputVar, str): inputVar = T.tensor4(inputVar) # input variable # create structure super(PoseRegNet, self).__init__(rng, inputVar, cfgParams)
moberweger/deep-prior
src/net/poseregnet.py
Python
gpl-3.0
7,467
""" Image Sequence Asset Plugins """
david-cattermole/assetQC
tests/test/standardAssets/imageSequence/__init__.py
Python
lgpl-3.0
37
# Greedy layerwise training of a 2 layer autoencoder (MLP) on Fashion MNIST # Code is based on # https://github.com/ageron/handson-ml2/blob/master/17_autoencoders_and_gans.ipynb import superimport import numpy as np import matplotlib.pyplot as plt import os figdir = "../figures" def save_fig(fname): plt.savefig(os.path.join(figdir, fname)) import tensorflow as tf from tensorflow import keras (X_train_full, y_train_full), (X_test, y_test) = keras.datasets.fashion_mnist.load_data() X_train_full = X_train_full.astype(np.float32) / 255 X_test = X_test.astype(np.float32) / 255 X_train, X_valid = X_train_full[:-5000], X_train_full[-5000:] y_train, y_valid = y_train_full[:-5000], y_train_full[-5000:] def rounded_accuracy(y_true, y_pred): return keras.metrics.binary_accuracy(tf.round(y_true), tf.round(y_pred)) def plot_image(image): plt.imshow(image, cmap="binary") plt.axis("off") def show_reconstructions(model, images=X_valid, n_images=5): reconstructions = model.predict(images[:n_images]) plt.figure(figsize=(n_images * 1.5, 3)) for image_index in range(n_images): plt.subplot(2, n_images, 1 + image_index) plot_image(images[image_index]) plt.subplot(2, n_images, 1 + n_images + image_index) plot_image(reconstructions[image_index]) def train_autoencoder(n_neurons, X_train, X_valid, loss, optimizer, n_epochs=10, output_activation=None, metrics=None): n_inputs = X_train.shape[-1] encoder = keras.models.Sequential([ keras.layers.Dense(n_neurons, activation="selu", input_shape=[n_inputs]) ]) decoder = keras.models.Sequential([ keras.layers.Dense(n_inputs, activation=output_activation), ]) autoencoder = keras.models.Sequential([encoder, decoder]) autoencoder.compile(optimizer, loss, metrics=metrics) autoencoder.fit(X_train, X_train, epochs=n_epochs, validation_data=[X_valid, X_valid]) return encoder, decoder, encoder(X_train), encoder(X_valid) tf.random.set_seed(42) np.random.seed(42) K = keras.backend X_train_flat = K.batch_flatten(X_train) # equivalent to .reshape(-1, 28 * 28) X_valid_flat = K.batch_flatten(X_valid) # Reconstruct binary image enc1, dec1, X_train_enc1, X_valid_enc1 = train_autoencoder( 100, X_train_flat, X_valid_flat, "binary_crossentropy", keras.optimizers.SGD(lr=1.5), output_activation="sigmoid", metrics=[rounded_accuracy]) # Reconstruct real-valued codes enc2, dec2, _, _ = train_autoencoder( 30, X_train_enc1, X_valid_enc1, "mse", keras.optimizers.SGD(lr=0.05), output_activation="selu") # Stack models, no fine tuning stacked_ae = keras.models.Sequential([ keras.layers.Flatten(input_shape=[28, 28]), enc1, enc2, dec2, dec1, keras.layers.Reshape([28, 28]) ]) show_reconstructions(stacked_ae) plt.show() # Fine tune stacked model end to end stacked_ae.compile(loss="binary_crossentropy", optimizer=keras.optimizers.SGD(lr=0.1), metrics=[rounded_accuracy]) history = stacked_ae.fit(X_train, X_train, epochs=10, validation_data=[X_valid, X_valid]) show_reconstructions(stacked_ae) plt.show()
probml/pyprobml
scripts/ae_layerwise_fashion_tf.py
Python
mit
3,215
import os from gi.repository import GObject, Caja class ColumnExtension(GObject.GObject, Caja.ColumnProvider, Caja.InfoProvider): def __init__(self): pass def get_columns(self): return Caja.Column(name="CajaPython::block_size_column", attribute="block_size", label="Block size", description="Get the block size"), def update_file_info(self, file): if file.get_uri_scheme() != 'file': return filename = file.get_location().get_path() file.add_string_attribute('block_size', str(os.stat(filename).st_blksize))
mate-desktop/python-caja
examples/block-size-column.py
Python
gpl-2.0
691
# # This is a Construct library which represents an # LC_CODE_SIGNATURE structure. Like all Construct # libraries, can be used for parsing or emitting # (Construct calls it 'building') # from construct import * import plistlib class PlistAdapter(Adapter): def _encode(self, obj, context): return plistlib.writePlistToString(obj) def _decode(self, obj, context): return plistlib.readPlistFromString(obj) # talk about overdesign. # magic is in the blob struct Expr = LazyBound("expr", lambda: Expr_) Blob = LazyBound("blob", lambda: Blob_) Hashes = LazyBound("hashes", lambda: Hashes_) Hashes_ = Array(lambda ctx: ctx['nSpecialSlots'] + ctx['nCodeSlots'], Bytes("hash", lambda ctx: ctx['hashSize'])) CodeDirectory = Struct("CodeDirectory", Anchor("cd_start"), UBInt32("version"), UBInt32("flags"), UBInt32("hashOffset"), UBInt32("identOffset"), UBInt32("nSpecialSlots"), UBInt32("nCodeSlots"), UBInt32("codeLimit"), UBInt8("hashSize"), UBInt8("hashType"), UBInt8("spare1"), UBInt8("pageSize"), UBInt32("spare2"), Pointer(lambda ctx: ctx['cd_start'] - 8 + ctx['identOffset'], CString('ident')), If(lambda ctx: ctx['version'] >= 0x20100, UBInt32("scatterOffset")), If(lambda ctx: ctx['version'] >= 0x20200, UBInt32("teamIDOffset")), If(lambda ctx: ctx['version'] >= 0x20200, Pointer(lambda ctx: ctx['cd_start'] - 8 + ctx['teamIDOffset'], CString('teamID'))), Pointer(lambda ctx: ctx['cd_start'] - 8 + ctx['hashOffset'] - ctx['hashSize'] * ctx['nSpecialSlots'], Hashes) ) Data = Struct("Data", UBInt32("length"), Bytes("data", lambda ctx: ctx['length']), Padding(lambda ctx: -ctx['length'] & 3), ) CertSlot = Enum(UBInt32("slot"), anchorCert=-1, leafCert=0, _default_=Pass, ) Match = Struct("Match", Enum(UBInt32("matchOp"), matchExists=0, matchEqual=1, matchContains=2, matchBeginsWith=3, matchEndsWith=4, matchLessThan=5, matchGreaterThan=6, matchLessEqual=7, matchGreaterEqual=8, ), If(lambda ctx: ctx['matchOp'] != 'matchExists', Data) ) expr_args = { 'opIdent': Data, 'opAnchorHash': Sequence("AnchorHash", CertSlot, Data), 'opInfoKeyValue': Data, 'opAnd': Sequence("And", Expr, Expr), 'opOr': Sequence("Or", Expr, Expr), 'opNot': Expr, 'opCDHash': Data, 'opInfoKeyField': Sequence("InfoKeyField", Data, Match), 'opEntitlementField': Sequence("EntitlementField", Data, Match), 'opCertField': Sequence("CertField", CertSlot, Data, Match), 'opCertGeneric': Sequence("CertGeneric", CertSlot, Data, Match), 'opTrustedCert': CertSlot, } Expr_ = Struct("Expr", Enum(UBInt32("op"), opFalse=0, opTrue=1, opIdent=2, opAppleAnchor=3, opAnchorHash=4, opInfoKeyValue=5, opAnd=6, opOr=7, opCDHash=8, opNot=9, opInfoKeyField=10, opCertField=11, opTrustedCert=12, opTrustedCerts=13, opCertGeneric=14, opAppleGenericAnchor=15, opEntitlementField=16, ), Switch("data", lambda ctx: ctx['op'], expr_args, default=Pass), ) Requirement = Struct("Requirement", Const(UBInt32("kind"), 1), Expr, ) Entitlement = Struct("Entitlement", # actually a plist PlistAdapter(Bytes("data", lambda ctx: ctx['_']['length'] - 8)), ) EntitlementsBlobIndex = Struct("BlobIndex", Enum(UBInt32("type"), kSecHostRequirementType=1, kSecGuestRequirementType=2, kSecDesignatedRequirementType=3, kSecLibraryRequirementType=4, ), UBInt32("offset"), Pointer(lambda ctx: ctx['_']['sb_start'] - 8 + ctx['offset'], Blob), ) Entitlements = Struct("Entitlements", # actually a kind of super blob Anchor("sb_start"), UBInt32("count"), Array(lambda ctx: ctx['count'], EntitlementsBlobIndex), ) BlobWrapper = Struct("BlobWrapper", OnDemand(Bytes("data", lambda ctx: ctx['_']['length'] - 8)), ) BlobIndex = Struct("BlobIndex", UBInt32("type"), UBInt32("offset"), If(lambda ctx: ctx['offset'], Pointer(lambda ctx: ctx['_']['sb_start'] - 8 + ctx['offset'], Blob)), ) SuperBlob = Struct("SuperBlob", Anchor("sb_start"), UBInt32("count"), Array(lambda ctx: ctx['count'], BlobIndex), ) Blob_ = Struct("Blob", Enum(UBInt32("magic"), CSMAGIC_REQUIREMENT=0xfade0c00, CSMAGIC_REQUIREMENTS=0xfade0c01, CSMAGIC_CODEDIRECTORY=0xfade0c02, CSMAGIC_ENTITLEMENT=0xfade7171, # actually, this is kSecCodeMagicEntitlement, and not defined in the C version CSMAGIC_BLOBWRAPPER=0xfade0b01, # and this isn't even defined in libsecurity_codesigning; it's in _utilities CSMAGIC_EMBEDDED_SIGNATURE=0xfade0cc0, CSMAGIC_DETACHED_SIGNATURE=0xfade0cc1, CSMAGIC_CODE_SIGN_DRS=0xfade0c05, _default_=Pass, ), UBInt32("length"), Peek(Switch("data", lambda ctx: ctx['magic'], {'CSMAGIC_REQUIREMENT': Requirement, 'CSMAGIC_REQUIREMENTS': Entitlements, 'CSMAGIC_CODEDIRECTORY': CodeDirectory, 'CSMAGIC_ENTITLEMENT': Entitlement, 'CSMAGIC_BLOBWRAPPER': BlobWrapper, 'CSMAGIC_EMBEDDED_SIGNATURE': SuperBlob, 'CSMAGIC_DETACHED_SIGNATURE': SuperBlob, 'CSMAGIC_CODE_SIGN_DRS': SuperBlob, })), OnDemand(Bytes('bytes', lambda ctx: ctx['length'] - 8)), )
CiNC0/Cartier
cartier-python-resign-linux/isign/macho_cs.py
Python
apache-2.0
7,340
# -*- coding: utf-8 -*- from __future__ import unicode_literals import json from django.shortcuts import render # Create your views here. from rest_framework import viewsets # from suggestions.serializers import SentenceSerializer from rest_framework.response import Response from rest_framework.views import APIView from rest_framework import status from suggester import suggester class SuggestionsViewSet(APIView): permission_classes = [] def post(self, request, *args, **kwargs): #data = json.loads(request.data)#request.POST["_content"]) query = { 'words': request.data["words"] if request.data["words"] else [], 'jargon': request.data["jargon"] if request.data["jargon"] else 'default' } suggestions = suggester.suggest(query) # suggestions = suggester.suggest(["the", "other"]) return Response(suggestions, status=status.HTTP_200_OK) # return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) class JargonsViewSet(APIView): permission_classes = [] def get(self, request, *args, **kwargs): models = [] for model in suggester.MODELS: models.append(suggester.MODELS[model]['name']) response = { 'models': models } return Response(response, status=status.HTTP_200_OK) class DefaultJargonViewSet(APIView): permission_classes = [] def get(self, request, *args, **kwargs): default_model = suggester.DEFAULT_MODEL['name'] return Response(default_model, status=status.HTTP_200_OK)
lahdo/sentence-suggester
back-end/suggestions/views.py
Python
mit
1,593
""" Copyright (c) 2015 Michael Bright and Bamboo HR LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # pylint: disable=no-member,too-few-public-methods import datetime from sqlalchemy.orm import relationship, backref from sqlalchemy import Column, String, ForeignKey, Integer, DateTime, Boolean, Text, Enum from rapid.lib import get_declarative_base from rapid.master.data.database.models.base.base_model import BaseModel from rapid.lib.constants import VcsReleaseStepType Base = get_declarative_base() class Release(BaseModel, Base): name = Column(String(255), nullable=False, index=True) date_created = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow, index=True) status_id = Column(Integer, ForeignKey('statuses.id'), nullable=False, index=True) commit_id = Column(Integer, ForeignKey('commits.id'), nullable=False, index=True) integration_id = Column(Integer, ForeignKey('integrations.id'), index=True) status = relationship('Status') integration = relationship('Integration') commit = relationship('Commit', backref=backref('release', uselist=False)) details = relationship('ReleaseDetail', uselist=False, backref=backref('release')) class ReleaseDetail(BaseModel, Base): release_id = Column(Integer, ForeignKey('releases.id'), nullable=False, index=True) details = Column(Text) class StepIntegration(BaseModel, Base): step_id = Column(Integer, ForeignKey('steps.id'), nullable=False, index=True) integration_id = Column(Integer, ForeignKey('integrations.id'), nullable=False, index=True) class Step(BaseModel, Base): name = Column(String(100), nullable=False) custom_id = Column(String(25), nullable=False) status_id = Column(Integer, ForeignKey('statuses.id'), nullable=False, index=True) user_required = Column(Boolean, default=False, nullable=False) release_id = Column(Integer, ForeignKey('releases.id'), nullable=False, index=True) sort_order = Column(Integer, default=0) release = relationship("Release", lazy='subquery', backref="steps") status = relationship('Status') integrations = relationship("Integration", secondary="step_integrations") class StepUser(BaseModel, Base): step_id = Column(Integer, ForeignKey('steps.id'), nullable=False, index=True) user_id = Column(Integer, ForeignKey('users.id'), nullable=False, index=True) date_created = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow) class StepUserComment(BaseModel, Base): step_user_id = Column(Integer, ForeignKey('step_users.id'), nullable=False) comment = Column(Text) class User(BaseModel, Base): name = Column(String(150), nullable=False) username = Column(String(150), nullable=False) active = Column(Boolean, default=True, nullable=False) class VcsRelease(BaseModel, Base): search_filter = Column(String(500), nullable=False) notification_id = Column(String(250), nullable=False) vcs_id = Column(Integer, ForeignKey('vcs.id'), nullable=False, index=True) auto_release = Column(Boolean, nullable=False, default=False) vcs = relationship('Vcs', lazy='subquery', backref='product_release') steps = relationship("VcsReleaseStep", backref='vcs_release') class VcsReleaseStep(BaseModel, Base): name = Column(String(250), nullable=False) custom_id = Column(String(250), nullable=False) user_required = Column(Boolean, default=False, nullable=False) sort_order = Column(Integer, default=0) type = Column(Enum(*list(map(lambda x: x.name, VcsReleaseStepType))), nullable=False, default='PRE') vcs_release_id = Column(Integer, ForeignKey('vcs_releases.id'), nullable=False, index=True) __all__ = ['Release', 'StepIntegration', 'Step', 'StepUser', 'StepUserComment', 'StepIntegration', 'User', 'VcsRelease', 'VcsReleaseStep']
BambooHR/rapid
rapid/release/data/models.py
Python
apache-2.0
4,322
"""Tests for closures.""" from pytype.tests import test_inference class ClosuresTest(test_inference.InferenceTest): """Tests for closures.""" def testBasicClosure(self): with self.Infer(""" def f(): x = 3 def g(): return x return g def caller(): return f()() caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f() -> function def caller() -> int """) def testClosureOnArg(self): with self.Infer(""" def f(x): def g(): return x return g def caller(): return f(3)() caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f(x: int) -> function def caller() -> int """) def testClosureWithArg(self): with self.Infer(""" def f(x): def g(y): return x[y] return g def caller(): return f([1.0])(0) caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f(x: list<float>) -> function def caller() -> float """) def testClosureSameName(self): with self.Infer(""" def f(): x = 1 y = 2 def g(): print y x = "foo" def h(): return x return h return g def caller(): return f()()() caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f() -> function def caller() -> str """) def testClosuresAdd(self): with self.Infer(""" def f(x): z = x+1 def g(y): return x+y+z return g def caller(): return f(1)(2) caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def caller() -> int def f(x: int) -> function """) def testClosuresWithDefaults(self): with self.Infer(""" def f(x, y=13, z=43): def g(q, r=11): return x+y+z+q+r return g def t1(): return f(1)(1) def t2(): return f(1, 2)(1, 2) def t3(): return f(1, 2, 3)(1) t1() t2() t3() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f(x: int, ...) -> function def t1() -> int def t2() -> int def t3() -> int """) def testClosureScope(self): with self.Infer(""" def f(): x = ["foo"] def inner(): x[0] = "bar" return x return inner def g(funcptr): x = 5 def inner(): return x y = funcptr() return y def caller(): return g(f()) caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def caller() -> list<str> def f() -> function def g(funcptr: function) -> list<str> """) def testDeepClosures(self): with self.Infer(""" def f1(a): b = a def f2(c): d = c def f3(e): f = e def f4(g): h = g return a+b+c+d+e+f+g+h return f4 return f3 return f2 def caller(): return f1(3)(4)(5)(6) caller() """, deep=False, solve_unknowns=False, extract_locals=True) as ty: self.assertTypesMatchPytd(ty, """ def f1(a: int) -> function def caller() -> int """) if __name__ == "__main__": test_inference.main()
pombredanne/pytype
pytype/tests/test_closures.py
Python
apache-2.0
3,855
from django.forms import widgets from rest_framework import serializers from app.models import Address, Company, Crime, House, Job, JobRating, JobSalary class AddressSerializer(serializers.ModelSerializer): class Meta: model = Address fields = ('street_address', 'city', 'country', 'longitude', 'latitude') class CompanySerializer(serializers.ModelSerializer): class Meta: model = Company fields = ('id', 'name') class CrimeSerializer(serializers.ModelSerializer): address = AddressSerializer() class Meta: model = Crime fields = ('id', 'address', 'incident_num', 'occurred', 'category', 'description') class HouseSerializer(serializers.ModelSerializer): address = AddressSerializer() class Meta: model = House fields = ('id', 'address', 'price', 'source') class JobSerializer(serializers.ModelSerializer): company = CompanySerializer() address = AddressSerializer() class Meta: model = Job fields = ('id', 'company', 'address', 'title', 'description') class JobRatingSerializer(serializers.ModelSerializer): job = JobSerializer() class Meta: model = JobRating fields = ('id', 'job', 'source', 'review', 'created') class JobSalarySerializer(serializers.ModelSerializer): job = JobSerializer() class Meta: model = JobRating fields = ('id', 'job', 'amount')
Ommy/Intern-Project
django/app/serializers.py
Python
mit
1,322
"""Multi-layer Perceptron """ # Authors: Issam H. Laradji <issam.laradji@gmail.com> # Andreas Mueller # Jiyuan Qian # License: BSD 3 clause import numpy as np from abc import ABCMeta, abstractmethod from scipy.optimize import fmin_l_bfgs_b import warnings from ..base import BaseEstimator, ClassifierMixin, RegressorMixin from ._base import ACTIVATIONS, DERIVATIVES, LOSS_FUNCTIONS from ._stochastic_optimizers import SGDOptimizer, AdamOptimizer from ..model_selection import train_test_split from ..externals import six from ..preprocessing import LabelBinarizer from ..utils import gen_batches, check_random_state from ..utils import shuffle from ..utils import check_array, check_X_y, column_or_1d from ..exceptions import ConvergenceWarning from ..utils.extmath import safe_sparse_dot from ..utils.validation import check_is_fitted from ..utils.multiclass import _check_partial_fit_first_call _STOCHASTIC_ALGOS = ['sgd', 'adam'] def _pack(coefs_, intercepts_): """Pack the parameters into a single vector.""" return np.hstack([l.ravel() for l in coefs_ + intercepts_]) class BaseMultilayerPerceptron(six.with_metaclass(ABCMeta, BaseEstimator)): """Base class for MLP classification and regression. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, hidden_layer_sizes, activation, algorithm, alpha, batch_size, learning_rate, learning_rate_init, power_t, max_iter, loss, shuffle, random_state, tol, verbose, warm_start, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, epsilon): self.activation = activation self.algorithm = algorithm self.alpha = alpha self.batch_size = batch_size self.learning_rate = learning_rate self.learning_rate_init = learning_rate_init self.power_t = power_t self.max_iter = max_iter self.loss = loss self.hidden_layer_sizes = hidden_layer_sizes self.shuffle = shuffle self.random_state = random_state self.tol = tol self.verbose = verbose self.warm_start = warm_start self.momentum = momentum self.nesterovs_momentum = nesterovs_momentum self.early_stopping = early_stopping self.validation_fraction = validation_fraction self.beta_1 = beta_1 self.beta_2 = beta_2 self.epsilon = epsilon def _unpack(self, packed_parameters): """Extract the coefficients and intercepts from packed_parameters.""" for i in range(self.n_layers_ - 1): start, end, shape = self._coef_indptr[i] self.coefs_[i] = np.reshape(packed_parameters[start:end], shape) start, end = self._intercept_indptr[i] self.intercepts_[i] = packed_parameters[start:end] def _forward_pass(self, activations): """Perform a forward pass on the network by computing the values of the neurons in the hidden layers and the output layer. Parameters ---------- activations: list, length = n_layers - 1 The ith element of the list holds the values of the ith layer. with_output_activation : bool, default True If True, the output passes through the output activation function, which is either the softmax function or the logistic function """ hidden_activation = ACTIVATIONS[self.activation] # Iterate over the hidden layers for i in range(self.n_layers_ - 1): activations[i + 1] = safe_sparse_dot(activations[i], self.coefs_[i]) activations[i + 1] += self.intercepts_[i] # For the hidden layers if (i + 1) != (self.n_layers_ - 1): activations[i + 1] = hidden_activation(activations[i + 1]) # For the last layer output_activation = ACTIVATIONS[self.out_activation_] activations[i + 1] = output_activation(activations[i + 1]) return activations def _compute_loss_grad(self, layer, n_samples, activations, deltas, coef_grads, intercept_grads): """Compute the gradient of loss with respect to coefs and intercept for specified layer. This function does backpropagation for the specified one layer. """ coef_grads[layer] = safe_sparse_dot(activations[layer].T, deltas[layer]) coef_grads[layer] += (self.alpha * self.coefs_[layer]) coef_grads[layer] /= n_samples intercept_grads[layer] = np.mean(deltas[layer], 0) return coef_grads, intercept_grads def _loss_grad_lbfgs(self, packed_coef_inter, X, y, activations, deltas, coef_grads, intercept_grads): """Compute the MLP loss function and its corresponding derivatives with respect to the different parameters given in the initialization. Returned gradients are packed in a single vector so it can be used in l-bfgs Parameters ---------- packed_parameters : array-like A vector comprising the flattened coefficients and intercepts. X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. y : array-like, shape (n_samples,) The target values. activations: list, length = n_layers - 1 The ith element of the list holds the values of the ith layer. deltas : list, length = n_layers - 1 The ith element of the list holds the difference between the activations of the i + 1 layer and the backpropagated error. More specifically, deltas are gradients of loss with respect to z in each layer, where z = wx + b is the value of a particular layer before passing through the activation function coef_grad : list, length = n_layers - 1 The ith element contains the amount of change used to update the coefficient parameters of the ith layer in an iteration. intercept_grads : list, length = n_layers - 1 The ith element contains the amount of change used to update the intercept parameters of the ith layer in an iteration. Returns ------- loss : float grad : array-like, shape (number of nodes of all layers,) """ self._unpack(packed_coef_inter) loss, coef_grads, intercept_grads = self._backprop( X, y, activations, deltas, coef_grads, intercept_grads) self.n_iter_ += 1 grad = _pack(coef_grads, intercept_grads) return loss, grad def _backprop(self, X, y, activations, deltas, coef_grads, intercept_grads): """Compute the MLP loss function and its corresponding derivatives with respect to each parameter: weights and bias vectors. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. y : array-like, shape (n_samples,) The target values. activations: list, length = n_layers - 1 The ith element of the list holds the values of the ith layer. deltas : list, length = n_layers - 1 The ith element of the list holds the difference between the activations of the i + 1 layer and the backpropagated error. More specifically, deltas are gradients of loss with respect to z in each layer, where z = wx + b is the value of a particular layer before passing through the activation function coef_grad : list, length = n_layers - 1 The ith element contains the amount of change used to update the coefficient parameters of the ith layer in an iteration. intercept_grads : list, length = n_layers - 1 The ith element contains the amount of change used to update the intercept parameters of the ith layer in an iteration. Returns ------- loss : float coef_grads : list, length = n_layers - 1 intercept_grads : list, length = n_layers - 1 """ n_samples = X.shape[0] # Forward propagate activations = self._forward_pass(activations) # Get loss loss_func_name = self.loss if loss_func_name == 'log_loss' and self.out_activation_ == 'logistic': loss_func_name = 'binary_log_loss' loss = LOSS_FUNCTIONS[loss_func_name](y, activations[-1]) # Add L2 regularization term to loss values = np.sum( np.array([np.dot(s.ravel(), s.ravel()) for s in self.coefs_])) loss += (0.5 * self.alpha) * values / n_samples # Backward propagate last = self.n_layers_ - 2 # The calculation of delta[last] here works with following # combinations of output activation and loss function: # sigmoid and binary cross entropy, softmax and categorical cross # entropy, and identity with squared loss deltas[last] = activations[-1] - y # Compute gradient for the last layer coef_grads, intercept_grads = self._compute_loss_grad( last, n_samples, activations, deltas, coef_grads, intercept_grads) # Iterate over the hidden layers for i in range(self.n_layers_ - 2, 0, -1): deltas[i - 1] = safe_sparse_dot(deltas[i], self.coefs_[i].T) inplace_derivative = DERIVATIVES[self.activation] inplace_derivative(activations[i], deltas[i - 1]) coef_grads, intercept_grads = self._compute_loss_grad( i - 1, n_samples, activations, deltas, coef_grads, intercept_grads) return loss, coef_grads, intercept_grads def _initialize(self, y, layer_units): # set all attributes, allocate weights etc for first call # Initialize parameters self.n_iter_ = 0 self.t_ = 0 self.n_outputs_ = y.shape[1] # Compute the number of layers self.n_layers_ = len(layer_units) # Output for regression if not isinstance(self, ClassifierMixin): self.out_activation_ = 'identity' # Output for multi class elif self.label_binarizer_.y_type_ == 'multiclass': self.out_activation_ = 'softmax' # Output for binary class and multi-label else: self.out_activation_ = 'logistic' # Initialize coefficient and intercept layers self.coefs_ = [] self.intercepts_ = [] for i in range(self.n_layers_ - 1): coef_init, intercept_init = self._init_coef(layer_units[i], layer_units[i + 1]) self.coefs_.append(coef_init) self.intercepts_.append(intercept_init) if self.algorithm in _STOCHASTIC_ALGOS: self.loss_curve_ = [] self._no_improvement_count = 0 if self.early_stopping: self.validation_scores_ = [] self.best_validation_score_ = -np.inf else: self.best_loss_ = np.inf def _init_coef(self, fan_in, fan_out): if self.activation == 'logistic': # Use the initialization method recommended by # Glorot et al. init_bound = np.sqrt(2. / (fan_in + fan_out)) elif self.activation in ('identity', 'tanh', 'relu'): init_bound = np.sqrt(6. / (fan_in + fan_out)) else: # this was caught earlier, just to make sure raise ValueError("Unknown activation function %s" % self.activation) coef_init = self._random_state.uniform(-init_bound, init_bound, (fan_in, fan_out)) intercept_init = self._random_state.uniform(-init_bound, init_bound, fan_out) return coef_init, intercept_init def _fit(self, X, y, incremental=False): # Make sure self.hidden_layer_sizes is a list hidden_layer_sizes = self.hidden_layer_sizes if not hasattr(hidden_layer_sizes, "__iter__"): hidden_layer_sizes = [hidden_layer_sizes] hidden_layer_sizes = list(hidden_layer_sizes) # Validate input parameters. self._validate_hyperparameters() if np.any(np.array(hidden_layer_sizes) <= 0): raise ValueError("hidden_layer_sizes must be > 0, got %s." % hidden_layer_sizes) X, y = self._validate_input(X, y, incremental) n_samples, n_features = X.shape # Ensure y is 2D if y.ndim == 1: y = y.reshape((-1, 1)) self.n_outputs_ = y.shape[1] layer_units = ([n_features] + hidden_layer_sizes + [self.n_outputs_]) # check random state self._random_state = check_random_state(self.random_state) if not hasattr(self, 'coefs_') or (not self.warm_start and not incremental): # First time training the model self._initialize(y, layer_units) # l-bfgs does not support mini-batches if self.algorithm == 'l-bfgs': batch_size = n_samples elif self.batch_size == 'auto': batch_size = min(200, n_samples) else: if self.batch_size < 1 or self.batch_size > n_samples: warnings.warn("Got `batch_size` less than 1 or larger than " "sample size. It is going to be clipped") batch_size = np.clip(self.batch_size, 1, n_samples) # Initialize lists activations = [X] activations.extend(np.empty((batch_size, n_fan_out)) for n_fan_out in layer_units[1:]) deltas = [np.empty_like(a_layer) for a_layer in activations] coef_grads = [np.empty((n_fan_in_, n_fan_out_)) for n_fan_in_, n_fan_out_ in zip(layer_units[:-1], layer_units[1:])] intercept_grads = [np.empty(n_fan_out_) for n_fan_out_ in layer_units[1:]] # Run the Stochastic optimization algorithm if self.algorithm in _STOCHASTIC_ALGOS: self._fit_stochastic(X, y, activations, deltas, coef_grads, intercept_grads, layer_units, incremental) # Run the LBFGS algorithm elif self.algorithm == 'l-bfgs': self._fit_lbfgs(X, y, activations, deltas, coef_grads, intercept_grads, layer_units) return self def _validate_hyperparameters(self): if not isinstance(self.shuffle, bool): raise ValueError("shuffle must be either True or False, got %s." % self.shuffle) if self.max_iter <= 0: raise ValueError("max_iter must be > 0, got %s." % self.max_iter) if self.alpha < 0.0: raise ValueError("alpha must be >= 0, got %s." % self.alpha) if (self.learning_rate in ["constant", "invscaling", "adaptive"] and self.learning_rate_init <= 0.0): raise ValueError("learning_rate_init must be > 0, got %s." % self.learning_rate) if self.momentum > 1 or self.momentum < 0: raise ValueError("momentum must be >= 0 and <= 1, got %s" % self.momentum) if not isinstance(self.nesterovs_momentum, bool): raise ValueError("nesterovs_momentum must be either True or False," " got %s." % self.nesterovs_momentum) if not isinstance(self.early_stopping, bool): raise ValueError("early_stopping must be either True or False," " got %s." % self.early_stopping) if self.validation_fraction < 0 or self.validation_fraction >= 1: raise ValueError("validation_fraction must be >= 0 and < 1, " "got %s" % self.validation_fraction) if self.beta_1 < 0 or self.beta_1 >= 1: raise ValueError("beta_1 must be >= 0 and < 1, got %s" % self.beta_1) if self.beta_2 < 0 or self.beta_2 >= 1: raise ValueError("beta_2 must be >= 0 and < 1, got %s" % self.beta_2) if self.epsilon <= 0.0: raise ValueError("epsilon must be > 0, got %s." % self.epsilon) # raise ValueError if not registered supported_activations = ('identity', 'logistic', 'tanh', 'relu') if self.activation not in supported_activations: raise ValueError("The activation '%s' is not supported. Supported " "activations are %s." % (self.activation, supported_activations)) if self.learning_rate not in ["constant", "invscaling", "adaptive"]: raise ValueError("learning rate %s is not supported. " % self.learning_rate) supported_algorithms = _STOCHASTIC_ALGOS + ["l-bfgs"] if self.algorithm not in supported_algorithms: raise ValueError("The algorithm %s is not supported. " " Expected one of: %s" % (self.algorithm, ", ".join(supported_algorithms))) def _fit_lbfgs(self, X, y, activations, deltas, coef_grads, intercept_grads, layer_units): # Store meta information for the parameters self._coef_indptr = [] self._intercept_indptr = [] start = 0 # Save sizes and indices of coefficients for faster unpacking for i in range(self.n_layers_ - 1): n_fan_in, n_fan_out = layer_units[i], layer_units[i + 1] end = start + (n_fan_in * n_fan_out) self._coef_indptr.append((start, end, (n_fan_in, n_fan_out))) start = end # Save sizes and indices of intercepts for faster unpacking for i in range(self.n_layers_ - 1): end = start + layer_units[i + 1] self._intercept_indptr.append((start, end)) start = end # Run LBFGS packed_coef_inter = _pack(self.coefs_, self.intercepts_) if self.verbose is True or self.verbose >= 1: iprint = 1 else: iprint = -1 optimal_parameters, self.loss_, d = fmin_l_bfgs_b( x0=packed_coef_inter, func=self._loss_grad_lbfgs, maxfun=self.max_iter, iprint=iprint, pgtol=self.tol, args=(X, y, activations, deltas, coef_grads, intercept_grads)) self._unpack(optimal_parameters) def _fit_stochastic(self, X, y, activations, deltas, coef_grads, intercept_grads, layer_units, incremental): if not incremental or not hasattr(self, '_optimizer'): params = self.coefs_ + self.intercepts_ if self.algorithm == 'sgd': self._optimizer = SGDOptimizer( params, self.learning_rate_init, self.learning_rate, self.momentum, self.nesterovs_momentum, self.power_t) elif self.algorithm == 'adam': self._optimizer = AdamOptimizer( params, self.learning_rate_init, self.beta_1, self.beta_2, self.epsilon) # early_stopping in partial_fit doesn't make sense early_stopping = self.early_stopping and not incremental if early_stopping: X, X_val, y, y_val = train_test_split( X, y, random_state=self._random_state, test_size=self.validation_fraction) if isinstance(self, ClassifierMixin): y_val = self.label_binarizer_.inverse_transform(y_val) else: X_val = None y_val = None n_samples = X.shape[0] if self.batch_size == 'auto': batch_size = min(200, n_samples) else: batch_size = np.clip(self.batch_size, 1, n_samples) try: for it in range(self.max_iter): X, y = shuffle(X, y, random_state=self._random_state) accumulated_loss = 0.0 for batch_slice in gen_batches(n_samples, batch_size): activations[0] = X[batch_slice] batch_loss, coef_grads, intercept_grads = self._backprop( X[batch_slice], y[batch_slice], activations, deltas, coef_grads, intercept_grads) accumulated_loss += batch_loss * (batch_slice.stop - batch_slice.start) # update weights grads = coef_grads + intercept_grads self._optimizer.update_params(grads) self.n_iter_ += 1 self.loss_ = accumulated_loss / X.shape[0] self.t_ += n_samples self.loss_curve_.append(self.loss_) if self.verbose: print("Iteration %d, loss = %.8f" % (self.n_iter_, self.loss_)) # update no_improvement_count based on training loss or # validation score according to early_stopping self._update_no_improvement_count(early_stopping, X_val, y_val) # for learning rate that needs to be updated at iteration end self._optimizer.iteration_ends(self.t_) if self._no_improvement_count > 2: # not better than last two iterations by tol. # stop or decrease learning rate if early_stopping: msg = ("Validation score did not improve more than " "tol=%f for two consecutive epochs." % self.tol) else: msg = ("Training loss did not improve more than tol=%f" " for two consecutive epochs." % self.tol) is_stopping = self._optimizer.trigger_stopping( msg, self.verbose) if is_stopping: break else: self._no_improvement_count = 0 if incremental: break if self.n_iter_ == self.max_iter: warnings.warn('Stochastic Optimizer: Maximum iterations' ' reached and the optimization hasn\'t ' 'converged yet.' % (), ConvergenceWarning) except KeyboardInterrupt: pass if early_stopping: # restore best weights self.coefs_ = self._best_coefs self.intercepts_ = self._best_intercepts def _update_no_improvement_count(self, early_stopping, X_val, y_val): if early_stopping: # compute validation score, use that for stopping self.validation_scores_.append(self.score(X_val, y_val)) if self.verbose: print("Validation score: %f" % self.validation_scores_[-1]) # update best parameters # use validation_scores_, not loss_curve_ # let's hope no-one overloads .score with mse last_valid_score = self.validation_scores_[-1] if last_valid_score < (self.best_validation_score_ + self.tol): self._no_improvement_count += 1 else: self._no_improvement_count = 0 if last_valid_score > self.best_validation_score_: self.best_validation_score_ = last_valid_score self._best_coefs = [c.copy() for c in self.coefs_] self._best_intercepts = [i.copy() for i in self.intercepts_] else: if self.loss_curve_[-1] > self.best_loss_ - self.tol: self._no_improvement_count += 1 else: self._no_improvement_count = 0 if self.loss_curve_[-1] < self.best_loss_: self.best_loss_ = self.loss_curve_[-1] def fit(self, X, y): """Fit the model to data matrix X and target y. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. y : array-like, shape (n_samples,) The target values. Returns ------- self : returns a trained MLP model. """ return self._fit(X, y, incremental=False) @property def partial_fit(self): """Fit the model to data matrix X and target y. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. y : array-like, shape (n_samples,) The target values. Returns ------- self : returns a trained MLP model. """ if self.algorithm not in _STOCHASTIC_ALGOS: raise AttributeError("partial_fit is only available for stochastic" " optimization algorithms. %s is not" " stochastic" % self.algorithm) return self._partial_fit def _partial_fit(self, X, y, classes=None): return self._fit(X, y, incremental=True) def _predict(self, X): """Predict using the trained model Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. Returns ------- y_pred : array-like, shape (n_samples,) or (n_samples, n_outputs) The decision function of the samples for each class in the model. """ X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) # Make sure self.hidden_layer_sizes is a list hidden_layer_sizes = self.hidden_layer_sizes if not hasattr(hidden_layer_sizes, "__iter__"): hidden_layer_sizes = [hidden_layer_sizes] hidden_layer_sizes = list(hidden_layer_sizes) layer_units = [X.shape[1]] + hidden_layer_sizes + \ [self.n_outputs_] # Initialize layers activations = [X] for i in range(self.n_layers_ - 1): activations.append(np.empty((X.shape[0], layer_units[i + 1]))) # forward propagate self._forward_pass(activations) y_pred = activations[-1] return y_pred class MLPClassifier(BaseMultilayerPerceptron, ClassifierMixin): """Multi-layer Perceptron classifier. This algorithm optimizes the log-loss function using l-bfgs or gradient descent. Parameters ---------- hidden_layer_sizes : tuple, length = n_layers - 2, default (100,) The ith element represents the number of neurons in the ith hidden layer. activation : {'identity', 'logistic', 'tanh', 'relu'}, default 'relu' Activation function for the hidden layer. - 'identity', no-op activation, useful to implement linear bottleneck, returns f(x) = x - 'logistic', the logistic sigmoid function, returns f(x) = 1 / (1 + exp(-x)). - 'tanh', the hyperbolic tan function, returns f(x) = tanh(x). - 'relu', the rectified linear unit function, returns f(x) = max(0, x) algorithm : {'l-bfgs', 'sgd', 'adam'}, default 'adam' The algorithm for weight optimization. - 'l-bfgs' is an optimization algorithm in the family of quasi-Newton methods. - 'sgd' refers to stochastic gradient descent. - 'adam' refers to a stochastic gradient-based optimization algorithm proposed by Kingma, Diederik, and Jimmy Ba Note: The default algorithm 'adam' works pretty well on relatively large datasets (with thousands of training samples or more) in terms of both training time and validation score. For small datasets, however, 'l-bfgs' can converge faster and perform better. alpha : float, optional, default 0.0001 L2 penalty (regularization term) parameter. batch_size : int, optional, default 'auto' Size of minibatches for stochastic optimizers. If the algorithm is 'l-bfgs', the classifier will not use minibatch. When set to "auto", `batch_size=min(200, n_samples)` learning_rate : {'constant', 'invscaling', 'adaptive'}, default 'constant' Learning rate schedule for weight updates. - 'constant' is a constant learning rate given by 'learning_rate_init'. - 'invscaling' gradually decreases the learning rate ``learning_rate_`` at each time step 't' using an inverse scaling exponent of 'power_t'. effective_learning_rate = learning_rate_init / pow(t, power_t) - 'adaptive' keeps the learning rate constant to 'learning_rate_init' as long as training loss keeps decreasing. Each time two consecutive epochs fail to decrease training loss by at least tol, or fail to increase validation score by at least tol if 'early_stopping' is on, the current learning rate is divided by 5. Only used when ``algorithm='sgd'``. max_iter : int, optional, default 200 Maximum number of iterations. The algorithm iterates until convergence (determined by 'tol') or this number of iterations. random_state : int or RandomState, optional, default None State or seed for random number generator. shuffle : bool, optional, default True Whether to shuffle samples in each iteration. Only used when algorithm='sgd' or 'adam'. tol : float, optional, default 1e-4 Tolerance for the optimization. When the loss or score is not improving by at least tol for two consecutive iterations, unless `learning_rate` is set to 'adaptive', convergence is considered to be reached and training stops. learning_rate_init : double, optional, default 0.001 The initial learning rate used. It controls the step-size in updating the weights. Only used when algorithm='sgd' or 'adam'. power_t : double, optional, default 0.5 The exponent for inverse scaling learning rate. It is used in updating effective learning rate when the learning_rate is set to 'invscaling'. Only used when algorithm='sgd'. verbose : bool, optional, default False Whether to print progress messages to stdout. warm_start : bool, optional, default False When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. momentum : float, default 0.9 Momentum for gradient descent update. Should be between 0 and 1. Only used when algorithm='sgd'. nesterovs_momentum : boolean, default True Whether to use Nesterov's momentum. Only used when algorithm='sgd' and momentum > 0. early_stopping : bool, default False Whether to use early stopping to terminate training when validation score is not improving. If set to true, it will automatically set aside 10% of training data as validation and terminate training when validation score is not improving by at least tol for two consecutive epochs. Only effective when algorithm='sgd' or 'adam' validation_fraction : float, optional, default 0.1 The proportion of training data to set aside as validation set for early stopping. Must be between 0 and 1. Only used if early_stopping is True beta_1 : float, optional, default 0.9 Exponential decay rate for estimates of first moment vector in adam, should be in [0, 1). Only used when algorithm='adam' beta_2 : float, optional, default 0.999 Exponential decay rate for estimates of second moment vector in adam, should be in [0, 1). Only used when algorithm='adam' epsilon : float, optional, default 1e-8 Value for numerical stability in adam. Only used when algorithm='adam' Attributes ---------- `classes_` : array or list of array of shape (n_classes,) Class labels for each output. `loss_` : float The current loss computed with the loss function. `label_binarizer_` : LabelBinarizer A LabelBinarizer object trained on the training set. `coefs_` : list, length n_layers - 1 The ith element in the list represents the weight matrix corresponding to layer i. `intercepts_` : list, length n_layers - 1 The ith element in the list represents the bias vector corresponding to layer i + 1. n_iter_ : int, The number of iterations the algorithm has ran. n_layers_ : int Number of layers. `n_outputs_` : int Number of outputs. `out_activation_` : string Name of the output activation function. Notes ----- MLPClassifier trains iteratively since at each time step the partial derivatives of the loss function with respect to the model parameters are computed to update the parameters. It can also have a regularization term added to the loss function that shrinks model parameters to prevent overfitting. This implementation works with data represented as dense numpy arrays or sparse scipy arrays of floating point values. References ---------- Hinton, Geoffrey E. "Connectionist learning procedures." Artificial intelligence 40.1 (1989): 185-234. Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of training deep feedforward neural networks." International Conference on Artificial Intelligence and Statistics. 2010. He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level performance on imagenet classification." arXiv preprint arXiv:1502.01852 (2015). Kingma, Diederik, and Jimmy Ba. "Adam: A method for stochastic optimization." arXiv preprint arXiv:1412.6980 (2014). """ def __init__(self, hidden_layer_sizes=(100,), activation="relu", algorithm='adam', alpha=0.0001, batch_size='auto', learning_rate="constant", learning_rate_init=0.001, power_t=0.5, max_iter=200, shuffle=True, random_state=None, tol=1e-4, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-8): sup = super(MLPClassifier, self) sup.__init__(hidden_layer_sizes=hidden_layer_sizes, activation=activation, algorithm=algorithm, alpha=alpha, batch_size=batch_size, learning_rate=learning_rate, learning_rate_init=learning_rate_init, power_t=power_t, max_iter=max_iter, loss='log_loss', shuffle=shuffle, random_state=random_state, tol=tol, verbose=verbose, warm_start=warm_start, momentum=momentum, nesterovs_momentum=nesterovs_momentum, early_stopping=early_stopping, validation_fraction=validation_fraction, beta_1=beta_1, beta_2=beta_2, epsilon=epsilon) self.label_binarizer_ = LabelBinarizer() def _validate_input(self, X, y, incremental): X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'], multi_output=True) if y.ndim == 2 and y.shape[1] == 1: y = column_or_1d(y, warn=True) self.label_binarizer_.fit(y) if not hasattr(self, 'classes_') or not incremental: self.classes_ = self.label_binarizer_.classes_ else: classes = self.label_binarizer_.classes_ if not np.all(np.in1d(classes, self.classes_)): raise ValueError("`y` has classes not in `self.classes_`." " `self.classes_` has %s. 'y' has %s." % (self.classes_, classes)) y = self.label_binarizer_.transform(y) return X, y def predict(self, X): """Predict using the multi-layer perceptron classifier Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. Returns ------- y : array-like, shape (n_samples,) or (n_samples, n_classes) The predicted classes. """ check_is_fitted(self, "coefs_") y_pred = self._predict(X) if self.n_outputs_ == 1: y_pred = y_pred.ravel() return self.label_binarizer_.inverse_transform(y_pred) @property def partial_fit(self): """Fit the model to data matrix X and target y. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. y : array-like, shape (n_samples,) The target values. classes : array, shape (n_classes) Classes across all calls to partial_fit. Can be obtained via `np.unique(y_all)`, where y_all is the target vector of the entire dataset. This argument is required for the first call to partial_fit and can be omitted in the subsequent calls. Note that y doesn't need to contain all labels in `classes`. Returns ------- self : returns a trained MLP model. """ if self.algorithm not in _STOCHASTIC_ALGOS: raise AttributeError("partial_fit is only available for stochastic" " optimization algorithms. %s is not" " stochastic" % self.algorithm) return self._partial_fit def _partial_fit(self, X, y, classes=None): _check_partial_fit_first_call(self, classes) super(MLPClassifier, self)._partial_fit(X, y) return self def predict_log_proba(self, X): """Return the log of probability estimates. Parameters ---------- X : array-like, shape (n_samples, n_features) The input data. Returns ------- log_y_prob : array-like, shape (n_samples, n_classes) The predicted log-probability of the sample for each class in the model, where classes are ordered as they are in `self.classes_`. Equivalent to log(predict_proba(X)) """ y_prob = self.predict_proba(X) return np.log(y_prob, out=y_prob) def predict_proba(self, X): """Probability estimates. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. Returns ------- y_prob : array-like, shape (n_samples, n_classes) The predicted probability of the sample for each class in the model, where classes are ordered as they are in `self.classes_`. """ check_is_fitted(self, "coefs_") y_pred = self._predict(X) if self.n_outputs_ == 1: y_pred = y_pred.ravel() if y_pred.ndim == 1: return np.vstack([1 - y_pred, y_pred]).T else: return y_pred class MLPRegressor(BaseMultilayerPerceptron, RegressorMixin): """Multi-layer Perceptron regressor. This algorithm optimizes the squared-loss using l-bfgs or gradient descent. Parameters ---------- hidden_layer_sizes : tuple, length = n_layers - 2, default (100,) The ith element represents the number of neurons in the ith hidden layer. activation : {'identity', 'logistic', 'tanh', 'relu'}, default 'relu' Activation function for the hidden layer. - 'identity', no-op activation, useful to implement linear bottleneck, returns f(x) = x - 'logistic', the logistic sigmoid function, returns f(x) = 1 / (1 + exp(-x)). - 'tanh', the hyperbolic tan function, returns f(x) = tanh(x). - 'relu', the rectified linear unit function, returns f(x) = max(0, x) algorithm : {'l-bfgs', 'sgd', 'adam'}, default 'adam' The algorithm for weight optimization. - 'l-bfgs' is an optimization algorithm in the family of quasi-Newton methods. - 'sgd' refers to stochastic gradient descent. - 'adam' refers to a stochastic gradient-based optimization algorithm proposed by Kingma, Diederik, and Jimmy Ba Note: The default algorithm 'adam' works pretty well on relatively large datasets (with thousands of training samples or more) in terms of both training time and validation score. For small datasets, however, 'l-bfgs' can converge faster and perform better. alpha : float, optional, default 0.0001 L2 penalty (regularization term) parameter. batch_size : int, optional, default 'auto' Size of minibatches for stochastic optimizers. If the algorithm is 'l-bfgs', the classifier will not use minibatch. When set to "auto", `batch_size=min(200, n_samples)` learning_rate : {'constant', 'invscaling', 'adaptive'}, default 'constant' Learning rate schedule for weight updates. - 'constant' is a constant learning rate given by 'learning_rate_init'. - 'invscaling' gradually decreases the learning rate ``learning_rate_`` at each time step 't' using an inverse scaling exponent of 'power_t'. effective_learning_rate = learning_rate_init / pow(t, power_t) - 'adaptive' keeps the learning rate constant to 'learning_rate_init' as long as training loss keeps decreasing. Each time two consecutive epochs fail to decrease training loss by at least tol, or fail to increase validation score by at least tol if 'early_stopping' is on, the current learning rate is divided by 5. Only used when algorithm='sgd'. max_iter : int, optional, default 200 Maximum number of iterations. The algorithm iterates until convergence (determined by 'tol') or this number of iterations. random_state : int or RandomState, optional, default None State or seed for random number generator. shuffle : bool, optional, default True Whether to shuffle samples in each iteration. Only used when algorithm='sgd' or 'adam'. tol : float, optional, default 1e-4 Tolerance for the optimization. When the loss or score is not improving by at least tol for two consecutive iterations, unless `learning_rate` is set to 'adaptive', convergence is considered to be reached and training stops. learning_rate_init : double, optional, default 0.001 The initial learning rate used. It controls the step-size in updating the weights. Only used when algorithm='sgd' or 'adam'. power_t : double, optional, default 0.5 The exponent for inverse scaling learning rate. It is used in updating effective learning rate when the learning_rate is set to 'invscaling'. Only used when algorithm='sgd'. verbose : bool, optional, default False Whether to print progress messages to stdout. warm_start : bool, optional, default False When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. momentum : float, default 0.9 Momentum for gradient descent update. Should be between 0 and 1. Only used when algorithm='sgd'. nesterovs_momentum : boolean, default True Whether to use Nesterov's momentum. Only used when algorithm='sgd' and momentum > 0. early_stopping : bool, default False Whether to use early stopping to terminate training when validation score is not improving. If set to true, it will automatically set aside 10% of training data as validation and terminate training when validation score is not improving by at least tol for two consecutive epochs. Only effective when algorithm='sgd' or 'adam' validation_fraction : float, optional, default 0.1 The proportion of training data to set aside as validation set for early stopping. Must be between 0 and 1. Only used if early_stopping is True beta_1 : float, optional, default 0.9 Exponential decay rate for estimates of first moment vector in adam, should be in [0, 1). Only used when algorithm='adam' beta_2 : float, optional, default 0.999 Exponential decay rate for estimates of second moment vector in adam, should be in [0, 1). Only used when algorithm='adam' epsilon : float, optional, default 1e-8 Value for numerical stability in adam. Only used when algorithm='adam' Attributes ---------- `loss_` : float The current loss computed with the loss function. `coefs_` : list, length n_layers - 1 The ith element in the list represents the weight matrix corresponding to layer i. `intercepts_` : list, length n_layers - 1 The ith element in the list represents the bias vector corresponding to layer i + 1. n_iter_ : int, The number of iterations the algorithm has ran. n_layers_ : int Number of layers. `n_outputs_` : int Number of outputs. `out_activation_` : string Name of the output activation function. Notes ----- MLPRegressor trains iteratively since at each time step the partial derivatives of the loss function with respect to the model parameters are computed to update the parameters. It can also have a regularization term added to the loss function that shrinks model parameters to prevent overfitting. This implementation works with data represented as dense and sparse numpy arrays of floating point values. References ---------- Hinton, Geoffrey E. "Connectionist learning procedures." Artificial intelligence 40.1 (1989): 185-234. Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of training deep feedforward neural networks." International Conference on Artificial Intelligence and Statistics. 2010. He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level performance on imagenet classification." arXiv preprint arXiv:1502.01852 (2015). Kingma, Diederik, and Jimmy Ba. "Adam: A method for stochastic optimization." arXiv preprint arXiv:1412.6980 (2014). """ def __init__(self, hidden_layer_sizes=(100,), activation="relu", algorithm='adam', alpha=0.0001, batch_size='auto', learning_rate="constant", learning_rate_init=0.001, power_t=0.5, max_iter=200, shuffle=True, random_state=None, tol=1e-4, verbose=False, warm_start=False, momentum=0.9, nesterovs_momentum=True, early_stopping=False, validation_fraction=0.1, beta_1=0.9, beta_2=0.999, epsilon=1e-8): sup = super(MLPRegressor, self) sup.__init__(hidden_layer_sizes=hidden_layer_sizes, activation=activation, algorithm=algorithm, alpha=alpha, batch_size=batch_size, learning_rate=learning_rate, learning_rate_init=learning_rate_init, power_t=power_t, max_iter=max_iter, loss='squared_loss', shuffle=shuffle, random_state=random_state, tol=tol, verbose=verbose, warm_start=warm_start, momentum=momentum, nesterovs_momentum=nesterovs_momentum, early_stopping=early_stopping, validation_fraction=validation_fraction, beta_1=beta_1, beta_2=beta_2, epsilon=epsilon) def predict(self, X): """Predict using the multi-layer perceptron model. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) The input data. Returns ------- y : array-like, shape (n_samples, n_outputs) The predicted values. """ check_is_fitted(self, "coefs_") y_pred = self._predict(X) if y_pred.shape[1] == 1: return y_pred.ravel() return y_pred def _validate_input(self, X, y, incremental): X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'], multi_output=True, y_numeric=True) if y.ndim == 2 and y.shape[1] == 1: y = column_or_1d(y, warn=True) return X, y
jblackburne/scikit-learn
sklearn/neural_network/multilayer_perceptron.py
Python
bsd-3-clause
49,978
from paste.util import doctest24 as doctest from paste.util.import_string import simple_import import os filenames = [ 'tests/test_template.txt', ] modules = [ 'paste.util.template', 'paste.util.looper', # This one opens up httpserver, which is bad: #'paste.auth.cookie', #'paste.auth.multi', #'paste.auth.digest', #'paste.auth.basic', #'paste.auth.form', #'paste.progress', 'paste.exceptions.serial_number_generator', 'paste.evalexception.evalcontext', 'paste.util.dateinterval', 'paste.util.quoting', 'paste.wsgilib', 'paste.url', 'paste.request', ] options = doctest.ELLIPSIS|doctest.REPORT_ONLY_FIRST_FAILURE def test_doctests(): for filename in filenames: filename = os.path.join( os.path.dirname(os.path.dirname(__file__)), filename) yield do_doctest, filename def do_doctest(filename): failure, total = doctest.testfile( filename, module_relative=False, optionflags=options) assert not failure, "Failure in %r" % filename def test_doctest_mods(): for module in modules: yield do_doctest_mod, module def do_doctest_mod(module): module = simple_import(module) failure, total = doctest.testmod( module, optionflags=options) assert not failure, "Failure in %r" % module if __name__ == '__main__': import sys import doctest args = sys.argv[1:] if not args: args = filenames for filename in args: doctest.testfile(filename, module_relative=False)
yongshengwang/builthue
desktop/core/ext-py/Paste-1.7.2/tests/test_doctests.py
Python
apache-2.0
1,568
""" node for 2 conv's paired together, which allows more flexible combinations of filter size and padding - specifically even filter sizes can have "same" padding """ import numpy as np import theano import theano.tensor as T import treeano import treeano.nodes as tn import canopy fX = theano.config.floatX @treeano.register_node("paired_conv") class PairedConvNode(treeano.WrapperNodeImpl): hyperparameter_names = ("inits", "filter_size", "num_filters", "conv_pad", "pad") children_container = treeano.core.DictChildrenContainerSchema( conv=treeano.core.ChildContainer, separator=treeano.core.ChildContainer, ) def architecture_children(self): children = self.raw_children() conv_node = children["conv"] separator_node = children["separator"] return [tn.SequentialNode( self.name + "_sequential", [canopy.node_utils.suffix_node(conv_node, "_1"), separator_node, canopy.node_utils.suffix_node(conv_node, "_2")])] def init_state(self, network): super(PairedConvNode, self).init_state(network) filter_size = network.find_hyperparameter(["filter_size"]) # calculate effective total filter size total_filter_size = tuple([fs * 2 - 1 for fs in filter_size]) # by default, do same padding pad = network.find_hyperparameter(["conv_pad", "pad"], "same") total_pad = tn.conv.conv_parse_pad(total_filter_size, pad) second_pad = tuple([p // 2 for p in total_pad]) first_pad = tuple([p - p2 for p, p2 in zip(total_pad, second_pad)]) conv_node_name = self.raw_children()["conv"].name network.set_hyperparameter(conv_node_name + "_1", "pad", first_pad) network.set_hyperparameter(conv_node_name + "_2", "pad", second_pad)
diogo149/treeano
treeano/sandbox/nodes/paired_conv.py
Python
apache-2.0
2,082
# -*- coding: utf-8 -*- # Dactylo -- A datasets activity streams logger # By: Emmanuel Raviart <emmanuel@raviart.com> # # Copyright (C) 2013 Etalab # http://github.com/etalab/dactylo # # This file is part of Dactylo. # # Dactylo is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # Dactylo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Controllers for states""" import collections import datetime import json import logging from .. import contexts, conv, model, urls, wsgihelpers json_to_metrics = conv.pipe( conv.test_isinstance(dict), # conv.struct( # dict( # datasets = conv.pipe( # conv.test_isinstance(dict), # conv.struct( # dict( # count = conv.pipe( # conv.test_isinstance(int), # conv.test_greater_or_equal(0), # conv.not_none, # ), # weights = conv.pipe( # conv.test_isinstance(float), # conv.test_greater_or_equal(0.0), # conv.not_none, # ), # ), # ), # conv.not_none, # ), # ), # ), ) log = logging.getLogger(__name__) @wsgihelpers.wsgify def api1_get(req): ctx = contexts.Ctx(req) headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx) assert req.method == 'GET', req.method params = req.GET inputs = dict( callback = params.get('callback'), context = params.get('context'), ) data, errors = conv.pipe( conv.struct( dict( callback = conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, ), context = conv.test_isinstance(basestring), ), ), )(inputs, state = ctx) if errors is not None: return wsgihelpers.respond_json(ctx, dict( apiVersion = '1.0', context = inputs['context'], error = dict( code = 400, # Bad Request errors = [ dict( location = key, message = error, ) for key, error in sorted(errors.iteritems()) ], # message will be automatically defined. ), method = req.script_name, params = inputs, url = req.url.decode('utf-8'), ), headers = headers, jsonp = inputs['callback'], ) return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', context = data['context'], method = req.script_name, params = inputs, url = req.url.decode('utf-8'), value = model.metrics, ).iteritems())), headers = headers, jsonp = data['callback'], ) @wsgihelpers.wsgify def api1_set(req): ctx = contexts.Ctx(req) headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx) assert req.method == 'POST', req.method inputs_converters = dict( # Shared secret between client and server api_key = conv.pipe( conv.test_isinstance(basestring), conv.input_to_token, conv.not_none, ), # For asynchronous calls context = conv.test_isinstance(basestring), # "value" is handled below. ) content_type = req.content_type if content_type is not None: content_type = content_type.split(';', 1)[0].strip() if content_type == 'application/json': inputs, error = conv.pipe( conv.make_input_to_json(), conv.test_isinstance(dict), )(req.body, state = ctx) if error is not None: return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', error = collections.OrderedDict(sorted(dict( code = 400, # Bad Request errors = [error], message = ctx._(u'Invalid JSON in request POST body'), ).iteritems())), method = req.script_name, params = req.body, url = req.url.decode('utf-8'), ).iteritems())), headers = headers, ) inputs_converters.update(dict( value = conv.pipe( json_to_metrics, conv.not_none, ), )) else: # URL-encoded POST. inputs = dict(req.POST) inputs_converters.update(dict( value = conv.pipe( conv.make_input_to_json(), json_to_metrics, conv.not_none, ), )) data, errors = conv.struct(inputs_converters)(inputs, state = ctx) if errors is not None: return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', context = inputs.get('context'), error = collections.OrderedDict(sorted(dict( code = 400, # Bad Request errors = [errors], message = ctx._(u'Bad parameters in request'), ).iteritems())), method = req.script_name, params = inputs, url = req.url.decode('utf-8'), ).iteritems())), headers = headers, ) api_key = data['api_key'] account = model.Account.find_one( dict( api_key = api_key, ), as_class = collections.OrderedDict, ) if account is None: return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', context = data['context'], error = collections.OrderedDict(sorted(dict( code = 401, # Unauthorized message = ctx._('Unknown API Key: {}').format(api_key), ).iteritems())), method = req.script_name, params = inputs, url = req.url.decode('utf-8'), ).iteritems())), headers = headers, ) model.metrics = data['value'] message = unicode(json.dumps(model.metrics, encoding = 'utf-8', ensure_ascii = False, indent = 2)) for client in model.websocket_metrics_clients: client.send(message) return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', context = data['context'], method = req.script_name, params = inputs, url = req.url.decode('utf-8'), value = model.metrics, ).iteritems())), headers = headers, ) @wsgihelpers.wsgify def api1_test(req): ctx = contexts.Ctx(req) headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx) assert req.method == 'GET', req.method params = req.GET inputs = dict( callback = params.get('callback'), context = params.get('context'), ) data, errors = conv.pipe( conv.struct( dict( callback = conv.pipe( conv.test_isinstance(basestring), conv.cleanup_line, ), context = conv.test_isinstance(basestring), ), ), )(inputs, state = ctx) if errors is not None: return wsgihelpers.respond_json(ctx, dict( apiVersion = '1.0', context = inputs['context'], error = dict( code = 400, # Bad Request errors = [ dict( location = key, message = error, ) for key, error in sorted(errors.iteritems()) ], # message will be automatically defined. ), method = req.script_name, params = inputs, url = req.url.decode('utf-8'), ), headers = headers, jsonp = inputs['callback'], ) test_metrics = model.metrics.copy() test_metrics['test'] = datetime.datetime.utcnow().isoformat(), message = unicode(json.dumps(test_metrics, encoding = 'utf-8', ensure_ascii = False, indent = 2)) for client in model.websocket_metrics_clients: client.send(message) return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = '1.0', context = data['context'], method = req.script_name, params = inputs, url = req.url.decode('utf-8'), value = model.metrics, ).iteritems())), headers = headers, jsonp = data['callback'], ) def route_api1_class(environ, start_response): router = urls.make_router( ('GET', '^/?$', api1_get), ('GET', '^/test/?$', api1_test), ('POST', '^/?$', api1_set), ) return router(environ, start_response)
etalab/dactylo
dactylo/controllers/states.py
Python
agpl-3.0
10,349
from __future__ import unicode_literals import os from os import environ import sys import tempfile import psycopg2 import psycopg2.extras import urllib.parse as urlparse import json from oauth2client.service_account import ServiceAccountCredentials from openpyxl import Workbook, load_workbook import cmd2 from flask import Flask, request, abort from argparse import ArgumentParser import requests from linebot import ( LineBotApi, WebhookHandler, WebhookParser ) from linebot.exceptions import ( InvalidSignatureError ) from linebot.models import ( MessageEvent, TextMessage, TextSendMessage, SourceUser, SourceGroup, SourceRoom, TemplateSendMessage, ConfirmTemplate, MessageTemplateAction, ButtonsTemplate, URITemplateAction, PostbackTemplateAction, CarouselTemplate, CarouselColumn, PostbackEvent, StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage, ImageMessage, VideoMessage, AudioMessage, UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent, ImageSendMessage,VideoSendMessage ) from linebot import ( LineBotApi, WebhookHandler, WebhookParser ) from linebot.exceptions import ( InvalidSignatureError ) from linebot.models import ( MessageEvent, TextMessage, TextSendMessage, SourceUser, SourceGroup, SourceRoom, TemplateSendMessage, ConfirmTemplate, MessageTemplateAction, ButtonsTemplate, URITemplateAction, PostbackTemplateAction, CarouselTemplate, CarouselColumn, PostbackEvent, StickerMessage, StickerSendMessage, LocationMessage, LocationSendMessage, ImageMessage, VideoMessage, AudioMessage, UnfollowEvent, FollowEvent, JoinEvent, LeaveEvent, BeaconEvent, ImageSendMessage,VideoSendMessage ) app = Flask(__name__) line_bot_api = LineBotApi(os.environ['channel_secret']) handler = WebhookHandler(os.environ['handler']) line_bot_api = LineBotApi(os.environ['channel_secret']) parser = WebhookParser(os.environ['handler']) url = urlparse.urlparse(os.environ['DATABASE_URL']) dbname = url.path[1:] user = url.username password = url.password host = url.hostname port = url.port conn = psycopg2.connect( dbname=dbname, user=user, password=password, host=host, port=port ) def get_prestige_for_champion(champ, sig): cur = None try: cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cur.execute("""SELECT prestige FROM prestige_table WHERE champ = %(champ)s AND sig = %(sig)s""", {"champ": champ, "sig": sig}) rows = cur.fetchall() for row in rows: return str(row['prestige']) # Returns a prestige value else: return None # Returns None if a champ or sig wasn't found except psycopg2.Error: conn.rollback() print("PostgreSQL Error: " + e.diag.message_primary) pass finally: if cur is not None: cur.close() class inputchamp(cmd2.Cmd): def __init__(self, line_bot_api,event,events,user): self.line_bot_api = line_bot_api self.event = event self.events=events self.user=user super(inputchamp, self).__init__() def splitname(self,line): line_bot_api=self.line_bot_api event=self.event events=self.events user=self.user print(user) eventText=event.message.text trigger="Mc3 inputchamp " s = eventText[eventText.find(trigger) + len(trigger):] # 4-nebula-4 30 pieces = s.split() # ['4-nebula-4', '30'] champ = pieces[0] sig = pieces[1] profile= line_bot_api.get_profile(user) name=(profile.display_name) print(name) print (champ) print (sig) champ_prestige = get_prestige_for_champion(champ, sig) if champ_prestige is None: line_bot_api.reply_message( event.reply_token, TextSendMessage(text="Oops! You've entered an invalid champion or signature level.")) # this breaks out of our branch without exiting the bot script cur = None try: print (champ_prestige) cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) # get the user's information if it exists cur.execute("""SELECT lineid, summoner_name, champ_data FROM prestige_data WHERE lineid = %(lineid)s""", {"lineid":user}) rows = cur.fetchall() print (rows) # The user exists in the database and a result was returned for row in rows: lineid = row['lineid'] summoner_name = row['summoner_name'] champs = json.loads(row['champ_data']) # contains a list of the user's champs break # we should only have one result, but we'll stop just in case # The user does not exist in the database already else: lineid = user summoner_name = name champ_data = json.loads('{}') # start with an empty list of champs champs = {} # creates an empty Python list except BaseException: if cur is not None: cur.rollback() cur.close() finally: if cur is not None: cur.close() # either way, let's move on # this will make sure that the Summoner's name is always updated if their Line profile has changed summoner_name = name # add or update the user's champ champs[champ] = champ_prestige # put everything together and send it back to the database champ_data = json.dumps(champs) # Checks for an existing line ID and updates if it exists or adds if it doesn't cur = None try: cur = conn.cursor() cur.execute("""INSERT INTO prestige_data(lineid, summoner_name, champ_data) VALUES(%(lineid)s, %(summoner_name)s, %(champ_data)s) ON CONFLICT (lineid) DO UPDATE SET summoner_name = Excluded.summoner_name, champ_data = Excluded.champ_data;""", {"lineid": lineid, "summoner_name": summoner_name, "champ_data": champ_data}) conn.commit() line_bot_api.reply_message( event.reply_token, TextSendMessage(text=champ + " (" + champ_prestige + ") added")) except psycopg2.Error: conn.rollback() print("PostgreSQL Error: " + e.diag.message_primary) pass finally: if cur is not None: cur.close() def do_EOF(self, line): return True def process(self, line): return self.onecmd(line);
monhustla/line-bot-sdk-python
prestigetest.py
Python
apache-2.0
7,144
# # Solve a multi-commodity flow problem as python package. # # Implement core functionality needed to achieve modularity. # 1. Define the input data schema # 2. Define the output data schema # 3. Create a solve function that accepts a data set consistent with the input # schema and (if possible) returns a data set consistent with the output schema. # # Provides command line interface via ticdat.standard_main # For example, typing # python netflow.py -i netflow_sample_data.sql -o netflow_solution.sql # will read from the model stored in netflow_sample_data.sql # and write the solution to netflow_solution.sql from ticdat import TicDatFactory, standard_main from amplpy import AMPL from itertools import product # ------------------------ define the input schema -------------------------------- input_schema = TicDatFactory ( commodities = [["Name"],[]], nodes = [["Name"],[]], arcs = [["Source", "Destination"],["Capacity"]], cost = [["Commodity", "Source", "Destination"], ["Cost"]], inflow = [["Commodity", "Node"],["Quantity"]] ) # Define the foreign key relationships input_schema.add_foreign_key("arcs", "nodes", ['Source', 'Name']) input_schema.add_foreign_key("arcs", "nodes", ['Destination', 'Name']) input_schema.add_foreign_key("cost", "nodes", ['Source', 'Name']) input_schema.add_foreign_key("cost", "nodes", ['Destination', 'Name']) input_schema.add_foreign_key("cost", "commodities", ['Commodity', 'Name']) input_schema.add_foreign_key("inflow", "commodities", ['Commodity', 'Name']) input_schema.add_foreign_key("inflow", "nodes", ['Node', 'Name']) # Define the data types input_schema.set_data_type("arcs", "Capacity", min=0, max=float("inf"), inclusive_min=True, inclusive_max=True) input_schema.set_data_type("cost", "Cost", min=0, max=float("inf"), inclusive_min=True, inclusive_max=False) input_schema.set_data_type("inflow", "Quantity", min=-float("inf"), max=float("inf"), inclusive_min=False, inclusive_max=False) # The default-default of zero makes sense everywhere except for Capacity input_schema.set_default_value("arcs", "Capacity", float("inf")) # --------------------------------------------------------------------------------- # ------------------------ define the output schema ------------------------------- solution_schema = TicDatFactory( flow = [["Commodity", "Source", "Destination"], ["Quantity"]], parameters = [["Parameter"],["Value"]]) # --------------------------------------------------------------------------------- # ------------------------ solving section----------------------------------------- def solve(dat): """ core solving routine :param dat: a good ticdat for the input_schema :return: a good ticdat for the solution_schema, or None """ assert input_schema.good_tic_dat_object(dat) assert not input_schema.find_foreign_key_failures(dat) assert not input_schema.find_data_type_failures(dat) # copy the data over to amplpy.DataFrame objects, renaming the data fields as needed dat = input_schema.copy_to_ampl(dat, field_renamings={("arcs", "Capacity"): "capacity", ("cost", "Cost"): "cost", ("inflow", "Quantity"): "inflow"}) # for instructional purposes, the following code anticipates extreme sparsity and doesn't generate # conservation of flow records unless they are really needed ampl = AMPL() ampl.setOption('solver', 'gurobi') ampl.eval(""" set NODES; set ARCS within {i in NODES, j in NODES: i <> j}; set COMMODITIES; param capacity {ARCS} >= 0; set SHIPMENT_OPTIONS within {COMMODITIES,ARCS}; param cost {SHIPMENT_OPTIONS} > 0; set INFLOW_INDEX within {COMMODITIES,NODES}; param inflow {INFLOW_INDEX}; var Flow {SHIPMENT_OPTIONS} >= 0; minimize TotalCost: sum {(h,i,j) in SHIPMENT_OPTIONS} cost[h,i,j] * Flow[h,i,j]; subject to Capacity {(i,j) in ARCS}: sum {(h,i,j) in SHIPMENT_OPTIONS} Flow[h,i,j] <= capacity[i,j]; subject to Conservation {h in COMMODITIES, j in NODES: card {(h,i,j) in SHIPMENT_OPTIONS} > 0 or card {(h,j,i) in SHIPMENT_OPTIONS} > 0 or (h,j) in INFLOW_INDEX}: sum {(h,i,j) in SHIPMENT_OPTIONS} Flow[h,i,j] + (if (h,j) in INFLOW_INDEX then inflow[h,j]) = sum {(h,j,i) in SHIPMENT_OPTIONS} Flow[h,j,i]; """) input_schema.set_ampl_data(dat, ampl, {"nodes": "NODES", "arcs": "ARCS", "commodities": "COMMODITIES", "cost":"SHIPMENT_OPTIONS", "inflow":"INFLOW_INDEX"}) ampl.solve() if ampl.getValue("solve_result") != "infeasible": sln = solution_schema.copy_from_ampl_variables( {('flow' ,'Quantity'):ampl.getVariable("Flow")}) sln.parameters["Total Cost"] = ampl.getObjective('TotalCost').value() return sln # --------------------------------------------------------------------------------- # ------------------------ provide stand-alone functionality ---------------------- # when run from the command line, will read/write json/xls/csv/db/sql/mdb files if __name__ == "__main__": standard_main(input_schema, solution_schema, solve) # ---------------------------------------------------------------------------------
opalytics/opalytics-ticdat
examples/expert_section/ampl_original_examples/netflow/netflow.py
Python
bsd-2-clause
5,373
import json import math import urlparse from httplib import HTTPConnection, HTTPSConnection from urlparse import urlsplit from django.conf import settings from django.utils.http import is_safe_url from django.http.request import validate_host from django.contrib.auth.decorators import login_required from django.core.exceptions import ObjectDoesNotExist from django.core.serializers.json import DjangoJSONEncoder from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotAllowed, HttpResponseServerError from django.shortcuts import render_to_response, get_object_or_404, redirect from django.template import RequestContext from django.views.decorators.csrf import csrf_exempt from geonode.base.models import TopicCategory from geonode.geoserver.helpers import ogc_server_settings from geonode.layers.models import Layer from geonode.maps.models import Map, MapLayer from geonode.utils import forward_mercator, default_map_config from geonode.utils import llbbox_to_mercator from geonode.layers.views import _resolve_layer from geonode.maps.views import _resolve_map, _PERMISSION_MSG_VIEW from geonode.maps.views import snapshot_config from geonode.utils import DEFAULT_TITLE from geonode.utils import DEFAULT_ABSTRACT from .models import LayerStats from .forms import EndpointForm @csrf_exempt def proxy(request): PROXY_ALLOWED_HOSTS = getattr(settings, 'PROXY_ALLOWED_HOSTS', ()) host = None if 'geonode.geoserver' in settings.INSTALLED_APPS: from geonode.geoserver.helpers import ogc_server_settings hostname = (ogc_server_settings.hostname,) if ogc_server_settings else () PROXY_ALLOWED_HOSTS += hostname host = ogc_server_settings.netloc if 'url' not in request.GET: return HttpResponse("The proxy service requires a URL-encoded URL as a parameter.", status=400, content_type="text/plain" ) raw_url = request.GET['url'] url = urlsplit(raw_url) locator = str(url.path) if url.query != "": locator += '?' + url.query if url.fragment != "": locator += '#' + url.fragment if not settings.DEBUG: if not validate_host(url.hostname, PROXY_ALLOWED_HOSTS): return HttpResponse("DEBUG is set to False but the host of the path provided to the proxy service" " is not in the PROXY_ALLOWED_HOSTS setting.", status=403, content_type="text/plain" ) headers = {} if settings.SESSION_COOKIE_NAME in request.COOKIES and is_safe_url(url=raw_url, host=host): headers["Cookie"] = request.META["HTTP_COOKIE"] if request.method in ("POST", "PUT") and "CONTENT_TYPE" in request.META: headers["Content-Type"] = request.META["CONTENT_TYPE"] if url.scheme == 'https': conn = HTTPSConnection(url.hostname, url.port) else: conn = HTTPConnection(url.hostname, url.port) conn.request(request.method, locator, request.body, headers) result = conn.getresponse() # If we get a redirect, let's add a useful message. if result.status in (301, 302, 303, 307): response = HttpResponse(('This proxy does not support redirects. The server in "%s" ' 'asked for a redirect to "%s"' % (url, result.getheader('Location'))), status=result.status, content_type=result.getheader("Content-Type", "text/plain") ) response['Location'] = result.getheader('Location') else: response = HttpResponse( result.read(), status=result.status, content_type=result.getheader("Content-Type", "text/plain")) return response def ajax_layer_edit_check(request, layername): """ Check if the the layer style is editable. """ # TODO implement this #layer = get_object_or_404(Layer, typename=layername); #editable = request.user.has_perm("maps.change_layer", obj=layer) editable = True return HttpResponse( str(editable), status=200 if editable else 403 ) @login_required def ajax_layer_update(request, layername): """ Used to update layer bounds and gazetteer after an edit transaction. """ # TODO implement this! return HttpResponse('') @login_required def create_pg_layer(request): # TODO implement this! #return redirect('layer_upload') return HttpResponse('') @login_required def upload_layer(request): # TODO implement this! return HttpResponse('') def ajax_increment_layer_stats(request): if request.method != 'POST': return HttpResponseNotAllowed('Only POST is supported') print request.POST if request.POST['layername'] != '': layer_match = Layer.objects.filter(typename=request.POST['layername'])[:1] for l in layer_match: layerStats,created = LayerStats.objects.get_or_create(layer=l) layerStats.visits += 1 first_visit = True if request.session.get('visitlayer' + str(l.id), False): first_visit = False else: request.session['visitlayer' + str(l.id)] = True if first_visit or created: layerStats.uniques += 1 layerStats.save() return HttpResponse( status=200 ) def add_layer_wm(request): """ The view that returns the map composer opened to a given map and adds a layer on top of it. """ map_id = request.GET.get('map_id') layer_name = request.GET.get('layer_name') map_obj = _resolve_map( request, map_id, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) return map_view_wm(request, str(map_obj.id), layer_name=layer_name) def map_view_wm(request, mapid, snapshot=None, layer_name=None, template='maps/map_view.html'): """ The view that returns the map composer opened to the map with the given map ID. """ map_obj = _resolve_map( request, mapid, 'base.view_resourcebase', _PERMISSION_MSG_VIEW) if 'access_token' in request.session: access_token = request.session['access_token'] else: access_token = None if snapshot is None: config = map_obj.viewer_json(request.user, access_token) else: config = snapshot_config(snapshot, map_obj, request.user, access_token) if layer_name: config = add_layers_to_map_config(request, map_obj, (layer_name, ), False) config = gxp2wm(config) return render_to_response(template, RequestContext(request, { 'config': json.dumps(config), 'map': map_obj, 'preview': getattr( settings, 'LAYER_PREVIEW_LIBRARY', '') })) def new_map_wm(request, template='maps/map_new.html'): config = new_map_config(request) config = gxp2wm(config) context_dict = { 'config': config, 'USE_GAZETTEER': settings.USE_GAZETTEER } context_dict["preview"] = getattr( settings, 'LAYER_PREVIEW_LIBRARY', '') if isinstance(config, HttpResponse): return config else: return render_to_response( template, RequestContext( request, context_dict)) def new_map_config(request): ''' View that creates a new map. If the query argument 'copy' is given, the initial map is a copy of the map with the id specified, otherwise the default map configuration is used. If copy is specified and the map specified does not exist a 404 is returned. ''' DEFAULT_MAP_CONFIG, DEFAULT_BASE_LAYERS = default_map_config(request) if 'access_token' in request.session: access_token = request.session['access_token'] else: access_token = None if request.method == 'GET' and 'copy' in request.GET: mapid = request.GET['copy'] map_obj = _resolve_map(request, mapid, 'base.view_resourcebase') map_obj.abstract = DEFAULT_ABSTRACT map_obj.title = DEFAULT_TITLE if request.user.is_authenticated(): map_obj.owner = request.user config = map_obj.viewer_json(request.user, access_token) del config['id'] else: if request.method == 'GET': params = request.GET elif request.method == 'POST': params = request.POST else: return HttpResponse(status=405) if 'layer' in params: bbox = None map_obj = Map(projection=getattr(settings, 'DEFAULT_MAP_CRS', 'EPSG:900913')) config = add_layers_to_map_config(request, map_obj, params.getlist('layer')) else: config = DEFAULT_MAP_CONFIG return json.dumps(config) def add_layers_to_map_config(request, map_obj, layer_names, add_base_layers=True): DEFAULT_MAP_CONFIG, DEFAULT_BASE_LAYERS = default_map_config(request) if 'access_token' in request.session: access_token = request.session['access_token'] else: access_token = None bbox = None layers = [] for layer_name in layer_names: try: layer = _resolve_layer(request, layer_name) except ObjectDoesNotExist: # bad layer, skip continue if not request.user.has_perm( 'view_resourcebase', obj=layer.get_self_resource()): # invisible layer, skip inclusion continue layer_bbox = layer.bbox # assert False, str(layer_bbox) if bbox is None: bbox = list(layer_bbox[0:4]) else: bbox[0] = min(bbox[0], layer_bbox[0]) bbox[1] = max(bbox[1], layer_bbox[1]) bbox[2] = min(bbox[2], layer_bbox[2]) bbox[3] = max(bbox[3], layer_bbox[3]) config = layer.attribute_config() # Add required parameters for a WM layer title = 'No title' if layer.title: title = layer.title config["title"] = title config["queryable"] = True config["srs"] = getattr( settings, 'DEFAULT_MAP_CRS', 'EPSG:900913') config["bbox"] = bbox if config["srs"] != 'EPSG:900913' \ else llbbox_to_mercator([float(coord) for coord in bbox]) if layer.storeType == "remoteStore": service = layer.service # Probably not a good idea to send the access token to every remote service. # This should never match, so no access token should be # sent to remote services. ogc_server_url = urlparse.urlsplit( ogc_server_settings.PUBLIC_LOCATION).netloc service_url = urlparse.urlsplit(service.base_url).netloc if access_token and ogc_server_url == service_url and 'access_token' not in service.base_url: url = service.base_url+'?access_token='+access_token else: url = service.base_url maplayer = MapLayer(map=map_obj, name=layer.alternate, ows_url=layer.ows_url, layer_params=json.dumps(config), visibility=True, source_params=json.dumps({ "ptype": service.ptype, "remote": True, "url": url, "name": service.name})) else: ogc_server_url = urlparse.urlsplit( ogc_server_settings.PUBLIC_LOCATION).netloc layer_url = urlparse.urlsplit(layer.ows_url).netloc if access_token and ogc_server_url == layer_url and 'access_token' not in layer.ows_url: url = layer.ows_url+'?access_token='+access_token else: url = layer.ows_url maplayer = MapLayer( map=map_obj, name=layer.alternate, ows_url=url, # use DjangoJSONEncoder to handle Decimal values layer_params=json.dumps(config, cls=DjangoJSONEncoder), visibility=True ) layers.append(maplayer) if bbox is not None: minx, miny, maxx, maxy = [float(coord) for coord in bbox] x = (minx + maxx) / 2 y = (miny + maxy) / 2 if getattr( settings, 'DEFAULT_MAP_CRS', 'EPSG:900913') == "EPSG:4326": center = list((x, y)) else: center = list(forward_mercator((x, y))) if center[1] == float('-inf'): center[1] = 0 BBOX_DIFFERENCE_THRESHOLD = 1e-5 # Check if the bbox is invalid valid_x = (maxx - minx) ** 2 > BBOX_DIFFERENCE_THRESHOLD valid_y = (maxy - miny) ** 2 > BBOX_DIFFERENCE_THRESHOLD if valid_x: width_zoom = math.log(360 / abs(maxx - minx), 2) else: width_zoom = 15 if valid_y: height_zoom = math.log(360 / abs(maxy - miny), 2) else: height_zoom = 15 map_obj.center_x = center[0] map_obj.center_y = center[1] map_obj.zoom = math.ceil(min(width_zoom, height_zoom)) map_obj.handle_moderated_uploads() if add_base_layers: layers_to_add = DEFAULT_BASE_LAYERS + layers else: layers_to_add = layers config = map_obj.viewer_json( request.user, access_token, *layers_to_add) config['fromLayer'] = True return config def gxp2wm(config): """ Convert a GeoNode map json or string config to the WorldMap client format. """ config_is_string = False # let's first see if it is a string, in which case must be converted to json if isinstance(config, basestring): config = json.loads(config) config_is_string = True topics = TopicCategory.objects.all() topicArray = [] for topic in topics: topicArray.append([topic.identifier, topic.gn_description]) topicArray.append(['General', 'General']) groups = set() config['topic_categories'] = topicArray config['proxy'] = '/proxy/?url=' # TODO check permissions here config['edit_map'] = True # 3 different layer types # # 1. background layer: group: background, ows_url: None # # 2. WM local layer: # ows_url: http://localhost:8080/geoserver/wms, # layer_params = {"selected": true, "title": "camer_hyd_basins_vm0_2007", "url": "http://localhost:8080/geoserver/wms", # "tiled": true, "detail_url": "http://worldmap.harvard.edu/data/geonode:camer_hyd_basins_vm0_2007", "local": true, # "llbbox": [-94.549682617, 9.553222656, -82.972412109, 18.762207031]} # # 3. WM remote layer (HH): # ows_url: http://192.168.33.15:8002/registry/hypermap/layer/13ff2fea-d479-4fc7-87a6-3eab7d349def/map/wmts/market/default_grid/$%7Bz%7D/$%7Bx%7D/$%7By%7D.png # layer_params = {"title": "market", "selected": true, "detail_url": "http://192.168.33.15:8002/registry/hypermap/layer/13ff2fea-d479-4fc7-87a6-3eab7d349def/", "local": false} # let's detect WM or HH layers and alter configuration as needed for layer_config in config['map']['layers']: is_wm = False is_hh = False source_id = layer_config['source'] source = config['sources'][source_id] if 'url' in source: source_url = source['url'] if settings.GEOSERVER_PUBLIC_LOCATION in source_url: if 'name' in layer_config: is_wm = True if 'registry/hypermap' in source_url: is_hh = True group = 'General' layer_config['tiled'] = True if is_wm: layer_config['local'] = True alternate = layer_config['name'] layer = Layer.objects.get(alternate=alternate) layer_config['url'] = layer.ows_url if 'styles' not in layer_config: #layer_config['styles'] = [str(unicode(style.name)) for style in layer.styles.all()] if layer.default_style: layer_config['styles'] = layer.default_style.name else: layer_config['styles'] = layer.styles.all()[0].name if layer.category: group = layer.category.gn_description layer_config["srs"] = getattr( settings, 'DEFAULT_MAP_CRS', 'EPSG:900913') bbox = layer.bbox[:-1] layer_config["bbox"] = bbox if layer_config["srs"] != 'EPSG:900913' \ else llbbox_to_mercator([float(coord) for coord in bbox]) if is_hh: layer_config['local'] = False layer_config['styles'] = '' hh_url = '%smap/wmts/%s/default_grid/${z}/${x}/${y}.png' % (layer_config['detail_url'], layer_config['name']) layer_config['url'] = hh_url if is_wm or is_hh: if 'group' not in layer_config: layer_config['group'] = group if group not in groups: groups.add(group) # TODO fix this accordingly to layer extent layer_config['llbbox'] = [-180,-90,180,90] # ml = layers.filter(name=layer_config['name']) # layer_config['url'] = ml[0].ows_url config['map']['groups'] = [] for group in groups: if group not in json.dumps(config['map']['groups']): config['map']['groups'].append({"expanded":"true", "group":group}) print json.dumps(config) if config_is_string: config = json.dumps(config) return config @login_required def add_endpoint(request): """ Let the user to add an endpoint for a remote service. """ if request.method == 'POST': endpoint_form = EndpointForm(request.POST) if endpoint_form.is_valid(): endpoint = endpoint_form.save(commit=False) endpoint.owner = request.user endpoint.save() return render_to_response( 'wm_extra/endpoint_added.html', RequestContext(request, { "endpoint": endpoint, }) ) else: logger.info('Error posting an endpoint') else: endpoint_form = EndpointForm() return render_to_response( 'wm_extra/endpoint_add.html', RequestContext(request, { "form": endpoint_form, }) ) def official_site(request, site): """ The view that returns the map composer opened to the map with the given urlsuffix site url. """ map_obj = get_object_or_404(Map,urlsuffix=site) return map_view_wm(request, str(map_obj.id))
cga-harvard/worldmap
wm_extra/views.py
Python
gpl-3.0
19,072
import sys import numpy as np from numpy import pi, sin, cos from scipy.optimize import leastsq import scipy, scipy.fftpack import six import cv2 if cv2.__version__[0] == "2": import cv2.cv as cv from pocpy.logpolar_opencv2 import * else: from pocpy.logpolar_opencv3 import * def zero_padding(src, dstshape, pos=(0, 0)): y, x = pos dst = np.zeros(dstshape) dst[y : src.shape[0] + y, x : src.shape[1] + x] = src return dst def pocfunc_model(alpha, delta1, delta2, r, u): N1, N2 = r.shape V1, V2 = list(six.moves.map(lambda x: 2 * x + 1, u)) return ( lambda n1, n2: alpha / (N1 * N2) * sin((n1 + delta1) * V1 / N1 * pi) * sin((n2 + delta2) * V2 / N2 * pi) / (sin((n1 + delta1) * pi / N1) * sin((n2 + delta2) * pi / N2)) ) def pocfunc(f, g, windowfunc=np.hanning, withlpf=False): m = np.floor(list(six.moves.map(lambda x: x / 2.0, f.shape))) u = list(six.moves.map(lambda x: x / 2.0, m)) # hanning window hy = windowfunc(f.shape[0]) hx = windowfunc(f.shape[1]) hw = hy.reshape(hy.shape[0], 1) * hx f = f * hw g = g * hw # compute 2d fft F = scipy.fftpack.fft2(f) G = scipy.fftpack.fft2(g) G_ = np.conj(G) R = F * G_ / np.abs(F * G_) if withlpf == True: R = scipy.fftpack.fftshift(R) lpf = np.ones(list(six.moves.map(lambda x: x + 1.0, m))) lpf = zero_padding(lpf, f.shape, u) R = R * lpf R = scipy.fftpack.fftshift(R) return scipy.fftpack.fftshift(np.real(scipy.fftpack.ifft2(R))) def poc(f, g, fitting_shape=(9, 9)): # compute phase-only correlation center = list(six.moves.map(lambda x: x / 2.0, f.shape)) m = np.floor(list(six.moves.map(lambda x: x / 2.0, f.shape))) u = list(six.moves.map(lambda x: x / 2.0, m)) r = pocfunc(f, g) # least-square fitting max_pos = np.argmax(r) peak = (max_pos // f.shape[1], max_pos % f.shape[1]) max_peak = r[peak[0], peak[1]] mf = list(six.moves.map(lambda x: int(x / 2), fitting_shape)) fitting_area = r[ peak[0] - mf[0] : peak[0] + mf[0] + 1, peak[1] - mf[1] : peak[1] + mf[1] + 1 ] p0 = [0.5, -(peak[0] - m[0]) - 0.02, -(peak[1] - m[1]) - 0.02] y, x = np.mgrid[-mf[0] : mf[0] + 1, -mf[1] : mf[1] + 1] y = y + peak[0] - m[0] x = x + peak[1] - m[1] errorfunction = lambda p: np.ravel( pocfunc_model(p[0], p[1], p[2], r, u)(y, x) - fitting_area ) plsq = leastsq(errorfunction, p0) return (plsq[0][0], plsq[0][1], plsq[0][2]) def ripoc(f, g, M=50, fitting_shape=(9, 9)): hy = np.hanning(f.shape[0]) hx = np.hanning(f.shape[1]) hw = hy.reshape(hy.shape[0], 1) * hx ff = f * hw gg = g * hw F = scipy.fftpack.fft2(ff) G = scipy.fftpack.fft2(gg) F = scipy.fftpack.fftshift(np.log(np.abs(F))) G = scipy.fftpack.fftshift(np.log(np.abs(G))) FLP = logpolar(F, (F.shape[0] / 2, F.shape[1] / 2), M) GLP = logpolar(G, (G.shape[0] / 2, G.shape[1] / 2), M) R = poc(FLP, GLP) angle = -R[1] / F.shape[0] * 360 scale = 1.0 - R[2] / 100 center = tuple(np.array(g.shape) / 2) rot = cv2.getRotationMatrix2D(center, -angle, 1.0 + (1.0 - scale)) g_dash = cv2.warpAffine(g, rot, (g.shape[1], g.shape[0]), flags=cv2.INTER_LANCZOS4) t = poc(f, g_dash) return (t[1], t[2], angle, scale)
daisukekobayashi/phase-only-correlation
pocpy/poc.py
Python
apache-2.0
3,384
from __future__ import absolute_import from openpyxl.descriptors import Integer, String, Typed from openpyxl.descriptors.serialisable import Serialisable from openpyxl.styles import ( Font, Fill, GradientFill, PatternFill, Border, Alignment, Protection, ) from openpyxl.xml.functions import localname, Element class NumFmt(Serialisable): numFmtId = Integer() formatCode = String() def __init__(self, numFmtId=None, formatCode=None, ): self.numFmtId = numFmtId self.formatCode = formatCode class DifferentialStyle(Serialisable): tagname = "dxf" __elements__ = ("font", "numFmt", "fill", "alignment", "border", "protection") font = Typed(expected_type=Font, allow_none=True) numFmt = Typed(expected_type=NumFmt, allow_none=True) fill = Typed(expected_type=Fill, allow_none=True) alignment = Typed(expected_type=Alignment, allow_none=True) border = Typed(expected_type=Border, allow_none=True) protection = Typed(expected_type=Protection, allow_none=True) def __init__(self, font=None, numFmt=None, fill=None, alignment=None, border=None, protection=None, extLst=None, ): self.font = font self.numFmt = numFmt self.fill = fill self.alignment = alignment self.border = border self.protection = protection self.extLst = extLst
Darthkpo/xtt
openpyxl/styles/differential.py
Python
mit
1,571
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2017 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import re import shutil import tarfile import tempfile from . import errors from ._base import FileBase class Tar(FileBase): def __init__( self, source, source_dir, source_tag=None, source_commit=None, source_branch=None, source_depth=None, source_checksum=None, ): super().__init__( source, source_dir, source_tag, source_commit, source_branch, source_depth, source_checksum, ) if source_tag: raise errors.SnapcraftSourceInvalidOptionError("tar", "source-tag") elif source_commit: raise errors.SnapcraftSourceInvalidOptionError("tar", "source-commit") elif source_branch: raise errors.SnapcraftSourceInvalidOptionError("tar", "source-branch") if source_depth: raise errors.SnapcraftSourceInvalidOptionError("tar", "source-depth") def provision(self, dst, clean_target=True, keep_tarball=False, src=None): # TODO add unit tests. if src: tarball = src else: tarball = os.path.join(self.source_dir, os.path.basename(self.source)) if clean_target: tmp_tarball = tempfile.NamedTemporaryFile().name shutil.move(tarball, tmp_tarball) shutil.rmtree(dst) os.makedirs(dst) shutil.move(tmp_tarball, tarball) self._extract(tarball, dst) if not keep_tarball: os.remove(tarball) def _extract(self, tarball, dst): with tarfile.open(tarball) as tar: def filter_members(tar): """Filters members and member names: - strips common prefix - bans dangerous names""" members = tar.getmembers() common = os.path.commonprefix([m.name for m in members]) # commonprefix() works a character at a time and will # consider "d/ab" and "d/abc" to have common prefix "d/ab"; # check all members either start with common dir for m in members: if not ( m.name.startswith(common + "/") or m.isdir() and m.name == common ): # commonprefix() didn't return a dir name; go up one # level common = os.path.dirname(common) break for m in members: if m.name == common: continue self._strip_prefix(common, m) # We mask all files to be writable to be able to easily # extract on top. m.mode = m.mode | 0o200 yield m tar.extractall(members=filter_members(tar), path=dst) def _strip_prefix(self, common, member): if member.name.startswith(common + "/"): member.name = member.name[len(common + "/") :] # strip leading '/', './' or '../' as many times as needed member.name = re.sub(r"^(\.{0,2}/)*", r"", member.name) # do the same for linkname if this is a hardlink if member.islnk() and not member.issym(): if member.linkname.startswith(common + "/"): member.linkname = member.linkname[len(common + "/") :] member.linkname = re.sub(r"^(\.{0,2}/)*", r"", member.linkname)
ubuntu-core/snapcraft
snapcraft/internal/sources/_tar.py
Python
gpl-3.0
4,273
import json import pytest from units.compat.mock import patch from ansible.modules.notification import slack from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args from ansible import module_utils class TestSlackModule(ModuleTestCase): def setUp(self): super(TestSlackModule, self).setUp() self.module = slack def tearDown(self): super(TestSlackModule, self).tearDown() @pytest.fixture def fetch_url_mock(self, mocker): return mocker.patch('ansible.module_utils.notification.slack.fetch_url') def test_without_required_parameters(self): """Failure must occurs when all parameters are missing""" with self.assertRaises(AnsibleFailJson): set_module_args({}) self.module.main() def test_invalid_old_token(self): """Failure if there is an old style token""" set_module_args({ 'token': 'test', }) with self.assertRaises(AnsibleFailJson): self.module.main() def test_sucessful_message(self): """tests sending a message. This is example 1 from the docs""" set_module_args({ 'token': 'XXXX/YYYY/ZZZZ', 'msg': 'test' }) with patch.object(slack, "fetch_url") as fetch_url_mock: fetch_url_mock.return_value = (None, {"status": 200}) with self.assertRaises(AnsibleExitJson): self.module.main() self.assertTrue(fetch_url_mock.call_count, 1) call_data = json.loads(fetch_url_mock.call_args[1]['data']) assert call_data['username'] == "Ansible" assert call_data['text'] == "test" assert fetch_url_mock.call_args[1]['url'] == "https://hooks.slack.com/services/XXXX/YYYY/ZZZZ" def test_failed_message(self): """tests failing to send a message""" set_module_args({ 'token': 'XXXX/YYYY/ZZZZ', 'msg': 'test' }) with patch.object(slack, "fetch_url") as fetch_url_mock: fetch_url_mock.return_value = (None, {"status": 404, 'msg': 'test'}) with self.assertRaises(AnsibleFailJson): self.module.main() def test_message_with_thread(self): """tests sending a message with a thread""" set_module_args({ 'token': 'XXXX/YYYY/ZZZZ', 'msg': 'test', 'thread_id': 100.00 }) with patch.object(slack, "fetch_url") as fetch_url_mock: fetch_url_mock.return_value = (None, {"status": 200}) with self.assertRaises(AnsibleExitJson): self.module.main() self.assertTrue(fetch_url_mock.call_count, 1) call_data = json.loads(fetch_url_mock.call_args[1]['data']) assert call_data['username'] == "Ansible" assert call_data['text'] == "test" assert call_data['thread_ts'] == 100.00 assert fetch_url_mock.call_args[1]['url'] == "https://hooks.slack.com/services/XXXX/YYYY/ZZZZ"
shepdelacreme/ansible
test/units/modules/notification/test_slack.py
Python
gpl-3.0
3,084
import calendar import functools import json import sys import tempfile import time import urllib import urllib2 from datetime import datetime from xml.dom import Node from xml.dom import minidom from xml.sax.saxutils import escape, quoteattr import httplib2 from sit_youtrack import youtrack def urlquote(s): return urllib.quote(utf8encode(s), safe="") def utf8encode(source): if isinstance(source, unicode): source = source.encode('utf-8') return source def relogin_on_401(f): @functools.wraps(f) def wrapped(self, *args, **kwargs): try: return f(self, *args, **kwargs) except youtrack.YouTrackException, e: if e.response.status not in (401, 403, 500): raise e self._login(*self._credentials) return f(self, *args, **kwargs) return wrapped class Connection(object): def __init__(self, url, login=None, password=None, proxy_info=None, api_key=None): self.http = httplib2.Http(disable_ssl_certificate_validation=True) if proxy_info is None else httplib2.Http( proxy_info=proxy_info, disable_ssl_certificate_validation=True) # Remove the last character of the url ends with "/" if url: url = url.rstrip('/') self.url = url self.baseUrl = url + "/rest" if api_key is None: self._credentials = (login, password) self._login(*self._credentials) else: self.headers = {'X-YouTrack-ApiKey': api_key} def _login(self, login, password): response, content = self.http.request( self.baseUrl + "/user/login?login=" + urllib.quote_plus(login) + "&password=" + urllib.quote_plus(password), 'POST', headers={'Content-Length': '0', 'Connection': 'keep-alive'}) if response.status != 200: raise youtrack.YouTrackException('/user/login', response, content) self.headers = {'Cookie': response['set-cookie'], 'Cache-Control': 'no-cache'} @relogin_on_401 def _req(self, method, url, body=None, ignoreStatus=None, content_type=None): headers = self.headers if method == 'PUT' or method == 'POST': headers = headers.copy() if content_type is None: content_type = 'application/xml; charset=UTF-8' headers['Content-Type'] = content_type headers['Content-Length'] = str(len(body)) if body else '0' response, content = self.http.request((self.baseUrl + url).encode('utf-8'), method, headers=headers, body=body) content = content.translate(None, '\0') if response.status != 200 and response.status != 201 and (ignoreStatus != response.status): raise youtrack.YouTrackException(url, response, content) return response, content def _reqXml(self, method, url, body=None, ignoreStatus=None): response, content = self._req(method, url, body, ignoreStatus) if response.has_key('content-type'): if (response["content-type"].find('application/xml') != -1 or response["content-type"].find( 'text/xml') != -1) and content is not None and content != '': try: return minidom.parseString(content) except Exception: return "" elif response['content-type'].find('application/json') != -1 and content is not None and content != '': try: return json.loads(content) except Exception: return "" if method == 'PUT' and ('location' in response.keys()): return 'Created: ' + response['location'] else: return content def _get(self, url): return self._reqXml('GET', url) def _put(self, url): return self._reqXml('PUT', url, '<empty/>\n\n') def getIssue(self, id): return youtrack.Issue(self._get("/issue/" + id), self) def createIssue(self, project, assignee, summary, description, priority=None, type=None, subsystem=None, state=None, affectsVersion=None, fixedVersion=None, fixedInBuild=None, permittedGroup=None): params = {'project': project, 'summary': summary} if description is not None: params['description'] = description if assignee is not None: params['assignee'] = assignee if priority is not None: params['priority'] = priority if type is not None: params['type'] = type if subsystem is not None: params['subsystem'] = subsystem if state is not None: params['state'] = state if affectsVersion is not None: params['affectsVersion'] = affectsVersion if fixedVersion is not None: params['fixVersion'] = fixedVersion if fixedInBuild is not None: params['fixedInBuild'] = fixedInBuild if permittedGroup is not None: params['permittedGroup'] = permittedGroup return self._req('PUT', '/issue', urllib.urlencode(params), content_type='application/x-www-form-urlencoded') def deleteIssue(self, issue_id): return self._req('DELETE', '/issue/%s' % issue_id) def get_changes_for_issue(self, issue): return [youtrack.IssueChange(change, self) for change in self._get("/issue/%s/changes" % issue).getElementsByTagName('change')] def getComments(self, id): response, content = self._req('GET', '/issue/' + id + '/comment') xml = minidom.parseString(content) return [youtrack.Comment(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getAttachments(self, id): response, content = self._req('GET', '/issue/' + id + '/attachment') xml = minidom.parseString(content) return [youtrack.Attachment(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getAttachmentContent(self, url): f = urllib2.urlopen(urllib2.Request(self.url + url, headers=self.headers)) return f def deleteAttachment(self, issue_id, attachment_id): return self._req('DELETE', '/issue/%s/attachment/%s' % (issue_id, attachment_id)) def createAttachmentFromAttachment(self, issueId, a): try: content = a.getContent() contentLength = None if 'content-length' in content.headers.dict: contentLength = int(content.headers.dict['content-length']) print 'Importing attachment for issue ', issueId try: print 'Name: ', utf8encode(a.name) except Exception, e: print e try: print 'Author: ', a.authorLogin except Exception, e: print e return self.importAttachment(issueId, a.name, content, a.authorLogin, contentLength=contentLength, contentType=content.info().type, created=a.created if hasattr(a, 'created') else None, group=a.group if hasattr(a, 'group') else '') except urllib2.HTTPError, e: print "Can't create attachment" try: err_content = e.read() issue_id = issueId attach_name = a.name attach_url = a.url if isinstance(err_content, unicode): err_content = err_content.encode('utf-8') if isinstance(issue_id, unicode): issue_id = issue_id.encode('utf-8') if isinstance(attach_name, unicode): attach_name = attach_name.encode('utf-8') if isinstance(attach_url, unicode): attach_url = attach_url.encode('utf-8') print "HTTP CODE: ", e.code print "REASON: ", err_content print "IssueId: ", issue_id print "Attachment filename: ", attach_name print "Attachment URL: ", attach_url except Exception: pass except Exception, e: try: print content.geturl() print content.getcode() print content.info() except Exception: pass raise e def _process_attachmnets(self, authorLogin, content, contentLength, contentType, created, group, issueId, name, url_prefix='/issue/'): if contentType is not None: content.contentType = contentType if contentLength is not None: content.contentLength = contentLength elif not isinstance(content, file): tmp = tempfile.NamedTemporaryFile(mode='w+b') tmp.write(content.read()) tmp.flush() tmp.seek(0) content = tmp # post_data = {'attachment': content} post_data = {name: content} headers = self.headers.copy() # headers['Content-Type'] = contentType # name without extension to workaround: http://youtrack.jetbrains.net/issue/JT-6110 params = { # 'name': os.path.splitext(name)[0], 'authorLogin': authorLogin.encode('utf-8'), } if group is not None: params["group"] = group if created is not None: params['created'] = created else: try: params['created'] = self.getIssue(issueId).created except youtrack.YouTrackException: params['created'] = str(calendar.timegm(datetime.now().timetuple()) * 1000) url = self.baseUrl + url_prefix + issueId + "/attachment?" + urllib.urlencode(params) r = urllib2.Request(url, headers=headers, data=post_data) # r.set_proxy('localhost:8888', 'http') try: res = urllib2.urlopen(r) except urllib2.HTTPError, e: if e.code == 201: return e.msg + ' ' + name raise e return res def createAttachment(self, issueId, name, content, authorLogin='', contentType=None, contentLength=None, created=None, group=''): return self._process_attachmnets(authorLogin, content, contentLength, contentType, created, group, issueId, name) def importAttachment(self, issue_id, name, content, authorLogin, contentType, contentLength, created=None, group=''): return self._process_attachmnets(authorLogin, content, contentLength, contentType, created, group, issue_id, name, '/import/') def getLinks(self, id, outwardOnly=False): response, content = self._req('GET', '/issue/' + urlquote(id) + '/link') xml = minidom.parseString(content) res = [] for c in [e for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE]: link = youtrack.Link(c, self) if link.source == id or not outwardOnly: res.append(link) return res def getUser(self, login): """ http://confluence.jetbrains.net/display/YTD2/GET+user """ return youtrack.User(self._get("/admin/user/" + urlquote(login.encode('utf8'))), self) def createUser(self, user): """ user from getUser """ # self.createUserDetailed(user.login, user.fullName, user.email, user.jabber) self.importUsers([user]) def createUserDetailed(self, login, fullName, email, jabber): self.importUsers([{'login': login, 'fullName': fullName, 'email': email, 'jabber': jabber}]) # return self._put('/admin/user/' + login + '?' + # 'password=' + password + # '&fullName=' + fullName + # '&email=' + email + # '&jabber=' + jabber) def importUsers(self, users): """ Import users, returns import result (http://confluence.jetbrains.net/display/YTD2/Import+Users) Example: importUsers([{'login':'vadim', 'fullName':'vadim', 'email':'eee@ss.com', 'jabber':'fff@fff.com'}, {'login':'maxim', 'fullName':'maxim', 'email':'aaa@ss.com', 'jabber':'www@fff.com'}]) """ if len(users) <= 0: return known_attrs = ('login', 'fullName', 'email', 'jabber') xml = '<list>\n' for u in users: xml += ' <user ' + "".join(k + '=' + quoteattr(u[k]) + ' ' for k in u if k in known_attrs) + '/>\n' xml += '</list>' # TODO: convert response xml into python objects if isinstance(xml, unicode): xml = xml.encode('utf-8') return self._reqXml('PUT', '/import/users', xml, 400).toxml() def importIssuesXml(self, projectId, assigneeGroup, xml): return self._reqXml('PUT', '/import/' + urlquote(projectId) + '/issues?' + urllib.urlencode({'assigneeGroup': assigneeGroup}), xml, 400).toxml() def importLinks(self, links): """ Import links, returns import result (http://confluence.jetbrains.net/display/YTD2/Import+Links) Accepts result of getLinks() Example: importLinks([{'login':'vadim', 'fullName':'vadim', 'email':'eee@ss.com', 'jabber':'fff@fff.com'}, {'login':'maxim', 'fullName':'maxim', 'email':'aaa@ss.com', 'jabber':'www@fff.com'}]) """ xml = '<list>\n' for l in links: # ignore typeOutward and typeInward returned by getLinks() xml += ' <link ' + "".join(attr + '=' + quoteattr(l[attr]) + ' ' for attr in l if attr not in ['typeInward', 'typeOutward']) + '/>\n' xml += '</list>' # TODO: convert response xml into python objects res = self._reqXml('PUT', '/import/links', xml, 400) return res.toxml() if hasattr(res, "toxml") else res def importIssues(self, projectId, assigneeGroup, issues): """ Import issues, returns import result (http://confluence.jetbrains.net/display/YTD2/Import+Issues) Accepts retrun of getIssues() Example: importIssues([{'numberInProject':'1', 'summary':'some problem', 'description':'some description', 'priority':'1', 'fixedVersion':['1.0', '2.0'], 'comment':[{'author':'yamaxim', 'text':'comment text', 'created':'1267030230127'}]}, {'numberInProject':'2', 'summary':'some problem', 'description':'some description', 'priority':'1'}]) """ if len(issues) <= 0: return bad_fields = ['id', 'projectShortName', 'votes', 'commentsCount', 'historyUpdated', 'updatedByFullName', 'updaterFullName', 'reporterFullName', 'links', 'attachments', 'jiraId', 'entityId', 'tags'] tt_settings = self.getProjectTimeTrackingSettings(projectId) if tt_settings and tt_settings.Enabled and tt_settings.TimeSpentField: bad_fields.append(tt_settings.TimeSpentField) xml = '<issues>\n' issue_records = dict([]) for issue in issues: record = "" record += ' <issue>\n' comments = None if getattr(issue, "getComments", None): comments = issue.getComments() for issueAttr in issue: attrValue = issue[issueAttr] if attrValue is None: continue if isinstance(attrValue, unicode): attrValue = attrValue.encode('utf-8') if isinstance(issueAttr, unicode): issueAttr = issueAttr.encode('utf-8') if issueAttr == 'comments': comments = attrValue else: # ignore bad fields from getIssue() if issueAttr not in bad_fields: record += ' <field name="' + issueAttr + '">\n' if isinstance(attrValue, list) or getattr(attrValue, '__iter__', False): for v in attrValue: if isinstance(v, unicode): v = v.encode('utf-8') record += ' <value>' + escape(v.strip()) + '</value>\n' else: record += ' <value>' + escape(attrValue.strip()) + '</value>\n' record += ' </field>\n' if comments: for comment in comments: record += ' <comment' for ca in comment: val = comment[ca] if isinstance(ca, unicode): ca = ca.encode('utf-8') if isinstance(val, unicode): val = val.encode('utf-8') record += ' ' + ca + '=' + quoteattr(val) record += '/>\n' record += ' </issue>\n' xml += record issue_records[issue.numberInProject] = record xml += '</issues>' # print xml # TODO: convert response xml into python objects if isinstance(xml, unicode): xml = xml.encode('utf-8') if isinstance(assigneeGroup, unicode): assigneeGroup = assigneeGroup.encode('utf-8') url = '/import/' + urlquote(projectId) + '/issues?' + urllib.urlencode({'assigneeGroup': assigneeGroup}) if isinstance(url, unicode): url = url.encode('utf-8') result = self._reqXml('PUT', url, xml, 400) if (result == "") and (len(issues) > 1): for issue in issues: self.importIssues(projectId, assigneeGroup, [issue]) response = "" try: response = result.toxml().encode('utf-8') except: sys.stderr.write("can't parse response") sys.stderr.write("request was") sys.stderr.write(xml) return response item_elements = minidom.parseString(response).getElementsByTagName("item") if len(item_elements) != len(issues): sys.stderr.write(response) else: for item in item_elements: id = item.attributes["id"].value imported = item.attributes["imported"].value.lower() if imported == "true": print "Issue [ %s-%s ] imported successfully" % (projectId, id) else: sys.stderr.write("") sys.stderr.write("Failed to import issue [ %s-%s ]." % (projectId, id)) sys.stderr.write("Reason : ") sys.stderr.write(item.toxml()) sys.stderr.write("Request was :") if isinstance(issue_records[id], unicode): sys.stderr.write(issue_records[id].encode('utf-8')) else: sys.stderr.write(issue_records[id]) print "" return response def getProjects(self): projects = {} for e in self._get("/project/all").documentElement.childNodes: projects[e.getAttribute('shortName')] = e.getAttribute('name') return projects def getProject(self, projectId): """ http://confluence.jetbrains.net/display/YTD2/GET+project """ return youtrack.Project(self._get("/admin/project/" + urlquote(projectId)), self) def getProjectIds(self): response, content = self._req('GET', '/admin/project/') xml = minidom.parseString(content) return [e.getAttribute('id') for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getProjectAssigneeGroups(self, projectId): response, content = self._req('GET', '/admin/project/' + urlquote(projectId) + '/assignee/group') xml = minidom.parseString(content) return [youtrack.Group(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getGroup(self, name): return youtrack.Group(self._get("/admin/group/" + urlquote(name.encode('utf-8'))), self) def getGroups(self): response, content = self._req('GET', '/admin/group') xml = minidom.parseString(content) return [youtrack.Group(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def deleteGroup(self, name): return self._req('DELETE', "/admin/group/" + urlquote(name.encode('utf-8'))) def getUserGroups(self, userName): response, content = self._req('GET', '/admin/user/%s/group' % urlquote(userName.encode('utf-8'))) xml = minidom.parseString(content) return [youtrack.Group(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def setUserGroup(self, user_name, group_name): if isinstance(user_name, unicode): user_name = user_name.encode('utf-8') if isinstance(group_name, unicode): group_name = group_name.encode('utf-8') response, content = self._req('POST', '/admin/user/%s/group/%s' % (urlquote(user_name), urlquote(group_name)), body='') return response def createGroup(self, group): content = self._put( '/admin/group/%s?autoJoin=false' % group.name.replace(' ', '%20')) return content def addUserRoleToGroup(self, group, userRole): url_group_name = urlquote(utf8encode(group.name)) url_role_name = urlquote(utf8encode(userRole.name)) response, content = self._req('PUT', '/admin/group/%s/role/%s' % (url_group_name, url_role_name), body=userRole.toXml()) return content def getRole(self, name): return youtrack.Role(self._get("/admin/role/" + urlquote(name)), self) def getRoles(self): response, content = self._req('GET', '/admin/role') xml = minidom.parseString(content) return [youtrack.Role(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getGroupRoles(self, group_name): response, content = self._req('GET', '/admin/group/%s/role' % urlquote(group_name)) xml = minidom.parseString(content) return [youtrack.UserRole(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def createRole(self, role): url_role_name = urlquote(utf8encode(role.name)) url_role_dscr = '' if hasattr(role, 'description'): url_role_dscr = urlquote(utf8encode(role.description)) content = self._put('/admin/role/%s?description=%s' % (url_role_name, url_role_dscr)) return content def changeRole(self, role, new_name, new_description): url_role_name = urlquote(utf8encode(role.name)) url_new_name = urlquote(utf8encode(new_name)) url_new_dscr = urlquote(utf8encode(new_description)) content = self._req('POST', '/admin/role/%s?newName=%s&description=%s' % (url_role_name, url_new_name, url_new_dscr)) return content def addPermissionToRole(self, role, permission): url_role_name = urlquote(role.name) url_prm_name = urlquote(permission.name) content = self._req('POST', '/admin/role/%s/permission/%s' % (url_role_name, url_prm_name)) return content def getRolePermissions(self, role): response, content = self._req('GET', '/admin/role/%s/permission' % urlquote(role.name)) xml = minidom.parseString(content) return [youtrack.Permission(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getPermissions(self): response, content = self._req('GET', '/admin/permission') xml = minidom.parseString(content) return [youtrack.Permission(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getSubsystem(self, projectId, name): response, content = self._req('GET', '/admin/project/' + projectId + '/subsystem/' + urlquote(name)) xml = minidom.parseString(content) return youtrack.Subsystem(xml, self) def getSubsystems(self, projectId): response, content = self._req('GET', '/admin/project/' + projectId + '/subsystem') xml = minidom.parseString(content) return [youtrack.Subsystem(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getVersions(self, projectId): response, content = self._req('GET', '/admin/project/' + urlquote(projectId) + '/version?showReleased=true') xml = minidom.parseString(content) return [self.getVersion(projectId, v.getAttribute('name')) for v in xml.documentElement.getElementsByTagName('version')] def getVersion(self, projectId, name): return youtrack.Version( self._get("/admin/project/" + urlquote(projectId) + "/version/" + urlquote(name)), self) def getBuilds(self, projectId): response, content = self._req('GET', '/admin/project/' + urlquote(projectId) + '/build') xml = minidom.parseString(content) return [youtrack.Build(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getUsers(self, params={}): first = True users = [] position = 0 user_search_params = urllib.urlencode(params) while True: response, content = self._req('GET', "/admin/user/?start=%s&%s" % (str(position), user_search_params)) position += 10 xml = minidom.parseString(content) newUsers = [youtrack.User(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] if not len(newUsers): return users users += newUsers def getUsersTen(self, start): response, content = self._req('GET', "/admin/user/?start=%s" % str(start)) xml = minidom.parseString(content) users = [youtrack.User(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] return users def deleteUser(self, login): return self._req('DELETE', "/admin/user/" + urlquote(login.encode('utf-8'))) # TODO this function is deprecated def createBuild(self): raise NotImplementedError # TODO this function is deprecated def createBuilds(self): raise NotImplementedError def createProject(self, project): return self.createProjectDetailed(project.id, project.name, project.description, project.lead) def deleteProject(self, projectId): return self._req('DELETE', "/admin/project/" + urlquote(projectId)) def createProjectDetailed(self, projectId, name, description, projectLeadLogin, startingNumber=1): _name = name _desc = description if isinstance(_name, unicode): _name = _name.encode('utf-8') if isinstance(_desc, unicode): _desc = _desc.encode('utf-8') return self._put('/admin/project/' + projectId + '?' + urllib.urlencode({'projectName': _name, 'description': _desc + ' ', 'projectLeadLogin': projectLeadLogin, 'lead': projectLeadLogin, 'startingNumber': str(startingNumber)})) # TODO this function is deprecated def createSubsystems(self, projectId, subsystems): """ Accepts result of getSubsystems() """ for s in subsystems: self.createSubsystem(projectId, s) # TODO this function is deprecated def createSubsystem(self, projectId, s): return self.createSubsystemDetailed(projectId, s.name, s.isDefault, s.defaultAssignee if s.defaultAssignee != '<no user>' else '') # TODO this function is deprecated def createSubsystemDetailed(self, projectId, name, isDefault, defaultAssigneeLogin): self._put('/admin/project/' + projectId + '/subsystem/' + urlquote(name.encode('utf-8')) + "?" + urllib.urlencode({'isDefault': str(isDefault), 'defaultAssignee': defaultAssigneeLogin})) return 'Created' # TODO this function is deprecated def deleteSubsystem(self, projectId, name): return self._reqXml('DELETE', '/admin/project/' + projectId + '/subsystem/' + urlquote(name.encode('utf-8')) , '') # TODO this function is deprecated def createVersions(self, projectId, versions): """ Accepts result of getVersions() """ for v in versions: self.createVersion(projectId, v) # TODO this function is deprecated def createVersion(self, projectId, v): return self.createVersionDetailed(projectId, v.name, v.isReleased, v.isArchived, releaseDate=v.releaseDate, description=v.description) # TODO this function is deprecated def createVersionDetailed(self, projectId, name, isReleased, isArchived, releaseDate=None, description=''): params = {'description': description, 'isReleased': str(isReleased), 'isArchived': str(isArchived)} if releaseDate is not None: params['releaseDate'] = str(releaseDate) return self._put( '/admin/project/' + urlquote(projectId) + '/version/' + urlquote(name.encode('utf-8')) + "?" + urllib.urlencode(params)) def getIssues(self, projectId, filter, after, max): # response, content = self._req('GET', '/project/issues/' + urlquote(projectId) + "?" + response, content = self._req('GET', '/issue/byproject/' + urlquote(projectId) + "?" + urllib.urlencode({'after': str(after), 'max': str(max), 'filter': filter})) xml = minidom.parseString(content) return [youtrack.Issue(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getNumberOfIssues(self, filter='', waitForServer=True): while True: urlFilterList = [('filter', filter)] finalUrl = '/issue/count?' + urllib.urlencode(urlFilterList) response, content = self._req('GET', finalUrl) result = eval(content.replace('callback', '')) numberOfIssues = result['value'] if (not waitForServer): return numberOfIssues if (numberOfIssues != -1): break time.sleep(5) return self.getNumberOfIssues(filter, False) def getAllSprints(self, agileID): response, content = self._req('GET', '/agile/' + agileID + "/sprints?") xml = minidom.parseString(content) return [(e.getAttribute('name'), e.getAttribute('start'), e.getAttribute('finish')) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def getAllIssues(self, filter='', after=0, max=999999, withFields=()): urlJobby = [('with', field) for field in withFields] + \ [('after', str(after)), ('max', str(max)), ('filter', filter)] response, content = self._req('GET', '/issue' + "?" + urllib.urlencode(urlJobby)) xml = minidom.parseString(content) return [youtrack.Issue(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def exportIssueLinks(self): response, content = self._req('GET', '/export/links') xml = minidom.parseString(content) return [youtrack.Link(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def executeCommand(self, issueId, command, comment=None, group=None, run_as=None, disable_notifications=False): if isinstance(command, unicode): command = command.encode('utf-8') params = {'command': command} if comment is not None: params['comment'] = comment if group is not None: params['group'] = group if run_as is not None: params['runAs'] = run_as if disable_notifications: params['disableNotifications'] = disable_notifications for p in params: if isinstance(params[p], unicode): params[p] = params[p].encode('utf-8') response, content = self._req('POST', '/issue/' + issueId + "/execute?" + urllib.urlencode(params), body='') return "Command executed" def getCustomField(self, name): return youtrack.CustomField(self._get("/admin/customfield/field/" + urlquote(name.encode('utf-8'))), self) def getCustomFields(self): response, content = self._req('GET', '/admin/customfield/field') xml = minidom.parseString(content) return [self.getCustomField(e.getAttribute('name')) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] def createCustomField(self, cf): params = dict([]) if hasattr(cf, "defaultBundle"): params["defaultBundle"] = cf.defaultBundle if hasattr(cf, "attachBundlePolicy"): params["attachBundlePolicy"] = cf.attachBundlePolicy auto_attached = False if hasattr(cf, "autoAttached"): auto_attached = cf.autoAttached return self.createCustomFieldDetailed(cf.name, cf.type, cf.isPrivate, cf.visibleByDefault, auto_attached, params) def createCustomFieldDetailed(self, customFieldName, typeName, isPrivate, defaultVisibility, auto_attached=False, additional_params=dict([])): params = {'type': typeName, 'isPrivate': str(isPrivate), 'defaultVisibility': str(defaultVisibility), 'autoAttached': str(auto_attached)} params.update(additional_params) for key in params: if isinstance(params[key], unicode): params[key] = params[key].encode('utf-8') self._put('/admin/customfield/field/' + urlquote(customFieldName.encode('utf-8')) + '?' + urllib.urlencode(params), ) return "Created" def createCustomFields(self, cfs): for cf in cfs: self.createCustomField(cf) def getProjectCustomField(self, projectId, name): if isinstance(name, unicode): name = name.encode('utf8') return youtrack.ProjectCustomField( self._get("/admin/project/" + urlquote(projectId) + "/customfield/" + urlquote(name)) , self) def getProjectCustomFields(self, projectId): response, content = self._req('GET', '/admin/project/' + urlquote(projectId) + '/customfield') xml = minidom.parseString(content) return [self.getProjectCustomField(projectId, e.getAttribute('name')) for e in xml.getElementsByTagName('projectCustomField')] def createProjectCustomField(self, projectId, pcf): return self.createProjectCustomFieldDetailed(projectId, pcf.name, pcf.emptyText, pcf.params) def createProjectCustomFieldDetailed(self, projectId, customFieldName, emptyFieldText, params=None): if not len(emptyFieldText.strip()): emptyFieldText = u"No " + customFieldName if isinstance(customFieldName, unicode): customFieldName = customFieldName.encode('utf-8') _params = {'emptyFieldText': emptyFieldText} if params is not None: _params.update(params) for key in _params: if isinstance(_params[key], unicode): _params[key] = _params[key].encode('utf-8') return self._put( '/admin/project/' + projectId + '/customfield/' + urlquote(customFieldName) + '?' + urllib.urlencode(_params)) def deleteProjectCustomField(self, project_id, pcf_name): self._req('DELETE', '/admin/project/' + urlquote(project_id) + "/customfield/" + urlquote(pcf_name)) def getIssueLinkTypes(self): response, content = self._req('GET', '/admin/issueLinkType') xml = minidom.parseString(content) return [youtrack.IssueLinkType(e, self) for e in xml.getElementsByTagName('issueLinkType')] def createIssueLinkTypes(self, issueLinkTypes): for ilt in issueLinkTypes: return self.createIssueLinkType(ilt) def createIssueLinkType(self, ilt): return self.createIssueLinkTypeDetailed(ilt.name, ilt.outwardName, ilt.inwardName, ilt.directed) def createIssueLinkTypeDetailed(self, name, outwardName, inwardName, directed): if isinstance(name, unicode): name = name.encode('utf-8') if isinstance(outwardName, unicode): outwardName = outwardName.encode('utf-8') if isinstance(inwardName, unicode): inwardName = inwardName.encode('utf-8') return self._put('/admin/issueLinkType/' + urlquote(name) + '?' + urllib.urlencode({'outwardName': outwardName, 'inwardName': inwardName, 'directed': directed})) def getEvents(self, issue_id): return self._get('/event/issueEvents/' + urlquote(issue_id)) def getWorkItems(self, issue_id): try: response, content = self._req('GET', '/issue/%s/timetracking/workitem' % urlquote(issue_id)) xml = minidom.parseString(content) return [youtrack.WorkItem(e, self) for e in xml.documentElement.childNodes if e.nodeType == Node.ELEMENT_NODE] except youtrack.YouTrackException, e: print "Can't get work items.", str(e) return [] def createWorkItem(self, issue_id, work_item): xml = '<workItem>' xml += '<date>%s</date>' % work_item.date xml += '<duration>%s</duration>' % work_item.duration if hasattr(work_item, 'description') and work_item.description is not None: xml += '<description>%s</description>' % escape(work_item.description) if hasattr(work_item, 'worktype') and work_item.worktype is not None: xml += '<worktype><name>%s</name></worktype>' % work_item.worktype xml += '</workItem>' if isinstance(xml, unicode): xml = xml.encode('utf-8') self._reqXml('POST', '/issue/%s/timetracking/workitem' % urlquote(issue_id), xml) def importWorkItems(self, issue_id, work_items): xml = '' for work_item in work_items: xml += '<workItem>' xml += '<date>%s</date>' % work_item.date xml += '<duration>%s</duration>' % work_item.duration if hasattr(work_item, 'description') and work_item.description is not None: xml += '<description>%s</description>' % escape(work_item.description) if hasattr(work_item, 'worktype') and work_item.worktype is not None: xml += '<worktype><name>%s</name></worktype>' % work_item.worktype xml += '<author login=%s></author>' % quoteattr(work_item.authorLogin) xml += '</workItem>' if isinstance(xml, unicode): xml = xml.encode('utf-8') if xml: xml = '<workItems>' + xml + '</workItems>' self._reqXml('PUT', '/import/issue/%s/workitems' % urlquote(issue_id), xml) def getSearchIntelliSense(self, query, context=None, caret=None, options_limit=None): opts = {'filter': query} if context: opts['project'] = context if caret is not None: opts['caret'] = caret if options_limit is not None: opts['optionsLimit'] = options_limit return youtrack.IntelliSense( self._get('/issue/intellisense?' + urllib.urlencode(opts)), self) def getCommandIntelliSense(self, issue_id, command, run_as=None, caret=None, options_limit=None): opts = {'command': command} if run_as: opts['runAs'] = run_as if caret is not None: opts['caret'] = caret if options_limit is not None: opts['optionsLimit'] = options_limit return youtrack.IntelliSense( self._get('/issue/%s/execute/intellisense?%s' % (issue_id, urllib.urlencode(opts))), self) def getGlobalTimeTrackingSettings(self): try: cont = self._get('/admin/timetracking') return youtrack.GlobalTimeTrackingSettings(cont, xml) except youtrack.YouTrackException, e: if e.response.status != 404: raise e def getProjectTimeTrackingSettings(self, projectId): try: cont = self._get('/admin/project/' + projectId + '/timetracking') return youtrack.ProjectTimeTrackingSettings(cont, self) except youtrack.YouTrackException, e: if e.response.status != 404: raise e def setGlobalTimeTrackingSettings(self, daysAWeek=None, hoursADay=None): xml = '<timesettings>' if daysAWeek is not None: xml += '<daysAWeek>%d</daysAWeek>' % daysAWeek if hoursADay is not None: xml += '<hoursADay>%d</hoursADay>' % hoursADay xml += '</timesettings>' return self._reqXml('PUT', '/admin/timetracking', xml) def setProjectTimeTrackingSettings(self, projectId, estimateField=None, timeSpentField=None, enabled=None): if enabled is not None: xml = '<settings enabled="%s">' % str(enabled == True).lower() else: xml = '<settings>' if estimateField is not None and estimateField != '': xml += '<estimation name="%s"/>' % estimateField if timeSpentField is not None and timeSpentField != '': xml += '<spentTime name="%s"/>' % timeSpentField xml += '</settings>' return self._reqXml( 'PUT', '/admin/project/' + projectId + '/timetracking', xml) def getAllBundles(self, field_type): field_type = self.get_field_type(field_type) if field_type == "enum": tag_name = "enumFieldBundle" elif field_type == "user": tag_name = "userFieldBundle" else: tag_name = self.bundle_paths[field_type] names = [e.getAttribute("name") for e in self._get('/admin/customfield/' + self.bundle_paths[field_type]).getElementsByTagName( tag_name)] return [self.getBundle(field_type, name) for name in names] def get_field_type(self, field_type): if "[" in field_type: field_type = field_type[0:-3] return field_type def getBundle(self, field_type, name): field_type = self.get_field_type(field_type) response = self._get('/admin/customfield/%s/%s' % (self.bundle_paths[field_type], urlquote(name.encode('utf-8')))) return self.bundle_types[field_type](response, self) def renameBundle(self, bundle, new_name): response, content = self._req("POST", "/admin/customfield/%s/%s?newName=%s" % ( self.bundle_paths[bundle.get_field_type()], bundle.name, new_name), "", ignoreStatus=301) return response def createBundle(self, bundle): return self._reqXml('PUT', '/admin/customfield/' + self.bundle_paths[bundle.get_field_type()], body=bundle.toXml(), ignoreStatus=400) def deleteBundle(self, bundle): response, content = self._req("DELETE", "/admin/customfield/%s/%s" % ( self.bundle_paths[bundle.get_field_type()], bundle.name), "") return response def addValueToBundle(self, bundle, value): request = "" if bundle.get_field_type() != "user": request = "/admin/customfield/%s/%s/" % ( self.bundle_paths[bundle.get_field_type()], urlquote(bundle.name.encode('utf-8'))) if isinstance(value, str): request += urlquote(value) elif isinstance(value, unicode): request += urlquote(value.encode('utf-8')) else: request += urlquote(value.name.encode('utf-8')) + "?" params = dict() for e in value: if (e != "name") and (e != "element_name") and len(value[e]): if isinstance(value[e], unicode): params[e] = value[e].encode('utf-8') else: params[e] = value[e] if len(params): request += urllib.urlencode(params) else: request = "/admin/customfield/userBundle/%s/" % urlquote(bundle.name.encode('utf-8')) if isinstance(value, youtrack.User): request += "individual/%s/" % value.login elif isinstance(value, youtrack.Group): request += "group/%s/" % urlquote(value.name.encode('utf-8')) else: request += "individual/%s/" % urlquote(value) return self._put(request) def removeValueFromBundle(self, bundle, value): field_type = bundle.get_field_type() request = "/admin/customfield/%s/%s/" % (self.bundle_paths[field_type], bundle.name) if field_type != "user": request += urlquote(value.name) elif isinstance(value, youtrack.User): request += "individual/" + urlquote(value.login) else: request += "group/" + value.name response, content = self._req("DELETE", request, "", ignoreStatus=204) return response def getEnumBundle(self, name): return youtrack.EnumBundle(self._get("/admin/customfield/bundle/" + urlquote(name)), self) def createEnumBundle(self, eb): return self.createBundle(eb) def deleteEnumBundle(self, name): return self.deleteBundle(self.getEnumBundle(name)) def createEnumBundleDetailed(self, name, values): xml = '<enumeration name=\"' + name.encode('utf-8') + '\">' xml += ' '.join('<value>' + v + '</value>' for v in values) xml += '</enumeration>' return self._reqXml('PUT', '/admin/customfield/bundle', body=xml.encode('utf8'), ignoreStatus=400) def addValueToEnumBundle(self, name, value): return self.addValueToBundle(self.getEnumBundle(name), value) def addValuesToEnumBundle(self, name, values): return ", ".join(self.addValueToEnumBundle(name, value) for value in values) bundle_paths = { "enum": "bundle", "build": "buildBundle", "ownedField": "ownedFieldBundle", "state": "stateBundle", "version": "versionBundle", "user": "userBundle" } bundle_types = { "enum": lambda xml, yt: youtrack.EnumBundle(xml, yt), "build": lambda xml, yt: youtrack.BuildBundle(xml, yt), "ownedField": lambda xml, yt: youtrack.OwnedFieldBundle(xml, yt), "state": lambda xml, yt: youtrack.StateBundle(xml, yt), "version": lambda xml, yt: youtrack.VersionBundle(xml, yt), "user": lambda xml, yt: youtrack.UserBundle(xml, yt) }
softintouch/python-youtrack-api
sit_youtrack/youtrack/connection.py
Python
apache-2.0
48,679
from __future__ import absolute_import from __future__ import with_statement """ This module contains the component responsible for consuming messages from the broker, processing the messages and keeping the broker connections up and running. * :meth:`~Consumer.start` is an infinite loop, which only iterates again if the connection is lost. For each iteration (at start, or if the connection is lost) it calls :meth:`~Consumer.reset_connection`, and starts the consumer by calling :meth:`~Consumer.consume_messages`. * :meth:`~Consumer.reset_connection`, clears the internal queues, establishes a new connection to the broker, sets up the task consumer (+ QoS), and the broadcast remote control command consumer. Also if events are enabled it configures the event dispatcher and starts up the heartbeat thread. * Finally it can consume messages. :meth:`~Consumer.consume_messages` is simply an infinite loop waiting for events on the AMQP channels. Both the task consumer and the broadcast consumer uses the same callback: :meth:`~Consumer.receive_message`. * So for each message received the :meth:`~Consumer.receive_message` method is called, this checks the payload of the message for either a `task` key or a `control` key. If the message is a task, it verifies the validity of the message converts it to a :class:`celery.worker.job.TaskRequest`, and sends it to :meth:`~Consumer.on_task`. If the message is a control command the message is passed to :meth:`~Consumer.on_control`, which in turn dispatches the control command using the control dispatcher. It also tries to handle malformed or invalid messages properly, so the worker doesn't choke on them and die. Any invalid messages are acknowledged immediately and logged, so the message is not resent again, and again. * If the task has an ETA/countdown, the task is moved to the `eta_schedule` so the :class:`timer2.Timer` can schedule it at its deadline. Tasks without an eta are moved immediately to the `ready_queue`, so they can be picked up by the :class:`~celery.worker.mediator.Mediator` to be sent to the pool. * When a task with an ETA is received the QoS prefetch count is also incremented, so another message can be reserved. When the ETA is met the prefetch count is decremented again, though this cannot happen immediately because amqplib doesn't support doing broker requests across threads. Instead the current prefetch count is kept as a shared counter, so as soon as :meth:`~Consumer.consume_messages` detects that the value has changed it will send out the actual QoS event to the broker. * Notice that when the connection is lost all internal queues are cleared because we can no longer ack the messages reserved in memory. However, this is not dangerous as the broker will resend them to another worker when the channel is closed. * **WARNING**: :meth:`~Consumer.stop` does not close the connection! This is because some pre-acked messages may be in processing, and they need to be finished before the channel is closed. For celeryd this means the pool must finish the tasks it has acked early, *then* close the connection. """ import socket import sys import threading import traceback import warnings from celery.app import app_or_default from celery.datastructures import AttributeDict from celery.exceptions import NotRegistered from celery.utils import noop from celery.utils import timer2 from celery.utils.encoding import safe_repr from celery.worker import state from celery.worker.job import TaskRequest, InvalidTaskError from celery.worker.control.registry import Panel from celery.worker.heartbeat import Heart RUN = 0x1 CLOSE = 0x2 #: Prefetch count can't exceed short. PREFETCH_COUNT_MAX = 0xFFFF #: Error message for when an unregistered task is received. UNKNOWN_TASK_ERROR = """\ Received unregistered task of type %s. The message has been ignored and discarded. Did you remember to import the module containing this task? Or maybe you are using relative imports? Please see http://bit.ly/gLye1c for more information. The full contents of the message body was: %s """ #: Error message for when an invalid task message is received. INVALID_TASK_ERROR = """\ Received invalid task message: %s The message has been ignored and discarded. Please ensure your message conforms to the task message protocol as described here: http://bit.ly/hYj41y The full contents of the message body was: %s """ MESSAGE_REPORT_FMT = """\ body: %s {content_type:%s content_encoding:%s delivery_info:%s}\ """ class QoS(object): """Quality of Service for Channel. For thread-safe increment/decrement of a channels prefetch count value. :param consumer: A :class:`kombu.messaging.Consumer` instance. :param initial_value: Initial prefetch count value. :param logger: Logger used to log debug messages. """ prev = None def __init__(self, consumer, initial_value, logger): self.consumer = consumer self.logger = logger self._mutex = threading.RLock() self.value = initial_value def increment(self, n=1): """Increment the current prefetch count value by n.""" with self._mutex: if self.value: new_value = self.value + max(n, 0) self.value = self.set(new_value) return self.value def _sub(self, n=1): assert self.value - n > 1 self.value -= n def decrement(self, n=1): """Decrement the current prefetch count value by n.""" with self._mutex: if self.value: self._sub(n) self.set(self.value) return self.value def decrement_eventually(self, n=1): """Decrement the value, but do not update the qos. The MainThread will be responsible for calling :meth:`update` when necessary. """ with self._mutex: if self.value: self._sub(n) def set(self, pcount): """Set channel prefetch_count setting.""" if pcount != self.prev: new_value = pcount if pcount > PREFETCH_COUNT_MAX: self.logger.warning( "QoS: Disabled: prefetch_count exceeds %r" % ( PREFETCH_COUNT_MAX, )) new_value = 0 self.logger.debug("basic.qos: prefetch_count->%s" % new_value) self.consumer.qos(prefetch_count=new_value) self.prev = pcount return pcount def update(self): """Update prefetch count with current value.""" with self._mutex: return self.set(self.value) class Consumer(object): """Listen for messages received from the broker and move them to the ready queue for task processing. :param ready_queue: See :attr:`ready_queue`. :param eta_schedule: See :attr:`eta_schedule`. """ #: The queue that holds tasks ready for immediate processing. ready_queue = None #: Timer for tasks with an ETA/countdown. eta_schedule = None #: Enable/disable events. send_events = False #: Optional callback to be called when the connection is established. #: Will only be called once, even if the connection is lost and #: re-established. init_callback = None #: The current hostname. Defaults to the system hostname. hostname = None #: Initial QoS prefetch count for the task channel. initial_prefetch_count = 0 #: A :class:`celery.events.EventDispatcher` for sending events. event_dispatcher = None #: The thread that sends event heartbeats at regular intervals. #: The heartbeats are used by monitors to detect that a worker #: went off-line/disappeared. heart = None #: The logger instance to use. Defaults to the default Celery logger. logger = None #: The broker connection. connection = None #: The consumer used to consume task messages. task_consumer = None #: The consumer used to consume broadcast commands. broadcast_consumer = None #: The process mailbox (kombu pidbox node). pidbox_node = None #: The current worker pool instance. pool = None #: A timer used for high-priority internal tasks, such #: as sending heartbeats. priority_timer = None # Consumer state, can be RUN or CLOSE. _state = None def __init__(self, ready_queue, eta_schedule, logger, init_callback=noop, send_events=False, hostname=None, initial_prefetch_count=2, pool=None, app=None, priority_timer=None, controller=None): self.app = app_or_default(app) self.connection = None self.task_consumer = None self.controller = controller self.broadcast_consumer = None self.ready_queue = ready_queue self.eta_schedule = eta_schedule self.send_events = send_events self.init_callback = init_callback self.logger = logger self.hostname = hostname or socket.gethostname() self.initial_prefetch_count = initial_prefetch_count self.event_dispatcher = None self.heart = None self.pool = pool self.priority_timer = priority_timer or timer2.default_timer pidbox_state = AttributeDict(app=self.app, logger=logger, hostname=self.hostname, listener=self, # pre 2.2 consumer=self) self.pidbox_node = self.app.control.mailbox.Node(self.hostname, state=pidbox_state, handlers=Panel.data) conninfo = self.app.broker_connection() self.connection_errors = conninfo.connection_errors self.channel_errors = conninfo.channel_errors def start(self): """Start the consumer. Automatically survives intermittent connection failure, and will retry establishing the connection and restart consuming messages. """ self.init_callback(self) while self._state != CLOSE: try: self.reset_connection() self.consume_messages() except self.connection_errors: self.logger.error("Consumer: Connection to broker lost." + " Trying to re-establish the connection...", exc_info=sys.exc_info()) def consume_messages(self): """Consume messages forever (or until an exception is raised).""" self._debug("Starting message consumer...") self.task_consumer.consume() self._debug("Ready to accept tasks!") while self._state != CLOSE and self.connection: if self.qos.prev != self.qos.value: self.qos.update() try: self.connection.drain_events(timeout=1) except socket.timeout: pass except socket.error: if self._state != CLOSE: raise def on_task(self, task): """Handle received task. If the task has an `eta` we enter it into the ETA schedule, otherwise we move it the ready queue for immediate processing. """ if task.revoked(): return self.logger.info("Got task from broker: %s" % (task.shortinfo(), )) if self.event_dispatcher.enabled: self.event_dispatcher.send("task-received", uuid=task.task_id, name=task.task_name, args=safe_repr(task.args), kwargs=safe_repr(task.kwargs), retries=task.retries, eta=task.eta and task.eta.isoformat(), expires=task.expires and task.expires.isoformat()) if task.eta: try: eta = timer2.to_timestamp(task.eta) except OverflowError, exc: self.logger.error( "Couldn't convert eta %s to time stamp: %r. Task: %r" % ( task.eta, exc, task.info(safe=True)), exc_info=sys.exc_info()) task.acknowledge() else: self.qos.increment() self.eta_schedule.apply_at(eta, self.apply_eta_task, (task, )) else: state.task_reserved(task) self.ready_queue.put(task) def on_control(self, body, message): """Process remote control command message.""" try: self.pidbox_node.handle_message(body, message) except KeyError, exc: self.logger.error("No such control command: %s" % exc) except Exception, exc: self.logger.error( "Error occurred while handling control command: %r\n%r" % ( exc, traceback.format_exc()), exc_info=sys.exc_info()) self.reset_pidbox_node() def apply_eta_task(self, task): """Method called by the timer to apply a task with an ETA/countdown.""" state.task_reserved(task) self.ready_queue.put(task) self.qos.decrement_eventually() def _message_report(self, body, message): return MESSAGE_REPORT_FMT % (safe_repr(body), safe_repr(message.content_type), safe_repr(message.content_encoding), safe_repr(message.delivery_info)) def receive_message(self, body, message): """Handles incoming messages. :param body: The message body. :param message: The kombu message object. """ # need to guard against errors occurring while acking the message. def ack(): try: message.ack() except self.connection_errors + (AttributeError, ), exc: self.logger.critical( "Couldn't ack %r: %s reason:%r" % ( message.delivery_tag, self._message_report(body, message), exc)) try: body["task"] except (KeyError, TypeError): warnings.warn(RuntimeWarning( "Received and deleted unknown message. Wrong destination?!? \ the full contents of the message body was: %s" % ( self._message_report(body, message), ))) ack() return try: task = TaskRequest.from_message(message, body, ack, app=self.app, logger=self.logger, hostname=self.hostname, eventer=self.event_dispatcher) except NotRegistered, exc: self.logger.error(UNKNOWN_TASK_ERROR % ( exc, safe_repr(body)), exc_info=sys.exc_info()) ack() except InvalidTaskError, exc: self.logger.error(INVALID_TASK_ERROR % ( str(exc), safe_repr(body)), exc_info=sys.exc_info()) ack() else: self.on_task(task) def maybe_conn_error(self, fun): """Applies function but ignores any connection or channel errors raised.""" try: fun() except (AttributeError, ) + \ self.connection_errors + \ self.channel_errors: pass def close_connection(self): """Closes the current broker connection and all open channels.""" if self.task_consumer: self._debug("Closing consumer channel...") self.task_consumer = \ self.maybe_conn_error(self.task_consumer.close) if self.broadcast_consumer: self._debug("Closing broadcast channel...") self.broadcast_consumer = \ self.maybe_conn_error(self.broadcast_consumer.channel.close) if self.connection: self._debug("Closing broker connection...") self.connection = self.maybe_conn_error(self.connection.close) def stop_consumers(self, close_connection=True): """Stop consuming tasks and broadcast commands, also stops the heartbeat thread and event dispatcher. :keyword close_connection: Set to False to skip closing the broker connection. """ if not self._state == RUN: return if self.heart: # Stop the heartbeat thread if it's running. self.logger.debug("Heart: Going into cardiac arrest...") self.heart = self.heart.stop() self._debug("Cancelling task consumer...") if self.task_consumer: self.maybe_conn_error(self.task_consumer.cancel) if self.event_dispatcher: self._debug("Shutting down event dispatcher...") self.event_dispatcher = \ self.maybe_conn_error(self.event_dispatcher.close) self._debug("Cancelling broadcast consumer...") if self.broadcast_consumer: self.maybe_conn_error(self.broadcast_consumer.cancel) if close_connection: self.close_connection() def on_decode_error(self, message, exc): """Callback called if an error occurs while decoding a message received. Simply logs the error and acknowledges the message so it doesn't enter a loop. :param message: The message with errors. :param exc: The original exception instance. """ self.logger.critical( "Can't decode message body: %r (type:%r encoding:%r raw:%r')" % ( exc, message.content_type, message.content_encoding, safe_repr(message.body))) message.ack() def reset_pidbox_node(self): """Sets up the process mailbox.""" # close previously opened channel if any. if self.pidbox_node.channel: try: self.pidbox_node.channel.close() except self.connection_errors + self.channel_errors: pass if self.pool is not None and self.pool.is_green: return self.pool.spawn_n(self._green_pidbox_node) self.pidbox_node.channel = self.connection.channel() self.broadcast_consumer = self.pidbox_node.listen( callback=self.on_control) self.broadcast_consumer.consume() def _green_pidbox_node(self): """Sets up the process mailbox when running in a greenlet environment.""" conn = self._open_connection() self.pidbox_node.channel = conn.channel() self.broadcast_consumer = self.pidbox_node.listen( callback=self.on_control) self.broadcast_consumer.consume() try: while self.connection: # main connection still open? conn.drain_events() finally: conn.close() def reset_connection(self): """Re-establish the broker connection and set up consumers, heartbeat and the event dispatcher.""" self._debug("Re-establishing connection to the broker...") self.stop_consumers() # Clear internal queues to get rid of old messages. # They can't be acked anyway, as a delivery tag is specific # to the current channel. self.ready_queue.clear() self.eta_schedule.clear() # Re-establish the broker connection and setup the task consumer. self.connection = self._open_connection() self._debug("Connection established.") self.task_consumer = self.app.amqp.get_task_consumer(self.connection, on_decode_error=self.on_decode_error) # QoS: Reset prefetch window. self.qos = QoS(self.task_consumer, self.initial_prefetch_count, self.logger) self.qos.update() # receive_message handles incoming messages. self.task_consumer.register_callback(self.receive_message) # Setup the process mailbox. self.reset_pidbox_node() # Flush events sent while connection was down. prev_event_dispatcher = self.event_dispatcher self.event_dispatcher = self.app.events.Dispatcher(self.connection, hostname=self.hostname, enabled=self.send_events) if prev_event_dispatcher: self.event_dispatcher.copy_buffer(prev_event_dispatcher) self.event_dispatcher.flush() # Restart heartbeat thread. self.restart_heartbeat() # We're back! self._state = RUN def restart_heartbeat(self): """Restart the heartbeat thread. This thread sends heartbeat events at intervals so monitors can tell if the worker is off-line/missing. """ self.heart = Heart(self.priority_timer, self.event_dispatcher) self.heart.start() def _open_connection(self): """Establish the broker connection. Will retry establishing the connection if the :setting:`BROKER_CONNECTION_RETRY` setting is enabled """ # Callback called for each retry while the connection # can't be established. def _error_handler(exc, interval): self.logger.error("Consumer: Connection Error: %s. " % exc + "Trying again in %d seconds..." % interval) # remember that the connection is lazy, it won't establish # until it's needed. conn = self.app.broker_connection() if not self.app.conf.BROKER_CONNECTION_RETRY: # retry disabled, just call connect directly. conn.connect() return conn return conn.ensure_connection(_error_handler, self.app.conf.BROKER_CONNECTION_MAX_RETRIES) def stop(self): """Stop consuming. Does not close the broker connection, so be sure to call :meth:`close_connection` when you are finished with it. """ # Notifies other threads that this instance can't be used # anymore. self._state = CLOSE self._debug("Stopping consumers...") self.stop_consumers(close_connection=False) @property def info(self): """Returns information about this consumer instance as a dict. This is also the consumer related info returned by ``celeryctl stats``. """ conninfo = {} if self.connection: conninfo = self.connection.info() conninfo.pop("password", None) # don't send password. return {"broker": conninfo, "prefetch_count": self.qos.value} def _debug(self, msg, **kwargs): self.logger.debug("Consumer: %s" % (msg, ), **kwargs)
WoLpH/celery
celery/worker/consumer.py
Python
bsd-3-clause
23,217
# coding: utf-8 """ KubeVirt API This is KubeVirt API an add-on for Kubernetes. OpenAPI spec version: 1.0.0 Contact: kubevirt-dev@googlegroups.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import os import sys import unittest import kubevirt from kubevirt.rest import ApiException from kubevirt.models.v1_secret_volume_source import V1SecretVolumeSource class TestV1SecretVolumeSource(unittest.TestCase): """ V1SecretVolumeSource unit test stubs """ def setUp(self): pass def tearDown(self): pass def testV1SecretVolumeSource(self): """ Test V1SecretVolumeSource """ # FIXME: construct object with mandatory attributes with example values #model = kubevirt.models.v1_secret_volume_source.V1SecretVolumeSource() pass if __name__ == '__main__': unittest.main()
kubevirt/client-python
test/test_v1_secret_volume_source.py
Python
apache-2.0
943
"""Graphical user interface to Delta-Elektronika SM-700 Series controllers.""" import sys import pyhard2.driver as drv import pyhard2.driver.virtual as virtual import pyhard2.driver.deltaelektronika as delta import pyhard2.ctrlr as ctrlr def createController(): """Initialize controller.""" config = ctrlr.Config("deltaelektronika", "SM-700") if not config.nodes: config.nodes, config.names = ([1], ["SM700"]) if config.virtual: driver = virtual.VirtualInstrument() iface = ctrlr.virtualInstrumentController(config, driver) else: driver = delta.Sm700Series(drv.Serial(config.port)) iface = ctrlr.Controller(config, driver) iface.addCommand(driver.source.voltage, "Voltage", poll=True, log=True) iface.addCommand(driver.source.current, "Current", poll=True, log=True) iface.populate() return iface def main(argv): """Start controller.""" from PyQt4 import QtGui app = QtGui.QApplication(argv) app.lastWindowClosed.connect(app.quit) iface = createController() iface.show() sys.exit(app.exec_()) if __name__ == "__main__": main(sys.argv)
Synss/pyhard2
pyhard2/ctrlr/deltaelektronika.py
Python
mit
1,159
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import orderedSet class CTVNewsIE(InfoExtractor): _VALID_URL = r'https?://(?:.+?\.)?ctvnews\.ca/(?:video\?(?:clip|playlist|bin)Id=|.*?)(?P<id>[0-9.]+)' _TESTS = [{ 'url': 'http://www.ctvnews.ca/video?clipId=901995', 'md5': '10deb320dc0ccb8d01d34d12fc2ea672', 'info_dict': { 'id': '901995', 'ext': 'mp4', 'title': 'Extended: \'That person cannot be me\' Johnson says', 'description': 'md5:958dd3b4f5bbbf0ed4d045c790d89285', 'timestamp': 1467286284, 'upload_date': '20160630', } }, { 'url': 'http://www.ctvnews.ca/video?playlistId=1.2966224', 'info_dict': { 'id': '1.2966224', }, 'playlist_mincount': 19, }, { 'url': 'http://www.ctvnews.ca/video?binId=1.2876780', 'info_dict': { 'id': '1.2876780', }, 'playlist_mincount': 100, }, { 'url': 'http://www.ctvnews.ca/1.810401', 'only_matching': True, }, { 'url': 'http://www.ctvnews.ca/canadiens-send-p-k-subban-to-nashville-in-blockbuster-trade-1.2967231', 'only_matching': True, }, { 'url': 'http://vancouverisland.ctvnews.ca/video?clipId=761241', 'only_matching': True, }] def _real_extract(self, url): page_id = self._match_id(url) def ninecninemedia_url_result(clip_id): return { '_type': 'url_transparent', 'id': clip_id, 'url': '9c9media:ctvnews_web:%s' % clip_id, 'ie_key': 'NineCNineMedia', } if page_id.isdigit(): return ninecninemedia_url_result(page_id) else: webpage = self._download_webpage('http://www.ctvnews.ca/%s' % page_id, page_id, query={ 'ot': 'example.AjaxPageLayout.ot', 'maxItemsPerPage': 1000000, }) entries = [ninecninemedia_url_result(clip_id) for clip_id in orderedSet( re.findall(r'clip\.id\s*=\s*(\d+);', webpage))] return self.playlist_result(entries, page_id)
linglung/ytdl
youtube_dl/extractor/ctvnews.py
Python
unlicense
2,276
# # Copyright (c) 2013-2018 Quarkslab. # This file is part of IRMA project. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License in the top-level directory # of this distribution and at: # # http://www.apache.org/licenses/LICENSE-2.0 # # No part of the project, including this file, may be copied, # modified, propagated, or distributed except according to the # terms contained in the LICENSE file. import os import logging import config.parser as config from irma.common.base.exceptions import IrmaFileSystemError, \ IrmaFtpError from tempfile import TemporaryFile log = logging.getLogger(__name__) def _get_ftp(): IrmaFTP = config.get_ftp_class() ftp_config = config.frontend_config['ftp_brain'] host = ftp_config.host port = ftp_config.port auth = ftp_config.auth key_path = ftp_config.key_path user = ftp_config.username pwd = ftp_config.password return IrmaFTP(host, port, auth, key_path, user, pwd) def upload_file(upload_path, file_path): try: with _get_ftp() as ftp: log.debug("file_ext_id: %s uploading file: %s", upload_path, file_path) if not os.path.isfile(file_path): reason = "File does not exist" log.error(reason) raise IrmaFileSystemError(reason) ftp.upload_file(upload_path, file_path) return except Exception as e: log.exception(type(e).__name__ + " : " + str(e)) reason = "Ftp upload Error" raise IrmaFtpError(reason) def download_file_data(filename): try: fobj = TemporaryFile() with _get_ftp() as ftp: log.debug("downloading file %s", filename) ftp.download_fobj(".", filename, fobj) return fobj except Exception as e: log.exception(type(e).__name__ + " : " + str(e)) reason = "Ftp download Error" raise IrmaFtpError(reason) def rename_file(srcname, dstname): try: with _get_ftp() as ftp: log.debug("file %s renaming to %s", srcname, dstname) ftp.rename(srcname, dstname) return except Exception as e: log.exception(type(e).__name__ + " : " + str(e)) reason = "Ftp upload Error" raise IrmaFtpError(reason)
quarkslab/irma
frontend/api/common/ftp.py
Python
apache-2.0
2,432
# coding=utf-8 """**SAFE (Scenario Assessment For Emergencies) - API** The purpose of the module is to provide a well defined public API for the packages that constitute the SAFE engine. Modules using SAFE should only need to import functions from here. Contact : ole.moller.nielsen@gmail.com .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'marco@opengis.ch' __revision__ = '$Format:%H$' __date__ = '05/10/2012' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') from safe.common.utilities import ugettext as tr DEFAULTS = dict() # https://www.cia.gov/library/publications/the-world-factbook/geos/xx.html # total population: 1.01 male(s)/female (2011 est.) DEFAULTS['FEMALE_RATIO'] = 0.50 # https://www.cia.gov/library/publications/the-world-factbook/geos/xx.html # Age structure: # 0-14 years: 26.3% (male 944,987,919/female 884,268,378) # 15-64 years: 65.9% (male 2,234,860,865/female 2,187,838,153) # 65 years and over: 7.9% (male 227,164,176/female 289,048,221) (2011 est.) # NOTE (MB) CIA can not do maths!!! this gives 100.1% # inaSAFE can, thus we remove 0.1% from the elderly # I wrote them and got this contact confirmation number: CTCU1K2 DEFAULTS['YOUTH_RATIO'] = 0.263 DEFAULTS['ADULT_RATIO'] = 0.659 DEFAULTS['ELDERLY_RATIO'] = 0.078 #Keywords key names DEFAULTS['FEMALE_RATIO_ATTR_KEY'] = 'female ratio attribute' DEFAULTS['FEMALE_RATIO_KEY'] = 'female ratio default' DEFAULTS['YOUTH_RATIO_ATTR_KEY'] = 'youth ratio attribute' DEFAULTS['YOUTH_RATIO_KEY'] = 'youth ratio default' DEFAULTS['ADULT_RATIO_ATTR_KEY'] = 'adult ratio attribute' DEFAULTS['ADULT_RATIO_KEY'] = 'adult ratio default' DEFAULTS['ELDERLY_RATIO_ATTR_KEY'] = 'elderly ratio attribute' DEFAULTS['ELDERLY_RATIO_KEY'] = 'elderly ratio default' DEFAULTS['AGGR_ATTR_KEY'] = 'aggregation attribute' DEFAULTS['NO_DATA'] = tr('No data') # noinspection PyUnresolvedReferences # this is used when we are in safe without access to qgis (e.g. web ) and is # monkey patched in safe_qgis.__init__ def get_defaults(default=None): """Get defaults for aggregation / post processing. :param default: Optional parameter if you only want a specific default. :type default: str :return: A single value (when default is passed) or a dict of values. :rtype: str, int, float, dict """ print "SAFE defaults CALL. If in QGIS this is a WRONG CALL" if default is None: return DEFAULTS elif default in DEFAULTS: return DEFAULTS[default] else: return None
drayanaindra/inasafe
safe/defaults.py
Python
gpl-3.0
2,781
# -*- coding: utf-8 -*- # Copyright (c) 2013 Federico Ruiz-Ugalde # Author: Federico Ruiz Ugalde <memeruiz at gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from arcospyu.mypopen import MyPopen from arcospyu.dprint import iprint, dprint, eprint from time import sleep import signal import os def which(program): import os def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ['PATH'].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None class PManager(object): def __init__(self, processes_args=[]): self.processes_args = processes_args self.processes = [] self.processes_name_pos = [] def start(self): for process_args in self.processes_args: filename = which(process_args[0]) if filename and os.path.exists(filename) and os.access(filename, os.X_OK): dprint('Starting process: ', process_args[0]) self.processes.append(MyPopen([filename] + process_args[1:])) self.processes[-1].args = process_args else: eprint( 'Executable', process_args[0], ' not found, not starting process') # sleep(3) def monitor(self): stop = False while not stop: for process in self.processes: process.poll() if process.returncode is not None: eprint( 'Process: ', process.args[0], ' died! Closing all processes') stop = True sleep(0.5) # dprint('Looping') if stop: self.stop() def stop(self, sec=10): iprint('Sending signal ', signal.SIGTERM, ' to subprocesses') [process.send_signal2(signal.SIGTERM) for process in self.processes] iprint('Waiting processes to terminate') for process in self.processes: process.wait_and_kill(sec) iprint('Process: ', process.args[0], ' terminated') iprint('All processes terminated!')
arcoslab/arcospyu
lib/arcospyu/pmanager/pmanager.py
Python
gpl-3.0
3,054
#! /usr/bin/env python # Replaced references to re module # Replaced long with int # Workaround for missing "".translate() # Workaround for missing string muliplication # Workaround for javascript bit shifting is 32 bits (and behind the scenes on floats) """RFC 3548: Base16, Base32, Base64 Data Encodings""" # Modified 04-Oct-1995 by Jack Jansen to use binascii module # Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support #import re import struct import binascii __all__ = [ # Legacy interface exports traditional RFC 1521 Base64 encodings 'encode', 'decode', 'encodestring', 'decodestring', # Generalized interface for other encodings 'b64encode', 'b64decode', 'b32encode', 'b32decode', 'b16encode', 'b16decode', # Standard Base64 encoding 'standard_b64encode', 'standard_b64decode', # Some common Base64 alternatives. As referenced by RFC 3458, see thread # starting at: # # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html 'urlsafe_b64encode', 'urlsafe_b64decode', ] _translation = [chr(_x) for _x in range(256)] EMPTYSTRING = '' def _translate(s, altchars): translation = _translation[:] for k, v in altchars.items(): translation[ord(k)] = v #return s.translate(''.join(translation)) t = '' for c in s: t += translation[ord(c)] return t # Base64 encoding/decoding uses binascii def b64encode(s, altchars=None): """Encode a string using Base64. s is the string to encode. Optional altchars must be a string of at least length 2 (additional characters are ignored) which specifies an alternative alphabet for the '+' and '/' characters. This allows an application to e.g. generate url or filesystem safe Base64 strings. The encoded string is returned. """ # Strip off the trailing newline encoded = binascii.b2a_base64(s)[:-1] if altchars is not None: return _translate(encoded, {'+': altchars[0], '/': altchars[1]}) return encoded def b64decode(s, altchars=None): """Decode a Base64 encoded string. s is the string to decode. Optional altchars must be a string of at least length 2 (additional characters are ignored) which specifies the alternative alphabet used instead of the '+' and '/' characters. The decoded string is returned. A TypeError is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ if altchars is not None: s = _translate(s, {altchars[0]: '+', altchars[1]: '/'}) try: return binascii.a2b_base64(s) except binascii.Error, msg: # Transform this exception for consistency raise TypeError(msg) def standard_b64encode(s): """Encode a string using the standard Base64 alphabet. s is the string to encode. The encoded string is returned. """ return b64encode(s) def standard_b64decode(s): """Decode a string encoded with the standard Base64 alphabet. s is the string to decode. The decoded string is returned. A TypeError is raised if the string is incorrectly padded or if there are non-alphabet characters present in the string. """ return b64decode(s) def urlsafe_b64encode(s): """Encode a string using a url-safe Base64 alphabet. s is the string to encode. The encoded string is returned. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64encode(s, '-_') def urlsafe_b64decode(s): """Decode a string encoded with the standard Base64 alphabet. s is the string to decode. The decoded string is returned. A TypeError is raised if the string is incorrectly padded or if there are non-alphabet characters present in the string. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ return b64decode(s, '-_') # Base32 encoding/decoding must be done in Python _b32alphabet = { 0: 'A', 9: 'J', 18: 'S', 27: '3', 1: 'B', 10: 'K', 19: 'T', 28: '4', 2: 'C', 11: 'L', 20: 'U', 29: '5', 3: 'D', 12: 'M', 21: 'V', 30: '6', 4: 'E', 13: 'N', 22: 'W', 31: '7', 5: 'F', 14: 'O', 23: 'X', 6: 'G', 15: 'P', 24: 'Y', 7: 'H', 16: 'Q', 25: 'Z', 8: 'I', 17: 'R', 26: '2', } _b32tab = _b32alphabet.items() _b32tab.sort() _b32tab = [v for k, v in _b32tab] _b32rev = dict([(v, k) for k, v in _b32alphabet.items()]) def b32encode(s): """Encode a string using Base32. s is the string to encode. The encoded string is returned. """ parts = [] quanta, leftover = divmod(len(s), 5) # Pad the last quantum with zero bits if necessary if leftover: #s += ('\0' * (5 - leftover)) s += ("".ljust(5 - leftover, '\0')) quanta += 1 for i in range(quanta): # c1 and c2 are 16 bits wide, c3 is 8 bits wide. The intent of this # code is to process the 40 bits in units of 5 bits. So we take the 1 # leftover bit of c1 and tack it onto c2. Then we take the 2 leftover # bits of c2 and tack them onto c3. The shifts and masks are intended # to give us values of exactly 5 bits in width. c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5]) c2 += (c1 & 1) << 16 # 17 bits wide c3 += (c2 & 3) << 8 # 10 bits wide parts.extend([_b32tab[c1 >> 11], # bits 1 - 5 _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10 _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15 _b32tab[c2 >> 12], # bits 16 - 20 (1 - 5) _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10) _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15) _b32tab[c3 >> 5], # bits 31 - 35 (1 - 5) _b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5) ]) encoded = EMPTYSTRING.join(parts) # Adjust for any leftover partial quanta if leftover == 1: return encoded[:-6] + '======' elif leftover == 2: return encoded[:-4] + '====' elif leftover == 3: return encoded[:-3] + '===' elif leftover == 4: return encoded[:-1] + '=' return encoded def b32decode(s, casefold=False, map01=None): """Decode a Base32 encoded string. s is the string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. RFC 3548 allows for optional mapping of the digit 0 (zero) to the letter O (oh), and for optional mapping of the digit 1 (one) to either the letter I (eye) or letter L (el). The optional argument map01 when not None, specifies which letter the digit 1 should be mapped to (when map01 is not None, the digit 0 is always mapped to the letter O). For security purposes the default is None, so that 0 and 1 are not allowed in the input. The decoded string is returned. A TypeError is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ quanta, leftover = divmod(len(s), 8) if leftover: raise TypeError('Incorrect padding') # Handle section 2.4 zero and one mapping. The flag map01 will be either # False, or the character to map the digit 1 (one) to. It should be # either L (el) or I (eye). if map01: s = _translate(s, {'0': 'O', '1': map01}) if casefold: s = s.upper() # Strip off pad characters from the right. We need to count the pad # characters because this will tell us how many null bytes to remove from # the end of the decoded string. padchars = 0 # Replace regex code #mo = re.search('(?P<pad>[=]*)$', s) #if mo: # padchars = len(mo.group('pad')) # if padchars > 0: # s = s[:-padchars] # Replace with: padchars = len(s) s = s.rstrip('=') padchars -= len(s) # Now decode the full quanta parts = [] acc = 0 shift = 35 for c in s: val = _b32rev.get(c) if val is None: raise TypeError('Non-base32 digit found') #acc += _b32rev[c] << shift acc += _b32rev[c] * (2**shift) shift -= 5 if shift < 0: parts.append(binascii.unhexlify('%010x' % acc)) acc = 0 shift = 35 # Process the last, partial quanta last = binascii.unhexlify('%010x' % acc) if padchars == 0: last = '' # No characters elif padchars == 1: last = last[:-1] elif padchars == 3: last = last[:-2] elif padchars == 4: last = last[:-3] elif padchars == 6: last = last[:-4] else: raise TypeError('Incorrect padding') parts.append(last) return EMPTYSTRING.join(parts) # RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns # lowercase. The RFC also recommends against accepting input case # insensitively. def b16encode(s): """Encode a string using Base16. s is the string to encode. The encoded string is returned. """ return binascii.hexlify(s).upper() def b16decode(s, casefold=False): """Decode a Base16 encoded string. s is the string to decode. Optional casefold is a flag specifying whether a lowercase alphabet is acceptable as input. For security purposes, the default is False. The decoded string is returned. A TypeError is raised if s were incorrectly padded or if there are non-alphabet characters present in the string. """ if casefold: s = s.upper() # Replace regex code #if re.search('[^0-9A-F]', s): # raise TypeError('Non-base16 digit found') # Replace with: from __javascript__ import RegExp r = RegExp('[^0-9A-F]') if r.test(s): raise TypeError('Non-base16 digit found') return binascii.unhexlify(s) # Legacy interface. This code could be cleaned up since I don't believe # binascii has any line length limitations. It just doesn't seem worth it # though. MAXLINESIZE = 76 # Excluding the CRLF MAXBINSIZE = (MAXLINESIZE//4)*3 def encode(input, output): """Encode a file.""" raise NotImplementedError("encode: no file operations possible") #while True: # s = input.read(MAXBINSIZE) # if not s: # break # while len(s) < MAXBINSIZE: # ns = input.read(MAXBINSIZE-len(s)) # if not ns: # break # s += ns # line = binascii.b2a_base64(s) # output.write(line) def decode(input, output): """Decode a file.""" raise NotImplementedError("decode: no file operations possible") #while True: # line = input.readline() # if not line: # break # s = binascii.a2b_base64(line) # output.write(s) def encodestring(s): """Encode a string.""" pieces = [] for i in range(0, len(s), MAXBINSIZE): chunk = s[i : i + MAXBINSIZE] pieces.append(binascii.b2a_base64(chunk)) return "".join(pieces) def decodestring(s): """Decode a string.""" return binascii.a2b_base64(s) # Useable as a script... def test(): """Small test program""" import sys, getopt try: opts, args = getopt.getopt(sys.argv[1:], 'deut') except getopt.error, msg: sys.stdout = sys.stderr print msg print """usage: %s [-d|-e|-u|-t] [file|-] -d, -u: decode -e: encode (default) -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0] sys.exit(2) func = encode for o, a in opts: if o == '-e': func = encode if o == '-d': func = decode if o == '-u': func = decode if o == '-t': test1(); return if args and args[0] != '-': func(open(args[0], 'rb'), sys.stdout) else: func(sys.stdin, sys.stdout) def test1(): s0 = "Aladdin:open sesame" s1 = encodestring(s0) s2 = decodestring(s1) print s0, repr(s1), s2 if __name__ == '__main__': test()
emk/pyjamas
pyjs/src/pyjs/lib/base64.py
Python
apache-2.0
12,139
# -*- coding: utf-8 -*- # # OpenERP Rent documentation build configuration file, created by # sphinx-quickstart on Tue Jun 21 10:14:21 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.intersphinx', 'sphinx.ext.todo'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'OpenERP Rent' copyright = u'2011, Thibaut DIRLIK' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.4' # The full version, including alpha/beta/rc tags. release = '0.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'jinja' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['../themes'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'OpenERPRentdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'OpenERPRent.tex', u'OpenERP Rent Documentation', u'Thibaut DIRLIK', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'openerprent', u'OpenERP Rent Documentation', [u'Thibaut DIRLIK'], 1) ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None}
WE2BS/openerp-rent
docs/source/conf.py
Python
gpl-3.0
7,193
""" OAuth2 and OpenID Connect settings """ from __future__ import absolute_import, division, print_function, unicode_literals import provider.constants import provider.oauth2.forms import provider.scope from django.conf import settings # REQUIRED Issuer Identifider (the `iss` field in the `id_token`) OAUTH_OIDC_ISSUER = settings.OAUTH_OIDC_ISSUER # OAUTH/2 OpenID Connect scopes # Use bit-shifting so that scopes can be easily combined and checked. DEFAULT_SCOPE = 0 OPEN_ID_SCOPE = 1 << 0 PROFILE_SCOPE = 1 << 1 EMAIL_SCOPE = 1 << 2 COURSE_STAFF_SCOPE = 1 << 3 COURSE_INSTRUCTOR_SCOPE = 1 << 4 PERMISSIONS = 1 << 5 # Scope setting as required by django-oauth2-provider # The default scope value is SCOPES[0][0], which in this case is zero. # `django-oauth2-provider` considers a scope value of zero as empty, # ignoring its name when requested. SCOPES = ( (DEFAULT_SCOPE, 'default'), (OPEN_ID_SCOPE, 'openid'), (PROFILE_SCOPE, 'profile'), (EMAIL_SCOPE, 'email'), (COURSE_STAFF_SCOPE, 'course_staff'), (COURSE_INSTRUCTOR_SCOPE, 'course_instructor'), (PERMISSIONS, 'permissions') ) SCOPE_NAMES = [(name, name) for (value, name) in SCOPES] SCOPE_NAME_DICT = dict([(name, value) for (value, name) in SCOPES]) SCOPE_VALUE_DICT = dict([(value, name) for (value, name) in SCOPES]) # OpenID Connect claim handlers DEFAULT_ID_TOKEN_HANDLERS = ( 'oauth2_provider.oidc.handlers.BasicIDTokenHandler', 'oauth2_provider.oidc.handlers.ProfileHandler', 'oauth2_provider.oidc.handlers.EmailHandler', ) DEFAULT_USERINFO_HANDLERS = ( 'oauth2_provider.oidc.handlers.BasicUserInfoHandler', 'oauth2_provider.oidc.handlers.ProfileHandler', 'oauth2_provider.oidc.handlers.EmailHandler', ) ID_TOKEN_HANDLERS = getattr(settings, 'OAUTH_OIDC_ID_TOKEN_HANDLERS', DEFAULT_ID_TOKEN_HANDLERS) USERINFO_HANDLERS = getattr(settings, 'OAUTH_OIDC_USERINFO_HANDLERS', DEFAULT_USERINFO_HANDLERS) # Override django-oauth2-provider scopes (OAUTH_SCOPES) # # `provider.scopes` values are loaded from Django settings. However, # we don't want to rely on customizable settings, and instead use the # set of defaults above. One problem, is that in many places # `django-oauth2-provider` creates module globals with the values from # settings, making them hard to override. # # TODO: This solution is bit ugly, but viable for now. A better fix is # to make SCOPES be lazy evaluated at the django-oauth2-provider level # using django.utils.functional.lazy provider.constants.SCOPES = SCOPES provider.constants.DEFAULT_SCOPES = SCOPES provider.scope.SCOPES = SCOPES provider.scope.SCOPE_NAMES = SCOPE_NAMES provider.scope.SCOPE_NAME_DICT = SCOPE_NAME_DICT provider.scope.SCOPE_VALUE_DICT = SCOPE_VALUE_DICT provider.oauth2.forms.SCOPES = SCOPES provider.oauth2.forms.SCOPE_NAMES = SCOPE_NAMES AUTHORIZED_CLIENTS_SESSION_KEY = getattr(settings, 'OAUTH_OIDC_AUTHORIZED_CLIENTS_SESSION_KEY', 'authorized_clients')
edx/edx-oauth2-provider
edx_oauth2_provider/constants.py
Python
agpl-3.0
2,938
# encoding: utf8 from __future__ import unicode_literals # Source: collected from different resource on internet STOP_WORDS = set(""" ثھی خو گی اپٌے گئے ثہت طرف ہوبری پبئے اپٌب دوضری گیب کت گب ثھی ضے ہر پر اش دی گے لگیں ہے ثعذ ضکتے تھی اى دیب لئے والے یہ ثدبئے ضکتی تھب اًذر رریعے لگی ہوبرا ہوًے ثبہر ضکتب ًہیں تو اور رہب لگے ہوضکتب ہوں کب ہوبرے توبم کیب ایطے رہی هگر ہوضکتی ہیں کریں ہو تک کی ایک رہے هیں ہوضکتے کیطے ہوًب تت کہ ہوا آئے ضبت تھے کیوں ہو تب کے پھر ثغیر خبر ہے رکھ کی طب کوئی رریعے ثبرے خب اضطرذ ثلکہ خجکہ رکھ تب کی طرف ثراں خبر رریعہ اضکب ثٌذ خص کی لئے توہیں دوضرے کررہی اضکی ثیچ خوکہ رکھتی کیوًکہ دوًوں کر رہے خبر ہی ثرآں اضکے پچھلا خیطب رکھتے کے ثعذ تو ہی دورى کر یہبں آش تھوڑا چکے زکویہ دوضروں ضکب اوًچب ثٌب پل تھوڑی چلا خبهوظ دیتب ضکٌب اخبزت اوًچبئی ثٌبرہب پوچھب تھوڑے چلو ختن دیتی ضکی اچھب اوًچی ثٌبرہی پوچھتب تیي چلیں در دیتے ضکے اچھی اوًچے ثٌبرہے پوچھتی خبًب چلے درخبت دیر ضلطلہ اچھے اٹھبًب ثٌبًب پوچھتے خبًتب چھوٹب درخہ دیکھٌب ضوچ اختتبم اہن ثٌذ پوچھٌب خبًتی چھوٹوں درخے دیکھو ضوچب ادھر آئی ثٌذکرًب پوچھو خبًتے چھوٹی درزقیقت دیکھی ضوچتب ارد آئے ثٌذکرو پوچھوں خبًٌب چھوٹے درضت دیکھیں ضوچتی اردگرد آج ثٌذی پوچھیں خططرذ چھہ دش دیٌب ضوچتے ارکبى آخر ثڑا پورا خگہ چیسیں دفعہ دے ضوچٌب اضتعوبل آخر پہلا خگہوں زبصل دکھبئیں راضتوں ضوچو اضتعوبلات آدهی ثڑی پہلی خگہیں زبضر دکھبتب راضتہ ضوچی اغیب آًب ثڑے پہلےضی خلذی زبل دکھبتی راضتے ضوچیں اطراف آٹھ ثھر خٌبة زبل دکھبتے رکي ضیذھب افراد آیب ثھرا پہلے خواى زبلات دکھبًب رکھب ضیذھی اکثر ثب ہوا پیع خوًہی زبلیہ دکھبو رکھی ضیذھے اکٹھب ثھرپور تبزٍ خیطبکہ زصوں رکھے ضیکٌڈ اکٹھی ثبری ثہتر تر چبر زصہ دلچطپ زیبدٍ غبیذ اکٹھے ثبلا ثہتری ترتیت چبہب زصے دلچطپی ضبت غخص اکیلا ثبلترتیت ثہتریي تریي چبہٌب زقبئق دلچطپیبں ضبدٍ غذ اکیلی ثرش پبش تعذاد چبہے زقیتیں هٌبضت ضبرا غروع اکیلے ثغیر پبًب چکب زقیقت دو ضبرے غروعبت اگرچہ ثلٌذ پبًچ تن چکی زکن دور ضبل غے الگ پراًب تٌہب چکیں دوضرا ضبلوں صبف صسیر قجیلہ کوًطے لازهی هطئلے ًیب طریق کرتی کہتے صفر قطن کھولا لگتب هطبئل وار طریقوں کرتے کہٌب صورت کئی کھولٌب لگتی هطتعول وار طریقہ کرتے ہو کہٌب صورتسبل کئے کھولو لگتے هػتول ٹھیک طریقے کرًب کہو صورتوں کبفی هطلق ڈھوًڈا طور کرو کہوں صورتیں کبم کھولیں لگی هعلوم ڈھوًڈلیب طورپر کریں کہی ضرور کجھی کھولے لگے هکول ڈھوًڈًب ظبہر کرے کہیں ضرورت کرا کہب لوجب هلا ڈھوًڈو عذد کل کہیں کرتب کہتب لوجی هوکي ڈھوًڈی عظین کن کہے ضروری کرتبہوں کہتی لوجے هوکٌبت ڈھوًڈیں علاقوں کوتر کیے لوسبت هوکٌہ ہن لے ًبپطٌذ ہورہے علاقہ کورا کے رریعے لوسہ هڑا ہوئی هتعلق ًبگسیر ہوگئی علاقے کوروں گئی لو هڑًب ہوئے هسترم ًطجت ہو گئے علاوٍ کورٍ گرد لوگ هڑے ہوتی هسترهہ ًقطہ ہوگیب کورے گروپ لوگوں هہرثبى ہوتے هسطوش ًکبلٌب ہوًی عووهی کوطي گروٍ لڑکپي هیرا ہوچکب هختلف ًکتہ ہی فرد کوى گروہوں لی هیری ہوچکی هسیذ فی کوًطب گٌتی لیب هیرے ہوچکے هطئلہ ًوخواى یقیٌی قجل کوًطی لیٌب ًئی ہورہب لیں ًئے ہورہی ثبعث ضت """.split())
aikramer2/spaCy
spacy/lang/ur/stop_words.py
Python
mit
5,898
from lacore.source.chunked import ChunkedFile as BaseChunkedFile class ChunkedFile(BaseChunkedFile): maxchunk = 20971520
longaccess/longaccess-client
lacli/source/chunked.py
Python
apache-2.0
127
from unittest import TestCase from scrapy.contrib.spidermiddleware.depth import DepthMiddleware from scrapy.http import Response, Request from scrapy.spider import Spider from scrapy.statscol import StatsCollector from scrapy.utils.test import get_crawler class TestDepthMiddleware(TestCase): def setUp(self): self.spider = Spider('scrapytest.org') self.stats = StatsCollector(get_crawler()) self.stats.open_spider(self.spider) self.mw = DepthMiddleware(1, self.stats, True) def test_process_spider_output(self): req = Request('http://scrapytest.org') resp = Response('http://scrapytest.org') resp.request = req result = [Request('http://scrapytest.org')] out = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out, result) rdc = self.stats.get_value('request_depth_count/1', spider=self.spider) self.assertEquals(rdc, 1) req.meta['depth'] = 1 out2 = list(self.mw.process_spider_output(resp, result, self.spider)) self.assertEquals(out2, []) rdm = self.stats.get_value('request_depth_max', spider=self.spider) self.assertEquals(rdm, 1) def tearDown(self): self.stats.close_spider(self.spider, '')
ofanoyi/scrapy
scrapy/tests/test_spidermiddleware_depth.py
Python
bsd-3-clause
1,300
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals from rest_framework import serializers from s_analyzer.apps.analyzers.machine_learning.models import SecurityDailyMachineLearningPrediction from s_analyzer.apps.analyzers.moving_average.models import SecurityDailyMovingAverage, SecurityDailyMovingAveragePeriod from s_analyzer.apps.market_data.models import Security, SecurityDailyData class SecuritySerializer(serializers.ModelSerializer): class Meta: model = Security fields = '__all__' class SecurityDailyDataSerializer(serializers.ModelSerializer): security = SecuritySerializer() class Meta: model = SecurityDailyData fields = '__all__' class SecurityDailyDataLiteSerializer(serializers.ModelSerializer): class Meta: model = SecurityDailyData fields = '__all__' def to_representation(self, obj): return int(obj.date.strftime("%s")) * 1000, obj.adjusted_closing_price class SecurityDailyMovingAveragePeriodSerializer(serializers.ModelSerializer): security = SecuritySerializer() class Meta: model = SecurityDailyMovingAveragePeriod fields = '__all__' class SecurityDailyMovingAverageSerializer(serializers.ModelSerializer): period = SecurityDailyMovingAveragePeriodSerializer() class Meta: model = SecurityDailyMovingAverage fields = '__all__' class SecurityDailyMovingAverageLiteSerializer(serializers.ModelSerializer): class Meta: model = SecurityDailyMovingAverage fields = '__all__' def to_representation(self, obj): return int(obj.date.strftime("%s")) * 1000, obj.average class SecurityDailyMachineLearningPredictionSerializer(serializers.ModelSerializer): period = SecurityDailyMovingAveragePeriodSerializer() class Meta: model = SecurityDailyMachineLearningPrediction fields = '__all__' class SecurityDailyMachineLearningPredictionLiteSerializer(serializers.ModelSerializer): class Meta: model = SecurityDailyMachineLearningPrediction fields = '__all__' def to_representation(self, obj): return int(obj.date.strftime("%s")) * 1000, obj.prediction
iaga84/securities-analyzer
src/s_analyzer/apps/rest/serializers.py
Python
mit
2,242
import os import time import signal import py.test from ..scheduler import TaskFinished, TaskSleep, TaskPause, TaskCancel from ..scheduler import Scheduler, Task from ..scheduler import PeriodicTask, ProcessTask, PidTask, GroupTask THIS_PATH = os.path.dirname(os.path.abspath(__file__)) SAMPLE_PROC = os.path.join(THIS_PATH, 'sample1.py') class MockTime(object): def __init__(self): self.current = 1000 def time(self): return self.current def sleep(self, delay): self.current += delay class TestTask(object): def test_tid(self): t1 = Task(lambda :None) t2 = Task(lambda :None) assert (t1.tid + 1) == t2.tid def test_str(self): t1 = Task(lambda :None) assert "Task" in str(t1) assert str(t1.tid) in str(t1) def test_cmp(self): t1 = Task(lambda :None) t2 = Task(lambda :None) # None None assert not (t1 < t2) assert not (t1 > t2) # 3 None t1.scheduled = 3 assert not (t1 < t2) assert (t1 > t2) # 3 3.5 t2.scheduled = 5 assert (t1 < t2) assert not (t1 > t2) def test_run_function(self): t1 = Task(lambda :None) assert isinstance(t1.run_iteration(), TaskFinished) def test_run_coroutine(self): t1 = Task(lambda : (yield)) assert t1.run_iteration() is None assert isinstance(t1.run_iteration(), TaskFinished) def test_cancel(self): t1 = Task(lambda :(yield)) t1.cancelled = True assert isinstance(t1.run_iteration(), TaskFinished) class TestPeriodicTask(object): def test_run(self, monkeypatch): mytime = MockTime() monkeypatch.setattr(time, 'time', mytime.time) periodic = PeriodicTask(10, Task, [lambda :None]) got = periodic.run_iteration() assert isinstance(got[0], Task) assert got[0].scheduled is None assert (mytime.current + 10) == periodic.scheduled assert isinstance(got[1], TaskSleep) class TestProcessTask(object): def test_str(self): t1 = ProcessTask(['python', 'xxx.py']) assert "ProcessTask" in str(t1) assert str(t1.tid) in str(t1) assert "python xxx" in str(t1) def test_run(self): t1 = ProcessTask(['python', SAMPLE_PROC, '0']) # not started yet assert t1.proc is None # first run starts the process got = t1.run_iteration() assert isinstance(got[0], TaskPause) while(t1.proc.poll() is None): time.sleep(0.02) # magic number :) assert "" == t1.outdata.getvalue() # second run does data post-processing, and finishes task assert isinstance(t1.run_iteration(), TaskFinished) assert 0 == t1.proc.returncode assert "done" == t1.outdata.getvalue().strip() def test_timeout(self, monkeypatch): mytime = MockTime() monkeypatch.setattr(time, 'time', mytime.time) timeout = 30 t1 = ProcessTask(['python', SAMPLE_PROC, '0'], 30) # not started yet assert t1.proc is None # first run starts the process got = t1.run_iteration() assert isinstance(got[1], Task) assert got[1].scheduled == (time.time() + timeout) # cancel timeout task got2 = t1.run_iteration() assert isinstance(got2, TaskCancel) def test_terminate(self): t1 = ProcessTask(['python', SAMPLE_PROC, '5']) t1.run_iteration() t1.terminate() while(t1.proc.poll() is None): time.sleep(0.02) # magic number :) # terminating the process does not cancel its post processing t1.run_iteration() assert -15 == t1.proc.returncode def test_terminate2(self): t1 = ProcessTask(['python', SAMPLE_PROC, '5']) t1.run_iteration() t1.terminate() while(t1.proc.poll() is None): time.sleep(0.02) t1.terminate() def test_terminate_exception(self, monkeypatch): # ignore errors trying to terminate a process that is gone t1 = ProcessTask(['python', SAMPLE_PROC, '5']) t1.run_iteration() t1.terminate() while(t1.proc.poll() is None): time.sleep(0.02) t1.proc.returncode = None #force kill signal to be executed t1.terminate() def test_kill(self): t1 = ProcessTask(['python', SAMPLE_PROC, '5']) t1.run_iteration() t1.kill() while(t1.proc.poll() is None): time.sleep(0.02) # magic number :) # terminating the process does not cancel its post processing t1.run_iteration() assert -9 == t1.proc.returncode def test_watchdog(self): t1 = ProcessTask(['python', SAMPLE_PROC, '5']) signal_received = [] t1.terminate = lambda :signal_received.append('term') t1.kill = lambda :signal_received.append('kill') watchdog = Task(t1._watchdog) got = watchdog.run_iteration() assert isinstance(got, TaskSleep) assert ['term'] == signal_received got2 = watchdog.run_iteration() assert isinstance(got2, TaskFinished) assert ['term', 'kill'] == signal_received def test_get_returncode(self, monkeypatch): monkeypatch.setattr(os, 'waitpid', lambda pid, opt: (pid, 0)) t1 = ProcessTask(['python', SAMPLE_PROC, '0']) # not started assert None == t1.get_returncode() t1.run_iteration() # done assert 0 == t1.get_returncode() # get only once assert None == t1.get_returncode() def test_get_returncode_exception(self, monkeypatch): # raised expection def do_raise(pid, opt): raise OSError() monkeypatch.setattr(os, 'waitpid', do_raise) t1 = ProcessTask(['python', SAMPLE_PROC, '0']) t1.run_iteration() assert None == t1.get_returncode() class TestPidTask(object): def pytest_funcarg__fake_sched(self, request): def fake_sched(): class Empty(object): def ready_task(self, task): self.ready.append(task) fake_sched = Empty() fake_sched.tasks = {} fake_sched.ready = [] finished = ProcessTask(['xxx']) finished._started = True finished.get_returncode = lambda :0 not_finished = ProcessTask(['xxx']) not_finished._started = True not_finished.get_returncode = lambda :None fake_sched.tasks['1'] = Task(lambda :None) fake_sched.tasks['2'] = finished fake_sched.tasks['3'] = not_finished return fake_sched return request.cached_setup(setup=fake_sched, scope="function") def test_str(self, fake_sched): t1 = PidTask(fake_sched) assert "PidTask" in str(t1) assert str(t1.tid) in str(t1) def test_run(self, fake_sched): t1 = PidTask(fake_sched) assert isinstance(t1.run_iteration(), TaskFinished) assert [fake_sched.tasks['2']] == t1.sched.ready class TestGroupTask(object): def test_run(self): t1 = Task(lambda :None) t2 = Task(lambda :None) tg = GroupTask([t1, t2]) # run & wait t1 got = tg.run_iteration() assert t1 == got[0] assert t1.tid == got[1].tid assert isinstance(got[1], TaskPause) # run & wait t2 got = tg.run_iteration() assert t2 == got[0] assert t2.tid == got[1].tid assert isinstance(got[1], TaskPause) # done got = tg.run_iteration() assert isinstance(got, TaskFinished) # ################################# Scheduler def pytest_funcarg__sched(request): # scheduler with fake(controlled) time functions def fake_sched(): return Scheduler(False) return request.cached_setup(setup=fake_sched, scope="function") class TestScheduler(object): def test_child_terminate(self): sched = Scheduler() assert 0 == len(sched.tasks) os.kill(os.getpid(), signal.SIGCHLD) assert 1 == len(sched.tasks) assert isinstance(sched.tasks.values()[0], PidTask) # restore default signal.signal(signal.SIGCHLD, signal.SIG_DFL) os.kill(os.getpid(), signal.SIGCHLD) assert 1 == len(sched.tasks) def test_add_task_ready(self, sched): t1 = Task(lambda :None) sched.add_task(t1) assert 1 == len(sched.tasks) assert t1 == sched.tasks[t1.tid] assert 1 == len(sched.ready) assert 0 == len(sched.waiting) def test_add_task_scheduled_delay(self, sched): tasks = [Task(lambda :None) for i in range(3)] sched.add_task(tasks[0], 20) sched.add_task(tasks[1], 1) sched.add_task(tasks[2], 10) assert 3 == len(sched.tasks) assert 0 == len(sched.ready) assert 3 == len(sched.waiting) # first element of waitng must be the next to be executed assert tasks[1] == sched.waiting[0] def test_add_task_scheduled_timestamp(self, sched): t1 = Task((lambda :None), scheduled=300) sched.add_task(t1) assert 1 == len(sched.tasks) assert 0 == len(sched.ready) assert 1 == len(sched.waiting) assert t1 == sched.waiting[0] def test_add_task_not_ready(self, sched): t1 = Task((lambda :None)) sched.add_task(t1, -1) assert 1 == len(sched.tasks) assert 0 == len(sched.ready) assert 0 == len(sched.waiting) def test_task_ready(self, sched): t1 = Task(lambda :None) sched.add_task(t1, -1) assert 0 == len(sched.ready) sched.ready_task(t1) assert 1 == len(sched.ready) sched.ready_task(t1) assert 1 == len(sched.ready) def test_run_task_Task(self, sched): t2 = Task(lambda :None) t1 = Task(lambda :(yield t2)) sched.add_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(sched.ready.popleft()) #t1 assert 2 == len(sched.tasks) assert 2 == len(sched.ready) # rescheduled t1 + t2 assert 0 == len(sched.waiting) def test_run_task_Sleep(self, sched): t1 = Task(lambda : (yield TaskSleep())) sched.add_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(sched.ready.popleft()) # t1 assert 1 == len(sched.tasks) assert 0 == len(sched.ready) assert 1 == len(sched.waiting) def test_run_task_Sleep_with_delay(self, sched, monkeypatch): mytime = MockTime() monkeypatch.setattr(time, 'time', mytime.time) t1 = Task(lambda : (yield TaskSleep(5))) sched.add_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(sched.ready.popleft()) #t1 assert 1 == len(sched.tasks) assert 0 == len(sched.ready) assert 1 == len(sched.waiting) assert (mytime.current + 5) == t1.scheduled def test_run_task_Pause(self, sched): t1 = Task(lambda :(yield TaskPause())) sched.add_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) def test_run_task_Pause_depends(self, sched): t1 = Task(lambda : None) t2 = Task(lambda :(yield TaskPause(t1.tid))) sched.add_task(t1, -1) sched.add_task(t2, -1) sched.run_task(t2) assert [t2.tid] == t1.dependents assert 0 == len(sched.ready) # running t1 puts t2 as ready sched.run_task(t1) assert t1 == sched.ready[0] def test_run_task_Finish(self, sched): t1 = Task(lambda :None) sched.add_task(t1) assert 1 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(t1) assert 0 == len(sched.tasks) assert 1 == len(sched.ready) assert 0 == len(sched.waiting) def test_run_task_Cancelled(self, sched): t1 = Task(lambda :None) t2 = Task(lambda : (yield TaskCancel(t1.tid))) sched.add_task(t2) sched.add_task(t1) assert 2 == len(sched.tasks) assert 2 == len(sched.ready) assert 0 == len(sched.waiting) sched.run_task(sched.ready.popleft()) # t2 assert 2 == len(sched.tasks) assert 2== len(sched.ready) assert 0 == len(sched.waiting) assert t1.cancelled def test_run_task_Error(self, sched): # yield a class instead of instance t1 = Task(lambda : (yield TaskPause)) sched.add_task(t1) py.test.raises(Exception, sched.run_task, t1) def test_lock(self, sched): t1 = Task((lambda: (yield)), lock="lock_x") t2 = Task((lambda: (yield)), lock="lock_x") sched.add_task(t1) sched.add_task(t2) assert 0 == len(sched.locks) # t1 locks and starts sched.run_task(sched.ready.popleft()) # t1 assert 0 == len(sched.locks["lock_x"]) assert t1._started assert 2 == len(sched.ready) # t2 is locked / not started sched.run_task(sched.ready.popleft()) #t2 assert t2 == sched.locks["lock_x"][0] assert not t2._started assert 1 == len(sched.ready) # t1 finishes / free t2 sched.run_task(sched.ready.popleft()) # t1 assert "lock_x" not in sched.locks assert 1 == len(sched.ready) assert t2 == sched.ready[0] class TestSchedulerPool(object): def test_iteration_execute_one(self, sched): sched.add_task(Task(lambda :None)) sched.add_task(Task(lambda :None)) sched.loop_iteration() assert 1 == len(sched.ready) def test_iteration_waiting(self, sched, monkeypatch): mytime = MockTime() monkeypatch.setattr(time, 'time', mytime.time) t1 = Task(lambda :None) t2 = Task(lambda :None) t3 = Task(lambda :None) sched.add_task(t1) sched.add_task(t2, 40) sched.add_task(t3, 30) mytime.current += 35 sched.loop_iteration() assert t3 == sched.ready[0] assert t2 == sched.waiting[0] def test_iteration_sleep(self, sched, monkeypatch): mytime = MockTime() monkeypatch.setattr(time, 'time', mytime.time) monkeypatch.setattr(time, 'sleep', mytime.sleep) t1 = Task(Task(lambda :None)) mytime.current = 100 sched.add_task(t1, 40) sched.loop_iteration() assert 140 == mytime.current def test_loop_no_tasks(self, sched): def not_executed(): raise Exception('this must not be executed') sched.loop_iteration = not_executed sched.loop() # nothing raised ok def test_loop_with_tasks(self, sched): count = [] # count how many tasks were executed def count_run(): count.append(1) t1 = Task(count_run) t2 = Task(count_run) sched.add_task(t1) sched.add_task(t2) sched.loop() assert 2 == len(count)
schettino72/serveronduty
sodd/tests/test_scheduler.py
Python
mit
15,515
""" Given an array of words and a length L, format the text such that each line has exactly L characters and is fully (left and right) justified. You should pack your words in a greedy approach; that is, pack as many words as you can in each line. Pad extra spaces ' ' when necessary so that each line has exactly L characters. Extra spaces between words should be distributed as evenly as possible. If the number of spaces on a line do not divide evenly between words, the empty slots on the left will be assigned more spaces than the slots on the right. For the last line of text, it should be left justified and no extra space is inserted between words. For example, words: ["This", "is", "an", "example", "of", "text", "justification."] L: 16. Return the formatted lines as: [ "This is an", "example of text", "justification. " ] Note: Each word is guaranteed not to exceed L in length. click to show corner cases. Corner Cases: A line other than the last line might contain only one word. What should you do in this case? In this case, that line should be left-justified. """ class Solution(object): def fullJustify(self, words, maxWidth): """ :type words: List[str] :type maxWidth: int :rtype: List[str] """ i, n, L = 0, len(words), maxWidth ans, cur, curL = [], [], 0 for w in words: if curL + len(cur) + len(w) > L: insertion = (len(cur) -1 ) or 1 for i in range(L - curL): cur[i % insertion] += ' ' ans.append(''.join(cur)) cur, curL = [], 0 cur += [w] curL += len(w) return ans + [" ".join(cur) + (L-curL-len(cur)+1) * ' ']
dichen001/Go4Jobs
JackChen/Facebook/68. Text Justification.py
Python
gpl-3.0
1,757
# Playhouse: Making buildings into interactive displays using remotely controllable lights. # Copyright (C) 2014 John Eriksson, Arvid Fahlström Myrman, Jonas Höglund, # Hannes Leskelä, Christian Lidström, Mattias Palo, # Markus Videll, Tomas Wickman, Emil Öhman. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import tornado.ioloop import tornado.web import traceback import lightgames import manager password = None class GetException(Exception): def __init__(self, msg, response): super().__init__() self.msg = msg self.response = response class ConfigException(Exception): def __init__(self, msg): super().__init__() self.msg = msg def add_auth_cookie(headers): if manager.light_cookie and 'user' in manager.light_cookie: headers['Cookie'] = manager.light_cookie['user'].output(attrs=[], header='') return headers def get_data(client, path): print("GET %s" % path) try: client.request("GET", path, headers=add_auth_cookie({})) except ConnectionRefusedError as e: print("ConnectionRefusedError: [Errno 111] Connection refused") raise ConfigException(e) json = client.getresponse().read().decode() print("GET %s response:" % path, json) try: response = tornado.escape.json_decode(json) except ValueError: print("ValueError: Did not get json from server when requesting %s" % path) print(json) raise GetException("<p>Did not get json from server. Is the IP and port correct? Check the output in console</p>", json) else: if response.get('state',None) == 'success': return response else: raise GetException("<p>Unexpected answer from lamp-server.</p>" + "<p>" + str(response) + "</p>" + "<p>Expected 'state':'success'</p>", json) def update_config(cur_cfg, new_cfg, key): if key in new_cfg and cur_cfg[key] != new_cfg[key]: cur_cfg[key] = new_cfg[key] return True return False def use_statusmessage(func): def new_func(self, *args, **kwargs): tvars = { 'status': self.get_argument('status', 'message'), 'message': self.get_argument('msg', '') } return func(self, tvars, *args, **kwargs) return new_func class RequestHandler(tornado.web.RequestHandler): def get_current_user(self): if password is None: return "disabled" user_json = self.get_secure_cookie("user") if user_json: return tornado.escape.json_decode(user_json) else: return None class ConfigHandler(tornado.web.RequestHandler): def get(self): self.redirect("/config/setup") class ConfigLoginHandler(tornado.web.RequestHandler): @use_statusmessage @tornado.web.removeslash def get(self, tvars): tvars['next'] = self.get_argument("next", "/config") self.render('config_login.html', **tvars) def post(self): user = self.get_argument("username") pwd = self.get_argument("password",None) if user == 'admin' and pwd == password: self.set_current_user(user) self.redirect(self.get_argument("next")) else: self.set_current_user(None) self.redirect("login?next=%s&status=error&msg=%s" % ( self.get_argument("next"), "Wrong username or password")) def set_current_user(self, user): if user: print("User %s logged in" % user) self.set_secure_cookie("user", tornado.escape.json_encode(user), expires_days=None) else: self.clear_cookie("user") class SetupConfigHandler(RequestHandler): @use_statusmessage @tornado.web.removeslash @tornado.web.authenticated def get(self, tvars): def config(key): if key in manager.config: return manager.config[key] return None tvars['config'] = config tvars['connection_status'] = manager.client_status self.render("config_setup.html", **tvars) @tornado.web.authenticated def post(self): print('POST', self.request.body) cfg = {} for key in self.request.arguments.keys(): cfg[key] = self.get_argument(key) cfg['lampport'] = int(cfg['lampport']) cfg['serverport'] = int(cfg['serverport']) cfg['configport'] = int(cfg['configport']) status = "message" msg = "Setup saved" if any([update_config(manager.config, cfg, x) for x in ['lampdest', 'lampport']]): manager.client = manager.connect_lampserver() try: manager.fetch_grid_size() manager.client_status = "connected" msg = "Connected to lampserver" except: traceback.print_exc() manager.client_status = "error" status = "error" msg = "Failed to connect to lampserver" if update_config(manager.config, cfg, 'serverport'): msg = 'Web server port change requires a restart' if update_config(manager.config, cfg, 'configport'): msg = 'Web server port change requires a restart' if manager.config['serverport'] == manager.config['configport']: msg = 'Warning: Game server port and config server port are the same' status = 'error' update_config(manager.config, cfg, 'stream_embedcode') if any([update_config(manager.config['idle'], cfg, x) for x in ['animation_directory', 'cycle_interval', 'transition_time', 'color_off']]): msg = 'Idle animation changed, requires reloading the game' manager.save_config() self.redirect("setup?status=%s&msg=%s" % (status, msg)) class BridgeConfigHandler(RequestHandler): bridges = {} @use_statusmessage @tornado.web.removeslash @tornado.web.authenticated def get(self, tvars): if BridgeConfigHandler.bridges == {}: try: client = manager.connect_lampserver() response = get_data(client, "/bridges") BridgeConfigHandler.bridges = response['bridges'] except ConfigException as e: tvars['status'] = "error" tvars['message'] = e.msg except GetException as e: self.write(e.msg) return tvars['bridges'] = BridgeConfigHandler.bridges self.render('config_bridges.html', **tvars) @tornado.web.authenticated def post(self): print('POST', self.request.arguments) client = manager.connect_lampserver() headers = add_auth_cookie({'Content-Type': 'application/json'}) data = self.request.arguments if 'identify' in data: if not 'select' in data: self.redirect("bridges?status=message&msg=%s" % "You need to select a bridge.") request = {'alert': 'select'} request = tornado.escape.json_encode(request) for mac in data['select']: print("Identify POST:", "/bridges/"+mac.decode('utf-8')+"/lights/all", request) client.request("POST", "/bridges/"+mac.decode('utf-8')+"/lights/all", request, headers) response = client.getresponse().read().decode() print('Identify response:', response) response = tornado.escape.json_decode(response) if not response['state'] == 'success': print('Error when blinking', mac, response) break else: self.redirect('bridges') self.redirect("bridges?status=error&msg=%s" % response['errormessage'].capitalize()) elif 'add' in data: # Remove unneccesary whitespace and decode to utf-8 data['ip'] = data['ip'][0].strip().decode() if data['ip'] != '': request = {'ip': data['ip']} print('Add bridge:', request) json = tornado.escape.json_encode(request) client.request("POST", "/bridges/add", json, headers) response = client.getresponse().read().decode() response = tornado.escape.json_decode(response) if response['state'] == 'success': BridgeConfigHandler.bridges.update(response['bridges']) print('Added bridge:', response['bridges']) self.redirect('bridges') else: print("ERROR!", response) self.redirect("bridges?status=error&msg=%s" % response['errormessage'].capitalize()) else: print('No IP specified') self.redirect('bridges') elif 'remove' in data: if not 'select' in data: self.redirect("bridges?status=message&msg=%s" % "You need to select a bridge.") for mac in data['select']: print('Remove bridge', mac.decode()) client.request("DELETE", "/bridges/"+mac.decode(), {}, headers) response = client.getresponse().read().decode() print('Remove response:', response) response = tornado.escape.json_decode(response) if response['state'] == 'success': del BridgeConfigHandler.bridges[mac.decode()] self.redirect("bridges") else: print('Could not remove bridge.') print(response['errorcode'], response['errormessage']) self.redirect("bridges?status=error&msg=%s" % response['errormessage'].capitalize()) # Set bridges to None, to force it to get them in get() elif 'refresh' in data: BridgeConfigHandler.bridges = {} self.redirect('bridges') elif 'newUsername' in data and 'mac' in data: mac = data['mac'][0].decode() print("New username to", mac) client.request( "POST", "/bridges/" + mac + "/adduser", tornado.escape.json_encode({}), headers ) response = client.getresponse().read().decode() response = tornado.escape.json_decode(response) print(response) if response['state'] == 'success': BridgeConfigHandler.bridges[mac]['username'] = response['username'] BridgeConfigHandler.bridges[mac]['valid_username'] = response['valid_username'] if not response['valid_username']: BridgeConfigHandler.bridges[mac]['lights'] = -1 #self.write({'state': 'success'}) #self.redirect('bridges') else: print(response['errorcode'], response['errormessage']) #self.redirect("bridges?status=error&msg=%s" % response['errormessage'].capitalize()) self.write(response) elif 'search' in data: print('Search') request = tornado.escape.json_encode({'auto_add': True}) client.request('POST', '/bridges/search', request, headers) response = client.getresponse().read().decode() print(response) response = tornado.escape.json_decode(response) if response['state'] == 'success': self.redirect("bridges?status=message&msg=%s" % "Server begun searching, refresh bridges (using the button) after 20 s.") else: print(response['errorcode'], response['errormessage']) self.redirect("bridges?status=error&msg=%s" % response['errormessage'].capitalize()) else: print('Unknown request. What did you do?') self.redirect("bridges?status=message&msg=%s" % "Unknown request.") class GridConfigHandler(RequestHandler): grid = None bridges = None skipped = None activated = None changed = False def get_lights(self, client): if GridConfigHandler.bridges is None: response = get_data(client, '/bridges') GridConfigHandler.bridges = response['bridges'] bridges = GridConfigHandler.bridges lights = [] for mac in bridges: for light in range(bridges[mac]['lights']): lights.append({'mac':mac, 'lamp':light+1}) return lights def sendRequest(self, light, change): headers = add_auth_cookie({'Content-Type': 'application/json'}) request = tornado.escape.json_encode( [{'light' : light['lamp'], 'change': change}] ) print(">>> POST:", "/bridges/%s/lights" % light['mac'], request) manager.client.request("POST", "/bridges/%s/lights" % light['mac'], request, headers) response = manager.client.getresponse().read().decode() print('POST response:', response) @use_statusmessage @tornado.web.removeslash @tornado.web.authenticated def get(self, tvars): client = manager.connect_lampserver() tvars['activated'] = '' tvars['lamp'] = '' tvars['json_encode'] = tornado.escape.json_encode tvars['changed'] = GridConfigHandler.changed try: if GridConfigHandler.grid is None: response = get_data(client, '/grid') GridConfigHandler.grid = { k: response[k] for k in ('width', 'height', 'grid') } GridConfigHandler.skipped = [] grid = GridConfigHandler.grid lights = self.get_lights(client) except ConfigException as e: tvars['status'] = "error" tvars['message'] = e.msg tvars['skipped'] = [] tvars['grid'] = { 'width':0, 'height':0, 'grid':[] } self.render('config_grid.html', **tvars) return except GetException as e: self.write(e.msg) return ingrid = [cell for row in grid['grid'] for cell in row if cell != None] free = [c for c in lights if c not in ingrid and c not in GridConfigHandler.skipped] invalid = [c for c in ingrid if c not in lights] if len(free) > 0 and not all([all(row) for row in grid['grid']]): if GridConfigHandler.activated is None: # choose and activate one of the free lights GridConfigHandler.activated = free[0] choosen = GridConfigHandler.activated tvars['activated'] = tornado.escape.json_encode(choosen) tvars['lamp'] = choosen # set color to white self.sendRequest(choosen, {'on':True, 'sat':0, 'hue':0, 'bri':255}) tvars['free'] = free tvars['invalid'] = invalid tvars['skipped'] = GridConfigHandler.skipped tvars['grid'] = GridConfigHandler.grid self.render('config_grid.html', **tvars) @tornado.web.authenticated def post(self): headers = add_auth_cookie({'Content-Type': 'application/json'}) args = self.request.arguments status,msg = ('message','') load_game = None if 'changesize' in args: if GridConfigHandler.grid is not None: size = self.get_argument('grid_size').split('x') if len(size) == 2 and size[0].isdigit() and size[1].isdigit(): w, h = int(size[0]), int(size[1]) newgrid = [[None for _ in range(w)] for _ in range(h)] GridConfigHandler.grid['grid'] = newgrid GridConfigHandler.grid['width'] = w GridConfigHandler.grid['height'] = h msg = "Grid size changed to %dx%d" % (w,h) print(msg) GridConfigHandler.changed = True else: status,msg = ('error','Invalid size') elif 'clear' in args: if GridConfigHandler.grid is not None: w = GridConfigHandler.grid['width'] h = GridConfigHandler.grid['height'] newgrid = [[None for _ in range(w)] for _ in range(h)] GridConfigHandler.grid['grid'] = newgrid msg = "Grid cleared" GridConfigHandler.activated = None GridConfigHandler.changed = True elif 'placelamp' in args: coords = self.get_argument('coords').split(',') if GridConfigHandler.grid is not None and \ len(coords) == 2 and coords[0].isdigit() and coords[1].isdigit(): x,y = int(coords[0]), int(coords[1]) if y >= GridConfigHandler.grid['height'] or \ x >= GridConfigHandler.grid['width']: status,msg = ('error','Invalid position') else: if GridConfigHandler.grid['grid'][y][x] != None: lamp = GridConfigHandler.grid['grid'][y][x] GridConfigHandler.grid['grid'][y][x] = None # set color to red self.sendRequest(lamp, {'on':True, 'sat':255, 'hue':0, 'bri':255}) print('Lamp removed from %s' % coords) msg = 'Lamp removed from %d,%d' % (x,y) GridConfigHandler.changed = True elif self.get_argument('lamp') == '': status,msg = ('error','No activated lamp') else: GridConfigHandler.activated = None try: lamp = tornado.escape.json_decode( self.get_argument('lamp')) GridConfigHandler.grid['grid'][y][x] = lamp # set color to blue self.sendRequest(lamp, {'on':True, 'sat':255, 'hue':45000, 'bri':255}) print('Lamp %s placed at %s' % (lamp, coords)) msg = 'Lamp placed at %d,%d' % (x,y) GridConfigHandler.changed = True except ValueError: status,msg = ('error','Invalid lamp') else: status,msg = ('error','Invalid position') elif 'skip' in args: skip_name = self.get_argument('skip_name', 'skip') try: if skip_name == 'skip': GridConfigHandler.activated = None lamp = tornado.escape.json_decode( self.get_argument('lamp')) GridConfigHandler.skipped.append(lamp) self.sendRequest(lamp, {'on':False}) # turn off msg = 'Skipped lamp %s%%23%s' % (lamp['mac'], lamp['lamp']) else: skip_data = skip_name.split('#') skip_lamp = { 'mac': skip_data[0], 'lamp':int(skip_data[1]) } GridConfigHandler.skipped.remove(skip_lamp) msg = 'Unkipped lamp %s%%23%s' % (skip_lamp['mac'], skip_lamp['lamp']) except ValueError: status,msg = ('error','Invalid lamp') elif 'save' in args: request = tornado.escape.json_encode( GridConfigHandler.grid['grid'] ) print(">>> POST:", "/grid", request) manager.client.request('POST', '/grid', request, headers) response = manager.client.getresponse().read().decode() response = tornado.escape.json_decode(response) print('POST response:', response) if response['state'] == 'success': msg = 'Grid saved' manager.grid['width'] = GridConfigHandler.grid['width'] manager.grid['height'] = GridConfigHandler.grid['height'] GridConfigHandler.changed = False else: status,msg = ('error','Saving failed!') elif 'refresh' in args: GridConfigHandler.grid = None GridConfigHandler.bridges = None GridConfigHandler.activated = None GridConfigHandler.changed = False elif 'off' in args: load_game = 'off' elif 'test' in args: load_game = 'diagnostics' else: status,msg = ('error','Unknown request') if load_game: manager.config['game_name'] = load_game try: manager.load_game() msg = 'Game changed to: %s' % load_game except Exception as e: msg = 'Loading failed: %s' % e status = 'error' traceback.print_exc() self.redirect('grid?status=%s&msg=%s' % (status,msg)) class GameConfigHandler(RequestHandler): @use_statusmessage @tornado.web.removeslash @tornado.web.authenticated def get(self, tvars): tvars.update({ 'config_file': lightgames.Game.config_file, 'game_name': manager.config['game_name'], 'game_list': lightgames.get_games(manager.config['game_path']) }) tvars.update(lightgames.Game.template_vars) # Game defaults if manager.game != None: tvars.update(manager.game.template_vars) tvars['config_file'] = manager.game.config_file if 'title' not in tvars: tvars['title'] = tvars.get('module_name', "Untitled game") tvars['vars'] = tvars self.render('config_game.html', **tvars) @tornado.web.authenticated def post(self): backup = manager.config.copy() cfg = {} for key in self.request.arguments.keys(): cfg[key] = self.get_argument(key) print("Config: %s" % cfg) cfg['files'] = self.request.files load_game = False update_config(manager.config, cfg, 'game_name') if 'game_name' in cfg: load_game = True status = "message" if load_game: print("Changing or restarting game") try: manager.client = manager.connect_lampserver() manager.load_game() msg = "Game changed" if backup['game_name'] == manager.config['game_name']: msg = "Game restarted" except Exception as e: manager.config = backup msg = "Loading failed: %s" % e status = "error" traceback.print_exc() else: ret = manager.game.set_options(cfg) if ret is None: msg = "Settings saved" else: status = "error" msg = ret manager.save_config() self.redirect("game?status=%s&msg=%s" % (status, msg))
smab/playhouse-web
src/config.py
Python
agpl-3.0
24,018
from __future__ import print_function, division, absolute_import from fontTools.misc.py23 import * from fontTools.misc.textTools import safeEval from fontTools.misc.fixedTools import fixedToFloat as fi2fl, floatToFixed as fl2fi from .otBase import ValueRecordFactory import logging log = logging.getLogger(__name__) def buildConverters(tableSpec, tableNamespace): """Given a table spec from otData.py, build a converter object for each field of the table. This is called for each table in otData.py, and the results are assigned to the corresponding class in otTables.py.""" converters = [] convertersByName = {} for tp, name, repeat, aux, descr in tableSpec: tableName = name if name.startswith("ValueFormat"): assert tp == "uint16" converterClass = ValueFormat elif name.endswith("Count") or name.endswith("LookupType"): assert tp == "uint16" converterClass = ComputedUShort elif name == "SubTable": converterClass = SubTable elif name == "ExtSubTable": converterClass = ExtSubTable elif name == "FeatureParams": converterClass = FeatureParams else: if not tp in converterMapping: tableName = tp converterClass = Struct else: converterClass = converterMapping[tp] tableClass = tableNamespace.get(tableName) conv = converterClass(name, repeat, aux, tableClass) if name in ["SubTable", "ExtSubTable"]: conv.lookupTypes = tableNamespace['lookupTypes'] # also create reverse mapping for t in conv.lookupTypes.values(): for cls in t.values(): convertersByName[cls.__name__] = Table(name, repeat, aux, cls) if name == "FeatureParams": conv.featureParamTypes = tableNamespace['featureParamTypes'] conv.defaultFeatureParams = tableNamespace['FeatureParams'] for cls in conv.featureParamTypes.values(): convertersByName[cls.__name__] = Table(name, repeat, aux, cls) converters.append(conv) assert name not in convertersByName, name convertersByName[name] = conv return converters, convertersByName class _MissingItem(tuple): __slots__ = () try: from collections import UserList except: from UserList import UserList class _LazyList(UserList): def __getslice__(self, i, j): return self.__getitem__(slice(i, j)) def __getitem__(self, k): if isinstance(k, slice): indices = range(*k.indices(len(self))) return [self[i] for i in indices] item = self.data[k] if isinstance(item, _MissingItem): self.reader.seek(self.pos + item[0] * self.recordSize) item = self.conv.read(self.reader, self.font, {}) self.data[k] = item return item class BaseConverter(object): """Base class for converter objects. Apart from the constructor, this is an abstract class.""" def __init__(self, name, repeat, aux, tableClass): self.name = name self.repeat = repeat self.aux = aux self.tableClass = tableClass self.isCount = name.endswith("Count") self.isLookupType = name.endswith("LookupType") self.isPropagated = name in ["ClassCount", "Class2Count", "FeatureTag", "SettingsCount", "AxisCount"] def readArray(self, reader, font, tableDict, count): """Read an array of values from the reader.""" lazy = font.lazy and count > 8 if lazy: recordSize = self.getRecordSize(reader) if recordSize is NotImplemented: lazy = False if not lazy: l = [] for i in range(count): l.append(self.read(reader, font, tableDict)) return l else: l = _LazyList() l.reader = reader.copy() l.pos = l.reader.pos l.font = font l.conv = self l.recordSize = recordSize l.extend(_MissingItem([i]) for i in range(count)) reader.advance(count * recordSize) return l def getRecordSize(self, reader): if hasattr(self, 'staticSize'): return self.staticSize return NotImplemented def read(self, reader, font, tableDict): """Read a value from the reader.""" raise NotImplementedError(self) def writeArray(self, writer, font, tableDict, values): for i, value in enumerate(values): self.write(writer, font, tableDict, value, i) def write(self, writer, font, tableDict, value, repeatIndex=None): """Write a value to the writer.""" raise NotImplementedError(self) def xmlRead(self, attrs, content, font): """Read a value from XML.""" raise NotImplementedError(self) def xmlWrite(self, xmlWriter, font, value, name, attrs): """Write a value to XML.""" raise NotImplementedError(self) class SimpleValue(BaseConverter): def xmlWrite(self, xmlWriter, font, value, name, attrs): xmlWriter.simpletag(name, attrs + [("value", value)]) xmlWriter.newline() def xmlRead(self, attrs, content, font): return attrs["value"] class IntValue(SimpleValue): def xmlRead(self, attrs, content, font): return int(attrs["value"], 0) class Long(IntValue): staticSize = 4 def read(self, reader, font, tableDict): return reader.readLong() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeLong(value) class ULong(IntValue): staticSize = 4 def read(self, reader, font, tableDict): return reader.readULong() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeULong(value) class Short(IntValue): staticSize = 2 def read(self, reader, font, tableDict): return reader.readShort() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeShort(value) class UShort(IntValue): staticSize = 2 def read(self, reader, font, tableDict): return reader.readUShort() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeUShort(value) class UInt8(IntValue): staticSize = 1 def read(self, reader, font, tableDict): return reader.readUInt8() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeUInt8(value) class UInt24(IntValue): staticSize = 3 def read(self, reader, font, tableDict): return reader.readUInt24() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeUInt24(value) class ComputedUShort(UShort): def xmlWrite(self, xmlWriter, font, value, name, attrs): xmlWriter.comment("%s=%s" % (name, value)) xmlWriter.newline() class Tag(SimpleValue): staticSize = 4 def read(self, reader, font, tableDict): return reader.readTag() def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeTag(value) class GlyphID(SimpleValue): staticSize = 2 def readArray(self, reader, font, tableDict, count): glyphOrder = font.getGlyphOrder() gids = reader.readUShortArray(count) try: l = [glyphOrder[gid] for gid in gids] except IndexError: # Slower, but will not throw an IndexError on an invalid glyph id. l = [font.getGlyphName(gid) for gid in gids] return l def read(self, reader, font, tableDict): return font.getGlyphName(reader.readUShort()) def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeUShort(font.getGlyphID(value)) class FloatValue(SimpleValue): def xmlRead(self, attrs, content, font): return float(attrs["value"]) class DeciPoints(FloatValue): staticSize = 2 def read(self, reader, font, tableDict): return reader.readUShort() / 10 def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeUShort(int(round(value * 10))) class Fixed(FloatValue): staticSize = 4 def read(self, reader, font, tableDict): return fi2fl(reader.readLong(), 16) def write(self, writer, font, tableDict, value, repeatIndex=None): writer.writeLong(fl2fi(value, 16)) class Version(BaseConverter): staticSize = 4 def read(self, reader, font, tableDict): value = reader.readLong() assert (value >> 16) == 1, "Unsupported version 0x%08x" % value return fi2fl(value, 16) def write(self, writer, font, tableDict, value, repeatIndex=None): if value < 0x10000: value = fl2fi(value, 16) value = int(round(value)) assert (value >> 16) == 1, "Unsupported version 0x%08x" % value writer.writeLong(value) def xmlRead(self, attrs, content, font): value = attrs["value"] value = float(int(value, 0)) if value.startswith("0") else float(value) if value >= 0x10000: value = fi2fl(value, 16) return value def xmlWrite(self, xmlWriter, font, value, name, attrs): if value >= 0x10000: value = fi2fl(value, 16) if value % 1 != 0: # Write as hex value = "0x%08x" % fl2fi(value, 16) xmlWriter.simpletag(name, attrs + [("value", value)]) xmlWriter.newline() class Struct(BaseConverter): def getRecordSize(self, reader): return self.tableClass and self.tableClass.getRecordSize(reader) def read(self, reader, font, tableDict): table = self.tableClass() table.decompile(reader, font) return table def write(self, writer, font, tableDict, value, repeatIndex=None): value.compile(writer, font) def xmlWrite(self, xmlWriter, font, value, name, attrs): if value is None: if attrs: # If there are attributes (probably index), then # don't drop this even if it's NULL. It will mess # up the array indices of the containing element. xmlWriter.simpletag(name, attrs + [("empty", 1)]) xmlWriter.newline() else: pass # NULL table, ignore else: value.toXML(xmlWriter, font, attrs, name=name) def xmlRead(self, attrs, content, font): if "empty" in attrs and safeEval(attrs["empty"]): return None table = self.tableClass() Format = attrs.get("Format") if Format is not None: table.Format = int(Format) for element in content: if isinstance(element, tuple): name, attrs, content = element table.fromXML(name, attrs, content, font) else: pass # TODO Fill in items that are not set by XML. return table def __repr__(self): return "Struct of " + repr(self.tableClass) class Table(Struct): longOffset = False staticSize = 2 def readOffset(self, reader): return reader.readUShort() def writeNullOffset(self, writer): if self.longOffset: writer.writeULong(0) else: writer.writeUShort(0) def read(self, reader, font, tableDict): offset = self.readOffset(reader) if offset == 0: return None if offset <= 3: # XXX hack to work around buggy pala.ttf log.warning("offset is not 0, yet suspiciously low (%d). table: %s", offset, self.tableClass.__name__) return None table = self.tableClass() reader = reader.getSubReader(offset) if font.lazy: table.reader = reader table.font = font else: table.decompile(reader, font) return table def write(self, writer, font, tableDict, value, repeatIndex=None): if value is None: self.writeNullOffset(writer) else: subWriter = writer.getSubWriter() subWriter.longOffset = self.longOffset subWriter.name = self.name if repeatIndex is not None: subWriter.repeatIndex = repeatIndex writer.writeSubTable(subWriter) value.compile(subWriter, font) class LTable(Table): longOffset = True staticSize = 4 def readOffset(self, reader): return reader.readULong() class SubTable(Table): def getConverter(self, tableType, lookupType): tableClass = self.lookupTypes[tableType][lookupType] return self.__class__(self.name, self.repeat, self.aux, tableClass) def xmlWrite(self, xmlWriter, font, value, name, attrs): Table.xmlWrite(self, xmlWriter, font, value, None, attrs) class ExtSubTable(LTable, SubTable): def write(self, writer, font, tableDict, value, repeatIndex=None): writer.Extension = True # actually, mere presence of the field flags it as an Ext Subtable writer. Table.write(self, writer, font, tableDict, value, repeatIndex) class FeatureParams(Table): def getConverter(self, featureTag): tableClass = self.featureParamTypes.get(featureTag, self.defaultFeatureParams) return self.__class__(self.name, self.repeat, self.aux, tableClass) class ValueFormat(IntValue): staticSize = 2 def __init__(self, name, repeat, aux, tableClass): BaseConverter.__init__(self, name, repeat, aux, tableClass) self.which = "ValueFormat" + ("2" if name[-1] == "2" else "1") def read(self, reader, font, tableDict): format = reader.readUShort() reader[self.which] = ValueRecordFactory(format) return format def write(self, writer, font, tableDict, format, repeatIndex=None): writer.writeUShort(format) writer[self.which] = ValueRecordFactory(format) class ValueRecord(ValueFormat): def getRecordSize(self, reader): return 2 * len(reader[self.which]) def read(self, reader, font, tableDict): return reader[self.which].readValueRecord(reader, font) def write(self, writer, font, tableDict, value, repeatIndex=None): writer[self.which].writeValueRecord(writer, font, value) def xmlWrite(self, xmlWriter, font, value, name, attrs): if value is None: pass # NULL table, ignore else: value.toXML(xmlWriter, font, self.name, attrs) def xmlRead(self, attrs, content, font): from .otBase import ValueRecord value = ValueRecord() value.fromXML(None, attrs, content, font) return value class DeltaValue(BaseConverter): def read(self, reader, font, tableDict): StartSize = tableDict["StartSize"] EndSize = tableDict["EndSize"] DeltaFormat = tableDict["DeltaFormat"] assert DeltaFormat in (1, 2, 3), "illegal DeltaFormat" nItems = EndSize - StartSize + 1 nBits = 1 << DeltaFormat minusOffset = 1 << nBits mask = (1 << nBits) - 1 signMask = 1 << (nBits - 1) DeltaValue = [] tmp, shift = 0, 0 for i in range(nItems): if shift == 0: tmp, shift = reader.readUShort(), 16 shift = shift - nBits value = (tmp >> shift) & mask if value & signMask: value = value - minusOffset DeltaValue.append(value) return DeltaValue def write(self, writer, font, tableDict, value, repeatIndex=None): StartSize = tableDict["StartSize"] EndSize = tableDict["EndSize"] DeltaFormat = tableDict["DeltaFormat"] DeltaValue = value assert DeltaFormat in (1, 2, 3), "illegal DeltaFormat" nItems = EndSize - StartSize + 1 nBits = 1 << DeltaFormat assert len(DeltaValue) == nItems mask = (1 << nBits) - 1 tmp, shift = 0, 16 for value in DeltaValue: shift = shift - nBits tmp = tmp | ((value & mask) << shift) if shift == 0: writer.writeUShort(tmp) tmp, shift = 0, 16 if shift != 16: writer.writeUShort(tmp) def xmlWrite(self, xmlWriter, font, value, name, attrs): xmlWriter.simpletag(name, attrs + [("value", value)]) xmlWriter.newline() def xmlRead(self, attrs, content, font): return safeEval(attrs["value"]) converterMapping = { # type class "int16": Short, "uint8": UInt8, "uint16": UShort, "uint24": UInt24, "uint32": ULong, "Version": Version, "Tag": Tag, "GlyphID": GlyphID, "DeciPoints": DeciPoints, "Fixed": Fixed, "struct": Struct, "Offset": Table, "LOffset": LTable, "ValueRecord": ValueRecord, "DeltaValue": DeltaValue, }
MitchTalmadge/Emoji-Tools
src/main/resources/PythonScripts/fontTools/ttLib/tables/otConverters.py
Python
gpl-3.0
14,704
# -*- coding: utf-8 -*- """ netvisor.schemas.customers.get ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2013-2016 by Fast Monkeys Oy. :license: MIT, see LICENSE for more details. """ from marshmallow import Schema, fields from ..common import StringSchema from ..fields import Decimal class CustomerBaseInformationSchema(Schema): internal_identifier = fields.String(allow_none=True) external_identifier = fields.String(allow_none=True) name = fields.String() name_extension = fields.String(allow_none=True) customer_group_netvisor_key = fields.Integer() customer_group_name = fields.String() email = fields.String(allow_none=True) email_invoicing_address = fields.String(allow_none=True) phone_number = fields.String(allow_none=True) fax_number = fields.String(allow_none=True) home_page_uri = fields.String(allow_none=True) is_active = fields.Boolean() street_address = fields.String(allow_none=True) city = fields.String(allow_none=True) post_number = fields.String(allow_none=True) country = fields.Nested(StringSchema, allow_none=True) class CustomerFinvoiceDetailsSchema(Schema): finvoice_address = fields.String(allow_none=True) finvoice_router_code = fields.String(allow_none=True) class CustomerAdditionalInformationSchema(Schema): balance_limit = Decimal(allow_none=True) comment = fields.String(allow_none=True) reference_number = fields.String(allow_none=True) class CustomerContactDetailsSchema(Schema): contact_person = fields.String(allow_none=True) contact_person_email = fields.String(allow_none=True) contact_person_phone = fields.String(allow_none=True) class CustomerDeliveryDetailsSchema(Schema): delivery_name = fields.String(allow_none=True) delivery_street_address = fields.String(allow_none=True) delivery_post_number = fields.String(allow_none=True) delivery_city = fields.String(allow_none=True) class GetCustomerSchema(Schema): customer_base_information = fields.Nested(CustomerBaseInformationSchema) customer_additional_information = fields.Nested( CustomerAdditionalInformationSchema ) customer_contact_details = fields.Nested(CustomerContactDetailsSchema) customer_finvoice_details = fields.Nested(CustomerFinvoiceDetailsSchema) customer_delivery_details = fields.Nested(CustomerDeliveryDetailsSchema)
fastmonkeys/netvisor.py
netvisor/schemas/customers/get.py
Python
mit
2,408
# ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is configman # # The Initial Developer of the Original Code is # Mozilla Foundation # Portions created by the Initial Developer are Copyright (C) 2011 # the Initial Developer. All Rights Reserved. # # Contributor(s): # K Lars Lohn, lars@mozilla.com # Peter Bengtsson, peterbe@mozilla.com # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** import sys import ConfigParser from source_exceptions import (CantHandleTypeException, ValueException, NotEnoughInformationException) from .. import namespace from .. import converters as conv file_name_extension = 'ini' can_handle = (ConfigParser, ConfigParser.RawConfigParser, # just the base class, subclasses # will be detected too basestring, ) class LoadingIniFileFailsException(ValueException): pass class ValueSource(object): def __init__(self, source, config_manager=None, top_level_section_name='top_level'): self.delayed_parser_instantiation = False self.top_level_section_name = top_level_section_name if source is ConfigParser: try: app = config_manager.get_option_by_name('admin.application') source = "%s.%s" % (app.value.app_name, file_name_extension) except AttributeError: # we likely don't have the admin.application object set up yet. # we need to delay the instantiation of the ConfigParser # until later. if source is None: raise NotEnoughInformationException("Can't setup an ini " "file without knowing " "the file name") self.delayed_parser_instantiation = True return if (isinstance(source, basestring) and source.endswith(file_name_extension)): try: self.configparser = self._create_parser(source) except Exception, x: # FIXME: this doesn't give you a clue why it fail. # Was it because the file didn't exist (IOError) or because it # was badly formatted?? raise LoadingIniFileFailsException("Cannot load ini: %s" % str(x)) elif isinstance(source, ConfigParser.RawConfigParser): self.configparser = source else: raise CantHandleTypeException( "ConfigParser doesn't know how to handle %s." % source) @staticmethod def _create_parser(source): parser = ConfigParser.ConfigParser() parser.optionxform = str parser.read(source) return parser def get_values(self, config_manager, ignore_mismatches): """Return a nested dictionary representing the values in the ini file. In the case of this ValueSource implementation, both parameters are dummies.""" if self.delayed_parser_instantiation: try: app = config_manager.get_option_by_name('admin.application') source = "%s%s" % (app.value.app_name, file_name_extension) self.configparser = self._create_parser(source) self.delayed_parser_instantiation = False except AttributeError: # we don't have enough information to get the ini file # yet. we'll ignore the error for now return {} options = {} for a_section in self.configparser.sections(): if a_section == self.top_level_section_name: prefix = '' else: prefix = "%s." % a_section for an_option in self.configparser.options(a_section): name = '%s%s' % (prefix, an_option) options[name] = self.configparser.get(a_section, an_option) return options @staticmethod def write(option_iter, output_stream=sys.stdout): print >> output_stream, '[top_level]' for qkey, key, val in option_iter(): if isinstance(val, namespace.Namespace): print >> output_stream, '[%s]' % key print >> output_stream, '# %s\n' % val._doc else: print >> output_stream, '# name:', qkey print >> output_stream, '# doc:', val.doc print >> output_stream, '# converter:', \ conv.py_obj_to_str(val.from_string_converter) val_str = conv.option_value_str(val) print >> output_stream, '%s=%s\n' % (key, val_str)
twobraids/configman_orginal
configman/value_sources/for_configparse.py
Python
bsd-3-clause
6,143
"""The tests for the Apache Kafka component.""" from collections import namedtuple import pytest import homeassistant.components.apache_kafka as apache_kafka from homeassistant.const import STATE_ON from homeassistant.setup import async_setup_component from tests.async_mock import patch APACHE_KAFKA_PATH = "homeassistant.components.apache_kafka" PRODUCER_PATH = f"{APACHE_KAFKA_PATH}.AIOKafkaProducer" MIN_CONFIG = { "ip_address": "localhost", "port": 8080, "topic": "topic", } FilterTest = namedtuple("FilterTest", "id should_pass") MockKafkaClient = namedtuple("MockKafkaClient", "init start send_and_wait") @pytest.fixture(name="mock_client") def mock_client_fixture(): """Mock the apache kafka client.""" with patch(f"{PRODUCER_PATH}.start") as start, patch( f"{PRODUCER_PATH}.send_and_wait" ) as send_and_wait, patch(f"{PRODUCER_PATH}.__init__", return_value=None) as init: yield MockKafkaClient(init, start, send_and_wait) @pytest.fixture(autouse=True, scope="module") def mock_client_stop(): """Mock client stop at module scope for teardown.""" with patch(f"{PRODUCER_PATH}.stop") as stop: yield stop async def test_minimal_config(hass, mock_client): """Test the minimal config and defaults of component.""" config = {apache_kafka.DOMAIN: MIN_CONFIG} assert await async_setup_component(hass, apache_kafka.DOMAIN, config) await hass.async_block_till_done() assert mock_client.start.called_once async def test_full_config(hass, mock_client): """Test the full config of component.""" config = { apache_kafka.DOMAIN: { "filter": { "include_domains": ["light"], "include_entity_globs": ["sensor.included_*"], "include_entities": ["binary_sensor.included"], "exclude_domains": ["light"], "exclude_entity_globs": ["sensor.excluded_*"], "exclude_entities": ["binary_sensor.excluded"], }, } } config[apache_kafka.DOMAIN].update(MIN_CONFIG) assert await async_setup_component(hass, apache_kafka.DOMAIN, config) await hass.async_block_till_done() assert mock_client.start.called_once async def _setup(hass, filter_config): """Shared set up for filtering tests.""" config = {apache_kafka.DOMAIN: {"filter": filter_config}} config[apache_kafka.DOMAIN].update(MIN_CONFIG) assert await async_setup_component(hass, apache_kafka.DOMAIN, config) await hass.async_block_till_done() async def _run_filter_tests(hass, tests, mock_client): """Run a series of filter tests on apache kafka.""" for test in tests: hass.states.async_set(test.id, STATE_ON) await hass.async_block_till_done() if test.should_pass: mock_client.send_and_wait.assert_called_once() mock_client.send_and_wait.reset_mock() else: mock_client.send_and_wait.assert_not_called() async def test_allowlist(hass, mock_client): """Test an allowlist only config.""" await _setup( hass, { "include_domains": ["light"], "include_entity_globs": ["sensor.included_*"], "include_entities": ["binary_sensor.included"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("light.included", True), FilterTest("sensor.excluded_test", False), FilterTest("sensor.included_test", True), FilterTest("binary_sensor.included", True), FilterTest("binary_sensor.excluded", False), ] await _run_filter_tests(hass, tests, mock_client) async def test_denylist(hass, mock_client): """Test a denylist only config.""" await _setup( hass, { "exclude_domains": ["climate"], "exclude_entity_globs": ["sensor.excluded_*"], "exclude_entities": ["binary_sensor.excluded"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("light.included", True), FilterTest("sensor.excluded_test", False), FilterTest("sensor.included_test", True), FilterTest("binary_sensor.included", True), FilterTest("binary_sensor.excluded", False), ] await _run_filter_tests(hass, tests, mock_client) async def test_filtered_allowlist(hass, mock_client): """Test an allowlist config with a filtering denylist.""" await _setup( hass, { "include_domains": ["light"], "include_entity_globs": ["*.included_*"], "exclude_domains": ["climate"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["light.excluded"], }, ) tests = [ FilterTest("light.included", True), FilterTest("light.excluded_test", False), FilterTest("light.excluded", False), FilterTest("sensor.included_test", True), FilterTest("climate.included_test", False), ] await _run_filter_tests(hass, tests, mock_client) async def test_filtered_denylist(hass, mock_client): """Test a denylist config with a filtering allowlist.""" await _setup( hass, { "include_entities": ["climate.included", "sensor.excluded_test"], "exclude_domains": ["climate"], "exclude_entity_globs": ["*.excluded_*"], "exclude_entities": ["light.excluded"], }, ) tests = [ FilterTest("climate.excluded", False), FilterTest("climate.included", True), FilterTest("switch.excluded_test", False), FilterTest("sensor.excluded_test", True), FilterTest("light.excluded", False), FilterTest("light.included", True), ] await _run_filter_tests(hass, tests, mock_client)
robbiet480/home-assistant
tests/components/apache_kafka/test_init.py
Python
apache-2.0
5,851
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('zerver', '0008_preregistrationuser_upper_email_idx'), ] operations = [ migrations.AlterField( model_name='userprofile', name='enable_stream_desktop_notifications', field=models.BooleanField(default=False), ), migrations.AlterField( model_name='userprofile', name='enable_stream_sounds', field=models.BooleanField(default=False), ), ]
sonali0901/zulip
zerver/migrations/0009_add_missing_migrations.py
Python
apache-2.0
630
# Copyright: Damien Elmes <anki@ichi2.net> # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import socket import time import traceback import gc from aqt.qt import * import aqt from anki import Collection from anki.sync import Syncer, RemoteServer, FullSyncer, MediaSyncer, \ RemoteMediaServer from anki.hooks import addHook, remHook from aqt.utils import tooltip, askUserDialog, showWarning, showText, showInfo # Sync manager ###################################################################### class SyncManager(QObject): def __init__(self, mw, pm): QObject.__init__(self, mw) self.mw = mw self.pm = pm def sync(self): if not self.pm.profile['syncKey']: auth = self._getUserPass() if not auth: return self.pm.profile['syncUser'] = auth[0] self._sync(auth) else: self._sync() def _sync(self, auth=None): # to avoid gui widgets being garbage collected in the worker thread, # run gc in advance self._didFullUp = False self._didError = False gc.collect() # create the thread, setup signals and start running t = self.thread = SyncThread( self.pm.collectionPath(), self.pm.profile['syncKey'], auth=auth, media=self.pm.profile['syncMedia']) t.event.connect(self.onEvent) self.label = _("Connecting...") self.mw.progress.start(immediate=True, label=self.label) self.sentBytes = self.recvBytes = 0 self._updateLabel() self.thread.start() while not self.thread.isFinished(): self.mw.app.processEvents() self.thread.wait(100) self.mw.progress.finish() if self.thread.syncMsg: showText(self.thread.syncMsg) if self.thread.uname: self.pm.profile['syncUser'] = self.thread.uname def delayedInfo(): if self._didFullUp and not self._didError: showInfo(_("""\ Your collection was successfully uploaded to AnkiWeb. If you use any other devices, please sync them now, and choose \ to download the collection you have just uploaded from this computer. \ After doing so, future reviews and added cards will be merged \ automatically.""")) self.mw.progress.timer(1000, delayedInfo, False) def _updateLabel(self): self.mw.progress.update(label="%s\n%s" % ( self.label, _("%(a)dkB up, %(b)dkB down") % dict( a=self.sentBytes // 1024, b=self.recvBytes // 1024))) def onEvent(self, evt, *args): pu = self.mw.progress.update if evt == "badAuth": tooltip( _("AnkiWeb ID or password was incorrect; please try again."), parent=self.mw) # blank the key so we prompt user again self.pm.profile['syncKey'] = None self.pm.save() elif evt == "corrupt": pass elif evt == "newKey": self.pm.profile['syncKey'] = args[0] self.pm.save() elif evt == "offline": tooltip(_("Syncing failed; internet offline.")) elif evt == "upbad": self._didFullUp = False self._checkFailed() elif evt == "sync": m = None; t = args[0] if t == "login": m = _("Syncing...") elif t == "upload": self._didFullUp = True m = _("Uploading to AnkiWeb...") elif t == "download": m = _("Downloading from AnkiWeb...") elif t == "sanity": m = _("Checking...") elif t == "findMedia": m = _("Syncing Media...") elif t == "upgradeRequired": showText(_("""\ Please visit AnkiWeb, upgrade your deck, then try again.""")) if m: self.label = m self._updateLabel() elif evt == "syncMsg": self.label = args[0] self._updateLabel() elif evt == "error": self._didError = True showText(_("Syncing failed:\n%s")% self._rewriteError(args[0])) elif evt == "clockOff": self._clockOff() elif evt == "checkFailed": self._checkFailed() elif evt == "mediaSanity": showWarning(_("""\ A problem occurred while syncing media. Please use Tools>Check Media, then \ sync again to correct the issue.""")) elif evt == "noChanges": pass elif evt == "fullSync": self._confirmFullSync() elif evt == "send": # posted events not guaranteed to arrive in order self.sentBytes = max(self.sentBytes, int(args[0])) self._updateLabel() elif evt == "recv": self.recvBytes = max(self.recvBytes, int(args[0])) self._updateLabel() def _rewriteError(self, err): if "Errno 61" in err: return _("""\ Couldn't connect to AnkiWeb. Please check your network connection \ and try again.""") elif "timed out" in err or "10060" in err: return _("""\ The connection to AnkiWeb timed out. Please check your network \ connection and try again.""") elif "code: 500" in err: return _("""\ AnkiWeb encountered an error. Please try again in a few minutes, and if \ the problem persists, please file a bug report.""") elif "code: 501" in err: return _("""\ Please upgrade to the latest version of Anki.""") # 502 is technically due to the server restarting, but we reuse the # error message elif "code: 502" in err: return _("AnkiWeb is under maintenance. Please try again in a few minutes.") elif "code: 503" in err: return _("""\ AnkiWeb is too busy at the moment. Please try again in a few minutes.""") elif "code: 504" in err: return _("504 gateway timeout error received. Please try temporarily disabling your antivirus.") elif "code: 409" in err: return _("Only one client can access AnkiWeb at a time. If a previous sync failed, please try again in a few minutes.") elif "10061" in err or "10013" in err or "10053" in err: return _( "Antivirus or firewall software is preventing Anki from connecting to the internet.") elif "10054" in err or "Broken pipe" in err: return _("Connection timed out. Either your internet connection is experiencing problems, or you have a very large file in your media folder.") elif "Unable to find the server" in err: return _( "Server not found. Either your connection is down, or antivirus/firewall " "software is blocking Anki from connecting to the internet.") elif "code: 407" in err: return _("Proxy authentication required.") elif "code: 413" in err: return _("Your collection or a media file is too large to sync.") elif "EOF occurred in violation of protocol" in err: return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.") elif "certificate verify failed" in err: return _("Error establishing a secure connection. This is usually caused by antivirus, firewall or VPN software, or problems with your ISP.") return err def _getUserPass(self): d = QDialog(self.mw) d.setWindowTitle("Anki") d.setWindowModality(Qt.WindowModal) vbox = QVBoxLayout() l = QLabel(_("""\ <h1>Account Required</h1> A free account is required to keep your collection synchronized. Please \ <a href="%s">sign up</a> for an account, then \ enter your details below.""") % "https://ankiweb.net/account/login") l.setOpenExternalLinks(True) l.setWordWrap(True) vbox.addWidget(l) vbox.addSpacing(20) g = QGridLayout() l1 = QLabel(_("AnkiWeb ID:")) g.addWidget(l1, 0, 0) user = QLineEdit() g.addWidget(user, 0, 1) l2 = QLabel(_("Password:")) g.addWidget(l2, 1, 0) passwd = QLineEdit() passwd.setEchoMode(QLineEdit.Password) g.addWidget(passwd, 1, 1) vbox.addLayout(g) bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel) bb.button(QDialogButtonBox.Ok).setAutoDefault(True) bb.accepted.connect(d.accept) bb.rejected.connect(d.reject) vbox.addWidget(bb) d.setLayout(vbox) d.show() accepted = d.exec_() u = user.text() p = passwd.text() if not accepted or not u or not p: return return (u, p) def _confirmFullSync(self): diag = askUserDialog(_("""\ Your decks here and on AnkiWeb differ in such a way that they can't \ be merged together, so it's necessary to overwrite the decks on one \ side with the decks from the other. If you choose download, Anki will download the collection from AnkiWeb, \ and any changes you have made on your computer since the last sync will \ be lost. If you choose upload, Anki will upload your collection to AnkiWeb, and \ any changes you have made on AnkiWeb or your other devices since the \ last sync to this device will be lost. After all devices are in sync, future reviews and added cards can be merged \ automatically."""), [_("Upload to AnkiWeb"), _("Download from AnkiWeb"), _("Cancel")]) diag.setDefault(2) ret = diag.run() if ret == _("Upload to AnkiWeb"): self.thread.fullSyncChoice = "upload" elif ret == _("Download from AnkiWeb"): self.thread.fullSyncChoice = "download" else: self.thread.fullSyncChoice = "cancel" def _clockOff(self): showWarning(_("""\ Syncing requires the clock on your computer to be set correctly. Please \ fix the clock and try again.""")) def _checkFailed(self): showWarning(_("""\ Your collection is in an inconsistent state. Please run Tools>\ Check Database, then sync again.""")) def badUserPass(self): aqt.preferences.Preferences(self, self.pm.profile).dialog.tabWidget.\ setCurrentIndex(1) # Sync thread ###################################################################### class SyncThread(QThread): event = pyqtSignal(str, str) def __init__(self, path, hkey, auth=None, media=True): QThread.__init__(self) self.path = path self.hkey = hkey self.auth = auth self.media = media def run(self): # init this first so an early crash doesn't cause an error # in the main thread self.syncMsg = "" self.uname = "" try: self.col = Collection(self.path, log=True) except: self.fireEvent("corrupt") return self.server = RemoteServer(self.hkey) self.client = Syncer(self.col, self.server) self.sentTotal = 0 self.recvTotal = 0 # throttle updates; qt doesn't handle lots of posted events well self.byteUpdate = time.time() def syncEvent(type): self.fireEvent("sync", type) def syncMsg(msg): self.fireEvent("syncMsg", msg) def canPost(): if (time.time() - self.byteUpdate) > 0.1: self.byteUpdate = time.time() return True def sendEvent(bytes): self.sentTotal += bytes if canPost(): self.fireEvent("send", str(self.sentTotal)) def recvEvent(bytes): self.recvTotal += bytes if canPost(): self.fireEvent("recv", str(self.recvTotal)) addHook("sync", syncEvent) addHook("syncMsg", syncMsg) addHook("httpSend", sendEvent) addHook("httpRecv", recvEvent) # run sync and catch any errors try: self._sync() except: err = traceback.format_exc() self.fireEvent("error", err) finally: # don't bump mod time unless we explicitly save self.col.close(save=False) remHook("sync", syncEvent) remHook("syncMsg", syncMsg) remHook("httpSend", sendEvent) remHook("httpRecv", recvEvent) def _sync(self): if self.auth: # need to authenticate and obtain host key self.hkey = self.server.hostKey(*self.auth) if not self.hkey: # provided details were invalid return self.fireEvent("badAuth") else: # write new details and tell calling thread to save self.fireEvent("newKey", self.hkey) # run sync and check state try: ret = self.client.sync() except Exception as e: log = traceback.format_exc() err = repr(str(e)) if ("Unable to find the server" in err or "Errno 2" in err): self.fireEvent("offline") else: if not err: err = log self.fireEvent("error", err) return if ret == "badAuth": return self.fireEvent("badAuth") elif ret == "clockOff": return self.fireEvent("clockOff") elif ret == "basicCheckFailed" or ret == "sanityCheckFailed": return self.fireEvent("checkFailed") # full sync? if ret == "fullSync": return self._fullSync() # save and note success state if ret == "noChanges": self.fireEvent("noChanges") elif ret == "success": self.fireEvent("success") elif ret == "serverAbort": pass else: self.fireEvent("error", "Unknown sync return code.") self.syncMsg = self.client.syncMsg self.uname = self.client.uname # then move on to media sync self._syncMedia() def _fullSync(self): # if the local deck is empty, assume user is trying to download if self.col.isEmpty(): f = "download" else: # tell the calling thread we need a decision on sync direction, and # wait for a reply self.fullSyncChoice = False self.fireEvent("fullSync") while not self.fullSyncChoice: time.sleep(0.1) f = self.fullSyncChoice if f == "cancel": return self.client = FullSyncer(self.col, self.hkey, self.server.con) if f == "upload": if not self.client.upload(): self.fireEvent("upbad") else: self.client.download() # reopen db and move on to media sync self.col.reopen() self._syncMedia() def _syncMedia(self): if not self.media: return self.server = RemoteMediaServer(self.col, self.hkey, self.server.con) self.client = MediaSyncer(self.col, self.server) ret = self.client.sync() if ret == "noChanges": self.fireEvent("noMediaChanges") elif ret == "sanityCheckFailed": self.fireEvent("mediaSanity") else: self.fireEvent("mediaSuccess") def fireEvent(self, cmd, arg=""): self.event.emit(cmd, arg) # Monkey-patch httplib & httplib2 so we can get progress info ###################################################################### CHUNK_SIZE = 65536 import http.client, httplib2 from io import StringIO from anki.hooks import runHook print("fixme: _conn_request and _incrementalSend need updating for python3") # sending in httplib def _incrementalSend(self, data): """Send `data' to the server.""" if self.sock is None: if self.auto_open: self.connect() else: raise http.client.NotConnected() # if it's not a file object, make it one if not hasattr(data, 'read'): data = StringIO(data) while 1: block = data.read(CHUNK_SIZE) if not block: break self.sock.sendall(block) runHook("httpSend", len(block)) #http.client.HTTPConnection.send = _incrementalSend # receiving in httplib2 # this is an augmented version of httplib's request routine that: # - doesn't assume requests will be tried more than once # - calls a hook for each chunk of data so we can update the gui # - retries only when keep-alive connection is closed def _conn_request(self, conn, request_uri, method, body, headers): for i in range(2): try: if conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise httplib2.ServerNotFoundError( "Unable to find the server at %s" % conn.host) except httplib2.ssl_SSLError: conn.close() raise except socket.error as e: conn.close() raise except http.client.HTTPException: conn.close() raise try: response = conn.getresponse() except http.client.BadStatusLine: print("retry bad line") conn.close() conn.connect() continue except (socket.error, http.client.HTTPException): raise else: content = "" if method == "HEAD": response.close() else: buf = StringIO() while 1: data = response.read(CHUNK_SIZE) if not data: break buf.write(data) runHook("httpRecv", len(data)) content = buf.getvalue() response = httplib2.Response(response) if method != "HEAD": content = httplib2._decompressContent(response, content) return (response, content) #httplib2.Http._conn_request = _conn_request
Stvad/anki
aqt/sync.py
Python
agpl-3.0
18,494
"""Holds all pytee logic."""
KonishchevDmitry/pytee
pytee/__init__.py
Python
gpl-3.0
29
import numpy as np import tensorflow as tf from keras.models import Sequential from keras.layers import Dense, Dropout, Activation from keras.layers import Convolution2D, MaxPooling2D, Flatten from keras.optimizers import SGD, Adam, RMSprop from keras.utils import np_utils from keras.layers.normalization import BatchNormalization from keras.datasets import cifar10 from tensorflow.python.ops import control_flow_ops tf.python.control_flow_ops = control_flow_ops np.random.seed(1337) # for reproducibility n_classes = 10 flat_img_size = 32*32*3 pool_size = (2, 2) (X_train, y_train), (X_test, y_test) = cifar10.load_data() def normalize_color(image_data): """ Normalize the image data with Min-Max scaling to a range of [0.1, 0.9] :param image_data: The image data to be normalized :return: Normalized image data """ a = -0.5 b = +0.5 Xmin = 0.0 Xmax = 255.0 norm_img = np.empty_like(image_data, dtype=np.float32) norm_img = a + (image_data - Xmin)*(b-a)/(Xmax - Xmin) return norm_img X_train = normalize_color(X_train) X_test = normalize_color(X_test) Y_train = np_utils.to_categorical(y_train, n_classes) Y_test = np_utils.to_categorical(y_test, n_classes) model = Sequential() model.add(Convolution2D(16, 5, 5, border_mode='same', input_shape=(32, 32, 3))) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=pool_size)) model.add(Convolution2D(64, 3, 3)) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=pool_size)) model.add(Dropout(0.5)) model.add(Convolution2D(128, 3, 3)) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(MaxPooling2D(pool_size=pool_size)) model.add(Dropout(0.5)) model.add(Flatten()) model.add(Dense(256, input_shape=(flat_img_size,), name='hidden1')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(Dense(n_classes, name='output')) model.add(BatchNormalization()) model.add(Activation('softmax')) adam = RMSprop(lr=0.01) model.compile(optimizer=adam, loss='categorical_crossentropy', metrics=['accuracy']) # 70% after 10 epochs history = model.fit(X_train, Y_train, batch_size=64, nb_epoch=10, validation_data=(X_test, Y_test), verbose=1) # history = model.fit_generator(datagen.flow(X_train, Y_train, batch_size=32), # samples_per_epoch=len(X_train), nb_epoch=nb_epoch)
thomasantony/CarND-Projects
Exercises/Term1/transfer-learning-lab/cifar10_test.py
Python
mit
2,442
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import webob from nova.api.openstack.compute.legacy_v2.contrib import multinic \ as multinic_v2 from nova.api.openstack.compute import multinic as multinic_v21 from nova import compute from nova import exception from nova import objects from nova import test from nova.tests.unit.api.openstack import fakes UUID = '70f6db34-de8d-4fbd-aafb-4065bdfa6114' last_add_fixed_ip = (None, None) last_remove_fixed_ip = (None, None) def compute_api_add_fixed_ip(self, context, instance, network_id): global last_add_fixed_ip last_add_fixed_ip = (instance['uuid'], network_id) def compute_api_remove_fixed_ip(self, context, instance, address): global last_remove_fixed_ip last_remove_fixed_ip = (instance['uuid'], address) def compute_api_get(self, context, instance_id, want_objects=False, expected_attrs=None): instance = objects.Instance() instance.uuid = instance_id instance.id = 1 instance.vm_state = 'fake' instance.task_state = 'fake' instance.obj_reset_changes() return instance class FixedIpTestV21(test.NoDBTestCase): controller_class = multinic_v21 validation_error = exception.ValidationError def setUp(self): super(FixedIpTestV21, self).setUp() fakes.stub_out_networking(self.stubs) fakes.stub_out_rate_limiting(self.stubs) self.stubs.Set(compute.api.API, "add_fixed_ip", compute_api_add_fixed_ip) self.stubs.Set(compute.api.API, "remove_fixed_ip", compute_api_remove_fixed_ip) self.stubs.Set(compute.api.API, 'get', compute_api_get) self.controller = self.controller_class.MultinicController() self.fake_req = fakes.HTTPRequest.blank('') def test_add_fixed_ip(self): global last_add_fixed_ip last_add_fixed_ip = (None, None) body = dict(addFixedIp=dict(networkId='test_net')) resp = self.controller._add_fixed_ip(self.fake_req, UUID, body=body) # NOTE: on v2.1, http status code is set as wsgi_code of API # method instead of status_int in a response object. if isinstance(self.controller, multinic_v21.MultinicController): status_int = self.controller._add_fixed_ip.wsgi_code else: status_int = resp.status_int self.assertEqual(status_int, 202) self.assertEqual(last_add_fixed_ip, (UUID, 'test_net')) def _test_add_fixed_ip_bad_request(self, body): self.assertRaises(self.validation_error, self.controller._add_fixed_ip, self.fake_req, UUID, body=body) def test_add_fixed_ip_empty_network_id(self): body = {'addFixedIp': {'network_id': ''}} self._test_add_fixed_ip_bad_request(body) def test_add_fixed_ip_network_id_bigger_than_36(self): body = {'addFixedIp': {'network_id': 'a' * 37}} self._test_add_fixed_ip_bad_request(body) def test_add_fixed_ip_no_network(self): global last_add_fixed_ip last_add_fixed_ip = (None, None) body = dict(addFixedIp=dict()) self._test_add_fixed_ip_bad_request(body) self.assertEqual(last_add_fixed_ip, (None, None)) @mock.patch.object(compute.api.API, 'add_fixed_ip') def test_add_fixed_ip_no_more_ips_available(self, mock_add_fixed_ip): mock_add_fixed_ip.side_effect = exception.NoMoreFixedIps(net='netid') body = dict(addFixedIp=dict(networkId='test_net')) self.assertRaises(webob.exc.HTTPBadRequest, self.controller._add_fixed_ip, self.fake_req, UUID, body=body) def test_remove_fixed_ip(self): global last_remove_fixed_ip last_remove_fixed_ip = (None, None) body = dict(removeFixedIp=dict(address='10.10.10.1')) resp = self.controller._remove_fixed_ip(self.fake_req, UUID, body=body) # NOTE: on v2.1, http status code is set as wsgi_code of API # method instead of status_int in a response object. if isinstance(self.controller, multinic_v21.MultinicController): status_int = self.controller._remove_fixed_ip.wsgi_code else: status_int = resp.status_int self.assertEqual(status_int, 202) self.assertEqual(last_remove_fixed_ip, (UUID, '10.10.10.1')) def test_remove_fixed_ip_no_address(self): global last_remove_fixed_ip last_remove_fixed_ip = (None, None) body = dict(removeFixedIp=dict()) self.assertRaises(self.validation_error, self.controller._remove_fixed_ip, self.fake_req, UUID, body=body) self.assertEqual(last_remove_fixed_ip, (None, None)) def test_remove_fixed_ip_invalid_address(self): body = {'removeFixedIp': {'address': ''}} self.assertRaises(self.validation_error, self.controller._remove_fixed_ip, self.fake_req, UUID, body=body) @mock.patch.object(compute.api.API, 'remove_fixed_ip', side_effect=exception.FixedIpNotFoundForSpecificInstance( instance_uuid=UUID, ip='10.10.10.1')) def test_remove_fixed_ip_not_found(self, _remove_fixed_ip): body = {'removeFixedIp': {'address': '10.10.10.1'}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller._remove_fixed_ip, self.fake_req, UUID, body=body) class FixedIpTestV2(FixedIpTestV21): controller_class = multinic_v2 validation_error = webob.exc.HTTPBadRequest def test_remove_fixed_ip_invalid_address(self): # NOTE(cyeoh): This test is disabled for the V2 API because it is # has poorer input validation. pass class MultinicPolicyEnforcementV21(test.NoDBTestCase): def setUp(self): super(MultinicPolicyEnforcementV21, self).setUp() self.controller = multinic_v21.MultinicController() self.req = fakes.HTTPRequest.blank('') def test_add_fixed_ip_policy_failed(self): rule_name = "os_compute_api:os-multinic" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises( exception.PolicyNotAuthorized, self.controller._add_fixed_ip, self.req, fakes.FAKE_UUID, body={'addFixedIp': {'networkId': fakes.FAKE_UUID}}) self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_remove_fixed_ip_policy_failed(self): rule_name = "os_compute_api:os-multinic" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises( exception.PolicyNotAuthorized, self.controller._remove_fixed_ip, self.req, fakes.FAKE_UUID, body={'removeFixedIp': {'address': "10.0.0.1"}}) self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message())
fnordahl/nova
nova/tests/unit/api/openstack/compute/test_multinic.py
Python
apache-2.0
7,864
resolution = (1920, 1080) FPS = 1/60 # 60 FPS master race or whatever
brooks42/LegendOfDuckHat
window/config.py
Python
gpl-3.0
71
"""Monitors home energy use for the ELIQ Online service.""" import asyncio from datetime import timedelta import logging import eliqonline import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, POWER_WATT from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_CHANNEL_ID = "channel_id" DEFAULT_NAME = "ELIQ Online" ICON = "mdi:gauge" SCAN_INTERVAL = timedelta(seconds=60) UNIT_OF_MEASUREMENT = POWER_WATT PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_ACCESS_TOKEN): cv.string, vol.Required(CONF_CHANNEL_ID): cv.positive_int, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the ELIQ Online sensor.""" access_token = config.get(CONF_ACCESS_TOKEN) name = config.get(CONF_NAME, DEFAULT_NAME) channel_id = config.get(CONF_CHANNEL_ID) session = async_get_clientsession(hass) api = eliqonline.API(session=session, access_token=access_token) try: _LOGGER.debug("Probing for access to ELIQ Online API") await api.get_data_now(channelid=channel_id) except OSError as error: _LOGGER.error("Could not access the ELIQ Online API: %s", error) return False async_add_entities([EliqSensor(api, channel_id, name)], True) class EliqSensor(SensorEntity): """Implementation of an ELIQ Online sensor.""" def __init__(self, api, channel_id, name): """Initialize the sensor.""" self._name = name self._state = None self._api = api self._channel_id = channel_id @property def name(self): """Return the name of the sensor.""" return self._name @property def icon(self): """Return icon.""" return ICON @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return UNIT_OF_MEASUREMENT @property def state(self): """Return the state of the device.""" return self._state async def async_update(self): """Get the latest data.""" try: response = await self._api.get_data_now(channelid=self._channel_id) self._state = int(response["power"]) _LOGGER.debug("Updated power from server %d W", self._state) except KeyError: _LOGGER.warning("Invalid response from ELIQ Online API") except (OSError, asyncio.TimeoutError) as error: _LOGGER.warning("Could not connect to the ELIQ Online API: %s", error)
kennedyshead/home-assistant
homeassistant/components/eliqonline/sensor.py
Python
apache-2.0
2,820
import unittest import types from rest_framework_ccbv.inspector import Inspector, Attribute, Method from rest_framework import generics class TestInspector(unittest.TestCase): def setUp(self): self.klass = 'GenericAPIView' self.module = 'rest_framework.generics' self.inspector = Inspector(self.klass, self.module) def test_get_klass(self): self.assertEquals(self.inspector.get_klass(), getattr(generics, self.klass)) def test_first_ancestor_is_itself(self): self.assertEquals(self.inspector.get_klass_mro()[0].__name__, self.klass) def test_ancestor(self): self.assertEquals([x.__name__ for x in self.inspector.get_klass_mro()], [self.klass, 'APIView', 'View']) def test_attributes(self): self.assertIn(Attribute(name='serializer_class', value=None, classobject=None), self.inspector.get_attributes()) for attr in self.inspector.get_attributes(): self.assertFalse(attr.name.startswith('_')) self.assertFalse(isinstance(attr, types.MethodType)) def test_children(self): self.klass = 'ListModelMixin' self.module = 'rest_framework.mixins' self.inspector = Inspector(self.klass, self.module) self.assertItemsEqual([x.__name__ for x in self.inspector.get_children()], ['ListCreateAPIView', 'ListAPIView', 'ModelViewSet', 'ReadOnlyModelViewSet']) def test_direct_acenstors(self): self.klass = 'CreateAPIView' self.module = 'rest_framework.generics' self.inspector = Inspector(self.klass, self.module) self.assertItemsEqual([x.__name__ for x in self.inspector.get_direct_ancestors()], ['CreateModelMixin', 'GenericAPIView']) class TestMethod(unittest.TestCase): def setUp(self): class A(object): def method(self, *args, **kwargs): pass def method1(self, *args): pass def method2(self, **kwargs): pass def method3(self, a, b, **kwargs): pass def method4(self, a, b, *args): pass def method5(self, a, b, *args, **kwargs): pass def method6(self, a, b=3): pass def method7(self, a, b=3, *args): pass def method8(self, a=2, b=3, **kwargs): pass self.method = Method('method', A.method, A) self.method1 = Method('method1', A.method1, A) self.method2 = Method('method2', A.method2, A) self.method3 = Method('method3', A.method3, A) self.method4 = Method('method4', A.method4, A) self.method5 = Method('method5', A.method5, A) self.method6 = Method('method6', A.method6, A) self.method7 = Method('method7', A.method7, A) self.method8 = Method('method8', A.method8, A) def test_method(self): self.assertEqual(self.method.params_string(), 'self, *args, **kwargs') def test_method1(self): self.assertEqual(self.method1.params_string(), 'self, *args') def test_method2(self): self.assertEqual(self.method2.params_string(), 'self, **kwargs') def test_method3(self): self.assertEqual(self.method3.params_string(), 'self, a, b, **kwargs') def test_method4(self): self.assertEqual(self.method4.params_string(), 'self, a, b, *args') def test_method5(self): self.assertEqual(self.method5.params_string(), 'self, a, b, *args,' ' **kwargs') def test_method6(self): self.assertEqual(self.method6.params_string(), 'self, a, b=3') def test_method7(self): self.assertEqual(self.method7.params_string(), 'self, a, b=3, *args') def test_method8(self): self.assertEqual(self.method8.params_string(), 'self, a=2, b=3,' ' **kwargs')
aericson/cdrf.co
tests/test_inspector.py
Python
mit
4,310
"""Write initial TrueGrid files for one Sandia blade station. Usage ----- start an IPython (qt)console with the pylab flag: $ ipython qtconsole --pylab or $ ipython --pylab Then, from the prompt, run this script: |> %run sandia_blade_lib/prep_stnXX_mesh.py or |> import sandia_blade_lib/prep_stnXX_mesh Author: Perry Roth-Johnson Last updated: April 10, 2014 """ import matplotlib.pyplot as plt import lib.blade as bl import lib.poly_utils as pu from shapely.geometry import Polygon # SET THESE PARAMETERS ----------------- station_num = 20 # -------------------------------------- plt.close('all') # load the Sandia blade m = bl.MonoplaneBlade('Sandia blade SNL100-00', 'sandia_blade') # pre-process the station dimensions station = m.list_of_stations[station_num-1] station.airfoil.create_polygon() station.structure.create_all_layers() station.structure.save_all_layer_edges() station.structure.write_all_part_polygons() # plot the parts station.plot_parts() # access the structure for this station st = station.structure # upper spar cap ----------------------------------------------------------- label = 'upper spar cap' # create the bounding polygon usc = st.spar_cap.layer['upper'] is2 = st.internal_surface_2.layer['resin'] points_usc = [ (-0.75, usc.left[0][1]), # SparCap_upper.txt (-0.74000000, 0.78611464), # InternalSurface2_resin.txt ( 0.74000000, 0.69270959), # InternalSurface2_resin.txt ( 0.75, usc.right[1][1]), # SparCap_upper.txt ( 0.75, 1.3), (-0.75, 1.3) ] bounding_polygon = Polygon(points_usc) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'triax', label, bounding_polygon) # lower spar cap ----------------------------------------------------------- label = 'lower spar cap' # create the bounding polygon lsc = st.spar_cap.layer['lower'] points_lsc = [ (-0.75,-1.6), ( 0.75,-1.6), (0.75000000, lsc.right[0][1]), # SparCap_lower.txt (0.74000000, -0.70717378), # InternalSurface2_resin.txt (-0.74000000, -1.00940891), # InternalSurface2_resin.txt (-0.75000000, lsc.left[1][1]) # SparCap_lower.txt ] bounding_polygon = Polygon(points_lsc) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'triax', label, bounding_polygon) # TE reinforcement, upper 1 ------------------------------------------------ label = 'TE reinforcement, upper 1' # create the bounding polygon ter = st.TE_reinforcement.layer['foam'] is4 = st.internal_surface_4.layer['resin'] points_teu1 = [ (ter.top[0][0], 0.35), # TE_Reinforcement_foam.txt tuple(ter.top[0]), # TE_Reinforcement_foam.txt is4.polygon.interiors[0].coords[377-163], # InternalSurface4_resin.txt (3.54, 0.12), is4.polygon.interiors[0].coords[373-163], # InternalSurface4_resin.txt (3.73289035, 0.35) # InternalSurface4_resin.txt ] bounding_polygon = Polygon(points_teu1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, lower 1 ------------------------------------------------ label = 'TE reinforcement, lower 1' # create the bounding polygon points_tel1 = [ (ter.bottom[0][0], -0.1), # TE_Reinforcement_foam.txt tuple(ter.bottom[1]), # TE_Reinforcement_foam.txt is4.polygon.interiors[0].coords[308-163], # InternalSurface4_resin.txt (3.54, 0.12), points_teu1[-2], # InternalSurface4_resin.txt (points_teu1[-1][0], -0.1) # InternalSurface4_resin.txt ] bounding_polygon = Polygon(points_tel1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, upper 2 ------------------------------------------------ label = 'TE reinforcement, upper 2' # create the bounding polygon is4t = st.internal_surface_4.layer['triax'] points_teu2 = [ points_teu1[-1], points_teu1[-2], is4t.polygon.interiors[0].coords[201-77], # InternalSurface4_triax.txt is4t.polygon.exterior.coords[17-3], # InternalSurface4_triax.txt (is4t.polygon.exterior.coords[17-3][0], 0.35) # InternalSurface4_triax.txt ] bounding_polygon = Polygon(points_teu2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, lower 2 ------------------------------------------------ label = 'TE reinforcement, lower 2' # create the bounding polygon points_tel2 = [ (points_teu2[0][0], -0.1), points_teu2[1], points_teu2[2], points_teu2[3], (points_teu2[3][0], -0.1) ] bounding_polygon = Polygon(points_tel2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, upper 3 ------------------------------------------------ label = 'TE reinforcement, upper 3' # create the bounding polygon points_teu3 = [ points_teu2[-1], points_teu2[-2], ter.polygon.exterior.coords[0], # TE_Reinforcement_foam.txt (ter.polygon.exterior.coords[0][0], 0.35) # TE_Reinforcement_foam.txt ] bounding_polygon = Polygon(points_teu3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, lower 3 ------------------------------------------------ label = 'TE reinforcement, lower 3' # create the bounding polygon points_tel3 = [ (points_teu3[0][0], -0.1), points_teu3[1], points_teu3[2], (points_teu3[2][0], -0.1) ] bounding_polygon = Polygon(points_tel3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'foam', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, upper 4 ------------------------------------------------ label = 'TE reinforcement, upper 4' # create the bounding polygon es = st.external_surface.layer['gelcoat'] teru = st.TE_reinforcement.layer['uniax'] points_teu4 = [ points_teu3[-1], points_teu3[-2], (4.32127500, 0.0025), # TE_Reinforcement_uniax.txt teru.polygon.exterior.coords[-2], # TE_Reinforcement_uniax.txt es.polygon.exterior.coords[-2], (4.4, 0.35) # TE_Reinforcement_uniax.txt ] bounding_polygon = Polygon(points_teu4) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # TE reinforcement, lower 4 ------------------------------------------------ label = 'TE reinforcement, lower 4' # create the bounding polygon points_tel4 = [ (points_teu4[0][0], -0.1), points_teu4[1], points_teu4[2], teru.polygon.exterior.coords[-1], # TE_Reinforcement_uniax.txt es.polygon.exterior.coords[-1], (points_teu4[2][0], -0.1) ] bounding_polygon = Polygon(points_tel4) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.TE_reinforcement, 'uniax', label, bounding_polygon) # LE panel ----------------------------------------------------------------- label = 'LE panel' # create the bounding polygon lep = st.LE_panel.layer['foam'] is1 = st.internal_surface_1.layer['resin'] points_le = [ (-3.00,-1.6), (-0.836,-1.6), tuple(lep.bottom[0]), # LE_Panel_foam.txt is1.polygon.interiors[0].coords[-2], # InternalSurface1_resin.txt (-1.5, 0.0), is1.polygon.interiors[0].coords[-1], # InternalSurface1_resin.txt tuple(lep.top[1]), # LE_Panel_foam.txt (-0.836, 1.3), (-3.00, 1.3) ] bounding_polygon = Polygon(points_le) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_1, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_1, 'triax', label, bounding_polygon) # upper aft panel 1 ------------------------------------------------------- label = 'upper aft panel 1' # create the bounding polygon ap1u = st.aft_panel_1.layer['upper'] is3 = st.internal_surface_3.layer['resin'] points_ap1u = [ (0.836, 1.3), (ap1u.right[1][0], 1.3), # AftPanel1_upper.txt tuple(ap1u.right[1]), # AftPanel1_upper.txt (2.11772044, 0.48911607), # InternalSurface3_resin.txt (1.2, 0.5), is3.polygon.interiors[0].coords[-2], # InternalSurface3_resin.txt tuple(ap1u.left[0]) # AftPanel1_upper.txt ] bounding_polygon = Polygon(points_ap1u) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'triax', label, bounding_polygon) # lower aft panel 1 ------------------------------------------------------- label = 'lower aft panel 1' # create the bounding polygon ap1l = st.aft_panel_1.layer['lower'] points_ap1l = [ (0.836, -1.6), (ap1l.right[0][0], -1.6), # AftPanel1_lower.txt tuple(ap1l.right[0]), # AftPanel1_lower.txt (2.11772044, -0.26291234), # InternalSurface3_resin.txt (1.2, -0.3), (0.84600000, -0.73214379), # InternalSurface3_resin.txt tuple(ap1l.left[1]) # AftPanel1_lower.txt ] bounding_polygon = Polygon(points_ap1l) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'triax', label, bounding_polygon) # upper aft panel 2 ------------------------------------------------------- label = 'upper aft panel 2' # create the bounding polygon ap2u = st.aft_panel_2.layer['upper'] sw3br = st.shear_web_3.layer['biax, right'] points_ap2u = [ (sw3br.right[0][0], 1.3), (ap2u.right[1][0], 1.3), # AftPanel2_upper.txt tuple(ap2u.right[1]), # AftPanel2_upper.txt (ap2u.right[1][0], 0.18), (3.0, 0.2), is4.polygon.interiors[0].coords[-2], # InternalSurface4_resin.txt tuple(ap2u.left[0]) # AftPanel2_upper.txt ] bounding_polygon = Polygon(points_ap2u) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) # lower aft panel 2 ------------------------------------------------------- label = 'lower aft panel 2' # create the bounding polygon ap2l = st.aft_panel_2.layer['lower'] is4 = st.internal_surface_4.layer['resin'] sw3br = st.shear_web_3.layer['biax, right'] points_ap2l = [ (sw3br.right[0][0], -1.6), (ap2l.right[0][0], -1.6), # AftPanel2_lower.txt tuple(ap2l.right[0]), # AftPanel2_lower.txt (ap2l.right[0][0], 0.1), (3.0, 0.1), (2.22372044, -0.22534714), # InternalSurface4_resin.txt tuple(ap2l.left[1]) # AftPanel2_lower.txt ] bounding_polygon = Polygon(points_ap2l) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) # above shear web 1 ---------------------------------------------------------- label = 'above shear web 1' # create the bounding polygon points_asw1 = [ (-0.75, 2.1), (-0.75, 0.1), (-0.836, 0.1), (-0.836, 2.1) ] bounding_polygon = Polygon(points_asw1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # below shear web 1 ---------------------------------------------------------- label = 'below shear web 1' # create the bounding polygon points_bsw1 = [ (-0.75, -2.1), (-0.75, -0.1), (-0.836, -0.1), (-0.836, -2.1) ] bounding_polygon = Polygon(points_bsw1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # above shear web 2 ---------------------------------------------------------- label = 'above shear web 2' # create the bounding polygon points_asw2 = [ (0.75, 2.1), (0.75, 0.1), (0.836, 0.1), (0.836, 2.1) ] bounding_polygon = Polygon(points_asw2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # below shear web 2 ---------------------------------------------------------- label = 'below shear web 2' # create the bounding polygon points_bsw2 = [ (0.75, -2.1), (0.75, -0.1), (0.836, -0.1), (0.836, -2.1) ] bounding_polygon = Polygon(points_bsw2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # above shear web 3 ---------------------------------------------------------- label = 'above shear web 3' # create the bounding polygon sw3bl = st.shear_web_3.layer['biax, left'] points_asw3 = [ (sw3bl.left[0][0], 1.0), (sw3bl.left[0][0], 0.1), (sw3br.right[0][0], 0.1), (sw3br.right[0][0], 1.0) ] bounding_polygon = Polygon(points_asw3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # below shear web 3 ---------------------------------------------------------- label = 'below shear web 3' # create the bounding polygon points_bsw3 = [ (sw3bl.left[0][0], -1.0), (sw3bl.left[0][0], -0.1), (sw3br.right[0][0], -0.1), (sw3br.right[0][0], -1.0) ] bounding_polygon = Polygon(points_bsw3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.external_surface, 'triax', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.external_surface, 'gelcoat', label, bounding_polygon) # left of shear web 1 ------------------------------------------------------- label = 'left of shear web 1' # create the bounding polygon points_lsw1 = points_le[2:-2] bounding_polygon = Polygon(points_lsw1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_1, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_1, 'triax', label, bounding_polygon) # right of shear web 1 ------------------------------------------------------- label = 'right of shear web 1' # create the bounding polygon points_rsw1 = [ points_usc[0], points_usc[1], (0.0, 0.0), points_lsc[-2], points_lsc[-1] ] bounding_polygon = Polygon(points_rsw1) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'triax', label, bounding_polygon) # left of shear web 2 ------------------------------------------------------- label = 'left of shear web 2' # create the bounding polygon points_lsw2 = [ points_usc[3], points_usc[2], (0.0, 0.0), points_lsc[3], points_lsc[2] ] bounding_polygon = Polygon(points_lsw2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_2, 'triax', label, bounding_polygon) # right of shear web 2 ------------------------------------------------------- label = 'right of shear web 2' # create the bounding polygon points_rsw2 = [ points_ap1u[-1], points_ap1u[-2], (1.5, 0.0), points_ap1l[-2], points_ap1l[-1] ] bounding_polygon = Polygon(points_rsw2) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'triax', label, bounding_polygon) # left of shear web 3 ------------------------------------------------------- label = 'left of shear web 3' # create the bounding polygon points_lsw3 = [ points_ap1u[2], points_ap1u[3], (2.0, 0.0), points_ap1l[3], points_ap1l[2] ] bounding_polygon = Polygon(points_lsw3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_3, 'triax', label, bounding_polygon) # right of shear web 3 ------------------------------------------------------- label = 'right of shear web 3' # create the bounding polygon points_rsw3 = [ points_ap2u[-1], points_ap2u[-2], (3.0, 0.15), points_ap2l[-2], points_ap2l[-1] ] bounding_polygon = Polygon(points_rsw3) pu.plot_polygon(bounding_polygon, 'None', '#000000') # cut the new layer polygons pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'resin', label, bounding_polygon) pu.cut_plot_and_write_alt_layer(st.internal_surface_4, 'triax', label, bounding_polygon) # show the plot plt.show() # write the TrueGrid input file for mesh generation --------------------- st.write_truegrid_inputfile( interrupt_flag=True, additional_layers=[ st.spar_cap.layer['upper'], st.spar_cap.layer['lower'], st.aft_panel_1.layer['upper'], st.aft_panel_1.layer['lower'], st.aft_panel_2.layer['upper'], st.aft_panel_2.layer['lower'], st.LE_panel.layer['foam'], st.shear_web_1.layer['biax, left'], st.shear_web_1.layer['foam'], st.shear_web_1.layer['biax, right'], st.shear_web_2.layer['biax, left'], st.shear_web_2.layer['foam'], st.shear_web_2.layer['biax, right'], st.shear_web_3.layer['biax, left'], st.shear_web_3.layer['foam'], st.shear_web_3.layer['biax, right'] ], alt_TE_reinforcement=True, soft_warning=False)
perryjohnson/biplaneblade
sandia_blade_lib/prep_stn20_mesh.py
Python
gpl-3.0
24,630
#!/usr/bin/env python2 # -*- coding: utf-8 -* #The MIT License (MIT) # Copyright (c) 2015 daite # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from scrapy import Selector import codecs import requests import argparse import os class ImageDownloader: def __init__(self, root_dir_name): ''' :: init function :: set root directory path when given root dir name ''' self.root_dir_name = root_dir_name self.root_dir_path = os.path.join(os.getcwd(), root_dir_name) if not os.path.exists(self.root_dir_path): os.mkdir(self.root_dir_path) def get_image_urls(self, res_text): ''' :: getting image urls from response_text ''' self.img_urls = Selector(text=res_text).xpath('//a/@href').re('.*jpg$') return self.img_urls def get_description(self, res_text): ''' :: getting description from response_text ''' self.desc_contents = Selector(text=res_text).xpath('//blockquote/text()').extract() return self.desc_contents def save_stuff(self, sub_dir_name, img_urls, desc_contents, text_file_name='description.txt'): ''' :: save images and description each subdir ''' self.sub_dir_path = os.path.join(self.root_dir_path, sub_dir_name) self.sub_dir_desc_file_name = os.path.join(self.sub_dir_path, text_file_name) if not os.path.exists(self.sub_dir_path): os.mkdir(self.sub_dir_path) os.chdir(self.sub_dir_path) with codecs.open(self.sub_dir_desc_file_name, 'a', encoding='utf-8') as f: for content in desc_contents: f.write(content) for img_url in img_urls: cmd = 'wget -nc -t 1 %s &' %img_url os.system(cmd) os.chdir(self.root_dir_path) def multi_save_stuff(self, urlgen, start_num, end_num): ''' :: multi save function ''' for movie_num in range(start_num, end_num + 1): url = urlgen(movie_num) res_text = requests.get(url).text img_urls = self.get_image_urls(res_text) desc_contents = self.get_description(res_text) if not img_urls: print('No images!!!!') continue sub_dir_name = url.split('/')[-1].strip('.html') self.save_stuff(sub_dir_name, img_urls, desc_contents) if __name__ == '__main__': gana_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/200GANA-%d.html' %x siro_urlgen = lambda x : 'http://blog.livedoor.jp/kirekawa39-siro/archives/siro-%d.html' %x parser = argparse.ArgumentParser() parser.add_argument("start", type=int, help='start number') parser.add_argument("end", type=int, help='end number') parser.add_argument('-g', '--gana', help='download image from gana200', action="store_true") parser.add_argument('-s', '--siro', help='download image from siro', action="store_true") args = parser.parse_args() if args.gana: i = ImageDownloader('GANA200') i.multi_save_stuff(gana_urlgen, args.start, args.end) elif args.siro: i = ImageDownloader('SIRO') i.multi_save_stuff(siro_urlgen, args.start, args.end) else: parser.print_help() exit(1)
daite/JAVImageDownloader
jav_image_download.py
Python
mit
3,951
# Names scores # Problem 22 # Using names.txt (right click and 'Save Link/Target As...'), a 46K text file # containing over five-thousand first names, begin by sorting it into # alphabetical order. Then working out the alphabetical value for each name, # multiply this value by its alphabetical position in the list to obtain a # name score. # # For example, when the list is sorted into alphabetical order, COLIN, which # is worth 3 + 15 + 12 + 9 + 14 = 53, is the 938th name in the list. So, COLIN # would obtain a score of 938 × 53 = 49714. # # What is the total of all the name scores in the file? filename = input('enter source file for names: ') with open(filename) as source: read_lines = sorted(source.readlines()) def scoreString(s): """returns int of score of s""" total = 0 newstring = s.upper() print(newstring) for i in range(len(newstring) - 1): print('value of',newstring[i],'is',(int(ord(newstring[i]) - 64))) total += (ord(newstring[i]) - 64) print(total) return total #namescores = [] # #for i in range(len(read_lines)): # namescores.append(scoreString(read_lines[i]) * (i + 1)) # try it with a list comprehension namescores = [(scoreString(read_lines[i]) * (i + 1)) for i in range(len(read_lines))] print(sum(namescores))
rolandwong212/projecteuler
022.py
Python
gpl-3.0
1,315
from __future__ import print_function try: from setuptools import setup, find_packages except ImportError: from distutils.core import setup, find_packages import io import codecs import os import sys import iAnnotateSV here = os.path.abspath(os.path.dirname(__file__)) def read(*filenames, **kwargs): encoding = kwargs.get('encoding', 'utf-8') sep = kwargs.get('sep', '\n') buf = [] for filename in filenames: with io.open(filename, encoding=encoding) as f: buf.append(f.read()) return sep.join(buf) long_description = read('README.rst', 'CHANGES.rst') setup( name='iCallSV', version=iAnnotateSV.__version__, description='The module helps to call structural variants using NGS data set on human.', long_description=long_description, include_package_data=True, url='https://github.com/rhshah/iCallSV', download_url='https://github.com/rhshah/iCallSV/tarball/0.0.7', author=iAnnotateSV.__author__, author_email='rons.shah@gmail.com', license=iAnnotateSV.__license__, platforms='any', packages=['iCallSV'], install_requires=[ 'pandas==0.16.2', 'pysam==0.8.4', 'pyvcf=0.6.7', 'biopython=1.65', 'coloredlogs=5.2', ], zip_safe=False, classifiers=( 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'License :: OSI Approved :: Apache License', 'Operating System :: OS Independent', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Development Status :: 3' ), )
mskcc/iCallSV
setup.py
Python
apache-2.0
1,644
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # Hope you'll enjoy and contribute to this project, # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # IRC #navitia on freenode # https://groups.google.com/d/forum/navitia # www.navitia.io from tyr.command.reload_at import ReloadAtCommand from tyr.command.at_reloader import AtReloader from tyr.command.reload_kraken import ReloadKrakenCommand from tyr.command.build_data import BuildDataCommand from tyr.command.load_data import LoadDataCommand import tyr.command.purge import tyr.command.cities import tyr.command.bounding_shape import tyr.command.import_last_dataset
stifoon/navitia
source/tyr/tyr/command/__init__.py
Python
agpl-3.0
1,575
from django.contrib.gis.geos import GEOSGeometry, GeometryCollection from arches.app.utils.betterJSONSerializer import JSONSerializer, JSONDeserializer class GeoUtils(object): def create_geom_collection_from_geojson(self, geojson): geoms = [] for feature in geojson['features']: geoms.append(GEOSGeometry(JSONSerializer().serialize(feature['geometry']))) return GeometryCollection(geoms) def get_bounds_from_geojson(self, geojson): """ Takes a geojson object with polygon(s) and returns the coordinates of the extent of the polygons. """ geom_collection = self.create_geom_collection_from_geojson(geojson) bounds = geom_collection.extent return bounds def get_centroid(self, geojson): """ Takes a geojson object with polygon(s) and returns its center point as geojson. """ geom_collection = self.create_geom_collection_from_geojson(geojson) centroid = geom_collection.centroid.geojson return JSONDeserializer().deserialize(centroid)
cvast/arches
arches/app/utils/geo_utils.py
Python
agpl-3.0
1,092
""" Generate a mitmproxy dump file. This script demonstrates how to generate a mitmproxy dump file, as it would also be generated by passing `-w` to mitmproxy. In contrast to `-w`, this gives you full control over which flows should be saved and also allows you to rotate files or log to multiple files in parallel. """ import random import sys from mitmproxy import io, http import typing # noqa class Writer: def __init__(self, path: str) -> None: self.f: typing.IO[bytes] = open(path, "wb") self.w = io.FlowWriter(self.f) def response(self, flow: http.HTTPFlow) -> None: if random.choice([True, False]): self.w.add(flow) def done(self): self.f.close() addons = [Writer(sys.argv[1])]
mitmproxy/mitmproxy
examples/addons/io-write-flow-file.py
Python
mit
750
import os import sys try: from django.conf import settings from django.test.utils import get_runner PROJECT_DIR = os.path.dirname(os.path.abspath(__file__)) settings.configure( STATICFILES_DIRS=( os.path.join(PROJECT_DIR, 'tests', 'dummy_content'), ), DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, INSTALLED_APPS=[ 'django.contrib.staticfiles', "svg_templatetag", ], TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }], MIDDLEWARE_CLASSES=(), ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() msg = "To fix this error, run: pip install -r requirements_test.txt" raise ImportError(msg) def run_tests(*test_args): if not test_args: test_args = ['tests'] # Run tests TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == '__main__': run_tests(*sys.argv[1:])
Mediamoose/django-svg-templatetag
runtests.py
Python
mit
1,352
# Copyright (c) 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. policy_data = """ { "context_is_admin": "role:admin", "admin_or_owner": "is_admin:True or project_id:%(project_id)s", "default": "rule:admin_or_owner", "admin_api": "is_admin:True", "bay:create": "", "bay:delete": "", "bay:detail": "", "bay:get": "", "bay:get_all": "", "bay:update": "", "baymodel:create": "", "baymodel:delete": "", "baymodel:detail": "", "baymodel:get": "", "baymodel:get_all": "", "baymodel:update": "", "node:create": "", "node:delete": "", "node:detail": "", "node:get": "", "node:get_all": "", "node:update": "" } """ policy_data_compat_juno = """ { } """ def get_policy_data(compat): if not compat: return policy_data elif compat == 'juno': return policy_data_compat_juno else: raise Exception('Policy data for %s not available' % compat)
hongbin/magnum
magnum/tests/fake_policy.py
Python
apache-2.0
1,487
# -*- coding: utf-8 -*- # Copyright 2017 KMEE # Hendrix Costa <hendrix.costa@kmee.com.br> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from openerp.addons.financial.tests.financial_test_classes import \ FinancialTestCase class ManualFinancialProcess(FinancialTestCase): def setUp(self): self.financial_model = self.env['financial.move'] super(ManualFinancialProcess, self).setUp() def test_01_check_return_views(self): """Check if view is correctly called for python code""" # test for len(financial.move) == 1 financial_move_id = self.financial_model.search([], limit=1) action = financial_move_id.action_view_financial('2receive') self.assertEqual( action.get('display_name'), 'financial.move.debt.2receive.form (in financial)') self.assertEqual( action.get('res_id'), financial_move_id.id) action = financial_move_id.action_view_financial('2pay') self.assertEqual( action.get('display_name'), 'financial.move.debt.2pay.form (in financial)') self.assertEqual( action.get('res_id'), financial_move_id.id) # test for len(financial.move) > 1 financial_move_id = self.financial_model.search([], limit=2) action = financial_move_id.action_view_financial('2pay') self.assertEqual(action.get('domain')[0][2], financial_move_id.ids) # test for len(financial.move) < 1 action = self.financial_model.action_view_financial('2pay') self.assertEqual(action.get('type'), 'ir.actions.act_window_close')
thinkopensolutions/l10n-brazil
financial/tests/test_financial_move.py
Python
agpl-3.0
1,651
import contextlib from selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException from selenium.common.exceptions import WebDriverException from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.support.ui import WebDriverWait from os.path import join, abspath, dirname, exists import os import json BASE_DIR = abspath( join( abspath(dirname(__file__)), ".." ) ) def get_cookie_dir(): cookie_dir = join(BASE_DIR, '.cookie_dumps') if not exists(cookie_dir): os.makedirs(cookie_dir) return cookie_dir class Page(object): """ Base class for all Pages """ cookie_base_path = get_cookie_dir() def __init__(self, base_url, selenium): self.base_url = base_url self.selenium = selenium self.timeout = 60 self._selenium_root = self._root_element if getattr(self, '_root_element', None) else self.selenium @classmethod def make_cookie_dir_path(cls): temp_path = join(cls.cookie_base_path, cls.__module__.replace('.', os.sep)) if not exists(temp_path): os.makedirs(temp_path) return temp_path def exist_cookie(self): cookie_path = self._cookie_ab_path() return exists(cookie_path) def _cookie_ab_path(self): bath_path = self.make_cookie_dir_path() cookie_path = join(bath_path, self.__class__.__name__ + '.json') return cookie_path def dump_cookie(self): cookie_path = self._cookie_ab_path() if not exists(cookie_path): with open(cookie_path, 'w', encoding='utf-8') as f: json.dump(self.selenium.get_cookies(), f) def load_cookie(self): cookie_path = self._cookie_ab_path() with open(cookie_path, 'r', encoding='utf-8') as f: json_cookie = json.load(f) for c in json_cookie: self.selenium.add_cookie(c) @classmethod def clean_cookie(cls): temp_path = join(cls.cookie_base_path, cls.__module__.replace('.', os.sep)) if exists(temp_path): os.removedirs(temp_path) def get_base_url_request_cookie(self): cook_list = self.selenium.get_cookies() base_url = self.base_url[:-1] ret = {} for c in cook_list: if base_url in c['domain']: ret[c['name']] = c['value'] return ret @property def is_the_current_page(self): if getattr(self, '_page_title', None): page_title = self.page_title return self._page_title in page_title @property def current_url(self): return self.selenium.current_url def maximize_window(self): try: self.selenium.maximize_window() except WebDriverException: pass @property def page_title(self): WebDriverWait(self.selenium, self.timeout).until(lambda s: s.title) return self.selenium.title def get_relative_path(self, url): self.selenium.get(self.base_url + url) def is_element_visible(self, by, value): try: return self._selenium_root.find_element(by, value).is_displayed() except (NoSuchElementException, ElementNotVisibleException): # this will return a snapshot, which takes time. return False def is_element_present(self, by, value): self.selenium.implicitly_wait(0) try: self._selenium_root.find_element(by, value) return True except NoSuchElementException: # this will return a snapshot, which takes time. return False finally: # set back to where you once belonged self.selenium.implicitly_wait(10) def wait_for_element_to_be_visible(self, *locator): """Wait for an element to become visible""" self.selenium.implicitly_wait(0) try: WebDriverWait(self.selenium, self.timeout).until( lambda s: self._selenium_root.find_element(*locator).is_displayed()) finally: self.selenium.implicitly_wait(10) def wait_for_element_present(self, *locator): self.selenium.implicitly_wait(0) try: WebDriverWait(self.selenium, self.timeout).until( lambda s: self._selenium_root.find_element(*locator)) finally: self.selenium.implicitly_wait(10) def wait_for_element_not_present(self, *locator): self.selenium.implicitly_wait(0) try: WebDriverWait(self.selenium, self.timeout).until( lambda s: len(self._selenium_root.find_elements(*locator)) < 1) finally: self.selenium.implicitly_wait(10) def wait_for_ajax(self): self.selenium.implicitly_wait(0) try: WebDriverWait(self.selenium, self.timeout).until( lambda s: s.execute_script('return $.active == 0')) finally: self.selenium.implicitly_wait(10) def type_in_element(self, locator, text): """ Type a string into an element. This method clears the element first then types the string via send_keys. Arguments: locator -- a locator for the element text -- the string to type via send_keys """ text_fld = self._selenium_root.find_element(*locator) text_fld.clear() text_fld.send_keys(text) def find_element(self, *locator): return self._selenium_root.find_element(*locator) def find_elements(self, *locator): return self._selenium_root.find_elements(*locator) def go_back(self): self.selenium.back() def refresh(self): self.selenium.refresh() def switch_to_default_content(self): self.selenium.switch_to_default_content() def switch_to_frame(self, frame_loc): frame = self.find_element(*frame_loc) self.selenium.switch_to_frame(frame) @contextlib.contextmanager def focus_frame(self, frame_loc): self.switch_to_frame(frame_loc) yield self.switch_to_default_content() def hover(self, element): ActionChains(self.selenium).move_to_element(element).perform() class PageRegion(Page): def __init__(self, base_url, selenium, element): self._root_element = element Page.__init__(self, base_url, selenium)
malongge/selenium-pom
pages/page.py
Python
apache-2.0
6,638
import argparse from cStringIO import StringIO import json import os from shutil import rmtree from tempfile import mkdtemp import unittest import mock from cloudweatherreport.datastore import DataStore from cloudweatherreport.utils import temp_dir with mock.patch('deployer.utils.get_juju_major_version', return_value=1): # deployer (from bundletester) tries to call out to Juju CLI # on import to determine which major version of Juju it's using from cloudweatherreport import run from cloudweatherreport import model class TestRunner(unittest.TestCase): def setUp(self): self._pgvja = mock.patch.object(run, 'get_versioned_juju_api') self.mgvja = self._pgvja.start() self.addCleanup(self._pgvja.stop) self._pgjmv = mock.patch.object(run, 'get_juju_major_version', return_value=2) self.mgjmv = self._pgjmv.start() self.addCleanup(self._pgjmv.stop) @mock.patch.object(model.TestPlan, 'load_plans') def test_run(self, mload_plans): mload_plans.return_value = [mock.Mock(), mock.Mock()] runner = run.Runner('aws', False, mock.Mock()) runner.run_plan = mock.Mock() runner.run() self.assertEqual(runner.run_plan.call_args_list, [mock.call(p) for p in mload_plans.return_value]) def test_load_index(self): runner = run.Runner('aws', False, mock.Mock()) datastore = mock.Mock() datastore.read.return_value = '{"providers": ["foo"]}' datastore.exists.return_value = True r1 = runner.load_index(datastore) self.assertIsInstance(r1, model.ReportIndex) self.assertEqual(r1.providers, ['foo']) datastore.exists.return_value = False r2 = runner.load_index(datastore) self.assertIsInstance(r2, model.ReportIndex) self.assertEqual(r2.providers, []) @mock.patch.object(model.Report, 'upsert_benchmarks') def test_load_report(self, mupsert_benchmarks): runner = run.Runner('aws', False, mock.Mock(test_id='test')) test_plan = mock.Mock(bundle='bundle', bundle_name='name', url='example.com') test_plan.report_filename.return_value = 'filename' datastore = mock.Mock() datastore.read.return_value = '{"test_id": "foo"}' index = mock.Mock() index.find_previous_report.return_value = mock.Mock() datastore.exists.return_value = True r1 = runner.load_report(datastore, index, test_plan) self.assertIsInstance(r1, model.Report) self.assertEqual(r1.test_id, 'foo') datastore.exists.return_value = False r2 = runner.load_report(datastore, index, test_plan) self.assertIsInstance(r2, model.Report) self.assertEqual(r2.test_id, 'test') assert mupsert_benchmarks.called @mock.patch('cloudweatherreport.run.logging.error') @mock.patch('cloudweatherreport.run.connect_juju_client') def test_run_plan_no_env(self, mock_juju, mock_logging): mock_juju.return_value = None runner = run.Runner('aws', False, mock.Mock()) res = runner.run_plan(mock.Mock()) assert mock_juju.called assert mock_logging.called self.assertFalse(res) @mock.patch('cloudweatherreport.run.DataStore.get') @mock.patch('cloudweatherreport.run.get_provider_name') @mock.patch('cloudweatherreport.run.connect_juju_client') def test_run_plan(self, mock_juju, mock_provider, mock_datastore): env = mock.Mock(spec=['provider_name', 'info']) env.info.return_value = {"ProviderType": "foo"} mock_juju.return_value = env mock_provider.return_value = "foo-provider" tempdir = mkdtemp() ds = DataStore.get(tempdir) mock_datastore.return_value = ds with mock.patch.object(run.Runner, 'run_tests', return_value=mock.Mock()) as mock_result: with mock.patch.object(run.Runner, 'run_benchmarks', return_value="") as mock_benchmark: with mock.patch.object(run.Runner, 'load_index', return_value=mock.Mock()) as mock_index: with mock.patch.object(run.Runner, 'load_report', return_value=mock.Mock() ) as mock_load: mock_index.return_value.bundle_names. \ return_value = ['bundle'] mock_index.return_value.bundle_index_filename. \ return_value = 'bundle/index.html' runner = run.Runner('aws', False, mock.Mock()) runner.run_plan(mock.Mock()) rmtree(tempdir) # Assert we tried to get the Juju env run the tests and benchmarks # and load the report and index assert mock_juju.called assert mock_result.called assert mock_benchmark.called assert mock_index.called assert mock_load.called self.assertTrue(True) @mock.patch('cloudweatherreport.run.get_provider_name') @mock.patch('cloudweatherreport.run.connect_juju_client') def test_run_plan_fail(self, mock_juju, mock_provider): env = mock.Mock(spec=['provider_name', 'info']) env.info.return_value = {"ProviderType": "foo"} mock_juju.return_value = env mock_provider.return_value = "foo-provider" with mock.patch.object(run.Runner, 'run_tests', side_effect=Exception()) as mock_result: with mock.patch.object(run.Runner, 'run_benchmarks', return_value=""): runner = run.Runner('aws', False, mock.Mock()) res = runner.run_plan(mock.Mock()) # Assert we tried to get the Juju env run the tests but # since we failed to run the tests we return false assert mock_juju.called assert mock_result.called self.assertFalse(res) @mock.patch('bundletester.tester.main') @mock.patch.object(model.SuiteResult, 'from_bundletester_output') def test_run_tests(self, bt_out, tester_main): runner = run.Runner('aws', False, mock.Mock()) env = mock.Mock() status = mock.Mock() status.bundle_yaml.return_value = mock.Mock() tester_main.return_value = status bt_out.return_value = status plan = mock.Mock() result = runner.run_tests(env, plan) # You called bundle tester and got back the results assert tester_main.called assert bt_out.called # The result is the Mock returned by SuiteResult self.assertIsInstance(result, mock.Mock) @mock.patch('bundletester.tester.main') @mock.patch.object(model.SuiteResult, 'from_bundletester_output') def test_run_tests_with_args(self, bt_out, tester_main): args = run.parse_args( ['aws', 'test_plan', '--test-id', '1234', '--deploy-plan', 'foo', '--deploy-budget', 'bar', '--testdir', '/tmp/testdir']) runner = run.Runner('aws', False, args) env = mock.Mock(spec_set=['name', 'provider_name']) env.name = 'env-name' status = mock.Mock() status.bundle_yaml.return_value = mock.Mock(spec_set=[]) tester_main.return_value = status bt_out.return_value = status plan = mock.Mock(spec_set=['tests', 'bundle_file', 'bundle']) plan.tests = 'foo-tests' plan.bundle_file = 'foo-bundle-file' plan.bundle = 'foo-bundle' str_io = StringIO() with mock.patch('cloudweatherreport.run.StringIO', autospec=True, return_value=str_io) as string_mock: result = runner.run_tests(plan, env) expected_args = argparse.Namespace( bucket=None, bundle='foo-bundle-file', controllers=['aws'], deploy_budget='bar', deploy_plan='foo', deployment=None, dryrun=False, environment='env-name', exclude=None, failfast=True, juju_major_version=2, log_level='INFO', no_destroy=False, output=str_io, regenerate_index=False, remove_test=None, reporter='json', results_dir='results', results_per_bundle=40, s3_creds=None, s3_public=True, skip_implicit=False, test_id='1234', test_pattern=None, test_plan='test_plan', testdir='foo-bundle', tests='foo-tests', tests_yaml=None, verbose=False) tester_main.assert_called_once_with(expected_args) string_mock.assert_called_once_with() assert bt_out.called self.assertIsInstance(result, mock.Mock) @mock.patch('cloudweatherreport.run.logging.error') @mock.patch('cloudweatherreport.run.find_unit') def test_run_benchmarks_action_fail(self, mock_unit, mock_log_error): mock_unit.return_value = "unit/0" env = mock.Mock() plan = self.get_plan() runner = run.Runner('aws', False, mock.Mock()) runner.run_benchmarks(plan, env) assert mock_log_error.called @mock.patch('cloudweatherreport.run.find_unit', side_effect=Exception) def test_run_benchmarks_unit_not_found(self, mock_unit): mock_unit.return_value = None env = mock.Mock() plan = self.get_plan() runner = run.Runner('aws', False, mock.Mock()) with self.assertRaises(Exception): runner.run_benchmarks(plan, env) @mock.patch('cloudweatherreport.run.model.Benchmark.from_action') @mock.patch('cloudweatherreport.run.run_action') @mock.patch('cloudweatherreport.run.find_unit') def test_run_benchmarks(self, mock_unit, mock_action, mock_result): mock_unit.return_value = "unit/0" mock_action.return_value = mock.Mock() mock_result.return_value = "Result" env = mock.Mock() plan = self.get_plan() runner = run.Runner('aws', False, mock.Mock()) benchmarks = runner.run_benchmarks(plan, env) self.assertEqual(len(benchmarks), 3) def test_remove_test_by_bundle_name(self): index_json = { "providers": [ "GCE" ], "reports": [ { "bundle_name": "foo", "date": "2017-12-06T21:15:56", "results": { "AWS": "FAIL" }, "test_id": "11", "test_label": None, "url": None, }, { "bundle_name": "bar", "date": "2017-11-15T17:44:01", "results": { "Azure": "NONE" }, "test_id": "22", "test_label": None, "url": None, } ] } with temp_dir() as results_dir: full_index = os.path.join( results_dir, model.ReportIndex.full_index_filename_json) args = run.parse_args( ['aws', 'test_plan', '--remove-test', 'foo', "--results-dir", results_dir]) with open(full_index, 'w') as f: json.dump(index_json, f) runner = run.Runner(None, False, args) runner.remove_test_by_bundle_name() with open(full_index) as f: result_index = json.load(f) self.assertEqual(len(result_index["reports"]), 1) self.assertEqual(result_index["reports"][0]["bundle_name"], "bar") os.path.isfile(model.ReportIndex.full_index_filename_html) os.path.isfile(model.ReportIndex.summary_filename_html) os.path.isfile(model.ReportIndex.summary_filename_json) def get_plan(self): plan = model.TestPlan.from_dict({ 'bundle': 'bundle_name', 'bundle_file': 'bundle.yaml', 'tests': ['test1', 'test2'], 'benchmark': { 'unit/0': u'name1', 'unit/1': { u'name2': { 'param': 'value2', }, u'name3': { 'param': 'value3', }, }, }, }) return plan def test_parse_args_defaults(self): with mock.patch('os.getcwd') as mgetcwd: mgetcwd.return_value = '/foo' args = run.parse_args(['aws', 'test_plan', '--test-id', '1234']) expected = argparse.Namespace( bucket=None, bundle=None, controllers=['aws'], deploy_budget=None, deploy_plan=None, deployment=None, dryrun=False, exclude=None, failfast=True, juju_major_version=2, log_level='INFO', no_destroy=False, regenerate_index=False, remove_test=None, results_dir='results', results_per_bundle=40, s3_creds=None, s3_public=True, skip_implicit=False, test_id='1234', test_pattern=None, test_plan='test_plan', testdir='/foo', tests_yaml=None, verbose=False, ) self.assertEqual(args, expected) if __name__ == '__main__': unittest.main()
seman/cloud-weather-report
tests/test_run.py
Python
apache-2.0
13,858
#!/usr/bin/env python from __future__ import print_function import freenect import matplotlib.pyplot as mp import signal import frame_convert mp.ion() image_rgb = None image_depth = None keep_running = True def display_depth(dev, data, timestamp): global image_depth data = frame_convert.pretty_depth(data) mp.gray() mp.figure(1) if image_depth: image_depth.set_data(data) else: image_depth = mp.imshow(data, interpolation='nearest', animated=True) mp.draw() def display_rgb(dev, data, timestamp): global image_rgb mp.figure(2) if image_rgb: image_rgb.set_data(data) else: image_rgb = mp.imshow(data, interpolation='nearest', animated=True) mp.draw() def body(*args): if not keep_running: raise freenect.Kill def handler(signum, frame): global keep_running keep_running = False print('Press Ctrl-C in terminal to stop') signal.signal(signal.SIGINT, handler) freenect.runloop(depth=display_depth, video=display_rgb, body=body)
tpltnt/SimpleCV
scripts/install/win/OpenKinect/freenect-examples/demo_mp_async.py
Python
bsd-3-clause
1,071
# -*- coding: ascii -*- import logging from queue import Queue from threading import Thread, Event import socket class Queued_sender(Thread): '''envoie les actions sans saturer le medium de communication''' def __init__(self, socket): Thread.__init__(self) self.running= Event( ) self.queue = Queue() self.socket = socket self.logger = logging.getLogger("queued_sender") def add_action(self, action): '''Inutile, sauf si on change d'implmentation''' action = bytes(action+"\n", "ascii") self.queue.put(action, True, None) # block=True, timeout=None return len(action) # pour tre compatible avec Comm.sender() def run(self): while not self.running.isSet( ): action = self.queue.get(True, None) # block=True, timeout=None try: self.socket.send(action) except socket.timeout as message: self.logger.error ("Sender : timout %s" % message) except socket.error as message: self.logger.error ("Sender : socket error %s" % message) self.running.wait(0.01) # 50ms entre chaque message
7Robot/cerveau
ia/comm/queued_sender.py
Python
gpl-3.0
1,193
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementations of various third-party authentication schemes. All the classes in this file are class Mixins designed to be used with web.py RequestHandler classes. The primary methods for each service are authenticate_redirect(), authorize_redirect(), and get_authenticated_user(). The former should be called to redirect the user to, e.g., the OpenID authentication page on the third party service, and the latter should be called upon return to get the user data from the data returned by the third party service. They all take slightly different arguments due to the fact all these services implement authentication and authorization slightly differently. See the individual service classes below for complete documentation. Example usage for Google OpenID: class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin): @tornado.web.asynchronous def get(self): if self.get_argument("openid.mode", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return self.authenticate_redirect() def _on_auth(self, user): if not user: raise tornado.web.HTTPError(500, "Google auth failed") # Save the user with, e.g., set_secure_cookie() """ import base64 import binascii import cgi import hashlib import hmac import logging import time import urllib import urlparse import uuid from tornado import httpclient from tornado import escape from tornado.ioloop import IOLoop class OpenIdMixin(object): """Abstract implementation of OpenID and Attribute Exchange. See GoogleMixin below for example implementations. """ def authenticate_redirect(self, callback_uri=None, ax_attrs=["name","email","language","username"]): """Returns the authentication URL for this service. After authentication, the service will redirect back to the given callback URI. We request the given attributes for the authenticated user by default (name, email, language, and username). If you don't need all those attributes for your app, you can request fewer with the ax_attrs keyword argument. """ callback_uri = callback_uri or self.request.path args = self._openid_args(callback_uri, ax_attrs=ax_attrs) self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args)) def get_authenticated_user(self, callback): """Fetches the authenticated user data upon redirect. This method should be called by the handler that receives the redirect from the authenticate_redirect() or authorize_redirect() methods. """ # Verify the OpenID response via direct request to the OP args = dict((k, v[-1]) for k, v in self.request.arguments.iteritems()) args["openid.mode"] = u"check_authentication" url = self._OPENID_ENDPOINT http = httpclient.AsyncHTTPClient() http.fetch(url, self.async_callback( self._on_authentication_verified, callback), method="POST", body=urllib.urlencode(args)) def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None): url = urlparse.urljoin(self.request.full_url(), callback_uri) args = { "openid.ns": "http://specs.openid.net/auth/2.0", "openid.claimed_id": "http://specs.openid.net/auth/2.0/identifier_select", "openid.identity": "http://specs.openid.net/auth/2.0/identifier_select", "openid.return_to": url, "openid.realm": self.request.protocol + "://" + self.request.host + "/", "openid.mode": "checkid_setup", } if ax_attrs: args.update({ "openid.ns.ax": "http://openid.net/srv/ax/1.0", "openid.ax.mode": "fetch_request", }) ax_attrs = set(ax_attrs) required = [] if "name" in ax_attrs: ax_attrs -= set(["name", "firstname", "fullname", "lastname"]) required += ["firstname", "fullname", "lastname"] args.update({ "openid.ax.type.firstname": "http://axschema.org/namePerson/first", "openid.ax.type.fullname": "http://axschema.org/namePerson", "openid.ax.type.lastname": "http://axschema.org/namePerson/last", }) known_attrs = { "email": "http://axschema.org/contact/email", "language": "http://axschema.org/pref/language", "username": "http://axschema.org/namePerson/friendly", } for name in ax_attrs: args["openid.ax.type." + name] = known_attrs[name] required.append(name) args["openid.ax.required"] = ",".join(required) if oauth_scope: args.update({ "openid.ns.oauth": "http://specs.openid.net/extensions/oauth/1.0", "openid.oauth.consumer": self.request.host.split(":")[0], "openid.oauth.scope": oauth_scope, }) return args def _on_authentication_verified(self, callback, response): if response.error or u"is_valid:true" not in response.body: logging.warning("Invalid OpenID response: %s", response.error or response.body) callback(None) return # Make sure we got back at least an email from attribute exchange ax_ns = None for name, values in self.request.arguments.iteritems(): if name.startswith("openid.ns.") and \ values[-1] == u"http://openid.net/srv/ax/1.0": ax_ns = name[10:] break def get_ax_arg(uri): if not ax_ns: return u"" prefix = "openid." + ax_ns + ".type." ax_name = None for name, values in self.request.arguments.iteritems(): if values[-1] == uri and name.startswith(prefix): part = name[len(prefix):] ax_name = "openid." + ax_ns + ".value." + part break if not ax_name: return u"" return self.get_argument(ax_name, u"") email = get_ax_arg("http://axschema.org/contact/email") name = get_ax_arg("http://axschema.org/namePerson") first_name = get_ax_arg("http://axschema.org/namePerson/first") last_name = get_ax_arg("http://axschema.org/namePerson/last") username = get_ax_arg("http://axschema.org/namePerson/friendly") locale = get_ax_arg("http://axschema.org/pref/language").lower() user = dict() name_parts = [] if first_name: user["first_name"] = first_name name_parts.append(first_name) if last_name: user["last_name"] = last_name name_parts.append(last_name) if name: user["name"] = name elif name_parts: user["name"] = u" ".join(name_parts) elif email: user["name"] = email.split("@")[0] if email: user["email"] = email if locale: user["locale"] = locale if username: user["username"] = username callback(user) class OAuthMixin(object): """Abstract implementation of OAuth. See TwitterMixin and FriendFeedMixin below for example implementations. """ def authorize_redirect(self, callback_uri=None, extra_params=None): """Redirects the user to obtain OAuth authorization for this service. Twitter and FriendFeed both require that you register a Callback URL with your application. You should call this method to log the user in, and then call get_authenticated_user() in the handler you registered as your Callback URL to complete the authorization process. This method sets a cookie called _oauth_request_token which is subsequently used (and cleared) in get_authenticated_user for security purposes. """ if callback_uri and getattr(self, "_OAUTH_NO_CALLBACKS", False): raise Exception("This service does not support oauth_callback") http = httpclient.AsyncHTTPClient() if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": http.fetch(self._oauth_request_token_url(callback_uri=callback_uri, extra_params=extra_params), self.async_callback( self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri)) else: http.fetch(self._oauth_request_token_url(), self.async_callback( self._on_request_token, self._OAUTH_AUTHORIZE_URL, callback_uri)) def get_authenticated_user(self, callback): """Gets the OAuth authorized user and access token on callback. This method should be called from the handler for your registered OAuth Callback URL to complete the registration process. We call callback with the authenticated user, which in addition to standard attributes like 'name' includes the 'access_key' attribute, which contains the OAuth access you can use to make authorized requests to this service on behalf of the user. """ request_key = self.get_argument("oauth_token") oauth_verifier = self.get_argument("oauth_verifier", None) request_cookie = self.get_cookie("_oauth_request_token") if not request_cookie: logging.warning("Missing OAuth request token cookie") callback(None) return self.clear_cookie("_oauth_request_token") cookie_key, cookie_secret = [base64.b64decode(i) for i in request_cookie.split("|")] if cookie_key != request_key: logging.warning("Request token does not match cookie") callback(None) return token = dict(key=cookie_key, secret=cookie_secret) if oauth_verifier: token["verifier"] = oauth_verifier http = httpclient.AsyncHTTPClient() http.fetch(self._oauth_access_token_url(token), self.async_callback( self._on_access_token, callback)) def _oauth_request_token_url(self, callback_uri= None, extra_params=None): consumer_token = self._oauth_consumer_token() url = self._OAUTH_REQUEST_TOKEN_URL args = dict( oauth_consumer_key=consumer_token["key"], oauth_signature_method="HMAC-SHA1", oauth_timestamp=str(int(time.time())), oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes), oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"), ) if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": if callback_uri: args["oauth_callback"] = urlparse.urljoin( self.request.full_url(), callback_uri) if extra_params: args.update(extra_params) signature = _oauth10a_signature(consumer_token, "GET", url, args) else: signature = _oauth_signature(consumer_token, "GET", url, args) args["oauth_signature"] = signature return url + "?" + urllib.urlencode(args) def _on_request_token(self, authorize_url, callback_uri, response): if response.error: raise Exception("Could not get request token") request_token = _oauth_parse_response(response.body) data = "|".join([base64.b64encode(request_token["key"]), base64.b64encode(request_token["secret"])]) self.set_cookie("_oauth_request_token", data) args = dict(oauth_token=request_token["key"]) if callback_uri: args["oauth_callback"] = urlparse.urljoin( self.request.full_url(), callback_uri) self.redirect(authorize_url + "?" + urllib.urlencode(args)) def _oauth_access_token_url(self, request_token): consumer_token = self._oauth_consumer_token() url = self._OAUTH_ACCESS_TOKEN_URL args = dict( oauth_consumer_key=consumer_token["key"], oauth_token=request_token["key"], oauth_signature_method="HMAC-SHA1", oauth_timestamp=str(int(time.time())), oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes), oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"), ) if "verifier" in request_token: args["oauth_verifier"]=request_token["verifier"] if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": signature = _oauth10a_signature(consumer_token, "GET", url, args, request_token) else: signature = _oauth_signature(consumer_token, "GET", url, args, request_token) args["oauth_signature"] = signature return url + "?" + urllib.urlencode(args) def _on_access_token(self, callback, response): if response.error: logging.warning("Could not fetch access token") callback(None) return access_token = _oauth_parse_response(response.body) user = self._oauth_get_user(access_token, self.async_callback( self._on_oauth_get_user, access_token, callback)) def _oauth_get_user(self, access_token, callback): raise NotImplementedError() def _on_oauth_get_user(self, access_token, callback, user): if not user: callback(None) return user["access_token"] = access_token callback(user) def _oauth_request_parameters(self, url, access_token, parameters={}, method="GET"): """Returns the OAuth parameters as a dict for the given request. parameters should include all POST arguments and query string arguments that will be sent with the request. """ consumer_token = self._oauth_consumer_token() base_args = dict( oauth_consumer_key=consumer_token["key"], oauth_token=access_token["key"], oauth_signature_method="HMAC-SHA1", oauth_timestamp=str(int(time.time())), oauth_nonce=binascii.b2a_hex(uuid.uuid4().bytes), oauth_version=getattr(self, "_OAUTH_VERSION", "1.0a"), ) args = {} args.update(base_args) args.update(parameters) if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a": signature = _oauth10a_signature(consumer_token, method, url, args, access_token) else: signature = _oauth_signature(consumer_token, method, url, args, access_token) base_args["oauth_signature"] = signature return base_args class OAuth2Mixin(object): """Abstract implementation of OAuth v 2.""" def authorize_redirect(self, redirect_uri=None, client_id=None, client_secret=None, extra_params=None ): """Redirects the user to obtain OAuth authorization for this service. Some providers require that you register a Callback URL with your application. You should call this method to log the user in, and then call get_authenticated_user() in the handler you registered as your Callback URL to complete the authorization process. """ args = { "redirect_uri": redirect_uri, "client_id": client_id } if extra_params: args.update(extra_params) self.redirect(self._OAUTH_AUTHORIZE_URL + urllib.urlencode(args)) def _oauth_request_token_url(self, redirect_uri= None, client_id = None, client_secret=None, code=None, extra_params=None): url = self._OAUTH_ACCESS_TOKEN_URL args = dict( redirect_uri=redirect_uri, code=code, client_id=client_id, client_secret=client_secret, ) if extra_params: args.update(extra_params) return url + urllib.urlencode(args) class TwitterMixin(OAuthMixin): """Twitter OAuth authentication. To authenticate with Twitter, register your application with Twitter at http://twitter.com/apps. Then copy your Consumer Key and Consumer Secret to the application settings 'twitter_consumer_key' and 'twitter_consumer_secret'. Use this Mixin on the handler for the URL you registered as your application's Callback URL. When your application is set up, you can use this Mixin like this to authenticate the user with Twitter and get access to their stream: class TwitterHandler(tornado.web.RequestHandler, tornado.auth.TwitterMixin): @tornado.web.asynchronous def get(self): if self.get_argument("oauth_token", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return self.authorize_redirect() def _on_auth(self, user): if not user: raise tornado.web.HTTPError(500, "Twitter auth failed") # Save the user using, e.g., set_secure_cookie() The user object returned by get_authenticated_user() includes the attributes 'username', 'name', and all of the custom Twitter user attributes describe at http://apiwiki.twitter.com/Twitter-REST-API-Method%3A-users%C2%A0show in addition to 'access_token'. You should save the access token with the user; it is required to make requests on behalf of the user later with twitter_request(). """ _OAUTH_REQUEST_TOKEN_URL = "http://api.twitter.com/oauth/request_token" _OAUTH_ACCESS_TOKEN_URL = "http://api.twitter.com/oauth/access_token" _OAUTH_AUTHORIZE_URL = "http://api.twitter.com/oauth/authorize" _OAUTH_AUTHENTICATE_URL = "http://api.twitter.com/oauth/authenticate" _OAUTH_NO_CALLBACKS = False def authenticate_redirect(self): """Just like authorize_redirect(), but auto-redirects if authorized. This is generally the right interface to use if you are using Twitter for single-sign on. """ http = httpclient.AsyncHTTPClient() http.fetch(self._oauth_request_token_url(), self.async_callback( self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None)) def twitter_request(self, path, callback, access_token=None, post_args=None, **args): """Fetches the given API path, e.g., "/statuses/user_timeline/btaylor" The path should not include the format (we automatically append ".json" and parse the JSON output). If the request is a POST, post_args should be provided. Query string arguments should be given as keyword arguments. All the Twitter methods are documented at http://apiwiki.twitter.com/Twitter-API-Documentation. Many methods require an OAuth access token which you can obtain through authorize_redirect() and get_authenticated_user(). The user returned through that process includes an 'access_token' attribute that can be used to make authenticated requests via this method. Example usage: class MainHandler(tornado.web.RequestHandler, tornado.auth.TwitterMixin): @tornado.web.authenticated @tornado.web.asynchronous def get(self): self.twitter_request( "/statuses/update", post_args={"status": "Testing Tornado Web Server"}, access_token=user["access_token"], callback=self.async_callback(self._on_post)) def _on_post(self, new_entry): if not new_entry: # Call failed; perhaps missing permission? self.authorize_redirect() return self.finish("Posted a message!") """ # Add the OAuth resource request signature if we have credentials url = "http://api.twitter.com/1" + path + ".json" if access_token: all_args = {} all_args.update(args) all_args.update(post_args or {}) consumer_token = self._oauth_consumer_token() method = "POST" if post_args is not None else "GET" oauth = self._oauth_request_parameters( url, access_token, all_args, method=method) args.update(oauth) if args: url += "?" + urllib.urlencode(args) callback = self.async_callback(self._on_twitter_request, callback) http = httpclient.AsyncHTTPClient() if post_args is not None: http.fetch(url, method="POST", body=urllib.urlencode(post_args), callback=callback) else: http.fetch(url, callback=callback) def _on_twitter_request(self, callback, response): if response.error: logging.warning("Error response %s fetching %s", response.error, response.request.url) callback(None) return callback(escape.json_decode(response.body)) def _oauth_consumer_token(self): self.require_setting("twitter_consumer_key", "Twitter OAuth") self.require_setting("twitter_consumer_secret", "Twitter OAuth") return dict( key=self.settings["twitter_consumer_key"], secret=self.settings["twitter_consumer_secret"]) def _oauth_get_user(self, access_token, callback): callback = self.async_callback(self._parse_user_response, callback) self.twitter_request( "/users/show/" + access_token["screen_name"], access_token=access_token, callback=callback) def _parse_user_response(self, callback, user): if user: user["username"] = user["screen_name"] callback(user) class FriendFeedMixin(OAuthMixin): """FriendFeed OAuth authentication. To authenticate with FriendFeed, register your application with FriendFeed at http://friendfeed.com/api/applications. Then copy your Consumer Key and Consumer Secret to the application settings 'friendfeed_consumer_key' and 'friendfeed_consumer_secret'. Use this Mixin on the handler for the URL you registered as your application's Callback URL. When your application is set up, you can use this Mixin like this to authenticate the user with FriendFeed and get access to their feed: class FriendFeedHandler(tornado.web.RequestHandler, tornado.auth.FriendFeedMixin): @tornado.web.asynchronous def get(self): if self.get_argument("oauth_token", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return self.authorize_redirect() def _on_auth(self, user): if not user: raise tornado.web.HTTPError(500, "FriendFeed auth failed") # Save the user using, e.g., set_secure_cookie() The user object returned by get_authenticated_user() includes the attributes 'username', 'name', and 'description' in addition to 'access_token'. You should save the access token with the user; it is required to make requests on behalf of the user later with friendfeed_request(). """ _OAUTH_VERSION = "1.0" _OAUTH_REQUEST_TOKEN_URL = "https://friendfeed.com/account/oauth/request_token" _OAUTH_ACCESS_TOKEN_URL = "https://friendfeed.com/account/oauth/access_token" _OAUTH_AUTHORIZE_URL = "https://friendfeed.com/account/oauth/authorize" _OAUTH_NO_CALLBACKS = True _OAUTH_VERSION = "1.0" def friendfeed_request(self, path, callback, access_token=None, post_args=None, **args): """Fetches the given relative API path, e.g., "/bret/friends" If the request is a POST, post_args should be provided. Query string arguments should be given as keyword arguments. All the FriendFeed methods are documented at http://friendfeed.com/api/documentation. Many methods require an OAuth access token which you can obtain through authorize_redirect() and get_authenticated_user(). The user returned through that process includes an 'access_token' attribute that can be used to make authenticated requests via this method. Example usage: class MainHandler(tornado.web.RequestHandler, tornado.auth.FriendFeedMixin): @tornado.web.authenticated @tornado.web.asynchronous def get(self): self.friendfeed_request( "/entry", post_args={"body": "Testing Tornado Web Server"}, access_token=self.current_user["access_token"], callback=self.async_callback(self._on_post)) def _on_post(self, new_entry): if not new_entry: # Call failed; perhaps missing permission? self.authorize_redirect() return self.finish("Posted a message!") """ # Add the OAuth resource request signature if we have credentials url = "http://friendfeed-api.com/v2" + path if access_token: all_args = {} all_args.update(args) all_args.update(post_args or {}) consumer_token = self._oauth_consumer_token() method = "POST" if post_args is not None else "GET" oauth = self._oauth_request_parameters( url, access_token, all_args, method=method) args.update(oauth) if args: url += "?" + urllib.urlencode(args) callback = self.async_callback(self._on_friendfeed_request, callback) http = httpclient.AsyncHTTPClient() if post_args is not None: http.fetch(url, method="POST", body=urllib.urlencode(post_args), callback=callback) else: http.fetch(url, callback=callback) def _on_friendfeed_request(self, callback, response): if response.error: logging.warning("Error response %s fetching %s", response.error, response.request.url) callback(None) return callback(escape.json_decode(response.body)) def _oauth_consumer_token(self): self.require_setting("friendfeed_consumer_key", "FriendFeed OAuth") self.require_setting("friendfeed_consumer_secret", "FriendFeed OAuth") return dict( key=self.settings["friendfeed_consumer_key"], secret=self.settings["friendfeed_consumer_secret"]) def _oauth_get_user(self, access_token, callback): callback = self.async_callback(self._parse_user_response, callback) self.friendfeed_request( "/feedinfo/" + access_token["username"], include="id,name,description", access_token=access_token, callback=callback) def _parse_user_response(self, callback, user): if user: user["username"] = user["id"] callback(user) class GoogleMixin(OpenIdMixin, OAuthMixin): """Google Open ID / OAuth authentication. No application registration is necessary to use Google for authentication or to access Google resources on behalf of a user. To authenticate with Google, redirect with authenticate_redirect(). On return, parse the response with get_authenticated_user(). We send a dict containing the values for the user, including 'email', 'name', and 'locale'. Example usage: class GoogleHandler(tornado.web.RequestHandler, tornado.auth.GoogleMixin): @tornado.web.asynchronous def get(self): if self.get_argument("openid.mode", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return self.authenticate_redirect() def _on_auth(self, user): if not user: raise tornado.web.HTTPError(500, "Google auth failed") # Save the user with, e.g., set_secure_cookie() """ _OPENID_ENDPOINT = "https://www.google.com/accounts/o8/ud" _OAUTH_ACCESS_TOKEN_URL = "https://www.google.com/accounts/OAuthGetAccessToken" def authorize_redirect(self, oauth_scope, callback_uri=None, ax_attrs=["name","email","language","username"]): """Authenticates and authorizes for the given Google resource. Some of the available resources are: Gmail Contacts - http://www.google.com/m8/feeds/ Calendar - http://www.google.com/calendar/feeds/ Finance - http://finance.google.com/finance/feeds/ You can authorize multiple resources by separating the resource URLs with a space. """ callback_uri = callback_uri or self.request.path args = self._openid_args(callback_uri, ax_attrs=ax_attrs, oauth_scope=oauth_scope) self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args)) def get_authenticated_user(self, callback): """Fetches the authenticated user data upon redirect.""" # Look to see if we are doing combined OpenID/OAuth oauth_ns = "" for name, values in self.request.arguments.iteritems(): if name.startswith("openid.ns.") and \ values[-1] == u"http://specs.openid.net/extensions/oauth/1.0": oauth_ns = name[10:] break token = self.get_argument("openid." + oauth_ns + ".request_token", "") if token: http = httpclient.AsyncHTTPClient() token = dict(key=token, secret="") http.fetch(self._oauth_access_token_url(token), self.async_callback(self._on_access_token, callback)) else: OpenIdMixin.get_authenticated_user(self, callback) def _oauth_consumer_token(self): self.require_setting("google_consumer_key", "Google OAuth") self.require_setting("google_consumer_secret", "Google OAuth") return dict( key=self.settings["google_consumer_key"], secret=self.settings["google_consumer_secret"]) def _oauth_get_user(self, access_token, callback): OpenIdMixin.get_authenticated_user(self, callback) class FacebookMixin(object): """Facebook Connect authentication. To authenticate with Facebook, register your application with Facebook at http://www.facebook.com/developers/apps.php. Then copy your API Key and Application Secret to the application settings 'facebook_api_key' and 'facebook_secret'. When your application is set up, you can use this Mixin like this to authenticate the user with Facebook: class FacebookHandler(tornado.web.RequestHandler, tornado.auth.FacebookMixin): @tornado.web.asynchronous def get(self): if self.get_argument("session", None): self.get_authenticated_user(self.async_callback(self._on_auth)) return self.authenticate_redirect() def _on_auth(self, user): if not user: raise tornado.web.HTTPError(500, "Facebook auth failed") # Save the user using, e.g., set_secure_cookie() The user object returned by get_authenticated_user() includes the attributes 'facebook_uid' and 'name' in addition to session attributes like 'session_key'. You should save the session key with the user; it is required to make requests on behalf of the user later with facebook_request(). """ def authenticate_redirect(self, callback_uri=None, cancel_uri=None, extended_permissions=None): """Authenticates/installs this app for the current user.""" self.require_setting("facebook_api_key", "Facebook Connect") callback_uri = callback_uri or self.request.path args = { "api_key": self.settings["facebook_api_key"], "v": "1.0", "fbconnect": "true", "display": "page", "next": urlparse.urljoin(self.request.full_url(), callback_uri), "return_session": "true", } if cancel_uri: args["cancel_url"] = urlparse.urljoin( self.request.full_url(), cancel_uri) if extended_permissions: if isinstance(extended_permissions, basestring): extended_permissions = [extended_permissions] args["req_perms"] = ",".join(extended_permissions) self.redirect("http://www.facebook.com/login.php?" + urllib.urlencode(args)) def authorize_redirect(self, extended_permissions, callback_uri=None, cancel_uri=None): """Redirects to an authorization request for the given FB resource. The available resource names are listed at http://wiki.developers.facebook.com/index.php/Extended_permission. The most common resource types include: publish_stream read_stream email sms extended_permissions can be a single permission name or a list of names. To get the session secret and session key, call get_authenticated_user() just as you would with authenticate_redirect(). """ self.authenticate_redirect(callback_uri, cancel_uri, extended_permissions) def get_authenticated_user(self, callback): """Fetches the authenticated Facebook user. The authenticated user includes the special Facebook attributes 'session_key' and 'facebook_uid' in addition to the standard user attributes like 'name'. """ self.require_setting("facebook_api_key", "Facebook Connect") session = escape.json_decode(self.get_argument("session")) self.facebook_request( method="facebook.users.getInfo", callback=self.async_callback( self._on_get_user_info, callback, session), session_key=session["session_key"], uids=session["uid"], fields="uid,first_name,last_name,name,locale,pic_square," \ "profile_url,username") def facebook_request(self, method, callback, **args): """Makes a Facebook API REST request. We automatically include the Facebook API key and signature, but it is the callers responsibility to include 'session_key' and any other required arguments to the method. The available Facebook methods are documented here: http://wiki.developers.facebook.com/index.php/API Here is an example for the stream.get() method: class MainHandler(tornado.web.RequestHandler, tornado.auth.FacebookMixin): @tornado.web.authenticated @tornado.web.asynchronous def get(self): self.facebook_request( method="stream.get", callback=self.async_callback(self._on_stream), session_key=self.current_user["session_key"]) def _on_stream(self, stream): if stream is None: # Not authorized to read the stream yet? self.redirect(self.authorize_redirect("read_stream")) return self.render("stream.html", stream=stream) """ self.require_setting("facebook_api_key", "Facebook Connect") self.require_setting("facebook_secret", "Facebook Connect") if not method.startswith("facebook."): method = "facebook." + method args["api_key"] = self.settings["facebook_api_key"] args["v"] = "1.0" args["method"] = method args["call_id"] = str(long(time.time() * 1e6)) args["format"] = "json" args["sig"] = self._signature(args) url = "http://api.facebook.com/restserver.php?" + \ urllib.urlencode(args) http = httpclient.AsyncHTTPClient() http.fetch(url, callback=self.async_callback( self._parse_response, callback)) def _on_get_user_info(self, callback, session, users): if users is None: callback(None) return callback({ "name": users[0]["name"], "first_name": users[0]["first_name"], "last_name": users[0]["last_name"], "uid": users[0]["uid"], "locale": users[0]["locale"], "pic_square": users[0]["pic_square"], "profile_url": users[0]["profile_url"], "username": users[0].get("username"), "session_key": session["session_key"], "session_expires": session.get("expires"), }) def _parse_response(self, callback, response): if response.error: logging.warning("HTTP error from Facebook: %s", response.error) callback(None) return try: json = escape.json_decode(response.body) except: logging.warning("Invalid JSON from Facebook: %r", response.body) callback(None) return if isinstance(json, dict) and json.get("error_code"): logging.warning("Facebook error: %d: %r", json["error_code"], json.get("error_msg")) callback(None) return callback(json) def _signature(self, args): parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())] body = "".join(parts) + self.settings["facebook_secret"] if isinstance(body, unicode): body = body.encode("utf-8") return hashlib.md5(body).hexdigest() class FacebookGraphMixin(OAuth2Mixin): _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" _OAUTH_AUTHORIZE_URL = "https://graph.facebook.com/oauth/authorize?" _OAUTH_NO_CALLBACKS = False def get_authenticated_user(self, redirect_uri, client_id, client_secret, code, callback): """ Handles the login for the Facebook user, returning a user object. Example usage: class FacebookGraphLoginHandler(LoginHandler, tornado.auth.FacebookGraphMixin): @tornado.web.asynchronous def get(self): if self.get_argument("code", False): self.get_authenticated_user( redirect_uri='/auth/facebookgraph/', client_id=self.settings["facebook_api_key"], client_secret=self.settings["facebook_secret"], code=self.get_argument("code"), callback=self.async_callback( self._on_login)) return self.authorize_redirect(redirect_uri='/auth/facebookgraph/', client_id=self.settings["facebook_api_key"], extra_params={"scope": "read_stream,offline_access"}) def _on_login(self, user): logging.error(user) self.finish() """ http = httpclient.AsyncHTTPClient() args = { "redirect_uri": redirect_uri, "code": code, "client_id": client_id, "client_secret": client_secret, } http.fetch(self._oauth_request_token_url(**args), self.async_callback(self._on_access_token, redirect_uri, client_id, client_secret, callback)) def _on_access_token(self, redirect_uri, client_id, client_secret, callback, response): session = { "access_token": cgi.parse_qs(response.body)["access_token"][-1], "expires": cgi.parse_qs(response.body)["expires"] } self.facebook_request( path="/me", callback=self.async_callback( self._on_get_user_info, callback, session), access_token=session["access_token"], fields="picture" ) def _on_get_user_info(self, callback, session, user): if user is None: callback(None) return callback({ "name": user["name"], "first_name": user["first_name"], "last_name": user["last_name"], "id": user["id"], "locale": user["locale"], "picture": user.get("picture"), "link": user["link"], "username": user.get("username"), "access_token": session["access_token"], "session_expires": session.get("expires"), }) def facebook_request(self, path, callback, access_token=None, post_args=None, **args): """Fetches the given relative API path, e.g., "/btaylor/picture" If the request is a POST, post_args should be provided. Query string arguments should be given as keyword arguments. An introduction to the Facebook Graph API can be found at http://developers.facebook.com/docs/api Many methods require an OAuth access token which you can obtain through authorize_redirect() and get_authenticated_user(). The user returned through that process includes an 'access_token' attribute that can be used to make authenticated requests via this method. Example usage: class MainHandler(tornado.web.RequestHandler, tornado.auth.FacebookGraphMixin): @tornado.web.authenticated @tornado.web.asynchronous def get(self): self.facebook_request( "/me/feed", post_args={"message": "I am posting from my Tornado application!"}, access_token=self.current_user["access_token"], callback=self.async_callback(self._on_post)) def _on_post(self, new_entry): if not new_entry: # Call failed; perhaps missing permission? self.authorize_redirect() return self.finish("Posted a message!") """ url = "https://graph.facebook.com" + path all_args = {} if access_token: all_args["access_token"] = access_token all_args.update(args) all_args.update(post_args or {}) if all_args: url += "?" + urllib.urlencode(all_args) callback = self.async_callback(self._on_facebook_request, callback) http = httpclient.AsyncHTTPClient() if post_args is not None: http.fetch(url, method="POST", body=urllib.urlencode(post_args), callback=callback) else: http.fetch(url, callback=callback) def _on_facebook_request(self, callback, response): if response.error: logging.warning("Error response %s fetching %s", response.error, response.request.url) callback(None) return callback(escape.json_decode(response.body)) def _oauth_signature(consumer_token, method, url, parameters={}, token=None): """Calculates the HMAC-SHA1 OAuth signature for the given request. See http://oauth.net/core/1.0/#signing_process """ parts = urlparse.urlparse(url) scheme, netloc, path = parts[:3] normalized_url = scheme.lower() + "://" + netloc.lower() + path base_elems = [] base_elems.append(method.upper()) base_elems.append(normalized_url) base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items()))) base_string = "&".join(_oauth_escape(e) for e in base_elems) key_elems = [consumer_token["secret"]] key_elems.append(token["secret"] if token else "") key = "&".join(key_elems) hash = hmac.new(key, base_string, hashlib.sha1) return binascii.b2a_base64(hash.digest())[:-1] def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None): """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request. See http://oauth.net/core/1.0a/#signing_process """ parts = urlparse.urlparse(url) scheme, netloc, path = parts[:3] normalized_url = scheme.lower() + "://" + netloc.lower() + path base_elems = [] base_elems.append(method.upper()) base_elems.append(normalized_url) base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v))) for k, v in sorted(parameters.items()))) base_string = "&".join(_oauth_escape(e) for e in base_elems) key_elems = [urllib.quote(consumer_token["secret"], safe='~')] key_elems.append(urllib.quote(token["secret"], safe='~') if token else "") key = "&".join(key_elems) hash = hmac.new(key, base_string, hashlib.sha1) return binascii.b2a_base64(hash.digest())[:-1] def _oauth_escape(val): if isinstance(val, unicode): val = val.encode("utf-8") return urllib.quote(val, safe="~") def _oauth_parse_response(body): p = cgi.parse_qs(body, keep_blank_values=False) token = dict(key=p["oauth_token"][0], secret=p["oauth_token_secret"][0]) # Add the extra parameters the Provider included to the token special = ("oauth_token", "oauth_token_secret") token.update((k, p[k][0]) for k in p if k not in special) return token
gtzilla/fastFrag-utils
app_engine/fastfrag-converter/tornado/auth.py
Python
unlicense
46,603
# -*- coding: utf-8 -*- import codecs import collections import time class WebVtt: """ Create a webvtt formatted file """ def __init__(self, filename, regions=None): self.file = codecs.open(filename, "w", "utf_8_sig") self.file.write(u"WEBVTT\n") if regions is not None: if isinstance(regions, list): for region in regions: self.file.write(region + '\n') else: sef.file.write(regions + '\n') self.file.write(u"\n") self.pending = collections.deque() self.zero_time = time.time() def add_cue(self, text, id=None, region=None, duration=None): cue = Cue(self, text, id=id, region=region, duration=duration) self.pending.append(cue) if duration is not None: self.check_end() return(cue) def check_end(self): while len(self.pending) > 0 and self.pending[0].stop is not None: cue = self.pending.popleft() self.file.write(cue.get_string()) def __del__(self): for cue in self.pending: cue.end(False) self.check_end() self.file.close() class Cue: def __init__(self, parent, text, duration=None, id=None, region=None): self.parent = parent self.text = text self.id = id self.region = region self.start = time.time() - self.parent.zero_time if duration is None: self.stop = None else: self.stop = self.start + duration def end(self, check=True): self.stop = time.time() - self.parent.zero_time if check: self.parent.check_end() def set_region(self, region): self.region = region def get_string(self): if self.stop is None: return(u"") ret = u"" if self.id: ret += id + u"\n" ret += self.convert_time(self.start) + u" --> " + self.convert_time(self.stop) if self.region: ret += u" region:{region} ".format(region=self.region) ret += u"\n" ret += self.text + u"\n\n" return(ret) def convert_time(self, seconds): micro = int((seconds - int(seconds)) * 1000) seconds = int(seconds) minutes = int(seconds/60) seconds -= minutes*60 hours = int(minutes/60) minutes -= hours*60 return("{hours:02}:{minutes:02}:{seconds:02}.{micro:03}".format(hours=hours, minutes=minutes, seconds=seconds, micro=micro))
Robert904/mumblerecbot
webvtt.py
Python
gpl-3.0
2,752
#!/usr/bin/env python # -*- coding: utf-8 -*- from dump2data import * g_program_name = 'raw2data.py' g_date_str = '2014-12-19' g_version_str = 'v0.43' ####### Main Code Below: ####### sys.stderr.write(g_program_name+' '+g_version_str+' '+g_date_str) sys.stderr.write('\n') try: data_settings = DataSettings() misc_settings = MiscSettings() misc_settings.multi = False warning_strings = [] ParseArgs(sys.argv, misc_settings, data_settings, warning_strings) frame_coords = defaultdict(list) frame_coords_ixiyiz = defaultdict(list) frame_vects = defaultdict(list) frame_velocities = defaultdict(list) frame_xlo_str = frame_xhi_str = None frame_ylo_str = frame_yhi_str = None frame_zlo_str = frame_zhi_str = None frame_xy_str = frame_xz_str = frame_yz_str = None frame_natoms = -1 frame_timestep_str = '' i_atomid = i_atomtype = i_molid = -1 i_x = i_y = i_z = i_xu = i_yu = i_zu = -1 i_xs = i_ys = i_zs = i_xsu = i_ysu = i_zsu = -1 dump_column_names = [] #num_frames_in = -1 num_frames_out = 0 finished_reading_frame = False read_last_frame = False #in_coord_file = open('traj.raw','r') #in_coord_file = open('tmp_atom_coords.dat','r') in_coord_file = sys.stdin read_last_frame = False while True: line = '\n' while (line != '') and (line.strip() == ''): line = in_coord_file.readline() if line == '': # if EOF break frame_coords = defaultdict(list) while line.strip() != '': n_crds = len(frame_coords) #sys.stdout.write("n_crds="+str(n_crds)+": \""+line.strip()+"\"\n") frame_coords[str(n_crds+1)] = line.split() line = in_coord_file.readline() # Check to see if there are any blank lines at this location in the file # If there are, it means we are at a new "frame" (in the trajectory). # Skip over any blank line(s) separating this frame from the next frame # so that the next time we enter the loop, we are at the beginning # of a new frame. while (line != '') and (line.strip() == ''): line = in_coord_file.readline() # Parse the DATA file specified by the user # and replace appropriate lines or fields with # the corresponding text from the input file. if misc_settings.multi: out_file_name = data_settings.file_name + '.'\ + str(num_frames_out) sys.stderr.write(' (creating file \"'+out_file_name+'\")\n') out_file = open(out_file_name, 'w') else: out_file = sys.stdout WriteFrameToData(out_file, None, misc_settings, data_settings, frame_natoms, frame_coords, frame_coords_ixiyiz, frame_vects, frame_velocities, None, None, frame_xlo_str, frame_xhi_str, frame_ylo_str, frame_yhi_str, frame_zlo_str, frame_zhi_str, frame_xy_str, frame_xz_str, frame_yz_str) except (ValueError, InputError) as err: sys.stderr.write('\n'+str(err)+'\n') sys.exit(-1)
andeplane/lammps
tools/moltemplate/src/raw2data.py
Python
gpl-2.0
3,484
# -*- coding: utf-8 -*- import json import requests from abc import ABCMeta, abstractmethod from . import getLogger from .common import thumbprint, newJwk log = getLogger(__name__) class KeyNotFoundError(Exception): def __str__(self): return "Encrypion key not found: " + self.args[0] class KeyStoreABC(metaclass=ABCMeta): """Abstract base for JWK key stores.""" @abstractmethod def add(self, jwk): pass # pragma: no cover @abstractmethod def __getitem__(self, tp): pass # pragma: no cover def upload(self, jwk): self.add(jwk) class LocalKeyStore(KeyStoreABC): def __init__(self): self._keys = {} def add(self, jwk): tp = thumbprint(jwk) self._keys[tp] = jwk def __getitem__(self, tp): try: return self._keys[tp] except KeyError: raise KeyNotFoundError(tp) def __contains__(self, tp): return tp in self._keys _global_keystore = LocalKeyStore() def keystore(): global _global_keystore return _global_keystore def setKeyStore(keystore): global _global_keystore curr = _global_keystore _global_keystore = keystore return curr class RemoteKeyStore(LocalKeyStore): def __init__(self, url): self._url = url self._headers = {"content-type": "application/json"} super().__init__() def add(self, jwk): return super().add(jwk) def _post(self, url, headers=None, json=None): # pragma: no cover resp = requests.post(url, headers=headers, json=json, timeout=5) return resp def _get(self, url, headers=None): # pragma: no cover resp = requests.get(url, timeout=5) return resp def upload(self, jwk): tprint = thumbprint(jwk) resp = self._post(self._url, headers=self._headers, json=json.loads(jwk.export_public())) if resp.status_code != 201: log.error(resp) raise requests.RequestException(response=resp) elif resp.json()["kid"] != tprint: raise ValueError("'kid' changed on upload") else: log.debug("RemoteKeyStore.upload response: {}".format(resp.json())) self.add(jwk) return tprint def __getitem__(self, tp): try: return super().__getitem__(tp) except KeyNotFoundError: resp = self._get("/".join([self._url, tp])) if resp.status_code != 200: log.error(resp) raise jwk = newJwk(**resp.json()) self.add(jwk) return jwk
nicfit/Clique
clique/keystore.py
Python
lgpl-3.0
2,746
from unittest.mock import Mock, patch from .. import two_factor_gateways as mod def test_gateway_make_call(): gateway = mod.Gateway.__new__(mod.Gateway) # instantiate without __init__ gateway.from_number = "+16145551234" gateway.client = Mock(name="client") device = Mock(name="device") device.number.as_e164 = "+14155552671" with patch.object(mod, "Site") as Site: Site.objects.get_current().domain = "test.com" gateway.make_call(device, "70839") gateway.client.api.account.calls.create.assert_called_with( to="+14155552671", from_="+16145551234", url="https://test.com/twilio/inbound/two_factor/70839/?locale=en-us", method="GET", if_machine="Hangup", timeout=15, )
dimagi/commcare-hq
corehq/apps/hqwebapp/tests/test_two_factor_gateways.py
Python
bsd-3-clause
768
from numpy import array from explauto.utils.config import make_configuration from explauto.sensorimotor_model.nearest_neighbor import NearestNeighbor from explauto.sensorimotor_model.non_parametric import NonParametric from environment.mathEnv import MathEnvironment, mathEnv_config from agent.supervisor import Supervisor #from agent.zpdes_sup import ZPDESSupervisor class Config(object): def __init__(self, name = None, iterations = None, debug = False): ################################### EXPERIMENT CONFIG ################################### self.debug = debug if not debug: self.name = name or 'Experiment' self.init_rest_trial = False self.bootstrap = 0 self.bootstrap_range_div = 1. self.iter = iterations or 50 self.log_each = self.iter/10 #must be <= iter self.eval_at = range(1, self.iter +2, self.iter/10) self.n_eval = 10 self.eval_modes = ['explo'] # explo, inverse else: self.name = name or 'Experiment' self.init_rest_trial = False self.bootstrap = 20 self.bootstrap_range_div = 1. self.iter = 10 self.log_each = 1 self.eval_at = [10] self.n_eval = 10 self.eval_modes = ['explo_comp'] # explo, inverse self.gui = False self.audio = False ################################### AGENT CONFIG ################################### self.n_motors = 1 self.motor_dims = [0] self.sensori_dims = [1, 2] self.used_dims = 3 #self.n_motors = 3 #self.motor_dims = [0, 1, 2] #self.sensori_dims = [3, 4, 5, 6, 7, 8, 9] #self.used_dims = 10 self.sms = { 'LWLR': (NonParametric, {'fwd': 'LWLR', 'inv': 'L-BFGS-B', 'sigma':0.05, 'k':10}), 'NSLWLR': (NonParametric, {'fwd': 'NSLWLR', 'inv': 'L-BFGS-B', 'sigma':0.05, 'sigma_t':100, 'k':20}), 'NSNN': (NonParametric, {'fwd': 'NSNN', 'inv': 'L-BFGS-B', 'sigma':0.05, 'sigma_t':100, 'k':20}), 'WNN': (NonParametric, {'fwd': 'WNN', 'inv': 'WNN', 'sigma':0.05, 'k':5}), 'NSWNN': (NonParametric, {'fwd': 'NSWNN', 'inv': 'NSWNN', 'sigma':0.2, 'sigma_t':100, 'k':20}), 'knn1': (NearestNeighbor, {'sigma_ratio': 1. / 30.}), 'knn2': (NearestNeighbor, {'sigma_ratio': 1. / 30.}), } self.std_range = [0,1] self.mids = ['mod1', 'mod2', #'mod3', ] # module ids m1 = [0] s1 = [1] s2 = [2] self.m_spaces = dict(m1=m1) self.s_spaces = dict(s1=s1, s2=s2) self.operators = ["par", "seq"] self.modules = dict(mod1 = dict(m = m1, s = s1, m_list = [m1], operator = "par", babbling_name = 'goal', sm_name = 'knn1', im_name = 'TDDensity', from_log = None, #children = ['m1'], motor_babbling_n_iter=10), # # mod2 = dict(m = m2, # s = s1, # m_list = [m2], # operator = "par", # babbling_name = 'goal', # sm_name = 'knn1', # im_name = 'miscRandom', # from_log = None, # #children = ["mod1"], # motor_babbling_n_iter=10), mod2 = dict(m = s1, s = s2, m_list = [s1], operator = "par", babbling_name = 'goal', sm_name = 'knn1', im_name = 'miscRandom', from_log = None, #children = ["mod2"], motor_babbling_n_iter=10), ) # # self.learning = dict(training_mode = 'seq',#seq, par, comb # seq = dict(not_babbling_mode = 'updating',#updating, fixed # updating = {}), # par = dict(par_mode = 'exploring',#exploring, random # exploring = dict(exploring_mode = 'cma', # cma = {'n_points':20, 'sigma0':40, 'popsize':4}, # random = {'n_points':20})), # comb = dict()) self.eval_dims = s2 self.eval_explo_dims = s2 self.eval_range = array([[0.], [1.]]) self.eval_explo_eps = 0.002 self.eval_explo_comp_eps = 0.002 ################################### Process CONFIG ################################### self.m_mins = [0] self.m_maxs = [1] self.s_mins = [0, 0] self.s_maxs = [1, 1] self.agent = make_configuration(self.m_mins, self.m_maxs, self.s_mins, self.s_maxs) # self.supervisor_cls = ZPDESSupervisor # self.supervisor_config = dict(alpha_reward=0.5, # activate_succ_th=0.5, # deactivate_succ_th=0.8, # progress_window_size=20) self.supervisor_cls = Supervisor self.supervisor_config = dict() self.env_config = mathEnv_config self.env_cls = MathEnvironment print self.agent self.tag = self.name + '-_{}'.format(self.iter) self.log_dir = ''#determined later if __name__ == '__main__': print Config()
sebastien-forestier/explaupoppydiva
explaupoppydiva/config.py
Python
gpl-3.0
6,989
import numpy as np import glob import os from mobilenets import MobileNet model = MobileNet((224, 224, 3), alpha=1.0, weights=None) weight_path = 'weights/*.npy' fns = glob.glob(weight_path) for i, fn in enumerate(fns): print("File : ", i + 1, ": ", fn) conv1_bn = fns[0:4] conv1_wb = fns[4:5] dw10 = fns[5:15] dw11 = fns[15:25] dw12 = fns[25:35] dw13 = fns[35:45] dw1 = fns[45:55] dw2 = fns[55:65] dw3 = fns[65:75] dw4 = fns[75:85] dw5 = fns[85:95] dw6 = fns[95:105] dw7 = fns[105:115] dw8 = fns[115:125] dw9 = fns[125:135] fcn = fns[135:] print("\nInitial Conv-BN : ", conv1_wb, conv1_bn) dwlist = [dw1, dw2, dw3, dw4, dw5, dw6, dw7, dw8, dw9, dw10, dw11, dw12, dw13] for i, dw in enumerate(dwlist): print('DW block %d' % (i + 1), dw) print("Final dense layer", fcn) print() #for i, layer in enumerate(model.layers): # print(i, layer.name) # used for sanity check that all weights have been loaded layers = model.layers layer_has_weights = [(True if len(layer.get_weights()) != 0 else False) for layer in model.layers] layer_weights_saved = [False for _ in range(len(model.layers))] # these two layers will be loaded in the next line layer_weights_saved[1] = True layer_weights_saved[2] = True conv1_weights = [np.load(conv1_wb[0])] model.layers[1].set_weights(conv1_weights) conv1_bn = [np.load(fn) for fn in conv1_bn] model.layers[2].set_weights([conv1_bn[1], conv1_bn[0], conv1_bn[2], conv1_bn[3]]) print("\nLoaded initail conv weights") layer_index = 4 for i, dw in enumerate(dwlist): dw_weights = [np.load(dw[4])] dw_bn = [np.load(fn) for fn in dw[0:4]] pw_weights_biases = [np.load(dw[-1])] pw_bn = [np.load(fn) for fn in dw[5:-1]] model.layers[layer_index].set_weights(dw_weights) model.layers[layer_index + 1].set_weights([dw_bn[1], dw_bn[0], dw_bn[2], dw_bn[3]]) model.layers[layer_index + 3].set_weights(pw_weights_biases) model.layers[layer_index + 4].set_weights([pw_bn[1], pw_bn[0], pw_bn[2], pw_bn[3]]) # for sanity check, set True for all layers whise weights were changed layer_weights_saved[layer_index] = True layer_weights_saved[layer_index + 1] = True layer_weights_saved[layer_index + 3] = True layer_weights_saved[layer_index + 4] = True print('Loaded DW layer %d weights' % (i + 1)) layer_index += 6 fc_weights_bias = [np.load(fcn[1]), np.load(fcn[0])] fc_weights_bias[0] = fc_weights_bias[0][..., 1:] fc_weights_bias[1] = fc_weights_bias[1][1:] model.layers[-3].set_weights(fc_weights_bias) print("Loaded final conv classifier weights") layer_weights_saved[-3] = True model.save_weights('../weights/mobilenet_imagenet_tf.h5', overwrite=True) print("Model saved") # perform check that all weights that could have been loaded, Have been loaded! print('\nBegin sanity check...') for layer_id, has_weights in enumerate(layer_has_weights): if has_weights and not layer_weights_saved[layer_id]: # weights were not saved! report print("Layer id %d (%s) weights were not saved!" % (layer_id, model.layers[layer_id].name)) print("Sanity check complete!") for fn in glob.glob(weight_path): os.remove(fn)
titu1994/MobileNetworks
_weight_extraction/weight_load.py
Python
apache-2.0
3,167
#!/usr/bin/env python import spear # 1/ The tool tool = spear.tools.ISVTool # 2/ GMM Training n_gaussians = 512 iterk = 25 iterg_train = 25 end_acc = 0.0001 var_thd = 0.0001 update_weights = True update_means = True update_variances = True norm_KMeans = True # 3/ JFA Training ru = 100 # The dimensionality of the subspace relevance_factor = 4 n_iter_train = 10 n_iter_enrol = 1 # 4/ JFA Enrolment and scoring iterg_enrol = 1 convergence_threshold = 0.0001 variance_threshold = 0.0001 relevance_factor = 4 responsibilities_threshold = 0
guker/spear
config/tools/isv/isv_512g_u100.py
Python
gpl-3.0
542
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and # is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012) from django.test import TestCase from django.conf import settings from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.db.models.signals import post_save from django.test.client import Client from django.test.utils import override_settings from geoevents.operations.models import Event from geoevents.smts.signals import new_smts_category_from_event, send_email_to_smts def disconnect_signal(signal, receiver, sender): """ Disconnects signals. To use: disconnect_signal(<signal>, <receiver>, <model>) IE: disconnect_signal(post_save, send_email, Event) """ disconnect = getattr(signal, 'disconnect') disconnect(receiver, sender) def reconnect_signal(signal, receiver, sender): """ Reconnects disconnected signals. To use: reconnect_signal(<signal>, <receiver>, <model>) IE: reconnect_signal(post_save, send_email, Event) """ connect = getattr(signal, 'connect') connect(receiver, sender=sender) class R3TestCaseMixin(object): """ Mixin that stores logic to be run on all R3 test cases. All test cases should subclass this and call super().setUp. """ def setUp(self): disconnect_signal(post_save, send_email_to_smts, Event) disconnect_signal(post_save, new_smts_category_from_event, Event) # Adds usernames with various permissions self.admin_user_credentials = {"username": 'admin', "password": 'test'} self.non_admin_user_credentials = {"username": 'non_admin', "password": 'test'} self.admin_user = User.objects.create_superuser('admin', 'test@aol.com', 'test') self.non_admin_user = User.objects.create_user('non_admin', 'test2@aol.com', 'test') self.cac_user = User.objects.create_user('Michael.Smith.Test', 'test3@aol.com', 'test') self.crazy_char_user = User.objects.create_user('aA0.-@_ \'', 'test4@aol.com', 'test') # Adds current_api_version to test cases. self.current_api_version = settings.CURRENT_API_VERSION @override_settings(AUTHENTICATION_BACKENDS=('django.contrib.auth.backends.ModelBackend',)) class SimpleTest(R3TestCaseMixin, TestCase): """ Tests site wide views and core functionality. """ fixtures = ['initial_data_hold.json', 'maps.json'] def setUp(self): super(SimpleTest, self).setUp() def test_view_profile(self): '''Test if the profile view renders''' c = Client() c.login(username=self.admin_user.username, password='test') response = c.get(reverse('user-profile', kwargs={'username': self.admin_user.username})) self.failUnlessEqual(response.status_code, 200) def test_view_profile_cac_user(self): '''Test if the profile view renders for cac users''' c = Client() c.login(username='Michael.Smith.Test', password='test') response = c.get(reverse('user-profile', kwargs={'username': self.cac_user.username})) self.failUnlessEqual(response.status_code, 200) def test_view_profile_crazy_char_user(self): '''Test if the profile view renders for cac users''' c = Client() c.login(username='aA0.-@_ \'', password='test') response = c.get(reverse('user-profile', kwargs={'username': self.crazy_char_user.username})) self.failUnlessEqual(response.status_code, 200) def test_view_profile_404(self): '''Test if the profile view renders a 404 if supplied with non-existent username''' c = Client() c.login(username='admin_user', password='test') response = c.get(reverse('user-profile', kwargs={'username': 'admisdfn'})) self.failUnlessEqual(response.status_code, 404) def test_non_admin_view_profile(self): '''Test if a non-admin can view their own user profile''' c = Client() c.login(username='non_admin', password='test') response = c.get(reverse('user-profile', kwargs={'username': self.non_admin_user.username})) self.failUnlessEqual(response.status_code, 200) def test_perms_view_profile(self): '''Test if the non-admin can view someone else's profile''' c = Client() c.login(username='non_admin', password='test') response = c.get(reverse('user-profile', kwargs={'username': self.admin_user.username})) self.failUnlessEqual(response.status_code, 403) def test_admin_perms_view_profile(self): '''Test if the admin can view someone else's profile''' c = Client() c.login(username='admin', password='test') response = c.get(reverse('user-profile', kwargs={'username': self.non_admin_user.username})) self.failUnlessEqual(response.status_code, 200) def test_manage_profile(self): '''Test if the admin can post to his own profile''' c = Client() c.login(username='admin', password='test') response = c.post(reverse('user-profile', kwargs={'username': self.admin_user.username}), {'first_name': 'Admin', 'last_name': 'Last', 'email': 'test@aol.com'}) self.failUnlessEqual(response.status_code, 302) def test_perms_manage_profile(self): '''Test if the admin can post to his own profile''' c = Client() c.login(username='non_admin', password='test') response = c.post(reverse('user-profile', kwargs={'username': self.admin_user.username}), {'first_name': 'Admin', 'last_name': 'Last', 'email': 'test@aol.com'}) self.failUnlessEqual(response.status_code, 403) def test_login_logout(self): '''Tests the login/logout views''' c = Client() response = c.get(reverse('login')) self.failUnlessEqual(response.status_code, 200) response = c.get(reverse('logout')) self.failUnlessEqual(response.status_code, 200) def tearDown(self): self.admin_user.delete() self.non_admin_user.delete() @override_settings(AUTHENTICATION_BACKENDS=('django.contrib.auth.backends.ModelBackend',)) class CoreAPITests(R3TestCaseMixin, TestCase): """ Tests core api functionality. """ #TODO: abstract this class so user can provide any API and it will test common functionality fixtures = ['maps.json'] def setUp(self): super(CoreAPITests, self).setUp() self.api_endpoint = '{0}?format=json'.format( reverse('api_dispatch_list', args=[self.current_api_version, 'user'])) def test_user_api_get(self): c = Client(self.api_endpoint) response = c.get(self.api_endpoint) self.assertEqual(response.status_code, 405) def test_user_api_post(self): c = Client() response = c.post(self.api_endpoint, data={'username': 'test'}) self.assertEqual(response.status_code, 405)
jaycrossler/geo-events
geoevents/core/tests.py
Python
mit
7,049
# -------------------------------------------------------------------# # Released under the MIT license (https://opensource.org/licenses/MIT) # Contact: mrinal.haloi11@gmail.com # Enhancement Copyright 2016, Mrinal Haloi # -------------------------------------------------------------------# import random import os import tensorflow as tf from core.solver import Solver from env.environment import GymEnvironment, SimpleGymEnvironment from config.config import cfg # Set random seed tf.set_random_seed(123) random.seed(12345) def main(_): gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4) with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess: if cfg.env_type == 'simple': env = SimpleGymEnvironment(cfg) else: env = GymEnvironment(cfg) if not os.path.exists('/tmp/model_dir'): os.mkdir('/tmp/model_dir') solver = Solver(cfg, env, sess, '/tmp/model_dir') solver.train() if __name__ == '__main__': tf.app.run()
n3011/deeprl
train_dqn.py
Python
mit
1,042
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sitem.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
gencelo/tlog
app/manage.py
Python
mit
248
from django.contrib import admin from .models import Device, Interface, Inventory # Register your models here. admin.site.register(Device) admin.site.register(Interface) admin.site.register(Inventory)
clay584/chuck
chuck/inventory/admin.py
Python
mit
202
#!/usr/bin/python # $Id:$ """Rewrite the license header of source files. Usage: license.py file.py file.py dir/ dir/ ... """ import optparse import os import sys license = """# pyglet # Copyright (c) 2006-2008 Alex Holkner # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE.""" marker = '# ' + '-' * 76 license_lines = [marker] + license.split('\n') + [marker] def update_license(filename): """Open a Python source file and update the license header, writing it back in place.""" lines = [l.strip('\r\n') for l in open(filename).readlines()] if marker in lines: # Update existing license try: marker1 = lines.index(marker) marker2 = lines.index(marker, marker1 + 1) if marker in lines[marker2 + 1:]: raise ValueError() # too many markers lines = (lines[:marker1] + license_lines + lines[marker2 + 1:]) except ValueError: print("Can't update license in %s" % filename, file=sys.stderr) else: # Add license to unmarked file # Skip over #! if present if not lines: pass # Skip empty files elif lines[0].startswith('#!'): lines = lines[:1] + license_lines + lines[1:] else: lines = license_lines + lines open(filename, 'wb').write('\n'.join(lines) + '\n') op = optparse.OptionParser() op.add_option('--exclude', action='append', default=list()) options, args = op.parse_args() if len(args) < 1: print(__doc__, file=sys.stderr) sys.exit(0) for path in args: if os.path.isdir(path): for root, dirnames, filenames in os.walk(path): for dirname in dirnames: if dirname in options.exclude: dirnames.remove(dirname) for filename in filenames: if (filename.endswith('.py') and filename not in options.exclude): update_license(os.path.join(root, filename)) else: update_license(path)
bitcraft/pyglet
tools/license.py
Python
bsd-3-clause
3,724
from baseclass.SDetection import SDetection from sklearn.linear_model import LogisticRegression from sklearn.svm import SVC # from random import shuffle from sklearn.tree import DecisionTreeClassifier class DegreeSAD(SDetection): def __init__(self, conf, trainingSet=None, testSet=None, labels=None, fold='[1]'): super(DegreeSAD, self).__init__(conf, trainingSet, testSet, labels, fold) def buildModel(self): self.MUD = {} self.RUD = {} self.QUD = {} # computing MUD,RUD,QUD for training set sList = sorted(self.dao.trainingSet_i.iteritems(), key=lambda d: len(d[1]), reverse=True) maxLength = len(sList[0][1]) for user in self.dao.trainingSet_u: self.MUD[user] = 0 for item in self.dao.trainingSet_u[user]: self.MUD[user] += len(self.dao.trainingSet_i[item]) #/ float(maxLength) self.MUD[user]/float(len(self.dao.trainingSet_u[user])) lengthList = [len(self.dao.trainingSet_i[item]) for item in self.dao.trainingSet_u[user]] lengthList.sort(reverse=True) self.RUD[user] = lengthList[0] - lengthList[-1] lengthList = [len(self.dao.trainingSet_i[item]) for item in self.dao.trainingSet_u[user]] lengthList.sort() self.QUD[user] = lengthList[int((len(lengthList) - 1) / 4.0)] # computing MUD,RUD,QUD for test set for user in self.dao.testSet_u: self.MUD[user] = 0 for item in self.dao.testSet_u[user]: self.MUD[user] += len(self.dao.trainingSet_i[item]) #/ float(maxLength) for user in self.dao.testSet_u: lengthList = [len(self.dao.trainingSet_i[item]) for item in self.dao.testSet_u[user]] lengthList.sort(reverse=True) self.RUD[user] = lengthList[0] - lengthList[-1] for user in self.dao.testSet_u: lengthList = [len(self.dao.trainingSet_i[item]) for item in self.dao.testSet_u[user]] lengthList.sort() self.QUD[user] = lengthList[int((len(lengthList) - 1) / 4.0)] # preparing examples for user in self.dao.trainingSet_u: self.training.append([self.MUD[user], self.RUD[user], self.QUD[user]]) self.trainingLabels.append(self.labels[user]) for user in self.dao.testSet_u: self.test.append([self.MUD[user], self.RUD[user], self.QUD[user]]) self.testLabels.append(self.labels[user]) def predict(self): # classifier = LogisticRegression() # classifier.fit(self.training, self.trainingLabels) # pred_labels = classifier.predict(self.test) # print 'Logistic:' # print classification_report(self.testLabels, pred_labels) # # classifier = SVC() # classifier.fit(self.training, self.trainingLabels) # pred_labels = classifier.predict(self.test) # print 'SVM:' # print classification_report(self.testLabels, pred_labels) classifier = DecisionTreeClassifier(criterion='entropy') classifier.fit(self.training, self.trainingLabels) pred_labels = classifier.predict(self.test) print 'Decision Tree:' return pred_labels
Coder-Yu/SDLib
method/DegreeSAD.py
Python
gpl-3.0
3,260
import os from . import helpers SNIPPETS_ROOT = os.path.join(os.path.dirname(__file__), 'snippets') def get_snippet_content(snippet_name, **format_kwargs): """ Load the content from a snippet file which exists in SNIPPETS_ROOT """ filename = snippet_name + '.snippet' snippet_file = os.path.join(SNIPPETS_ROOT, filename) if not os.path.isfile(snippet_file): raise ValueError('could not find snippet with name ' + filename) ret = helpers.get_file_content(snippet_file) if format_kwargs: ret = ret.format(**format_kwargs) return ret
ajk8/hatchery
hatchery/snippets.py
Python
mit
578
UNKNOWN = 0 def print_grid(grid): for r in range(0, 9): for c in range(0, 9): if grid[r][c] == UNKNOWN: print('.', end='') else: print(grid[r][c], end='') print() return True def find_open_pos(grid): for r in range(0, 9): for c in range(0, 9): if grid[r][c] == UNKNOWN: return (r, c) return None def value_in_row(v, r, grid): for c in range(0, 9): if grid[r][c] == v: return True return False def value_in_column(v, c, grid): for r in range(0, 9): if grid[r][c] == v: return True return False def value_in_block(v, r, c, grid): base_r = r - r%3 base_c = c - c%3 for r2 in range(base_r, base_r + 3): for c2 in range(base_c, base_c + 3): if grid[r2][c2] == v: return True return False def can_add_value(v, r, c, grid): if value_in_row(v, r, grid): return False if value_in_column(v, c, grid): return False if value_in_block(v, r, c, grid): return False return True def solve_grid(grid): pos = find_open_pos(grid) if pos == None: return True r = pos[0] c = pos[1] for v in range(1, 10): if can_add_value(v, r, c, grid): grid[r][c] = v if solve_grid(grid): return True grid[r][c] = UNKNOWN return False def solve(): file = open('problem096_data.txt') sum = 0 while True: name = file.readline().rstrip('\n') if not name: break grid = [] for r in range(0, 9): row_str = file.readline() row = [] for c in range(0, 9): row.append(int(row_str[c])) grid.append(row) assert(solve_grid(grid) == True) top_left = grid[0][0] * 100 + grid[0][1] * 10 + grid[0][2] sum += top_left file.close() print(sum) solve()
brunorijsman/euler-problems-python
euler/problem096.py
Python
bsd-2-clause
1,767
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. import abc import os import glob import logging import json import warnings from monty.io import zopen from pymatgen.io.vasp.inputs import Incar, Potcar, Poscar from pymatgen.io.vasp.outputs import Vasprun, Oszicar, Dynmat from pymatgen.io.gaussian import GaussianOutput from pymatgen.entries.computed_entries import ComputedEntry, \ ComputedStructureEntry from monty.json import MSONable """ This module define the various drones used to assimilate data. """ __author__ = "Shyue Ping Ong" __copyright__ = "Copyright 2012, The Materials Project" __version__ = "1.0" __maintainer__ = "Shyue Ping Ong" __email__ = "shyuep@gmail.com" __date__ = "Mar 18, 2012" logger = logging.getLogger(__name__) class AbstractDrone(MSONable, metaclass=abc.ABCMeta): """ Abstract drone class that defines the various methods that must be implemented by drones. Because of the quirky nature of Python"s multiprocessing, the intermediate data representations has to be in the form of python primitives. So all objects that drones work with must be MSONable. All drones must also implement the standard MSONable as_dict() and from_dict API. """ @abc.abstractmethod def assimilate(self, path): """ Assimilate data in a directory path into a pymatgen object. Because of the quirky nature of Python"s multiprocessing, the object must support pymatgen's as_dict() for parallel processing. Args: path: directory path Returns: An assimilated object """ return @abc.abstractmethod def get_valid_paths(self, path): """ Checks if path contains valid data for assimilation, and then returns the valid paths. The paths returned can be a list of directory or file paths, depending on what kind of data you are assimilating. For example, if you are assimilating VASP runs, you are only interested in directories containing vasprun.xml files. On the other hand, if you are interested converting all POSCARs in a directory tree to cifs for example, you will want the file paths. Args: path: input path as a tuple generated from os.walk, i.e., (parent, subdirs, files). Returns: List of valid dir/file paths for assimilation """ return class VaspToComputedEntryDrone(AbstractDrone): """ VaspToEntryDrone assimilates directories containing vasp output to ComputedEntry/ComputedStructureEntry objects. There are some restrictions on the valid directory structures: 1. There can be only one vasp run in each directory. 2. Directories designated "relax1", "relax2" are considered to be 2 parts of an aflow style run, and only "relax2" is parsed. 3. The drone parses only the vasprun.xml file. Args: inc_structure (bool): Set to True if you want ComputedStructureEntries to be returned instead of ComputedEntries. parameters (list): Input parameters to include. It has to be one of the properties supported by the Vasprun object. See :class:`pymatgen.io.vasp.Vasprun`. If parameters is None, a default set of parameters that are necessary for typical post-processing will be set. data (list): Output data to include. Has to be one of the properties supported by the Vasprun object. """ def __init__(self, inc_structure=False, parameters=None, data=None): self._inc_structure = inc_structure self._parameters = {"is_hubbard", "hubbards", "potcar_spec", "potcar_symbols", "run_type"} if parameters: self._parameters.update(parameters) self._data = data if data else [] def assimilate(self, path): files = os.listdir(path) if "relax1" in files and "relax2" in files: filepath = glob.glob(os.path.join(path, "relax2", "vasprun.xml*"))[0] else: vasprun_files = glob.glob(os.path.join(path, "vasprun.xml*")) filepath = None if len(vasprun_files) == 1: filepath = vasprun_files[0] elif len(vasprun_files) > 1: # Since multiple files are ambiguous, we will always read # the one that it the last one alphabetically. filepath = sorted(vasprun_files)[-1] warnings.warn("%d vasprun.xml.* found. %s is being parsed." % (len(vasprun_files), filepath)) try: vasprun = Vasprun(filepath) except Exception as ex: logger.debug("error in {}: {}".format(filepath, ex)) return None entry = vasprun.get_computed_entry(self._inc_structure, parameters=self._parameters, data=self._data) # entry.parameters["history"] = _get_transformation_history(path) return entry def get_valid_paths(self, path): (parent, subdirs, files) = path if "relax1" in subdirs and "relax2" in subdirs: return [parent] if (not parent.endswith("/relax1")) and \ (not parent.endswith("/relax2")) and ( len(glob.glob(os.path.join(parent, "vasprun.xml*"))) > 0 or ( len(glob.glob(os.path.join(parent, "POSCAR*"))) > 0 and len(glob.glob(os.path.join(parent, "OSZICAR*"))) > 0) ): return [parent] return [] def __str__(self): return " VaspToComputedEntryDrone" def as_dict(self): return {"init_args": {"inc_structure": self._inc_structure, "parameters": self._parameters, "data": self._data}, "version": __version__, "@module": self.__class__.__module__, "@class": self.__class__.__name__} @classmethod def from_dict(cls, d): return cls(**d["init_args"]) class SimpleVaspToComputedEntryDrone(VaspToComputedEntryDrone): """ A simpler VaspToComputedEntryDrone. Instead of parsing vasprun.xml, it parses only the INCAR, POTCAR, OSZICAR and KPOINTS files, which are much smaller and faster to parse. However, much fewer properties are available compared to the standard VaspToComputedEntryDrone. Args: inc_structure (bool): Set to True if you want ComputedStructureEntries to be returned instead of ComputedEntries. Structure will be parsed from the CONTCAR. """ def __init__(self, inc_structure=False): self._inc_structure = inc_structure self._parameters = {"is_hubbard", "hubbards", "potcar_spec", "run_type"} def assimilate(self, path): files = os.listdir(path) try: files_to_parse = {} if "relax1" in files and "relax2" in files: for filename in ("INCAR", "POTCAR", "POSCAR"): search_str = os.path.join(path, "relax1", filename + "*") files_to_parse[filename] = glob.glob(search_str)[0] for filename in ("CONTCAR", "OSZICAR"): search_str = os.path.join(path, "relax2", filename + "*") files_to_parse[filename] = glob.glob(search_str)[-1] else: for filename in ( "INCAR", "POTCAR", "CONTCAR", "OSZICAR", "POSCAR", "DYNMAT" ): files = sorted(glob.glob(os.path.join(path, filename + "*"))) if len(files) < 1: continue if len(files) == 1 or filename == "INCAR" or \ filename == "POTCAR" or filename == "DYNMAT": files_to_parse[filename] = files[-1]\ if filename == "POTCAR" else files[0] elif len(files) > 1: # Since multiple files are ambiguous, we will always # use the first one for POSCAR and the last one # alphabetically for CONTCAR and OSZICAR. if filename == "POSCAR": files_to_parse[filename] = files[0] else: files_to_parse[filename] = files[-1] warnings.warn( "%d files found. %s is being parsed." % (len(files), files_to_parse[filename])) poscar, contcar, incar, potcar, oszicar, dynmat = [None]*6 if 'POSCAR' in files_to_parse: poscar = Poscar.from_file(files_to_parse["POSCAR"]) if 'CONTCAR' in files_to_parse: contcar = Poscar.from_file(files_to_parse["CONTCAR"]) if 'INCAR' in files_to_parse: incar = Incar.from_file(files_to_parse["INCAR"]) if 'POTCAR' in files_to_parse: potcar = Potcar.from_file(files_to_parse["POTCAR"]) if 'OSZICAR' in files_to_parse: oszicar = Oszicar(files_to_parse["OSZICAR"]) if 'DYNMAT' in files_to_parse: dynmat = Dynmat(files_to_parse["DYNMAT"]) param = {"hubbards":{}} if poscar is not None and incar is not None and "LDAUU" in incar: param["hubbards"] = dict(zip(poscar.site_symbols, incar["LDAUU"])) param["is_hubbard"] = ( incar.get("LDAU", True) and sum(param["hubbards"].values()) > 0 ) if incar is not None else False param["run_type"] = None if incar is not None: param["run_type"] = Vasprun.run_type # param["history"] = _get_transformation_history(path) param["potcar_spec"] = potcar.spec if potcar is not None else None energy = oszicar.final_energy if oszicar is not None else Vasprun.final_energy structure = contcar.structure if contcar is not None\ else poscar.structure initial_vol = poscar.structure.volume if poscar is not None else \ None final_vol = contcar.structure.volume if contcar is not None else \ None delta_volume = None if initial_vol is not None and final_vol is not None: delta_volume = (final_vol / initial_vol - 1) data = {"filename": path, "delta_volume": delta_volume} if dynmat is not None: data['phonon_frequencies'] = dynmat.get_phonon_frequencies() if self._inc_structure: entry = ComputedStructureEntry( structure, energy, parameters=param, data=data ) else: entry = ComputedEntry( structure.composition, energy, parameters=param, data=data ) return entry except Exception as ex: logger.debug("error in {}: {}".format(path, ex)) return None def __str__(self): return "SimpleVaspToComputedEntryDrone" def as_dict(self): return {"init_args": {"inc_structure": self._inc_structure}, "version": __version__, "@module": self.__class__.__module__, "@class": self.__class__.__name__} @classmethod def from_dict(cls, d): return cls(**d["init_args"]) class GaussianToComputedEntryDrone(AbstractDrone): """ GaussianToEntryDrone assimilates directories containing Gaussian output to ComputedEntry/ComputedStructureEntry objects. By default, it is assumed that Gaussian output files have a ".log" extension. Args: inc_structure (bool): Set to True if you want ComputedStructureEntries to be returned instead of ComputedEntries. parameters (list): Input parameters to include. It has to be one of the properties supported by the GaussianOutput object. See :class:`pymatgen.io.gaussianio GaussianOutput`. The parameters have to be one of python"s primitive types, i.e., list, dict of strings and integers. If parameters is None, a default set of parameters will be set. data (list): Output data to include. Has to be one of the properties supported by the GaussianOutput object. The parameters have to be one of python"s primitive types, i.e. list, dict of strings and integers. If data is None, a default set will be set. file_extensions (list): File extensions to be considered as Gaussian output files. Defaults to just the typical "log" extension. .. note:: Like the GaussianOutput class, this is still in early beta. """ def __init__(self, inc_structure=False, parameters=None, data=None, file_extensions=(".log",)): self._inc_structure = inc_structure self._parameters = {"functional", "basis_set", "charge", "spin_multiplicity", "route_parameters"} if parameters: self._parameters.update(parameters) self._data = {"stationary_type", "properly_terminated"} if data: self._data.update(data) self._file_extensions = file_extensions def assimilate(self, path): try: gaurun = GaussianOutput(path) except Exception as ex: logger.debug("error in {}: {}".format(path, ex)) return None param = {} for p in self._parameters: param[p] = getattr(gaurun, p) data = {} for d in self._data: data[d] = getattr(gaurun, d) if self._inc_structure: entry = ComputedStructureEntry(gaurun.final_structure, gaurun.final_energy, parameters=param, data=data) else: entry = ComputedEntry(gaurun.final_structure.composition, gaurun.final_energy, parameters=param, data=data) return entry def get_valid_paths(self, path): (parent, subdirs, files) = path return [os.path.join(parent, f) for f in files if os.path.splitext(f)[1] in self._file_extensions] def __str__(self): return " GaussianToComputedEntryDrone" def as_dict(self): return {"init_args": {"inc_structure": self._inc_structure, "parameters": self._parameters, "data": self._data, "file_extensions": self._file_extensions}, "version": __version__, "@module": self.__class__.__module__, "@class": self.__class__.__name__} @classmethod def from_dict(cls, d): return cls(**d["init_args"]) def _get_transformation_history(path): """ Checks for a transformations.json* file and returns the history. """ trans_json = glob.glob(os.path.join(path, "transformations.json*")) if trans_json: try: with zopen(trans_json[0]) as f: return json.load(f)["history"] except: return None return None
blondegeek/pymatgen
pymatgen/apps/borg/hive.py
Python
mit
15,762
# Copyright 2017 The Forseti Security Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Blacklist Scanner Test""" from builtins import zip from unittest.mock import patch, Mock, MagicMock from tests.unittest_utils import ForsetiTestCase from google.cloud.forseti.common.gcp_type import instance from google.cloud.forseti.scanner.scanners import blacklist_scanner from google.cloud.forseti.scanner.audit import blacklist_rules_engine as bre from tests.scanner.test_data import fake_blacklist_scanner_data as fbsd from tests.unittest_utils import get_datafile_path def create_list_of_instence_network_interface_obj_from_data(): fake_instances_list = [] for data in fbsd.INSTANCE_DATA: fake_instances_list.append( instance.Instance( 'fake-instance', **data).create_network_interfaces()) return fake_instances_list class BlacklistScannerTest(ForsetiTestCase): @patch('google.cloud.forseti.scanner.audit.' + 'blacklist_rules_engine.urllib.request.urlopen') def test_get_blacklist_url(self, mock_urlopen): a = Mock() a.read.side_effect = [str.encode(fbsd.FAKE_BLACKLIST_SOURCE_1)] mock_urlopen.return_value = a output = bre.BlacklistRuleBook.get_and_parse_blacklist('') self.assertEqual(2, len(output)) self.assertEqual(sorted(fbsd.EXPECTED_BLACKLIST_1), sorted(output)) return output @patch('google.cloud.forseti.scanner.audit.' + 'blacklist_rules_engine.urllib.request.urlopen') def test_build_rule_book_from_local_yaml_file_works(self, mock_urlopen): """Test that a RuleBook is built correctly with a yaml file.""" a = Mock() a.read.side_effect = [str.encode(fbsd.FAKE_BLACKLIST_SOURCE_1), str.encode(fbsd.FAKE_BLACKLIST_SOURCE_2)] mock_urlopen.return_value = a rules_local_path = get_datafile_path(__file__, 'blacklist_test_rule.yaml') rules_engine = bre.BlacklistRulesEngine(rules_file_path=rules_local_path) rules_engine.build_rule_book({}) self.assertEqual(2, len(rules_engine.rule_book.resource_rules_map)) self.assertEqual('http://threatintel.localdomain/verybadips.txt', rules_engine.rule_book.rule_defs['rules'][0]['url']) @patch('google.cloud.forseti.scanner.audit.' + 'blacklist_rules_engine.urllib.request.urlopen') def test_blacklist_scanner_all_match(self, mock_urlopen): a = Mock() a.read.side_effect = [str.encode(fbsd.FAKE_BLACKLIST_SOURCE_1), str.encode(fbsd.FAKE_BLACKLIST_SOURCE_2)] mock_urlopen.return_value = a rules_local_path = get_datafile_path(__file__, 'blacklist_test_rule.yaml') scanner = blacklist_scanner.BlacklistScanner( {}, {}, MagicMock(), '', '', rules_local_path) netifs = create_list_of_instence_network_interface_obj_from_data() for netif, expected_violation in zip(netifs, fbsd.EXPECTED_VIOLATIONS): violation = scanner._find_violations([netif]) self.assertEqual(expected_violation, violation) if __name__ == '__main__': unittest.main()
forseti-security/forseti-security
tests/scanner/scanners/blacklist_scanner_test.py
Python
apache-2.0
3,729
"""postal_codes table added Revision ID: 13673746db16 Revises: 29fc422c56cb Create Date: 2015-04-27 17:08:52.775839 """ # revision identifiers, used by Alembic. revision = '13673746db16' down_revision = '29fc422c56cb' from alembic import op import sqlalchemy as sa import geoalchemy2 as ga from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('postal_codes', sa.Column('admin_id', sa.BIGINT(), nullable=False), sa.Column('postal_code', sa.TEXT(), nullable=False), sa.ForeignKeyConstraint(['admin_id'], [u'georef.admin.id'], name=u'postal_codes_admin_id_fkey'), schema='georef' ) op.execute("INSERT INTO georef.postal_codes (admin_id, postal_code) select id, post_code " "from georef.admin where post_code is not null") op.drop_column('admin', 'post_code', schema='georef') ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('admin', sa.Column('post_code', sa.TEXT(), nullable=True), schema='georef') op.execute("update georef.admin ga set post_code=pc.postal_code " "from georef.postal_codes pc where ga.id=pc.admin_id") op.drop_table('postal_codes', schema='georef') ### end Alembic commands ###
francois-vincent/navitia
source/sql/alembic/versions/13673746db16_postal_codes_table_added.py
Python
agpl-3.0
1,346
import json import responses from keen import exceptions from keen.client import KeenClient from keen.tests.base_test_case import BaseTestCase class CachedDatasetsTestCase(BaseTestCase): def setUp(self): super(CachedDatasetsTestCase, self).setUp() self.organization_id = "1234xxxx5678" self.project_id = "xxxx1234" self.read_key = "abcd5678read" self.master_key = "abcd5678master" self.client = KeenClient( project_id=self.project_id, read_key=self.read_key, master_key=self.master_key ) self.datasets = [ { "project_id": self.project_id, "organization_id": self.organization_id, "dataset_name": "DATASET_NAME_1", "display_name": "a first dataset wee", "query": { "project_id": self.project_id, "analysis_type": "count", "event_collection": "best collection", "filters": [ { "property_name": "request.foo", "operator": "lt", "property_value": 300, } ], "timeframe": "this_500_hours", "timezone": "US/Pacific", "interval": "hourly", "group_by": ["exception.name"], }, "index_by": ["project.id"], "last_scheduled_date": "2016-11-04T18:03:38.430Z", "latest_subtimeframe_available": "2016-11-04T19:00:00.000Z", "milliseconds_behind": 3600000, }, { "project_id": self.project_id, "organization_id": self.organization_id, "dataset_name": "DATASET_NAME_10", "display_name": "tenth dataset wee", "query": { "project_id": self.project_id, "analysis_type": "count", "event_collection": "tenth best collection", "filters": [], "timeframe": "this_500_days", "timezone": "UTC", "interval": "daily", "group_by": ["analysis_type"], }, "index_by": ["project.organization.id"], "last_scheduled_date": "2016-11-04T19:28:36.639Z", "latest_subtimeframe_available": "2016-11-05T00:00:00.000Z", "milliseconds_behind": 3600000, }, ] def test_get_all_raises_with_no_keys(self): client = KeenClient(project_id=self.project_id) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.all() @responses.activate def test_get_all(self): keen_response = { "datasets": self.datasets, "next_page_url": ( "https://api.keen.io/3.0/projects/{0}/datasets?" "limit=LIMIT&after_name={1}" ).format(self.project_id, self.datasets[-1]['dataset_name']) } url = "{0}/{1}/projects/{2}/datasets".format( self.client.api.base_url, self.client.api.api_version, self.project_id ) responses.add( responses.GET, url, status=200, json=keen_response ) all_cached_datasets = self.client.cached_datasets.all() self.assertEquals(all_cached_datasets, keen_response) def test_get_one_raises_with_no_keys(self): client = KeenClient(project_id=self.project_id) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.get() @responses.activate def test_get_one(self): keen_response = self.datasets[0] url = "{0}/{1}/projects/{2}/datasets/{3}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, self.datasets[0]['dataset_name'] ) def test_create_raises_with_no_keys(self): client = KeenClient(project_id=self.project_id) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.create( "NEW_DATASET", {}, "product.id", "My new dataset" ) def test_create_raises_with_read_key(self): client = KeenClient(project_id=self.project_id, read_key=self.read_key) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.create( "NEW_DATASET", {}, "product.id", "My new dataset" ) @responses.activate def test_create(self): dataset_name = "NEW_DATASET" display_name = "My new dataset" query = { "project_id": "PROJECT ID", "analysis_type": "count", "event_collection": "purchases", "filters": [ { "property_name": "price", "operator": "gte", "property_value": 100 } ], "timeframe": "this_500_days", "timezone": None, "interval": "daily", "group_by": ["ip_geo_info.country"] } index_by = "product.id" keen_response = { "project_id": self.project_id, "organization_id": self.organization_id, "dataset_name": dataset_name, "display_name": display_name, "query": query, "index_by": index_by, "last_scheduled_date": "1970-01-01T00:00:00.000Z", "latest_subtimeframe_available": "1970-01-01T00:00:00.000Z", "milliseconds_behind": 3600000 } url = "{0}/{1}/projects/{2}/datasets/{3}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, dataset_name ) responses.add(responses.PUT, url, status=201, json=keen_response) dataset = self.client.cached_datasets.create( dataset_name, query, index_by, display_name ) self.assertEqual(dataset, keen_response) def test_results_raises_with_no_keys(self): client = KeenClient(project_id=self.project_id) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.results( "DATASET_ONE", "product.id", "this_100_days" ) @responses.activate def test_results(self): keen_response = { "result": [ { "timeframe": { "start": "2016-11-02T00:00:00.000Z", "end": "2016-11-02T00:01:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 20 }, { "exception.name": "KeyError", "result": 18 } ] }, { "timeframe": { "start": "2016-11-02T01:00:00.000Z", "end": "2016-11-02T02:00:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 1 }, { "exception.name": "KeyError", "result": 13 } ] } ] } dataset_name = self.datasets[0]["dataset_name"] index_by = self.project_id timeframe = "this_two_hours" url = "{0}/{1}/projects/{2}/datasets/{3}/results?index_by={4}&timeframe={5}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, dataset_name, index_by, timeframe ) responses.add( responses.GET, url, status=200, json=keen_response, match_querystring=True ) results = self.client.cached_datasets.results( dataset_name, index_by, timeframe ) self.assertEqual(results, keen_response) @responses.activate def test_results_absolute_timeframe(self): keen_response = { "result": [ { "timeframe": { "start": "2016-11-02T00:00:00.000Z", "end": "2016-11-02T00:01:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 20 }, { "exception.name": "KeyError", "result": 18 } ] }, { "timeframe": { "start": "2016-11-02T01:00:00.000Z", "end": "2016-11-02T02:00:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 1 }, { "exception.name": "KeyError", "result": 13 } ] } ] } dataset_name = self.datasets[0]["dataset_name"] index_by = self.project_id timeframe = { "start": "2016-11-02T00:00:00.000Z", "end": "2016-11-02T02:00:00.000Z" } url = "{0}/{1}/projects/{2}/datasets/{3}/results?index_by={4}&timeframe={5}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, dataset_name, index_by, json.dumps(timeframe) ) responses.add( responses.GET, url, status=200, json=keen_response, match_querystring=True ) results = self.client.cached_datasets.results( dataset_name, index_by, timeframe ) self.assertEqual(results, keen_response) @responses.activate def test_results_multiple_index_by(self): keen_response = { "result": [ { "timeframe": { "start": "2016-11-02T00:00:00.000Z", "end": "2016-11-02T00:01:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 20 }, { "exception.name": "KeyError", "result": 18 } ] }, { "timeframe": { "start": "2016-11-02T01:00:00.000Z", "end": "2016-11-02T02:00:00.000Z" }, "value": [ { "exception.name": "ValueError", "result": 1 }, { "exception.name": "KeyError", "result": 13 } ] } ] } dataset_name = self.datasets[0]["dataset_name"] index_by = [self.project_id, 'another_id'] timeframe = "this_two_hours" url = "{0}/{1}/projects/{2}/datasets/{3}/results?index_by={4}&timeframe={5}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, dataset_name, json.dumps(index_by), timeframe ) responses.add( responses.GET, url, status=200, json=keen_response, match_querystring=True ) results = self.client.cached_datasets.results( dataset_name, index_by, timeframe ) self.assertEqual(results, keen_response) def test_delete_raises_with_no_keys(self): client = KeenClient(project_id=self.project_id) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.delete("MY_DATASET_NAME") def test_create_raises_with_read_key(self): client = KeenClient(project_id=self.project_id, read_key=self.read_key) with self.assertRaises(exceptions.InvalidEnvironmentError): client.cached_datasets.delete("MY_DATASET_NAME") @responses.activate def test_delete(self): dataset_name = "MY_DATASET_NAME" url = "{0}/{1}/projects/{2}/datasets/{3}".format( self.client.api.base_url, self.client.api.api_version, self.project_id, dataset_name) responses.add(responses.DELETE, url, status=204) response = self.client.cached_datasets.delete(dataset_name) self.assertTrue(response)
keenlabs/KeenClient-Python
keen/tests/cached_datasets_tests.py
Python
mit
13,778