prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.apis.rbac_authorization_v1alpha1_api import RbacAuthorizationV1alpha1Api
class TestRbacAuthorizationV1alpha1Api(unittest.TestCase):
""" RbacAuthorizationV1alpha1Api unit test stubs """
def setUp(self):
self.api = kubernetes.client.apis.rbac_authorization_v1alpha1_api.RbacAuthorizationV1alpha1Api()
def tearDown(self):
pass
def test_create_cluster_role(self):
"""
Test case for create_cluster_role
"""
pass
def test_create_cluster_role_binding(self):
"""
Test case for create_cluster_role_binding
"""
pass
def test_create_namespaced_role(self):
"""
Test case for create_namespaced_role
"""
pass
def test_create_namespaced_role_binding(self):
"""
Test case for create_namespaced_role_binding
"""
pass
def test_delete_cluster_role(self):
"""
Test case for delete_cluster_role
"""
pass
def test_delete_cluster_role_binding(self):
"""
Test case for delete_cluster_role_binding
"""
pass
def test_delete_collection_cluster_role(self):
"""
Test case for delete_collection_cluster_role
"""
pass
def test_delete_collection_cluster_role_binding(self):
"""
Test case for delete_collection_cluster_role_binding
"""
pass
def test_delete_collection_namespaced_role(self):
"""
Test case for delete_collection_namespaced_role
"""
pass
def test_delete_collection_namespaced_role_binding(self):
"""
Test case for delete_collection_namespaced_role_binding
"""
pass
def test_delete_namespaced_role(self):
"""
Test case for delete_namespaced_role
"""
pass
def test_delete_namespaced_role_binding(self):
"""
Test case for delete_namespaced_role_binding
"""
pass
def test_get_api_resources(self):
"""
Test case for get_api_resources
"""
pass
def test_list_cluster_role(self):
"""
Test case for list_cluster_role
"""
pass
def test_list_cluster_role_binding(self):
"""
Test case for list_cluster_role_binding
"""
pass
def test_list_namespaced_role(self):
"""
Test case | for list_namespaced_role
"""
pass
def test_list_namespaced_role_binding(self):
"""
Test case for list_namespaced_role_binding
"""
pass
def test_list_role_binding_for_all_namespaces(self):
"""
Test case for list_role_binding_for_all_namespaces
"""
pass
def test_ | list_role_for_all_namespaces(self):
"""
Test case for list_role_for_all_namespaces
"""
pass
def test_patch_cluster_role(self):
"""
Test case for patch_cluster_role
"""
pass
def test_patch_cluster_role_binding(self):
"""
Test case for patch_cluster_role_binding
"""
pass
def test_patch_namespaced_role(self):
"""
Test case for patch_namespaced_role
"""
pass
def test_patch_namespaced_role_binding(self):
"""
Test case for patch_namespaced_role_binding
"""
pass
def test_read_cluster_role(self):
"""
Test case for read_cluster_role
"""
pass
def test_read_cluster_role_binding(self):
"""
Test case for read_cluster_role_binding
"""
pass
def test_read_namespaced_role(self):
"""
Test case for read_namespaced_role
"""
pass
def test_read_namespaced_role_binding(self):
"""
Test case for read_namespaced_role_binding
"""
pass
def test_replace_cluster_role(self):
"""
Test case for replace_cluster_role
"""
pass
def test_replace_cluster_role_binding(self):
"""
Test case for replace_cluster_role_binding
"""
pass
def test_replace_namespaced_role(self):
"""
Test case for replace_namespaced_role
"""
pass
def test_replace_namespaced_role_binding(self):
"""
Test case for replace_namespaced_role_binding
"""
pass
if __name__ == '__main__':
unittest.main()
|
##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the te | rms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warra | nty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for MyMediaLite, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.run import run_cmd
class EB_MyMediaLite(ConfigureMake):
"""Support for building/installing MyMediaLite."""
def configure_step(self):
"""Custom configure step for MyMediaLite, using "make CONFIGURE_OPTIONS='...' configure"."""
if LooseVersion(self.version) < LooseVersion('3'):
cmd = "make CONFIGURE_OPTIONS='--prefix=%s' configure" % self.installdir
run_cmd(cmd, log_all=True, simple=True)
else:
self.cfg.update('installopts', "PREFIX=%s" % self.installdir)
def build_step(self):
"""Custom build step for MyMediaLite, using 'make all' in 'src' directory."""
cmd = "cd src && make all && cd .."
run_cmd(cmd, log_all=True, simple=True)
def sanity_check_step(self):
"""Custom sanity check for MyMediaLite."""
if LooseVersion(self.version) < LooseVersion('3'):
bin_files = ["bin/%s_prediction" % x for x in ['item', 'mapping_item', 'mapping_rating', 'rating']]
else:
bin_files = ["bin/item_recommendation", "bin/rating_based_ranking", "bin/rating_prediction"]
custom_paths = {
'files': bin_files,
'dirs': ["lib/mymedialite"],
}
super(EB_MyMediaLite, self).sanity_check_step(custom_paths=custom_paths)
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
sys.path.append("..")
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.op import Operator
from paddle.fluid.exec | utor import Executor
from op_test import OpTest
from test_gaussian_random_op import TestGaussianRandomOp
paddle.enable_static()
class TestXPUGaussianRandomOp(TestGaussianRandomOp):
def test_check_output(self):
if paddle.is_compiled_with_xpu() | :
place = paddle.XPUPlace(0)
outs = self.calc_output(place)
outs = [np.array(out) for out in outs]
outs.sort(key=len)
self.verify_output(outs)
if __name__ == "__main__":
unittest.main()
|
import random
import string
def random_string(n):
result = ' | '
for _ in range(10):
result += rand | om.SystemRandom().choice(
string.ascii_uppercase + string.digits)
return result
|
from django.conf.urls import url
from .views import simple_password
urlpatterns = [ |
url(r'^$', view=simple_password, name="simple_auth | _password"),
]
|
import os,json
from cgi import escape
def unescape(s):
s = s.replace("<", "<")
s = s.replace(">", ">")
# this has to be last:
s = s.replace("&", "&")
return s
class FilesystemMixin:
def h_fs_get(_,path,eltName | =''):
from stat import S_ISDIR
data = (escape(open(path).read())
if not S_ISDIR(os.stat(path).st_mode)
else [(p,S_ISDIR(os.stat(path+'/'+p).st_mode))
for p in os.listdir(path)])
_.ws.send(json.dumps({"method":"fs_get","result":[path,data,eltName]}))
pass
def h_fs_put(_,path,data):
f=open(path,'w')
for x in data: f.write(unescape(x))
f.close( | )
pass
def h_fs_system(_,path,eltName='',cwd=None):
import subprocess as sp
import shlex
data=sp.Popen(shlex.split(path),cwd=cwd,stdout=sp.PIPE, stderr=sp.PIPE).communicate()
_.ws.send(json.dumps({"method":"fs_system","result":[path,data,eltName]}));
pass
def h_fs_mkdir (_,path): os.mkdir(path)
def h_fs_rmdir (_,path): os.rmdir(path)
def h_fs_touch (_,path): open(path,'w').close()
def h_fs_unlink(_,path): os.unlink(path)
pass
class FsApp(FilesystemMixin):
def __init__(_,ws):_.ws=ws
|
import os
from kivy.lang import Builder
from kivy.properties import NumericProperty, StringProperty
from kivy.uix.anchorlayout import AnchorLayout
from cobiv.modules.core.hud import Hud
Builder.load_file(os.path.abspath(os.pa | th.join(os.path.dirname(__file__), 'progresshud.kv'))) |
class ProgressHud(Hud, AnchorLayout):
value = NumericProperty(0)
caption = StringProperty("")
def __init__(self, **kwargs):
super(ProgressHud, self).__init__(**kwargs)
|
import sdms
server = { 'HOST' : 'localhost',
'PORT' : '2506',
'USER' : 'SYSTEM',
'PASSWORD' : 'VerySecret' }
conn = sdms.SDMSConnectionOpenV2(server, server['USER'], server['PASSWOR | D'], "Simple Access Example")
try:
if 'ERROR' in conn:
print(str(conn))
exit(1)
except:
pass
stmt = "LIST SESSIONS;"
result = sdms.SDMSCommandWithSoc(conn, stmt)
if 'ERROR' in result:
print(str(result['ERROR']))
else:
for row in result['DATA']['TABLE']:
print("{0:3} {1:8} {2:32} {3:9} {4:15} { | 5:>15} {6}".format(\
str(row['THIS']), \
str(row['UID']), \
str(row['USER']), \
str(row['TYPE']), \
str(row['START']), \
str(row['IP']), \
str(row['INFORMATION'])))
conn.close()
|
#! /usr/bin/env python
import unittest
import sys
import os
import shutil
import mock
import subprocess
import re
# Import gslab_scons testing helper modules
import _test_helpers as helpers
import _side_effects as fx
# Ensure that Python can find and load the GSLab libraries
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append('../..')
import gslab_scons as gs
from gslab_scons._exception_classes import ExecCallError, BadExtensionError
from gslab_make import get_externals
from gslab_make.tests import nostderrout
# Define path to the builder for use in patching
path = 'gslab_scons.builders.build_stata'
class TestBuildStata(unittest.TestCase):
def setUp(self):
if not os.path.exists('./build/'):
os.mkdir('./build/')
@helpers.platform_patch('darwin', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_unix(self, mock_check, mock_path):
'''Test build_stata()'s standard behaviour on Unix machines'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
# Mock is_in_path() to finds just one executable of Stata
mock_path.side_effect = fx.make_stata_path_effect('stata-mp')
env = {'stata_executable' : None}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('win32', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
@mock.patch('%s.misc.is_64_windows' % path)
def test_windows(self, mock_is_64, mock_check, mock_path):
'''
Test that build_stata() behaves correctly on a Windows machine
when given appropriate inputs.
'''
mock_check.side_effect = fx.make_stata_side_effect('StataMP-64.exe')
mock_path.side_effect = fx.make_stata_path_effect('StataMP-64.exe')
mock_is_64.return_value = False
env = {'stata_executable' : None}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('cygwin', path)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_other_platform(self, mock_check, mock_path):
'''
Test build_stata()'s standard behaviour on a non-Unix,
non-win32 machine.
'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
mock_path.side_effect = fx.make_stata_path_effect('stata-mp')
# build_stata() will fail to define a command irrespective of
# whether a stata_executable is specified
env = {'stata_executable' : 'stata-mp'}
with self.assertRaises(NameError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
env = {'stata_executable' : None}
with self.assertRaises(NameError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@helpers.platform_patch('darwin', path)
@mock.patch('%s.subprocess.check_output' % path)
def test_stata_executable_unix(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@helpers.platform_patch('win32', path)
@mock.patch('%s.subprocess.check_output' % path)
def test_stata_executable_windows(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.standard_test(self, gs.build_stata, 'do',
env = env, system_mock = mock_check)
@mock.patch('%s.subprocess.check_output' % path)
def test_cl_arg(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable' : None}
helpers.test_cl_args(self, gs.build_stata, mock_check, 'do',
env = env)
def test_bad_stata_executable(self):
env = {'stata_executable': 'bad_stata_executable'}
with self.assertRaises(ExecCallError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@mock.patch('%s.misc.is_in_path' % path)
@mock.patch('%s.subprocess.check_output' % path)
def test_no_executable_in_path(self, mock_check, mock_path):
'''
Test build_stata()'s behaviour when there are no valid Stata
executables in the user's path variable
'''
# We mock the system to not find any executable in the path.
mock_check.side_effect = fx.make_stata_side_effect('')
mock_path.side_effect = fx.make_stata_path_effect('')
env = {'stata_executable': None}
with helpers.platform_patch('darwin', path), self.assertRaises(ExecCallError):
gs.build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
with helpers.platform_patch('win32', path), self.assertRaises(ExecCallError):
gs | .build_stata(target = './test_output.txt',
source = './test_script.do',
env = env)
@mock.patch('%s.subprocess.check_output' % path)
def test_unavailable_executable(self, mock_check):
'''
Test build_stata()'s behaviour when a Stata executable t | hat
isn't recognised is specified.
'''
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable' : 'stata-se'}
with self.assertRaises(ExecCallError):
gs.build_stata(target = './build/stata.dta',
source = './input/stata_test_script.do',
env = env)
@mock.patch('%s.subprocess.check_output' % path)
def test_bad_extension(self, mock_check):
mock_check.side_effect = fx.make_stata_side_effect('stata-mp')
env = {'stata_executable': 'stata-mp'}
helpers.bad_extension(self, gs.build_stata,
good = 'test.do', env = env)
def tearDown(self):
if os.path.exists('./build/'):
shutil.rmtree('./build/')
if os.path.isfile('./test_output.txt'):
os.remove('./test_output.txt')
if __name__ == '__main__':
unittest.main()
|
'''
Given a new mlst version or DB, update the container
'''
import pathlib
import click
import jinja2
import toml
import pendulum
import subprocess
import shlex
def load_template(name):
'''
Return the singularity recipe template as unicode text
'''
template = pathlib.Path(name).read_text()
return template
@click.command()
@click.option("--version", default=None)
@click.option("--mlst_version", default="latest")
@click.option("--author", default=None)
@click.option("-c", "--config", default="con | fig.toml")
def update_meningotype_singularity(version, mlst_version, author, config):
'''
Use the config.toml, or override any of the options via the command line
'''
# load the params
config = toml.load(config)
if version is not None:
config['version'] = version
if mlst_version is not None:
config['mlst_version'] = mlst_version
if author is not | None:
config['author'] = author
# load the template
loader = jinja2.FunctionLoader(load_template)
env = jinja2.Environment(loader=loader)
SINGULARITY_RECIPE = env.get_template("_singularity.j2").render(config)
# create global version
global_recipe = pathlib.Path("Singularity")
global_recipe.write_text(SINGULARITY_RECIPE)
if __name__ == "__main__":
update_meningotype_singularity()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-06-23 15:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mendel', '0008_auto_20160613_1911'),
]
operations = [
migrations.RenameField(
model_name='context',
old_name='keyword',
new_name='keyword_given',
),
migrations.RenameField(
model_name='review',
old_name='keyword',
new_name='keyword_given',
),
migrati | ons.AddField(
model_name='review',
name='keyword_proposed',
| field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='keyword_proposed', to='mendel.Keyword'),
preserve_default=False,
),
migrations.AlterUniqueTogether(
name='review',
unique_together=set([('context', 'keyword_proposed', 'category', 'user', 'status')]),
),
]
|
'' | 'Contains the Core classes of the PEATSA command line tool'''
import ProteinDesignTool, Data, Exceptions, P | EATSAParallel
|
or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Interface definition for Voltha Adapters
"""
from zope.interface import Interface
class IAdapterInterface(Interface):
"""
A Voltha adapter
"""
def start():
"""
Called once after adapter instance is laoded. Can be used to async
initialization.
:return: (None or Deferred)
"""
def stop():
"""
Called once before adapter is unloaded. It can be used to perform
any cleanup after the adapter.
:return: (None or Deferred)
"""
def adapter_descriptor():
"""
Return the adapter descriptor object for this adapter.
:return: voltha.Adapter grpc object (see voltha/protos/adapter.proto),
with adapter-specific information and config extensions.
"""
def device_types():
"""
Return list of device types supported by the adapter.
:return: voltha.DeviceTypes protobuf object, with optional type
specific extensions.
"""
def health():
"""
Return a 3-state health status using the voltha.HealthStatus message.
:return: Deferred or direct return with voltha.HealthStatus message
"""
def change_master_state(master):
"""
Called to indicate if plugin shall assume or lose master role. The
master role can be used to perform functions that must be performed
from a single point in the cluster. In single-node deployments of
Voltha, the plugins are always in master role.
:param master: (bool) True to indicate the mastership needs to be
assumed; False to indicate that mastership needs to be abandoned.
:return: (Deferred) which is fired by the adapter when mastership is
assumed/dropped, respectively.
"""
def adopt_device(device):
"""
Make sure the adapter looks after given device. Called when a device
is provisioned top-down and needs to be activated by the adapter.
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def reconcile_device(device):
"""
Make sure the adapter looks after given device. Called when this
device has changed ownership from another Voltha instance to
this one (typically, this occurs when the previous voltha
instance went down).
:param device: A voltha.Device object, with possible device-type
specific extensions. Such extensions shall be described as part of
the device type specification returned by device_types().
:return: (Deferred) Shall be fired to acknowledge device ownership.
"""
def abandon_device(device):
"""
Make sur ethe adapter no longer looks after device. This is called
if device ownership is taken over by another Voltha instance.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge abandonment.
"""
def disable_device(device):
"""
This is called when a previously enabled device needs to be disabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge disabling the device.
"""
def reenable_device(device):
"""
This is called when a previously disabled device needs to be enabled
based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge re-enabling the
device.
"""
def reboot_device(device):
"""
This is called to reboot a device based on a NBI call. The admin
state of the device will not change after the reboot
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the reboot.
"""
def download_image(device, request):
"""
This is called to request downloading a specified image into
the standby partition of a device based on a NBI call.
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge the downloa | d.
"""
def get_image_download_status(device, request):
"""
| This is called to inquire about a requested image download
status based on a NBI call.
The adapter is expected to update the DownloadImage DB object
with the query result
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def cancel_image_download(device, request):
"""
This is called to cancel a requested image download
based on a NBI call. The admin state of the device will not
change after the download.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) Shall be fired to acknowledge
"""
def activate_image_update(device, request):
"""
This is called to activate a downloaded image from
a standby partition into active partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
activated image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def revert_image_update(device, request):
"""
This is called to deactivate the specified image at
active partition, and revert to previous image at
standby partition.
Depending on the device implementation, this call
may or may not cause device reboot.
If no reboot, then a reboot is required to make the
previous image running on device
This call is expected to be non-blocking.
:param device: A Voltha.Device object.
A Voltha.ImageDownload object.
:return: (Deferred) OperationResponse object.
"""
def self_test_device(device):
"""
This is called to Self a device based on a NBI call.
:param device: A Voltha.Device object.
:return: Will return result of self test
"""
def delete_device(device):
"""
This is called to delete a device from the PON based on a NBI call.
If the device is an OLT then the whole PON will be deleted.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the deletion.
"""
def get_device_details(device):
"""
This is called to get additional device details based on a NBI call.
:param device: A Voltha.Device object.
:return: (Deferred) Shall be fired to acknowledge the retrieval of
additional details.
"""
def update_flows_bulk(device, flows, groups):
"""
Called after any flow table change, but only if the device supports
bulk mode, which is expressed by the 'accepts_bulk_flow_update'
capability attribute of the device type.
:param device: A Voltha.Device object.
:param flows: An openflow_v13.Flows object
:param groups: An openflow_v13.Flows object
:return: (Deferred or None)
"""
def update_flows_incremen |
#!/usr/bin/python
''' Spectral Harmonographs Copyright 2014 Alan Richmond (Tuxar.uk)
Trace of 4 decaying sine waves, 2 per axis (x & y)(i.e. 2-pendula), with rainbow colour.
I did this in Java some decades ago (Encyclogram; I no longer have the source), this
version is in Python, with PyGame.
It randomly generates a sequence of harmonographs. It's fast, and can be set to go
much faster (or slower) if you want.
Tip: set the display window to fullscreen. On KDE Ubuntu right-click on the title bar,
select More Actions -> Fullscreen
'''
print "Quit: q key, Screenshot: spacebar"
import pygame, sys, random as r
from pygame.locals import *
from math import pi, sin, cos, exp
# EDIT THESE:
width,height=1280,720 # YouTube HD
width,height=1920,1080 # my left monitor
width,height=1280,1024 # my right monitor
#width,height=2560,1440 # YT channel art
dd=0.99995 # decay factor
dt=0.02 # time increment
speed=200 # yes, speed
hui=57*2 # Hue increment
hue,sat,val,aaa=0,100,100,0
sd=0.005 # frequency spread (from integer)
mx=4 # max range for amplitudes & frequencies
def check_event():
global save
for event in pygame.event.get():
if event.type == QUIT:
sys.exit()
elif event.type == KEYDOWN and event.key == K_q:
sys.exit()
elif event.type == KEYDOWN and event.key == K_SPACE:
save=True
print "Saving when finished..."
steps=0
pygame.init()
pygame.event.set_allowed([QUIT, KEYDOWN])
screen = pygame.display.set_mode((width,height),DOUBLEBUF)
screen.set_alpha(None)
fg=pygame.Color(0,0,0,0)
save=False
while True:
while True:
ax1, ax2 = r.randint(-mx,mx), r.randint(-mx,mx)
maxx=abs(ax1)+abs(ax2)
if maxx>0: break
xscale=width/(2*maxx)
while True:
ay1, ay2 = r.randint(0,mx), r.randint(0,mx)
maxy=abs(ay1)+abs(ay2)
if maxy>0: break
yscale=height/(2*maxy)
fx1, fx2 = r.randint(1,mx) + r.gauss(0,sd), r.randint(1,mx) + r.gauss(0,sd)
fy1, fy2 = r.randint(1,mx) + r.gauss(0,sd), r.randint(1,mx) + r.gauss(0,sd)
px1, px2 = r.uniform(0,2*pi), r.uniform(0,2*pi)
py1, py2 = r.uniform(0,2*pi), r.uniform(0,2*pi)
| print ax1,ax2,ay1,ay2
print fx1,fx2,fy1,fy2
print px1,px2,py1,py | 2
dec=1.0
t=0.0 # angle for sin
first=True
while dec>0.015:
# calculate next x,y point along line
x = xscale * dec * (ax1*sin(t * fx1 + px1) + ax2*sin(t * fx2 + px2)) + width/2
y = yscale * dec * (ay1*cos(t * fy1 + py1) + ay2*cos(t * fy2 + py2)) + height/2
dec*=dd # decay
if not first: # ignore any complaint about prev_x,y being undefined
fg.hsva=(hue,sat,val,aaa)
hue = (hue + dt*hui) % 360 # cycle hue
pygame.draw.aaline(screen, fg, (x, y), (prev_x, prev_y), 1)
else:
first=False
prev_x = x # save x,y for next line segment start
prev_y = y
if steps%speed==0: pygame.display.update()
steps+=1
t+=dt # increment angle for sin
check_event()
if save:
pars='shg-{0}_{1}-{2}_{3}-{4}_{5}'.format(ax1,ax2,fx1,fx2,px1,px2)
pygame.image.save(screen, pars+'.jpg')
print "Saved as "+pars+'.jpg'
save=False
screen.fill((0,0,0))
|
import pygame
from ui.utils.interpolator import Interpolator
class LcarsWidget(pygame.sprite.DirtySprite):
"""Base class for all widgets"""
def __init__(self, color, pos, size, handler=None):
pygame.sprite.DirtySprite.__init__(self)
if self.image == None:
self.image = pygame.Surface(size).convert()
self.image.fill(color)
self.rect = self.image.get_rect()
self.rect.top = pos[0]
self.rect.left = pos[1]
self.size = (self.rect.width, self.rect.height)
self.long_pressed = False
self.pressed_time = 0
self.focussed = False
| self.line = None
self.handler = handler
def update(self, screen):
if not self.visible:
return
if self.line != None:
self.line. | next()
if self.rect.center == self.line.pos:
self.dirty = 0
self.rect.center = self.line.pos
else:
self.dirty = 0
screen.blit(self.image, self.rect)
def handleEvent(self, event, clock):
handled = False
if not self.visible:
self.focussed = False
return handled
if event.type == pygame.MOUSEBUTTONDOWN:
self.pressed_time = pygame.time.get_ticks()
self.focussed = True
if event.type == pygame.MOUSEMOTION:
if (self.focussed and pygame.time.get_ticks() - self.pressed_time > 1000):
self.long_pressed = True
if self.groups()[0].UI_PLACEMENT_MODE:
self.rect.top = event.pos[1]
self.rect.left = event.pos[0]
self.dirty = 1
if event.type == pygame.MOUSEBUTTONUP:
if self.handler:
self.handler(self, event, clock)
handled = True
if self.focussed and self.long_pressed and self.groups()[0].UI_PLACEMENT_MODE:
print(event.pos[1], event.pos[0])
self.pressed_time = 0
self.long_pressed = False
self.focussed = False
return handled
def applyColour(self, colour):
"""Convert non-black areas of an image to specified colour"""
for x in range(0, self.size[0]):
for y in range(0, self.size[1]):
pixel = self.image.get_at((x, y)).r
if (pixel > 50):
self.image.set_at((x, y), colour)
class LcarsMoveToMouse(LcarsWidget):
"""For testing purposes - move a small square to last clicked position"""
def __init__(self, color):
self.image = None
LcarsWidget.__init__(self, color, (0,0), (10,10))
self.focussed = True
def handleEvent(self, event, clock):
if event.type == pygame.MOUSEBUTTONDOWN:
# move sprite to clicked location using interpolator
fps = clock.get_fps()
x, y = event.pos
self.line = Interpolator(
self.rect.center,
(x, y),
0.5, # duration of interpolation
fps, # current frames per second
1.0, # type of interpolation
0.5 # middle?
)
self.dirty = 1
|
NESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import Queue as queue
import socket
import threading
import time
import sys
from utils import random_string, timestr, print_log
from utils import logger
class Shared:
def __init__(self, config):
self.lock = threading.Lock()
self._stopped = False
self.config = config
self._paused = True
def paused(self):
with self.lock:
return self._paused
def pause(self):
with self.lock:
self._paused = True
def unpause(self):
with self.lock:
self._paused = False
def stop(self):
print_log("Stopping Stratum")
with self.lock:
self._stopped = True
def stopped(self):
with self.lock:
return self._stopped
class Processor(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self.dispatcher = None
self.queue = queue.Queue()
def process(self, request):
pass
def add_request(self, session, request):
self.queue.put((session, request))
def push_response(self, session, response):
#print "response", response
self.dispatcher.request_dispatcher.push_response(session, response)
def close(self):
pass
def run(self):
while not self.shared.stopped():
try:
session, request = self.queue.get(True, timeout=1)
msg_id = request.get('id')
except:
continue
if session.stopped():
continue
try:
result = self.process(request)
self.push_response(session, {'id': msg_id, 'result': result})
except BaseException, e:
self.push_response(session, {'id': msg_id, 'error':str(e)})
except:
logger.error("process error", exc_info=True)
self.push_response(session, {'id': msg_id, 'error':'unknown error'})
self.close()
class Dispatcher:
def __init__(self, config):
self.shared = Shared(config)
self.request_dispatcher = RequestDispatcher(self.shared)
self.request_dispatcher.start()
self.response_dispatcher = \
ResponseDispatcher(self.shared, self.request_dispatcher)
self.response_dispatcher.start()
def register(self, prefix, processor):
processor.dispatcher = self
processor.shared = self.shared
processor.start()
self.request_dispatcher.processors[prefix] = processor
class RequestDispatcher(threading.Thread):
def __init__(self, shared):
self.shared = shared
threading.Thread.__init__(self)
self.daemon = True
self.request_queue = queue.Queue()
self.response_queue = queue.Queue()
self.lock = threading.Lock()
self.idlock = threading.Lock()
self.sessions = {}
self.processors = {}
self.lastgc = 0
def push_response(self, session, item):
self.response_queue.put((session, item))
def pop_response(self):
return self.response_queue.get()
def push_request(self, session, item):
self.request_queue.put((session, item))
def pop_request(self):
return self.request_queue.get()
def get_session_by_address(self, address):
for x in self.sessions.values():
| if x.address == address:
return x
def run(self):
if self.shared is None:
raise TypeError("self.shared not set in Processor")
while not self.shared.stopped():
session, request = self.pop_request()
try:
self.do_dispatch(session, request)
except:
logger.error('dispatch',exc_info=True)
self.collect_garbage()
self.stop()
def stop(self):
pass
d | ef do_dispatch(self, session, request):
""" dispatch request to the relevant processor """
method = request['method']
params = request.get('params', [])
suffix = method.split('.')[-1]
if session is not None:
if suffix == 'subscribe':
if not session.subscribe_to_service(method, params):
return
prefix = request['method'].split('.')[0]
try:
p = self.processors[prefix]
except:
print_log("error: no processor for", prefix)
return
p.add_request(session, request)
if method in ['server.version']:
try:
session.version = params[0]
session.protocol_version = float(params[1])
except:
pass
def get_sessions(self):
with self.lock:
r = self.sessions.values()
return r
def add_session(self, session):
key = session.key()
with self.lock:
self.sessions[key] = session
def remove_session(self, session):
key = session.key()
with self.lock:
del self.sessions[key]
def collect_garbage(self):
# only for HTTP sessions.
now = time.time()
if time.time() - self.lastgc < 60.0:
return
self.lastgc = now
for session in self.sessions.values():
if session.name == "HTTP" and (now - session.time) > session.timeout:
session.stop()
class Session:
def __init__(self, dispatcher):
self.dispatcher = dispatcher
self.bp = self.dispatcher.processors['blockchain']
self._stopped = False
self.lock = threading.Lock()
self.subscriptions = []
self.address = ''
self.name = ''
self.version = 'unknown'
self.protocol_version = 0.
self.time = time.time()
self.max_subscriptions = dispatcher.shared.config.getint('server', 'max_subscriptions')
threading.Timer(2, self.info).start()
def key(self):
return self.address
# Debugging method. Doesn't need to be threadsafe.
def info(self):
if self.subscriptions:
print_log("%4s" % self.name,
"%21s" % self.address,
"%4d" % len(self.subscriptions),
self.version)
def stop(self):
with self.lock:
if self._stopped:
return
self._stopped = True
self.shutdown()
self.dispatcher.remove_session(self)
self.stop_subscriptions()
def shutdown(self):
pass
def stopped(self):
with self.lock:
return self._stopped
def subscribe_to_service(self, method, params):
if self.stopped():
return False
if len(self.subscriptions) > self.max_subscriptions:
print_log("max subscriptions reached", self.address)
self.stop()
return False
# append to self.subscriptions only if this does not raise
self.bp.do_subscribe(method, params, self)
with self.lock:
if (method, params) not in self.subscriptions:
self.subscriptions.append((method,params))
return True
def stop_subscriptions(self):
with self.lock:
s = self.subscriptions[:]
for method, params in s:
self.bp.do_unsubscribe(method, params, self)
with self.lock:
self.subscriptions = []
class ResponseDispatcher(threading.Thread):
def __init__(self, shared, request_dispatcher):
self.shared = shared
self.request_dispatcher = request_dispatcher
threading.Thread.__init__(self)
self.daemon = True
def run(self):
while not self.shared.stopped():
session, response = self.request_dispatche |
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in t | his software or its documentation.
#
import sys
import test_lib
class SimpleDispatcherClient(test_lib.SimpleClient):
pass
class SimpleDispatcherRunner(test_lib.SimpleRunner):
client_factory = SimpleDispatcherClient
_resource = 'DISPATCHER'
def fix_connection(self, client):
client.retrieve_roster()
client.send_presence()
def main():
d = SimpleDispatcherRunner('username1', 'password1', "DISPATCHER")
d.main()
if __name__ == '__main__':
| sys.exit(main() or 0)
|
Site()).check()
self.assertEqual(errors, [])
def test_generic_inline_model_admin_non_generic_model(self):
"""
A model without a GenericForeignKey raises problems if it's included
in a GenericInlineModelAdmin definition.
"""
class BookInline(GenericStackedInline):
model = Book
class SongAdmin(admin.ModelAdmin):
inlines = [BookInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Book' has no GenericForeignKey.",
obj=BookInline,
id='admin.E301',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_ct_field(self):
"""
A GenericInlineModelAdmin errors if the ct_field points to a
nonexistent field.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = 'nonexistent'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.",
obj=InfluenceInline,
id='admin.E302',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_fk_field(self):
"""
A GenericInlineModelAdmin errors if the ct_fk_field points to a
nonexistent field.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = 'nonexistent'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_fk_field' references 'nonexistent', which is not a field on 'admin_checks.Influence'.",
obj=InfluenceInline,
id='admin.E303',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_ct_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_field points to a
field that isn't part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = 'name'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'name' and object ID field 'object_id'.",
obj=InfluenceInline,
id='admin.E304',
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_fk_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_fk_field points to
a field that isn't | part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = 'name'
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'content_type' and object ID field 'name'.",
| obj=InfluenceInline,
id='admin.E304',
)
]
self.assertEqual(errors, expected)
def test_app_label_in_admin_checks(self):
class RawIdNonexistentAdmin(admin.ModelAdmin):
raw_id_fields = ('nonexistent',)
errors = RawIdNonexistentAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"The value of 'raw_id_fields[0]' refers to 'nonexistent', "
"which is not an attribute of 'admin_checks.Album'.",
obj=RawIdNonexistentAdmin,
id='admin.E002',
)
]
self.assertEqual(errors, expected)
def test_fk_exclusion(self):
"""
Regression test for #11709 - when testing for fk excluding (when exclude is
given) make sure fk_name is honored or things blow up when there is more
than one fk to the parent model.
"""
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
exclude = ("e",)
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_inline_self_check(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.TwoAlbumFKAndAnE' has more than one ForeignKey "
"to 'admin_checks.Album'. You must specify a 'fk_name' "
"attribute.",
obj=TwoAlbumFKAndAnEInline,
id='admin.E202',
)
]
self.assertEqual(errors, expected)
def test_inline_with_specified(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_inlines_property(self):
class CitiesInline(admin.TabularInline):
model = City
class StateAdmin(admin.ModelAdmin):
@property
def inlines(self):
return [CitiesInline]
errors = StateAdmin(State, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_method(self):
@admin.display
def my_function(obj):
pass
class SongAdmin(admin.ModelAdmin):
readonly_fields = (my_function,)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_modeladmin",)
@admin.display
def readonly_method_on_modeladmin(self, obj):
pass
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_dynamic_attribute_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("dynamic_method",)
def __getattr__(self, item):
if item == "dynamic_method":
@admin.display
def method(obj):
pass
return method
raise AttributeError
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_method_on_model(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_model",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_nonexistent_field(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title", "nonexistent")
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 're |
import numpy as np
import cv2
from sys import argv
c | lass Test:
def __init__(self, name, image):
self.image = image
self.name = name
self.list = []
def add(self, function):
self.list.append(function)
def run(self):
cv2.imshow(self.name, self.image)
for function in self.list:
cv2.waitKey()
self.image = function(self.image)
cv2.imshow(self.name, self.image)
cv2.waitKey()
def grayscale(image):
image = cv2.cvtColor(im | age, cv2.COLOR_BGR2GRAY)
return image
def median(image):
cv2.medianBlur(image, 9, image)
return image
def unsharp(image):
image2 = cv2.GaussianBlur(image, (21,21), 21)
iamge = cv2.addWeighted(image, 1.5, image2, -0.5, 0, image)
return image
def harris(image):
x33 = image.shape[1] / 3
x66 = image.shape[1] / 3 * 2
dst1 = cv2.goodFeaturesToTrack(image[:,:x33], 10, 0.1, 5)
mean1 = np.uint8(cv2.mean(dst1))
cv2.circle(image, (mean1[0], mean1[1]), 2, 255)
dst2 = cv2.goodFeaturesToTrack(image[:,x66:], 10, 0.1, 5)
dst2 += [x66, 0]
mean2 = np.uint8(cv2.mean(dst2))
cv2.circle(image, (mean2[0], mean2[1]), 2, 255)
return image
if __name__ == '__main__':
image = cv2.imread(argv[1])
test = Test('Test', image)
test.add(grayscale)
test.add(median)
test.add(harris)
test.run()
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
#
# autocomplit.py
#
# Copyright 2011 Basmanov Illya <ffsdmad@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import config
import ffcgi
from base import bd_sql
from xslt_proc import xml_xsl_proc
import os, sys, inspect
sys.stderr = sys.stdout
from user import user
#print "Content-Type: text/html; charset=utf8\n\n"
import libs
PLGPATH = "plugins2"
def make_plugin_xml(plg_name):
xml = ""
plg = libs.load_module_name(plg_name)
if plg:
for k in dir(plg):
if k[:2]!="__":
xml += "/"+k
#xml += "<%s>%s</%s>"% (k, plg[k], k)
return xml
class plugin_manager():
def __init__(self):
xml = user[3]+"""<plug | ins>plugin_manager</plugins>"""
txml = ""
for root, dirs, files in os.walk(PLGPATH):
for f in files:
if f[-3:]==".py" and f!="__init__.py":
plg_path = root + "/" + f
plg_name = plg_path.replace("/", ".")[len(PLGPATH)+1:-3]
txml += "<row><plg_name>%s</plg_name><plg_path>%s</plg_path><title>%s</title> | </row>"% (plg_name, plg_path, make_plugin_xml(plg_name) )
txml = "<plugin_manager>%s</plugin_manager>" % txml
xml = "<doc>%s</doc>"%(xml+txml)
xsl = "data/af-web.xsl"
libs.save_xml(xml, __file__ )
print xml_xsl_proc(xml,fxsl=xsl)
def main():
return 0
if __name__ == '__main__': main()
|
cla | ss Event:
MESSAGE = 'MESSAGE'
JOIN = 'JOIN'
LEAVE = 'LEAVE'
def __init__(self, _type, user, text):
self._type = _type
self.user = user
self.text = text
def __repr__(self):
return '[{}] {}: {}'.format(self._type, self.user, self.text)
def __str__(self):
return self.__repr__()
def message(user, text):
return Event(Event.MESSAGE, user, t | ext)
def join(user):
return Event(Event.JOIN, user, 'joined the room')
def leave(user):
return Event(Event.LEAVE, user, 'leaved the room')
|
"""
Map urls to the relevant view handlers
"""
from django.conf.urls import url
from openedx.core.djan | goapps.zendesk_proxy.v0.views import ZendeskPassthroughView as v0_view
from openedx.core.djangoapps.zendesk_proxy.v1.views im | port ZendeskPassthroughView as v1_view
urlpatterns = [
url(r'^v0$', v0_view.as_view(), name='zendesk_proxy_v0'),
url(r'^v1$', v1_view.as_view(), name='zendesk_proxy_v1'),
]
|
lose()
################################################################################
# helper functions
################################################################################
def _get_cookie(name, default):
if name in request.cookies:
return request.cookies.get(name)
else:
return default
################################################################################
# AJAX REQUESTS
################################################################################
@app.route('/_thumb_up_id')
def thumb_up_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.arg | s.get('ili_id', None)
rate = 1
r = r | ate_ili_id(ili_id, rate, user)
counts, up_who, down_who = f_rate_summary([ili_id])
html = """ <span style="color:green" title="Who voted up: {}">+{}</span><br>
<span style="color:red" title="Who voted down: {}">-{}</span>
""".format(up_who[int(ili_id)], counts[int(ili_id)]['up'],
down_who[int(ili_id)], counts[int(ili_id)]['down'])
return jsonify(result=html)
@app.route('/_thumb_down_id')
def thumb_down_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.args.get('ili_id', None)
rate = -1
r = rate_ili_id(ili_id, rate, user)
counts, up_who, down_who = f_rate_summary([ili_id])
html = """ <span style="color:green" title="Who voted up: {}">+{}</span><br>
<span style="color:red" title="Who voted down: {}">-{}</span>
""".format(up_who[int(ili_id)], counts[int(ili_id)]['up'],
down_who[int(ili_id)], counts[int(ili_id)]['down'])
return jsonify(result=html)
@app.route('/_comment_id')
def comment_id():
user = fetch_id_from_userid(current_user.id)
ili_id = request.args.get('ili_id', None)
comment = request.args.get('comment', None)
comment = str(Markup.escape(comment))
dbinsert = comment_ili_id(ili_id, comment, user)
return jsonify(result=dbinsert)
@app.route('/_detailed_id')
def detailed_id():
ili_id = request.args.get('ili_id', None)
rate_hist = fetch_rate_id([ili_id])
comm_hist = fetch_comment_id([ili_id])
users = fetch_allusers()
r_html = ""
for r, u, t in rate_hist[int(ili_id)]:
r_html += '{} ({} — {}): {} <br>'.format(
users[u]['full_name'], users[u]['userID'], t, r)
c_html = ""
for c, u, t in comm_hist[int(ili_id)]:
c_html += '{} ({} — {}): {} <br>'.format(
users[u]['full_name'], users[u]['userID'], t, c)
html = """
<td colspan="9">
<div style="width: 49%; float:left;">
<h6>Ratings</h6>
{}</div>
<div style="width: 49%; float:right;">
<h6>Comments</h6>
{}</div>
</td>""".format(r_html, c_html)
return jsonify(result=html)
@app.route('/_confirm_wn_upload')
def confirm_wn_upload_id():
"""
Ingest the uploaded wordnet into the database and return a report.
This happens when the user has confirmed they want to add a
validated wordnet.
"""
user = fetch_id_from_userid(current_user.id)
fn = request.args.get('fn', None)
report = ingest_wordnet(fn, user)
updateLabels()
return jsonify(result=report)
@app.route('/_add_new_project')
def add_new_project():
user = fetch_id_from_userid(current_user.id)
proj = request.args.get('proj_code', None)
proj = str(Markup.escape(proj))
if user and proj:
dbinsert = insert_new_project(proj, user)
return jsonify(result=dbinsert)
else:
return jsonify(result=False)
@app.route("/_load_lang_selector",methods=["GET"])
def omw_lang_selector():
selected_lang = int(_get_cookie('selected_lang', 1))
selected_lang2 = int(_get_cookie('selected_lang', 1))
lang_id, lang_code = fetch_langs()
html = '<select name="lang" style="font-size: 85%; width: 9em" required>'
for lid in lang_id.keys():
if selected_lang == lid:
html += """<option value="{}" selected>{}</option>
""".format(lid, lang_id[lid][1])
else:
html += """<option value="{}">{}</option>
""".format(lid, lang_id[lid][1])
html += '</select>'
html += '<select name="lang2" style="font-size: 85%; width: 9em" required>'
for lid in lang_id.keys():
if selected_lang2 == lid:
html += """<option value="{}" selected>{}</option>
""".format(lid, lang_id[lid][1])
else:
html += """<option value="{}">{}</option>
""".format(lid, lang_id[lid][1])
html += '</select>'
return jsonify(result=html)
@app.route('/_add_new_language')
def add_new_language():
user = fetch_id_from_userid(current_user.id)
bcp = request.args.get('bcp', None)
bcp = str(Markup.escape(bcp))
iso = request.args.get('iso', None)
iso = str(Markup.escape(iso))
name = request.args.get('name', None)
name = str(Markup.escape(name))
if bcp and name:
dbinsert = insert_new_language(bcp, iso, name, user)
return jsonify(result=dbinsert)
else:
return jsonify(result=False)
@app.route('/_load_proj_details')
def load_proj_details():
proj_id = request.args.get('proj', 0)
if proj_id:
proj_id = int(proj_id)
else:
proj_id = None
projs = fetch_proj()
srcs = fetch_src()
srcs_meta = fetch_src_meta()
html = str()
if proj_id:
i = 0
for src_id in srcs.keys():
if srcs[src_id][0] == projs[proj_id]:
i += 1
html += "<br><p><b>Source {}: {}-{}</b></p>".format(i,
projs[proj_id],srcs[src_id][1])
for attr, val in srcs_meta[src_id].items():
html += "<p style='margin-left: 40px'>"
html += attr + ": " + val
html += "</p>"
return jsonify(result=html)
@app.route('/_load_min_omw_concept/<ss>')
@app.route('/_load_min_omw_concept_ili/<ili_id>')
def min_omw_concepts(ss=None, ili_id=None):
if ili_id:
ss_ids = f_ss_id_by_ili_id(ili_id)
else:
ss_ids = [ss]
pos = fetch_pos()
langs_id, langs_code = fetch_langs()
ss, senses, defs, exes, links = fetch_ss_basic(ss_ids)
ssrels = fetch_ssrel()
selected_lang = int(_get_cookie('selected_lang', 1))
labels = fetch_labels( selected_lang, set(senses.keys()))
return jsonify(result=render_template('min_omw_concept.html',
pos = pos,
langs = langs_id,
senses=senses,
ss=ss,
links=links,
ssrels=ssrels,
defs=defs,
exes=exes,
labels=labels))
@app.route('/_load_min_omw_sense/<sID>')
def min_omw_sense(sID=None):
if sID:
s_id=int(sID)
langs_id, langs_code = fetch_langs()
pos = fetch_pos()
sense = fetch_sense(s_id)
forms=fetch_forms(sense[3])
selected_lang = int(_get_cookie('selected_lang', 1))
labels= fetch_labels(selected_lang,[sense[4]])
src_meta= fetch_src_meta()
src_sid=fetch_src_for_s_id([s_id])
sdefs = fetch_defs_by_sense([s_id])
if selected_lang in sdefs[s_id]:
sdef = sdefs[s_id][selected_lang] ## requested language
else:
sdef = sdefs[s_id][min(sdefs[s_id].keys())] ## a language
if not sdef:
sdef="no definition"
# return jsonify(result=render_template('omw_sense.html',
return jsonify(result=render_template('min_omw_sense.html',
s_id = s_id,
sdef=sdef,
sense = sense,
forms=forms,
langs = langs_id,
|
e", "portal"])
def discover_n | o_credentials(builder):
return Credentia | ls(STYLE_NONE,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
"", "", "", "")
def discover_chap(builder):
return Credentials(STYLE_CHAP,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
builder.get_object("chapUsernameEntry").get_text(),
builder.get_object("chapPasswordEntry").get_text(),
"", "")
def discover_reverse_chap(builder):
return Credentials(STYLE_REVERSE_CHAP,
builder.get_object("targetEntry").get_text(),
builder.get_object("initiatorEntry").get_text(),
builder.get_object("rchapUsernameEntry").get_text(),
builder.get_object("rchapPasswordEntry").get_text(),
builder.get_object("rchapReverseUsername").get_text(),
builder.get_object("rchapReversePassword").get_text())
# This list maps the current page from the authNotebook to a function to grab
# credentials out of the UI. This works as long as authNotebook keeps the
# filler page at the front.
discoverMap = [discover_no_credentials, discover_chap, discover_reverse_chap]
def login_no_credentials(builder):
return Credentials(STYLE_NONE,
"", "",
"", "", "", "")
def login_chap(builder):
return Credentials(STYLE_CHAP,
"", "",
builder.get_object("loginChapUsernameEntry").get_text(),
builder.get_object("loginChapPasswordEntry").get_text(),
"", "")
def login_reverse_chap(builder):
return Credentials(STYLE_REVERSE_CHAP,
"", "",
builder.get_object("loginRchapUsernameEntry").get_text(),
builder.get_object("loginRchapPasswordEntry").get_text(),
builder.get_object("loginRchapReverseUsername").get_text(),
builder.get_object("loginRchapReversePassword").get_text())
# And this list maps the current page from the loginAuthNotebook to a function
# to grab credentials out of the UI. This works as long as loginAuthNotebook
# keeps the filler page at the front, and we check to make sure "Use the
# credentials from discovery" is not selected first.
loginMap = [login_no_credentials, login_chap, login_reverse_chap]
def credentials_valid(credentials):
if credentials.style == STYLE_NONE:
return True
elif credentials.style == STYLE_CHAP:
return credentials.username.strip() != "" and credentials.password != ""
elif credentials.style == STYLE_REVERSE_CHAP:
return credentials.username.strip() != "" and credentials.password != "" and \
credentials.rUsername.strip() != "" and credentials.rPassword != ""
class ISCSIDialog(GUIObject):
"""
.. inheritance-diagram:: ISCSIDialog
:parts: 3
"""
builderObjects = ["iscsiDialog", "nodeStore", "nodeStoreFiltered"]
mainWidgetName = "iscsiDialog"
uiFile = "spokes/advstorage/iscsi.glade"
def __init__(self, data, storage):
GUIObject.__init__(self, data)
self.storage = storage
self.iscsi = self.storage.iscsi()
self._discoveryError = None
self._loginError = False
self._discoveredNodes = []
self._update_devicetree = False
self._authTypeCombo = self.builder.get_object("authTypeCombo")
self._authNotebook = self.builder.get_object("authNotebook")
self._iscsiNotebook = self.builder.get_object("iscsiNotebook")
self._loginButton = self.builder.get_object("loginButton")
self._retryLoginButton = self.builder.get_object("retryLoginButton")
self._loginAuthTypeCombo = self.builder.get_object("loginAuthTypeCombo")
self._loginAuthNotebook = self.builder.get_object("loginAuthNotebook")
self._loginGrid = self.builder.get_object("loginGrid")
self._loginConditionNotebook = self.builder.get_object("loginConditionNotebook")
self._configureGrid = self.builder.get_object("configureGrid")
self._conditionNotebook = self.builder.get_object("conditionNotebook")
self._bindCheckbox = self.builder.get_object("bindCheckbutton")
self._startButton = self.builder.get_object("startButton")
self._okButton = self.builder.get_object("okButton")
self._cancelButton = self.builder.get_object("cancelButton")
self._retryButton = self.builder.get_object("retryButton")
self._initiatorEntry = self.builder.get_object("initiatorEntry")
self._store = self.builder.get_object("nodeStore")
self._storeFilter = self.builder.get_object("nodeStoreFiltered")
def refresh(self):
self._bindCheckbox.set_active(bool(self.iscsi.ifaces))
self._bindCheckbox.set_sensitive(self.iscsi.mode == "none")
self._authTypeCombo.set_active(0)
self._startButton.set_sensitive(True)
self._loginAuthTypeCombo.set_active(0)
self._storeFilter.set_visible_column(1)
self._initiatorEntry.set_text(self.iscsi.initiator)
self._initiatorEntry.set_sensitive(not self.iscsi.initiatorSet)
@property
def selectedNames(self):
return [itr[2] for itr in self._store if itr[0]]
def run(self):
rc = self.window.run()
self.window.destroy()
# We need to call this to get the device nodes to show up
# in our devicetree.
if self._update_devicetree:
self.storage.devicetree.populate()
return rc
##
## DISCOVERY
##
def on_auth_type_changed(self, widget, *args):
self._authNotebook.set_current_page(widget.get_active())
# When we change the notebook, we also need to reverify the credentials
# in order to set the Start button sensitivity.
self.on_discover_field_changed()
def _discover(self, credentials, bind):
# This needs to be in its own thread, not marked with gtk_action_* because it's
# called from on_start_clicked, which is in the GTK main loop. Those decorators
# won't do anything special in that case.
if not self.iscsi.initiatorSet:
self.iscsi.initiator = credentials.initiator
# interfaces created here affect nodes that iscsi.discover would return
if self.iscsi.mode == "none" and not bind:
self.iscsi.delete_interfaces()
elif (self.iscsi.mode == "bind"
or self.iscsi.mode == "none" and bind):
activated = set(nm.nm_activated_devices())
# The only place iscsi.ifaces is modified is create_interfaces(),
# right below, so iteration is safe.
created = set(self.iscsi.ifaces.values())
self.iscsi.create_interfaces(activated - created)
try:
self._discoveredNodes = self.iscsi.discover(credentials.targetIP,
username=credentials.username,
password=credentials.password,
r_username=credentials.rUsername,
r_password=credentials.rPassword)
except IOError as e:
self._discoveryError = str(e)
return
if len(self._discoveredNodes) == 0:
self._discoveryError = "No nodes discovered."
def _check_discover(self, *args):
if threadMgr.get(constants.THREAD_ISCSI_DISCOVER):
return True
# When iscsi discovery is done, update the UI. We don't need to worry
# about the user escaping from the dialog because all the buttons are
# marked insensitive.
spinner = self.builder.get_object("waitSpinner")
spinner.stop()
if self. |
# -*- coding: utf-8 -*-
#
# MAdmin documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 25 13:48:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MAdmin'
copyright = u'2013, Dominik Schacht'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1 Alpha'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MAdmindoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto/manual]).
latex_documents = [
('index', 'MAdmin.tex', u'MAdmin Documentation',
u'Dominik Schacht', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'madmin', u'MAdmin Documentation',
[u'Dominik Schacht'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MAdmin', u'MAdmin Documentation',
u'Domi | nik Schacht', 'MAdmin', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_d | etailmenu = False
|
# encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0012_auto_201 | 40209_0400'),
]
operations = [
migrations.AddField(
model_name='module_list',
name='widget',
field=models.TextField(null=True, blank=True),
preserve_def | ault=True,
),
]
|
# Copyright (C) 2017-2019 Dmitry Marakasov <amdmi3@amdmi3.ru>
#
# This file is part of repology
#
# repology is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# repology is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for | more details.
#
# You should have received a copy of the GNU General Public License
# along with repology. If not, see <http://www.gnu.org/licenses/>.
import re
from typing import Iterable
from repology.packagemaker import NameType, PackageFactory, PackageMaker
from repology.parsers import Parser
class CRANCheckSummaryParser(Parser):
def iter_parse(self, path: str, factory: PackageFactory) -> Iterable[PackageMaker]:
with open(path, 'r', encoding='u | tf-8') as htmlfile:
for nline, line in enumerate(htmlfile, 1):
match = re.search('<tr> <td> <a href="[^"]+">([^<>]+)</a> </td> <td>[ ]*([^ <>]+)[ ]*</td>', line)
if match:
pkg = factory.begin('line {}'.format(nline))
pkg.add_name(match[1], NameType.CRAN_NAME)
pkg.set_version(match[2])
yield pkg
|
lf, orm):
# Adding field 'PurpleRobotDevice.first_reading_timestamp'
db.add_column(u'purple_robot_app_purplerobotdevice', 'first_reading_timestamp',
self.gf('django.db.models.fields.BigIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PurpleRobotDevice.first_reading_timestamp'
db.delete_column(u'purple_robot_app_purplerobotdevice', 'first_reading_timestamp')
models = {
u'purple_robot_app.purplerobotalert': {
'Meta': {'object_name': 'PurpleRobotAlert'},
'action_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'dismissed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'generated': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manually_dismissed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'probe': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'severity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
u'purple_robot_app.purplerobotconfiguration': {
'Meta': {'object_name': 'PurpleRobotConfiguration'},
'added': ('django.db.models.fields.DateTimeField', [], {}),
'contents': ('django.db.models.fields.TextField', [], {'max_length': '1048576'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '1024'})
},
u'purple_robot_app.purplerobotdevice': {
'Meta': {'object_name': 'PurpleRobotDevice'},
'config_last_fetched': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'config_last_user_agent': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'configuration': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'devices'", 'null': 'True', 'to': u"orm['purple_robot_app.PurpleRobotConfiguration']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1048576', 'null': 'True', 'blank': 'True'}),
'device_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'devices'", 'null': 'True', 'to': u"orm['purple_robot_app.PurpleRobotDeviceGroup']"}),
'device_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256', 'db_index': 'True'}),
'first_reading_timestamp': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'hash_key': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mute_alerts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'performance_metadata': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'max_length': '1048576'}),
'test_device': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'purple_robot_app.purplerobotdevicegroup': {
'Meta': {'object_name': 'PurpleRobotDeviceGroup'},
'configuration': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'groups'", 'null': | 'True', 'to': u"orm['purple_robot_app.PurpleRobotConfiguration']"}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1048576', 'null': 'True', 'blank': 'True'}),
'group_id': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '256'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields. | CharField', [], {'max_length': '1024'})
},
u'purple_robot_app.purplerobotdevicenote': {
'Meta': {'object_name': 'PurpleRobotDeviceNote'},
'added': ('django.db.models.fields.DateTimeField', [], {}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': u"orm['purple_robot_app.PurpleRobotDevice']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '1024'})
},
u'purple_robot_app.purplerobotevent': {
'Meta': {'object_name': 'PurpleRobotEvent'},
'event': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logged': ('django.db.models.fields.DateTimeField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'payload': ('django.db.models.fields.TextField', [], {'max_length': '8388608', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'})
},
u'purple_robot_app.purplerobotexportjob': {
'Meta': {'object_name': 'PurpleRobotExportJob'},
'destination': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'export_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'probes': ('django.db.models.fields.TextField', [], {'max_length': '8196', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '512'}),
'users': ('django.db.models.fields.TextField', [], {'max_length': '8196', 'null': 'True', 'blank': 'True'})
},
u'purple_robot_app.purplerobotpayload': {
'Meta': {'object_name': 'PurpleRobotPayload'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'errors': ('django.db.models.fields.TextField', [], {'max_length': '65536', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payload': ('django.db.models.fields.TextField', [], {'max_length': '8388608'}),
'process_tags': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'})
},
u'purple_robot_app.purplerobotreading': {
'Meta': {'object_name': 'PurpleRobotReading', 'index_together': "[['probe', 'user_id'], ['logged', 'user_id'], ['probe', 'logged', 'user_id']]"},
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '1024', 'nu |
fro | m os import listdir
from os.path import isfile, join
import paer
mypath = 'aedat/'
onlyfiles = [ f for f in listdir(mypath) if isfile(join(mypath,f)) and f.endswith('.aedat')]
for file in onlyfiles:
ae = paer.aefile(mypath + str(file))
aed= paer.aedata(ae).downsample((16,16))
paer.create_pngs(aed, '16x16_' + str(file) | + '_',path='more_images/temp',step=3000, dim=(16,16))
|
self.name = self.orig_name = name
self.scope = scope
class TestPythonHandler(unittest.TestCase):
def test_name(self):
for method in _get_handler_methods(NameLibrary()):
handler = _PythonHandler(LibraryMock('mylib'), method.__name__, method)
assert_equal(handler.name, method.__doc__)
assert_equal(handler.longname, 'mylib.'+method.__doc__)
def test_docs(self):
for method in _get_handler_methods(DocLibrary()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
assert_equal(handler.doc, method.expected_doc)
assert_equal(handler.shortdoc, method.expected_shortdoc)
def test_arguments(self):
for method in _get_handler_methods(ArgInfoLibrary()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
args = handler.arguments
argspec = (args.positional, args.defaults, args.varargs, args.kwargs)
expected = eval(method.__doc__)
assert_equal(argspec, expected, method.__name__)
def test_arg_limits(self):
for method in _get_handler_methods(ArgumentsPython()):
handler = _PythonHandler(LibraryMock(), method.__name__, method)
exp_mina, exp_maxa = eval(method.__doc__)
assert_equal(handler.arguments.minargs, exp_mina)
assert_equal(handler.arguments.maxargs, exp_maxa)
def test_getarginfo_getattr(self):
handlers = TestLibrary('classes.GetattrLibrary').handlers
assert_equal(len(handlers), 3)
for handler in handlers:
assert_true(handler.name in ['Foo','Bar','Zap'])
assert_equal(handler.arguments.minargs, 0)
assert_equal(handler.arguments.maxargs, sys.maxsize)
class TestDynamicHandlerCreation(unittest.TestCase):
def test_none_doc(self):
self._assert_doc(None, '')
def test_empty_doc(self):
self._assert_doc('')
def test_non_empty_doc(self):
self._assert_doc('This is some documentation')
def test_non_ascii_doc(self):
self._assert_doc(u'P\xe4iv\xe4\xe4')
if not utils.IRONPYTHON:
def test_with_utf8_doc(self):
doc = u'P\xe4iv\xe4\xe4'
self._assert_doc(doc.encode('UTF-8'), doc)
def test_invalid_doc_type(self):
self._assert_fails('Return value must be string.', doc=True)
def test_none_argspec(self):
self._assert_spec(None, maxargs=sys.maxsize, vararg='varargs', kwarg=False)
def test_none_argspec_when_kwargs_supported(self):
self._assert_spec(None, maxargs=sys.maxsize, vararg='varargs', kwarg='kwargs')
def test_empty_argspec(self):
self._assert_spec([])
def test_mandatory_args(self):
for argspec in [['arg'], ['arg1', 'arg2', 'arg3']]:
self._assert_spec(argspec, len(argspec), len(argspec), argspec)
def test_only_default_args(self):
self._assert_spec(['defarg1=value', 'defarg2=defvalue'], 0, 2,
['defarg1', 'defarg2'], ['value', 'defvalue'])
def test_default_value_may_contain_equal_sign(self):
self._assert_spec(['d=foo=bar'], 0, 1, ['d'], ['foo=bar'])
def test_varargs(self):
self._assert_spec(['*vararg'], 0, sys.maxsize, vararg='vararg')
def test_kwargs(self):
self._assert_spec(['**kwarg'], 0, 0, kwarg='kwarg')
def test_varargs_and_kwargs(self):
self._assert_spec(['*vararg', '**kwarg'],
0, sys.maxsize, vararg='vararg', kwarg='kwarg')
def test_integration(self):
self._assert_spec(['arg', 'default=value'], 1, 2,
['arg', 'default'], ['value'])
self._assert_spec(['arg', 'default=value', '*var'], 1, sys.maxsize,
['arg', 'default'], ['value'], 'var')
self._assert_spec(['arg', 'default=value', '**kw'], 1, 2,
['arg', 'default'], ['value'], None, 'kw')
self._assert_spec(['arg', 'default=value', '*var', '**kw'], 1, sys.maxsize,
['arg', 'default'], ['value'], 'var', 'kw')
def test_invalid_argspec_type(self):
for argspec in [True, [1, 2]]:
self._assert_fails("Return value must be list of strings.", argspec)
def test_mandatory_arg_after_default_arg(self):
for argspec in [['d=v', 'arg'], ['a', 'b', 'c=v', 'd']]:
self._assert_fails('Invalid argument specification: '
'Non-default argument after default arguments.',
argspec)
def test_positional_after_vararg(self):
for argspec in [['*foo', 'arg'], ['arg', '*var', 'arg'],
['a', 'b=d', '*var', 'c'], ['*var', '*vararg']]:
self._assert_fails('Invalid argument specification: '
'Positional argument after varargs.', argspec)
def test_kwarg_not_last(self):
for argspec in [['**foo', 'arg'], ['arg', '**kw', 'arg'],
['a', 'b=d', '**kw', 'c'], ['**kw', '*vararg'],
['**kw', '**kwarg']]:
self._assert_fails('Invalid argument specification: '
'Only last argument can be kwargs.', argspec)
def test_missing_kwargs_support(self):
self._assert_fails("Too few 'run_keyword' method parameters"
" for **kwargs support.",
['**kwargs'])
def _assert_doc(self, doc, expected=None):
expected = doc if expected is None else expected
assert_equal(self._create_handler(doc=doc).doc, expected)
def _assert_spec(self, argspec, minargs=0, maxargs=0, positional=[],
defaults=[], vararg=None, kwarg=None):
if kwarg is None:
kwargs_support_modes = [True, False]
elif kwarg is False:
kwargs_support_modes = [False]
kwarg = None
else:
kwargs_support_modes = [True]
for kwargs_support in kwargs_support_modes:
arguments = self._create_handler(argspec,
kwargs_support=kwargs_support
).arguments
assert_equal(arguments.minargs, minargs)
assert_equal(arguments.maxargs, maxargs)
assert_equal(arguments.positional, positional)
assert_equal(arguments.defaults, defaults)
assert_equal(arguments.varargs, vararg)
assert_equal(arguments.kwargs, kwarg)
def _assert_fails(self, error, argspec=None, doc=None):
assert_raises_with_msg(DataError, error,
self._create_handler, argspec, doc)
def _create_handler(self, argspec=None, doc=None, kwargs_support=False):
lib = LibraryMock('TEST CASE')
if kwargs_support:
lib.run_keyword = lambda name, args, kwargs: None
else:
lib.run_keyword = lambda name, args: None
lib.run_keyword.__name__ = 'run_keyword'
doc = GetKeywordDocumentation(lib)._handle_return_value(doc)
argspec = GetKeywordArguments(lib)._handle_return_value(argspec)
return DynamicHandler(lib, 'mock', RunKeyword(lib), doc, argspec)
if utils.JYTHON:
handlers = dict((method.__name__, method) for method in
_get_java_handler_methods(ArgumentsJava('Arg', ['varargs'])))
class TestJavaHandler(unittest.TestCase):
def test_arg_limits_no_defaults_or_varargs(self):
for count in [0, 1, 3]:
method = handlers['a_%d' % count]
handler = _JavaHandler(LibraryMock(), method.__name__, method)
assert_equal(handler.arguments.minargs, count)
assert_equal(handler.arguments.maxargs, count)
def test_arg_limits_with_varargs(self):
for count in [0, 1]:
method = handlers['a_%d_n' % count]
| handler = _JavaHandler(LibraryMock(), method.__name__, met | hod)
assert_equal(handler.arguments.minargs, count)
asse |
from django.db.models import get_model
from django.utils.translation import ugettext
# a notice like "foo and bar are now friends" is stored in the database
# as "{auth.User.5} and {auth.User.7} are now friends".
#
# encode_object takes an object and turns it into "{app.Model.pk}" or
# "{app.Model.pk.msgid}" if named arguments are used in send()
# decode_object takes "{app.Model.pk}" and turns it into the object
#
# encode_message takes either ("%s and %s are now friends", [foo, bar]) or
# ("%(foo)s and %(bar)s are now friends", {'foo':foo, 'bar':bar}) and turns
# it into "{auth.User.5} and {auth.User.7} are now friends".
#
# decode_message takes "{auth.User.5} and {auth.User.7}" and converts it
# into a string using the given decode function to convert the object to
# string representation
#
# message_to_text and message_to_html use decode_message to produce a
# text and html version of the message respectively.
def encode_object(obj, name=None):
encoded = "%s.%s.%s" % (obj._meta.app_label, obj._meta.object_name, obj.pk)
if name:
encoded = "%s.%s" % (encoded, name)
return "{%s}" % encoded
def encode_message(message_template, objects):
if objects is None:
return message_template
if isinstance(objects, list) or isinstance(objects, tuple):
return message_template % tuple(encode_object(obj) for obj in objects)
if type(objects) is dict:
return message_template % dict((name, encode_object(obj, name)) for name, obj in objects.iteritems())
return ""
def decode_object(ref):
decoded = ref.split(".")
if len(decoded) == 4:
app, name, pk, msgid = decoded
return get_model(app, name).objects.get(pk=pk), msgid
app, name, pk = decoded
return get_model(app, name).objects.get(pk=pk), None
class FormatException(Exception):
pass
def decode_message(message, decoder):
out = []
objects = []
mapping = {}
in_field = False
prev = 0
for index, ch in enumerate(message):
if not in_field:
if ch == "{":
in_field = True
if prev != index:
out.append(message[prev:index])
prev = index
elif ch == "}":
raise FormatException("unmatched }")
elif in_field:
if ch == "{":
raise FormatException("{ | inside {}")
elif ch == "}":
in_field = False
obj, msgid = decoder(message[prev+1:index])
if msgid is None:
objects.append(obj)
out.append("%s")
else:
mapping[msgid] = obj
out.append("%("+msgid | +")s")
prev = index + 1
if in_field:
raise FormatException("unmatched {")
if prev <= index:
out.append(message[prev:index+1])
result = "".join(out)
if mapping:
args = mapping
else:
args = tuple(objects)
return ugettext(result) % args
def message_to_text(message):
def decoder(ref):
obj, msgid = decode_object(ref)
return unicode(obj), msgid
return decode_message(message, decoder)
def message_to_html(message):
def decoder(ref):
obj, msgid = decode_object(ref)
if hasattr(obj, "get_absolute_url"): # don't fail silenty if get_absolute_url hasn't been defined
return u"""<a href="%s">%s</a>""" % (obj.get_absolute_url(), unicode(obj)), msgid
else:
return unicode(obj), msgid
return decode_message(message, decoder)
|
from commands import | getoutput
def NetworkFromPrefix(val):
"""
Return the network with the network prefix given.
"""
ifaces = getoutput('hostname -I') # *nix only.
sifaces = ifaces.strip().split()
for iface in sifaces:
| if iface.startswith(val):
return iface
|
# coding: utf-8
import re
from crossword import *
class Crossword2(Crossword):
def __init__(self):
self.grid = OpenGrid()
self.connected = {}
self.used_words = []
def copy(self):
copied = Crossword2()
copied.grid = self.grid.copy()
copied.connected = self.connected.copy()
copied.used_words = self.used_words[:]
return copied
def embed(self, pos, direction, word):
assert word not in self.used_words
super(Crossword2, self).embed(pos, direction, word)
self.used_words.append(word)
def all_disconnected_sequences(self):
'''
>>> c = Crossword2()
>>> c.embed((0, 0), HORIZONTAL, 'ANT')
>>> c.embed((0, 0), VERTICAL, 'ATOM')
>>> c.embed((1, 2), HORIZONTAL, 'IT')
>>> c.embed((3, 0), HORIZONTAL, 'MEET')
>>> c.dump()
_#____
#ANT#_
_T#IT#
_O____
#MEET#
_#____
>>> c.all_disconnected_sequences()
[((0, 2), 2, 'T'), ((1, 0), 2, 'T'), ((2, 0), 2, 'O'), ((0, 1), 1, 'N'), ((3, 1), 1, 'E'), ((0, 2), 1, 'TI'), ((0, 2), 1, 'TI.E'), ((3, 2), 1, 'E'), ((1, 3), 1, 'T'), ((1, 3 | ), 1, 'T.T'), ((3, 3), 1, 'T')]
'''
sequences = []
for pos, direction, length in [((r, self.grid.colmin), HORIZONTAL, self.grid.width) for r in range(self.grid.rowmin, self.grid.rowmax + 1)] + [((self.grid.rowmin, c), VERTICAL, self.grid.height) for c in range(self.grid.colmin, self.grid.colmax + 1)]:
line = self.grid.get_word(pos, direction, length)
poslist = self.grid.poslist(pos, direction, length)
| sequences += self.extract_sequences(line, poslist, direction)
return [(p, d, w) for (p, d, w) in sequences if not w.endswith('.')]
def extract_sequences(self, line, poslist, direction, idx=0, current_seq=None):
'''
>>> c = Crossword2()
>>> c.extract_sequences('ABC', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'ABC')]
>>> c.extract_sequences('_A_', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 1), 2, 'A'), ((0, 1), 2, 'A.')]
>>> c.extract_sequences('A_C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.C'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A#C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A_#B_C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.'), ((0, 3), 2, 'B'), ((0, 3), 2, 'B.C'), ((0, 5), 2, 'C')]
>>> c.extract_sequences('A_B__C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.B'), ((0, 2), 2, 'B'), ((0, 0), 2, 'A.B.'), ((0, 2), 2, 'B.'), ((0, 0), 2, 'A.B..C'), ((0, 2), 2, 'B..C'), ((0, 5), 2, 'C')]
'''
if not current_seq: current_seq = []
if idx >= len(line): return current_seq
c = line[idx]
pos = poslist[idx]
if c == FILLED:
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, [])
if c == EMPTY:
new_current_seq = [(p, d, s + '.') for (p, d, s) in current_seq]
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
if current_seq:
new_current_seq = [(p, d, s + c) for (p, d, s) in current_seq if not self.is_connected(poslist[idx - 1], pos)]
if any([s.endswith('.') for (p, d, s) in current_seq]):
new_current_seq.append((pos, direction, c))
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
else:
new_current_seq = [(pos, direction, c)]
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
def build_crossword2(words, monitor=False):
'''
>>> ans = list(build_crossword2(['ANT', 'ART', 'RAT']))
>>> ans[0].dump()
#ANT#
>>> ans[1].dump()
_#___
#ANT#
_R___
_T___
_#___
>>> ans[2].dump()
___#___
__#ANT#
___R___
#RAT#__
___#___
>>> ans[3].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> ans[4].dump()
_#___
_R___
#ANT#
_T___
_#___
>>> ans[5].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[6].dump()
___#___
___R___
__#ANT#
#ART#__
___#___
>>> ans[7].dump()
___#_
___A_
___R_
#ANT#
___#_
>>> ans[8].dump()
___#__
_#RAT#
___R__
#ANT#_
___#__
>>> ans[9].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[10].dump()
___#___
___A___
__#RAT#
#ANT#__
___#___
>>> ans[11].dump()
___#_
___R_
___A_
#ANT#
___#_
>>> ans[12].dump()
___#__
_#ART#
___A__
#ANT#_
___#__
>>> ans[13].dump()
___#___
___R___
__#ART#
#ANT#__
___#___
>>> ans[14].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> len(ans)
15
'''
crosswords = [Crossword2()]
crosswords[0].embed((0, 0), HORIZONTAL, words[0])
while True:
if not crosswords: break
crosswords = sorted(crosswords, key=lambda c: evaluate_crossword(c))
base = crosswords.pop(0)
if monitor:
print ('%d candidates...'%(len(crosswords)))
if isinstance(monitor, dict):
base.dump(empty=monitor['EMPTY'], filled=monitor['FILLED'])
else:
base.dump()
print ('')
try:
sequences = base.all_disconnected_sequences()
if is_valid_crossword(sequences):
yield base
candidates = generate_candidates(words, base, sequences)
crosswords += candidates
except ValueError:
# discard this base
pass
def is_valid_crossword(sequences):
return all([len(s) <= 1 or s.find('.') > -1 for _, _, s in sequences])
def generate_candidates(words, base, sequences):
fit_words = []
for sequence in sequences:
available_words = [w for w in words if w not in base.used_words]
fit_words_for_seq = [(p, d, w) for (p, d, w) in propose_words(sequence, available_words) if base.is_fit(p, d, w)]
_, _, s = sequence
if not fit_words_for_seq and len(s) > 1 and s.find('.') == -1:
# dead end; discard this base
raise ValueError('no candidates found')
fit_words += fit_words_for_seq
candidates = []
for p, d, w in fit_words:
copy = base.copy()
copy.embed(p, d, w)
candidates.append(copy)
return candidates
def propose_words(sequence, words):
(p, d, seq) = sequence
proposed_words = []
for word in words:
idx = 0
while True:
m = re.search(seq, word[idx:])
if not m: break
proposed_words.append((OpenGrid.pos_inc(p, -(m.start() + idx), d), d, word))
idx += m.start() + 1
return proposed_words
def evaluate_crossword(c):
# return -len(c.used_words)
return (c.grid.width + c.grid.height) * 1.0 / len(c.used_words) ** 2
# return (c.grid.width * c.grid.height) * 1.0 / sum([len(w) for w in c.used_words])
def pickup_crosswords(words, dump_option=None, monitor=False):
best = 9999
for c in build_crossword2(words, monitor=monitor):
if evaluate_crossword(c) < best:
if dump_option:
c.dump(empty=dump_option['EMPTY'], filled=dump_option['FILLED'])
else:
c.dump()
best = evaluate_crossword(c)
print ('score: %f'%(best))
print ('')
if __name__ == '__main__':
import doctest
doctest.testmod()
|
#!/usr/bin/env python3
"""
Copyright 2015 Stefano Benvenuti <ste.benve86@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import json
import shutil
# helper function for reading a file content
def read_file(filename):
f = None |
try:
f = open(filename)
content = json.load(f)
except Exception as e:
print("File \"%s\" cannot be opened or read: %s", filename, e)
sys.exit(1)
finally:
if f is not None:
f.close()
return content
if len(sys.argv) is not 2:
print("""
USAGE | : ./add_poem.py JSON_DELTA_FILE_PATH
""")
sys.exit(1)
conf_file = os.path.join("..","poems","poems.json")
# reads old configuration file and new content
content = read_file(conf_file)
new_content = read_file(sys.argv[1])
# merge the values
content.update(new_content)
# write file
shutil.copyfile(conf_file, conf_file + ".bak")
f = None
try:
f = open(conf_file,'w')
json.dump(content, f)
except Exception as e:
print("File \"%s\" cannot be opened or written: %s", filename, e)
sys.exit(1)
finally:
if f is not None:
f.close()
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = | patterns('',
url(r'^$', view | s.main_page, name='main_page'),
)
|
print('Yes' if (lambda a,b : (lambda number: a == b or (a[1:] == b and a[0] in number and len(a)-1 == len(b)) or (a[:-1] == b | and a[-1] in number and len(a)-1 == len(b)) or ''.join(x.u | pper() if x.islower() else x.lower() for x in b) == a)({str(i) for i in range(10)}))(input(), input()) else 'No') |
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
#pylint: disable=invalid-name,no-init
import os
from systemtesting import MantidSystemTest
from mantid.simpleapi import *
from mantid.kernel import PropertyManager
from mantid import config
def MAX_DBL():
import sys
return sys.float_info[0]/2
def getNamedParameter(ws, name):
return ws.getInstrument().getNumberParameter(name)[0]
class DirectInelasticDiagnostic2(MantidSystemTest):
saved_diag_file=''
def requiredMemoryMB(self):
"""Requires 4Gb"""
return 4000
def runTest(self):
red_man = PropertyManager()
red_man_name = "__dgs_reduction_properties"
pmds[red_man_name] = red_man
if 'detvan' in mtd:
detvan = mtd['detvan']
else:
detvan = Load('MAP17186.raw')
if 'sample' in mtd:
sample = mtd['sample']
else:
sample = Load('MAP17269.raw')
# Libisis values to check against
# All PropertyManager properties need to be set
red_man["LowCounts"] = 1e-10
red_man["HighCounts"] = 1e10
red_man["LowOutlier"] = 0.01
red_man["HighOutlier"] = 100.
red_man["ErrorBarCriterion"] = 0.0
red_m | an["MedianTestLow"] = 0.1
red_man["MedianT | estHigh"] = 2.0
red_man["SamBkgMedianTestLow"] = 0.0
red_man["SamBkgMedianTestHigh"] = 1.5
red_man["SamBkgErrorbarCriterion"] = 3.3
red_man["RejectZeroBackground"] = True
# Things needed to run vanadium reduction
red_man["IncidentBeamNormalisation"] = "ToMonitor"
red_man["DetVanIntRangeUnits"] = "Energy"
# properties affecting diagnostics:
#reducer.wb_integr_range = [20,300]
red_man["DetVanIntRangeLow"] = 20.
red_man["DetVanIntRangeHigh"] = 300.
red_man["BackgroundCheck"] = True
red_man["BackgroundTofStart"]=12000.
red_man["BackgroundTofEnd"]=18000.
#reducer.bkgd_range=[12000,18000]
diag_mask = DgsDiagnose(DetVanWorkspace=detvan, SampleWorkspace=sample,
ReductionProperties=red_man_name)
MaskDetectors(sample, MaskedWorkspace=diag_mask)
# Save the masked spectra numbers to a simple ASCII file for comparison
self.saved_diag_file = os.path.join(config['defaultsave.directory'],
'CurrentDirectInelasticDiag2.txt')
with open(self.saved_diag_file, 'w') as handle:
spectrumInfo = sample.spectrumInfo()
for index in range(sample.getNumberHistograms()):
if spectrumInfo.isMasked(index):
spec_no = sample.getSpectrum(index).getSpectrumNo()
handle.write(str(spec_no) + '\n')
def cleanup(self):
if os.path.exists(self.saved_diag_file):
if self.succeeded():
os.remove(self.saved_diag_file)
else:
os.rename(self.saved_diag_file,
os.path.join(config['defaultsave.directory'],
'DirectInelasticDiag2-Mismatch.txt'))
def validateMethod(self):
return 'validateASCII'
def validate(self):
return (self.saved_diag_file, 'DirectInelasticDiagnostic.txt')
|
# coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"更新" 是選擇性的條件式, 格式就像 "欄位1=\'值\'". 但是 JOIN 的資料不可以使用 update 或是 delete"',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '已刪除 %s 筆',
'%s rows updated': '已更新 %s 筆',
'(something like "it-it")': '(格式類似 "zh-tw")',
'A new version of web2py is available': '新版的 web2py 已發行',
'A new version of web2py is available: %s': '新版的 web2py 已發行: %s',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登入管理帳號需要安全連線(HTTPS)或是在本機連線(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因為在測試模式不保證多執行緒安全性,也就是說不可以同時執行多個測試案例',
'ATTENTION: you cannot edit the running application!': '注意:不可編輯正在執行的應用程式!',
'About': '關於',
'About application': '關於本應用程式',
'Admin is disabled because insecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Admin is disabled because unsecure channel': '管理功能(Admin)在不安全連線環境下自動關閉',
'Administrator Password:': '管理員密碼:',
'Are you sure you want to delete file "%s"?': '確定要刪除檔案"%s"?',
'Are you sure you want to uninstall application "%s"': '確定要移除應用程式 "%s"',
'Are you sure you want to uninstall application "%s"?': '確定要移除應用程式 "%s"',
'Asíncrona': 'Asíncrona',
'Authentication': '驗證',
'Available databases and tables': '可提供的資料庫和資料表',
'Ayuda': 'Ayuda',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '無法編譯:應用程式中含有錯誤,請除錯後再試一次.',
'Change Password': '變更密碼',
'Check to delete': '打勾代表刪除',
'Check to delete:': '點選以示刪除:',
'Client IP': '客戶端網址(IP)',
'Comprobantes': 'Comprobantes',
'Configuración': 'Configuración',
'Configurar': 'Configurar',
'Consultas': 'Consultas',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版權所有',
'Cotización': 'Cotización',
'Create new application': '創建應用程式',
'Current request': '目前網路資料要求(request)',
'Current response': '目前網路資料回應(response)',
'Current session': '目前網路連線資訊(session)',
'DB Model': '資料庫模組',
'DESIGN': '設計',
'Database': '資料庫',
'Date and Time': '日期和時間',
'Delete': '刪除',
'Delete:': '刪除:',
'Deploy on Google App Engine': '配置到 Google App Engine',
'Description' | : '描述',
'Design for': '設計為了',
'Detalles': 'Detalles',
'E-mail': '電子郵件',
'EDIT': '編輯',
' | Edit': '編輯',
'Edit Profile': '編輯設定檔',
'Edit This App': '編輯本應用程式',
'Edit application': '編輯應用程式',
'Edit current record': '編輯當前紀錄',
'Editing file': '編輯檔案',
'Editing file "%s"': '編輯檔案"%s"',
'Emisión': 'Emisión',
'Error logs for "%(app)s"': '"%(app)s"的錯誤紀錄',
'Estado (dummy)': 'Estado (dummy)',
'FacturaLibre': 'FacturaLibre',
'FacturaLibre. Aplicación en desarrollo': 'FacturaLibre. Aplicación en desarrollo',
'FacturaLibre. Aplicación web para factura electrónica': 'FacturaLibre. Aplicación web para factura electrónica',
'FacturaLibre: interfase alternativa': 'FacturaLibre: interfase alternativa',
'FacturaLibre: interfaz de usuario alternativa': 'FacturaLibre: interfaz de usuario alternativa',
'First name': '名',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函式會顯示 [passed].',
'Group ID': '群組編號',
'Hello World': '嗨! 世界',
'Import/Export': '匯入/匯出',
'Index': '索引',
'Información General': 'Información General',
'Información Técnica': 'Información Técnica',
'Inicio': 'Inicio',
'Installed applications': '已安裝應用程式',
'Internal State': '內部狀態',
'Invalid Query': '不合法的查詢',
'Invalid action': '不合法的動作(action)',
'Invalid email': '不合法的電子郵件',
'Language files (static strings) updated': '語言檔已更新',
'Languages': '各國語言',
'Last name': '姓',
'Last saved on:': '最後儲存時間:',
'Layout': '網頁配置',
'License for': '軟體版權為',
'Listar comprobantes.': 'Listar comprobantes.',
'Listar detalles': 'Listar detalles',
'Login': '登入',
'Login to the Administrative Interface': '登入到管理員介面',
'Logout': '登出',
'Lost Password': '密碼遺忘',
'Main Menu': '主選單',
'Menu Model': '選單模組(menu)',
'Models': '資料模組',
'Modules': '程式模組',
'NO': '否',
'Name': '名字',
'New Record': '新紀錄',
'No databases in this application': '這應用程式不含資料庫',
'Origin': '原文',
'Original/Translation': '原文/翻譯',
'Password': '密碼',
"Password fields don't match": '密碼欄不匹配',
'Peeking at file': '選擇檔案',
'Powered by': '基於以下技術構建:',
'Query:': '查詢:',
'Record ID': '紀錄編號',
'Register': '註冊',
'Registration key': '註冊金鑰',
'Remember me (for 30 days)': '記住我(30 天)',
'Reset Password key': '重設密碼',
'Resolve Conflict file': '解決衝突檔案',
'Role': '角色',
'Rows in table': '在資料表裏的資料',
'Rows selected': '筆資料被選擇',
'Saved file hash:': '檔案雜湊值已紀錄:',
'Secuencial': 'Secuencial',
'Servicios Web': 'Servicios Web',
'Static files': '靜態檔案',
'Stylesheet': '網頁風格檔',
'Submit': '傳送',
'Sure you want to delete this object?': '確定要刪除此物件?',
'Table name': '資料表名稱',
'Testing application': '測試中的應用程式',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"查詢"是一個像 "db.表1.欄位1==\'值\'" 的條件式. 以"db.表1.欄位1==db.表2.欄位2"方式則相當於執行 JOIN SQL.',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有資料庫模組(models)',
'There are no modules': '沒有程式模組(modules)',
'There are no static files': '沒有靜態檔案',
'There are no translators, only default language is supported': '沒有翻譯檔,只支援原始語言',
'There are no views': '沒有視圖',
'This is the %(filename)s template': '這是%(filename)s檔案的樣板(template)',
'Ticket': '問題單',
'Timestamp': '時間標記',
'Unable to check for upgrades': '無法做升級檢查',
'Unable to download': '無法下載',
'Unable to download app': '無法下載應用程式',
'Update:': '更新:',
'Upload existing application': '更新存在的應用程式',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '使用下列方式來組合更複雜的條件式, (...)&(...) 代表同時存在的條件, (...)|(...) 代表擇一的條件, ~(...)則代表反向條件.',
'User %(id)s Logged-in': '使用者 %(id)s 已登入',
'User %(id)s Registered': '使用者 %(id)s 已註冊',
'User ID': '使用者編號',
'Verify Password': '驗證密碼',
'View': '視圖',
'Views': '視圖',
'WSBFE': 'WSBFE',
'WSFEX': 'WSFEX',
'WSFEv0': 'WSFEv0',
'WSFEv1': 'WSFEv1',
'WSMTXCA': 'WSMTXCA',
'Welcome %s': '歡迎 %s',
'Welcome to web2py': '歡迎使用 web2py',
'YES': '是',
'about': '關於',
'appadmin is disabled because insecure channel': '因為來自非安全通道,管理介面關閉',
'cache': '快取記憶體',
'change password': '變更密碼',
'click here for online examples': '點此處進入線上範例',
'click here for the administrative interface': '點此處進入管理介面',
'customize me!': '請調整我!',
'data uploaded': '資料已上傳',
'database': '資料庫',
'database %s select': '已選擇 %s 資料庫',
'db': 'db',
'design': '設計',
'done!': '完成!',
'edit profile': '編輯設定檔',
'export as csv file': '以逗號分隔檔(csv)格式匯出',
'insert new': '插入新資料',
'insert new %s': '插入新資料 %s',
'invalid request': '不合法的網路要求(request)',
'login': '登入',
'logout': '登出',
'new record inserted': '已插入新紀錄',
'next 100 rows': '往後 100 筆',
'or import from csv file': '或是從逗號分隔檔(CSV)匯入',
'previous 100 rows': '往前 100 筆',
'record': '紀錄',
'record does not exist': '紀錄不存在',
'record id': '紀錄編號',
'register': '註冊',
'selected': '已選擇',
'state': '狀態',
'table': '資料表',
'unable to parse csv file': '無法解析逗號分隔檔(csv)',
'Últ.ID': 'Últ.ID',
'Últ.Nro.Cbte.': 'Últ.Nro.Cbte.',
}
|
from datetime import datetime, timedelta
from subprocess import PIPE, call, Popen
import tempfile, os, argparse, sys, re
def get_file_duration(infilename):
'''
:param infilename: input mp4 filename
:type infilename: str
:returns: (h,m,s) tuple
'''
cmd=['ffmpeg','-i',infilename]
p=Popen(cmd,stdout=PIPE,stderr=PIPE)
output=p.stderr.read().decode('utf8')
match=re.search('Duration: (.*?)\.',output)
assert match
h,m,s= parse_ts(match.group(1))
return datetime(2017,1,1,h,m,s)
def parse_ts(instring):
'''
parse time notation
'''
x=instring.split(':')
if len(x)==2:
x.insert(0,'0')
h,m,s = map(int,x)
return (h,m,s)
def format_ts(instring):
h,m,s=parse_ts(instring)
return '%02d:%02d:%02d'%(h,m,s)
def run_cmd_dt(start,end,infname,outfname):
assert isinstance(start,datetime)
assert isinstance(end,datetime)
start_time='%02d:%02d:%02d'%(start.hour,start.minute,start.second)
end_time='%02d:%02d:%02d'%(end.hour,end.minute,end.second)
run_cmd(start_time,end_time,infname,outfname)
def run_cmd(start='00:00:00',end='23:00:00',infname='foo.mp4',outfname='outfoo.mp4'):
'''
trigger call to `ffmpeg`
'''
duration = get_duration(start,end)
cmd=['ffmpeg','-ss',format_ts(start),'-t',duration,'-i',
infname,'-acodec','copy','-vcodec','copy',
outfname]
call(cmd,stdout=PIPE,stderr=None)
def get_duration(start='00:00:00',end=''):
'''
end can be negative if prefixed with `n` as in `n00:00:04`
which means four seconds from the end of the file.
'''
if end and not end.startswith('n'): #
he,me,se=parse_ts(end)
end_time=datetime(2017,1,1,he,me,se)
elif end.startswith('n'):
he,me,se=parse_ts(end[1:])
end_time=get_file_duration(args.infile)-timedelta(hours=he,minutes=me,seconds=se)
else:
end_time=get_file_duration(args.infile)
hs,ms,ss=parse_ts(start)
start_time=datetime(2017,1,1,hs,ms,ss)
duration=str(end_time - start_time)
if len(duration)==7: duration = '0'+duration
return duration
if __name__ == '__main__':
parse = argparse
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''Cut a section out of MP4 file and return it using ffmpeg
without re-encoding.
Example: extract from start to 00:11:44
% python mp4_cut.py -e 11:44 -i L.mp4 -o foo.mp4
Example: extract from 00:15:00 to 00:17:34
% python mp4_cut.py -s 15:00 -e 17:34 -i L.mp4 -o foo.mp4
You can also take the complement of the selected slice by using the
--invert flag
% python mp4_cut.py --inverst -s 15:00 -e 17:34 -i L.mp4 -o foo.mp4
The two complementary parts are joined to make the output file.''')
parser.add_argument("-i","--input-file",
dest="infile",
help='input file',
default='',
)
parser.add_argument("-o","--output-file",
dest="outfile",
help='output file',
default='',
)
parser.add_argument("-s","--start-time",
dest="start_time",
help='hh:mm:ss',
default='00:00:00',
)
parser.add_argument("-e","--end-time",
dest="end_time",
help='hh:mm:ss',
default='',
)
parser.add_argument("-c","--chunk_duration",
help='Divide into <n> chunks of this duration hh:mm:ss. Overrides other flags!',
default='',
)
parser.add_argument("--invert",
dest='invert',
default=False,
action='store_true',
help="return complement of indicated section")
args = parser.parse_args()
if args.chunk_duration:
# this over-rides other options
hc,mc,sc=parse_ts(args.chunk_duration)
start_time=datetime(2017,1,1,0,0,0)
end_time=datetime(2017,1,1,hc,mc,sc)
file_length = get_file_duration(args.infile)
dt = timedelta(hours=hc,minutes=mc,seconds=sc)
outfilename_head = args.outfile.replace('.mp4','')
n=0
while end_time < file_length:
run_cmd_dt(start_time,end_time,args.infile,'%s_%03d.mp4'%(outfilename_head,n))
start_time = end_time
end_time = start_time + dt
n += 1
sys.exit()
duration = get_duration(args.start_time,args.end_time)
if args.invert:
if args.start_time=='00:00:00': # tail section
duration = '23:00:00'
cmd=['ffmpeg','-ss',format_ts(args.end_time),'-t',duration,'-i',
args.infile,'-acodec','copy','-vcodec','copy',
args.outfile]
call(cmd,stdout=PIPE,stderr=None)
else: # middle section
start_time='00:00:00'
filen | ame1=tempfile.mktemp('.mp4',dir=os.getcwd())
filename2=tempfile.mktemp('.mp4',dir=os.getcwd())
run_cmd(start_time,args.start_time,args.infile,filename1)
run_cmd(args.end_time,'23:00:00',args.infile,filename2)
fname= tempfile.mktemp(suffix='.txt',dir=os.getcwd())
with open(fname,'w') | as fd:
fd.write('file '+os.path.split(filename1)[1]+'\n')
fd.write('file '+os.path.split(filename2)[1]+'\n')
fd.close()
# ffmpeg -safe 0 -f concat -i list.txt -c copy outfile.mp4
cmd=['ffmpeg','-safe','0','-f','concat','-i',fname,'-c','copy',args.outfile ]
call(cmd,stdout=PIPE,stderr=None)
for i in (filename1,filename2,fname):
os.unlink(i)
else:
run_cmd(args.start_time,args.end_time,args.infile,args.outfile)
|
= subprocess.Popen(['tput', '-T{}'.format(term), 'smm'], stdout=subprocess.PIPE)
tput_stdout = tput_proc.communicate()[0]
if (tput_stdout and (tput_stdout != b'')):
result = tput_stdout
if isinstance(result, bytes):
result = result.decode("utf-8")
return result
DEFAULT_SMM_SEQUENCE = get_smm_sequence()
cqlshlog = basecase.cqlshlog
def set_controlling_pty(master, slave):
os.setsid()
os.close(master)
for i in range(3):
os.dup2(slave, i)
if slave > 2:
os.close(slave)
os.close(os.open(os.ttynam | e(1), os.O_RDWR))
@contextlib.contextmanager
def raising_signal(signum, exc):
"""
Within the wrapped context, the given signal will interrupt signal
calls and will raise the given exception class. The preexisting signal
han | dling will be reinstated on context exit.
"""
def raiser(signum, frames):
raise exc()
oldhandlr = signal.signal(signum, raiser)
try:
yield
finally:
signal.signal(signum, oldhandlr)
class TimeoutError(Exception):
pass
@contextlib.contextmanager
def timing_out_itimer(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval, oldint = signal.getitimer(signal.ITIMER_REAL)
if oldval != 0.0:
raise RuntimeError("ITIMER_REAL already in use")
signal.setitimer(signal.ITIMER_REAL, seconds)
try:
yield
finally:
signal.setitimer(signal.ITIMER_REAL, 0)
@contextlib.contextmanager
def timing_out_alarm(seconds):
if seconds is None:
yield
return
with raising_signal(signal.SIGALRM, TimeoutError):
oldval = signal.alarm(int(math.ceil(seconds)))
if oldval != 0:
signal.alarm(oldval)
raise RuntimeError("SIGALRM already in use")
try:
yield
finally:
signal.alarm(0)
if is_win():
try:
import eventlet
except ImportError as e:
sys.exit("evenlet library required to run cqlshlib tests on Windows")
def timing_out(seconds):
return eventlet.Timeout(seconds, TimeoutError)
else:
# setitimer is new in 2.6, but it's still worth supporting, for potentially
# faster tests because of sub-second resolution on timeouts.
if hasattr(signal, 'setitimer'):
timing_out = timing_out_itimer
else:
timing_out = timing_out_alarm
def noop(*a):
pass
class ProcRunner:
def __init__(self, path, tty=True, env=None, args=()):
self.exe_path = path
self.args = args
self.tty = bool(tty)
self.realtty = self.tty and not is_win()
if env is None:
env = {}
self.env = env
self.readbuf = ''
self.start_proc()
def start_proc(self):
preexec = noop
stdin = stdout = stderr = None
cqlshlog.info("Spawning %r subprocess with args: %r and env: %r"
% (self.exe_path, self.args, self.env))
if self.realtty:
masterfd, slavefd = pty.openpty()
preexec = (lambda: set_controlling_pty(masterfd, slavefd))
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, preexec_fn=preexec,
stdin=stdin, stdout=stdout, stderr=stderr,
close_fds=False)
os.close(slavefd)
self.childpty = masterfd
self.send = self.send_tty
self.read = self.read_tty
else:
stdin = stdout = subprocess.PIPE
stderr = subprocess.STDOUT
self.proc = subprocess.Popen((self.exe_path,) + tuple(self.args),
env=self.env, stdin=stdin, stdout=stdout,
stderr=stderr, bufsize=0, close_fds=False)
self.send = self.send_pipe
if self.tty:
self.winpty = WinPty(self.proc.stdout)
self.read = self.read_winpty
else:
self.read = self.read_pipe
def close(self):
cqlshlog.info("Closing %r subprocess." % (self.exe_path,))
if self.realtty:
os.close(self.childpty)
else:
self.proc.stdin.close()
cqlshlog.debug("Waiting for exit")
return self.proc.wait()
def send_tty(self, data):
if not isinstance(data, bytes):
data = data.encode("utf-8")
os.write(self.childpty, data)
def send_pipe(self, data):
self.proc.stdin.write(data)
def read_tty(self, blksize, timeout=None):
buf = os.read(self.childpty, blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_pipe(self, blksize, timeout=None):
buf = self.proc.stdout.read(blksize)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_winpty(self, blksize, timeout=None):
buf = self.winpty.read(blksize, timeout)
if isinstance(buf, bytes):
buf = buf.decode("utf-8")
return buf
def read_until(self, until, blksize=4096, timeout=None,
flags=0, ptty_timeout=None, replace=[]):
if not isinstance(until, Pattern):
until = re.compile(until, flags)
cqlshlog.debug("Searching for %r" % (until.pattern,))
got = self.readbuf
self.readbuf = ''
with timing_out(timeout):
while True:
val = self.read(blksize, ptty_timeout)
for replace_target in replace:
if (replace_target != ''):
val = val.replace(replace_target, '')
cqlshlog.debug("read %r from subproc" % (val,))
if val == '':
raise EOFError("'until' pattern %r not found" % (until.pattern,))
got += val
m = until.search(got)
if m is not None:
self.readbuf = got[m.end():]
got = got[:m.end()]
return got
def read_lines(self, numlines, blksize=4096, timeout=None):
lines = []
with timing_out(timeout):
for n in range(numlines):
lines.append(self.read_until('\n', blksize=blksize))
return lines
def read_up_to_timeout(self, timeout, blksize=4096):
got = self.readbuf
self.readbuf = ''
curtime = time()
stoptime = curtime + timeout
while curtime < stoptime:
try:
with timing_out(stoptime - curtime):
stuff = self.read(blksize)
except TimeoutError:
break
cqlshlog.debug("read %r from subproc" % (stuff,))
if stuff == '':
break
got += stuff
curtime = time()
return got
class CqlshRunner(ProcRunner):
def __init__(self, path=None, host=None, port=None, keyspace=None, cqlver=None,
args=(), prompt=DEFAULT_CQLSH_PROMPT, env=None,
win_force_colors=True, tty=True, **kwargs):
if path is None:
cqlsh_bin = 'cqlsh'
if is_win():
cqlsh_bin = 'cqlsh.bat'
path = normpath(join(basecase.cqlshdir, cqlsh_bin))
if host is None:
host = basecase.TEST_HOST
if port is None:
port = basecase.TEST_PORT
if env is None:
env = {}
if is_win():
env['PYTHONUNBUFFERED'] = '1'
env.update(os.environ.copy())
env.setdefault('TERM', 'xterm')
env.setdefault('CQLSH_NO_BUNDLED', os.environ.get('CQLSH_NO_BUNDLED', ''))
env.setdefault('PYTHONPATH', os.environ.get('PYTHONPATH', ''))
coverage = False
if ('CQLSH_COVERAGE' in env.keys()):
coverage = True
args = tuple(args) + (host, str(port))
if cqlver is not None:
args += ('- |
D_MOVE_R, CMD_MOVE_L = range(4)
OCCUPIED_S = '1'
OCCUPIED_F = '0'
BLANK = ' '
PENDING_MAX = 50 # max number of elements in pendings
PENDING_MIN = 4 # min number of elements in pendings before renewing
COL_STATIC = 1
COL_FALLING = 2
COL_NONE = 0
REVERSE_CMD = {CMD_ROTATE_R:CMD_ROTATE_L,
CMD_ROTATE_L:CMD_ROTATE_R,
CMD_MOVE_R :CMD_MOVE_L,
CMD_MOVE_L :CMD_MOVE_R}
logF = open('gamelog.txt','w')
def init():
global board, pendings, fallingPieces, staticPieces, softDroping
global currentPiece,nextPiece
global level, fallingTime, nextLevelScore, score
global delaying, lastDrop
board = [[BLANK]*BOARDCOLS for i in range(BOARDROWS)]
pendings = [(random.randrange(TYPES), random.randrange(4)) \
for i in range(PENDING_MAX)]
fallingPieces = []
staticPieces = []
nextPiece = None
currentPiece = None
delaying = False
lastDrop = 0
level = 1
fallingTime = _getFallingTime(level)
nextLevelScore = _getNextLvlScore(level)
score = 0
softDroping = False
update.oldTime = int(time.time() * 1000)
def update():
global fallingTime, score, nextLevelScore, fallingPieces
global currentPiece
global delaying,lastDrop
newTime = time.time()
lines = []
# time to move down
if (newTime - lastDrop)*1000 > fallingTime:
#print 'updating !!!!'
lastDrop = newTime
if currentPiece != None:
_moveDown(currentPiece)
# check if any line is eaten
lines = _removeEatenLines()
# print lines;
if len(lines) != 0:
delaying = True;
score += _calculateScore(lines)
if score >= nextLevelScore:
levelUp()
elif delaying:
hardDrop();
delaying = False
elif currentPiece == None:
#print 'making a new piece !!!! so fun!!!'
currentPiece = _getNextPiece()
_addToBoard(currentPiece)
fallingPieces.append(currentPiece)
logF.write(_getStrBoard())
return lines
def levelUp():
global level, fallingTime, nextLevelScore
level += 1
fallingTime = _getFallingTime(level)
nextLevelScore = _getNextLvlScore(level)
def getPieces():
return fallingPieces + staticPieces
def getNextPiece ():
global nextPiece
return nextPiece
def rotateRight():
_movePiece(CMD_ROTATE_R)
def rotateLeft():
_movePiece(CMD_ROTATE_L)
def moveRight():
_movePiece(CMD_MOVE_R)
def moveLeft():
_movePiece(CMD_MOVE_L)
def softDrop():
global fallingTime, softDroping
if not softDroping:
softDroping = True
fallingTime /= 3
def stopSoftDrop():
global fallingTime, softDroping
if softDroping:
softDroping = False
fallingTime = _getFallingTime(level)
def hardDrop():
global fallingPieces,lastDrop
while (len(fallingPieces) > 0):
for piece in fallingPieces:
_moveDown(piece)
lastDrop = time.time()
def checkGameEnd():
for x in range(BOARDCOLS):
if board[PATTERNSIZE-1][x] == OCCUPIED_S:
return True
return False
def close():
logF.close()
def getProjection():
global board, currentPiece
projectPiece = None
if currentPiece != None:
projectPiece = copy.copy(currentPiece)
col = _checkCollision(projectPiece)
while col != COL_STATIC:
projectPiece.moveDown()
col = _checkCollision(projectPiece)
projectPiece.moveUp()
return projectPiece
########################################################################
### Game helper functions
########################################################################
def _getFallingTime(level):
return 540 - level * 40; # TODO: need a better function
# 500, 460, 420, 380, 340 ...
def _getNextLvlScore(level):
return level*1000; # TODO: need a better function
def _removeEatenLines():
'''only check the static pieces'''
global board, staticPieces
eatenLines = []
for y in range(BOARDROWS):
eaten = True
for x in range(BOARDCOLS):
if board[y][x] != OCCUPIED_S: eaten = False
if eaten:
eatenLines.append(y)
# clear the row in board
for x in range(BOARDCOLS): board[y][x] = BLANK
# clear the row in staticPieces
for p in staticPieces[:]:
ptop, pbot = p.split(y)
if pbot != p:
staticPieces.remove(p)
if ptop != None:
assert len(ptop.boxes)>0
fallingPieces.append(ptop)
_addToBoard(ptop,OCCUPIED_F)
if pbot != None:
assert len(pbot.boxes)>0
staticPieces.append(pbot)
_addToBoard(pbot,OCCUPIED_S)
return eatenLines
def _calculateScore(eatenLines):
| global level
n = len(eatenLines);
baseScore = 100
if n == 2: baseScore = 300
elif n == 3: baseScore = 500
elif n == 4: baseScore = 800
#TODO: consider combo?
return n * baseScore * level
def _checkCollision(piece):
'''return true if collide'''
#print 'checking collision!!!'
global board
assert piece != None
for x, y in piece.boxes:
if x>=BOARDCOLS or x<0 or y> | =BOARDROWS or board[y][x] == OCCUPIED_S:
return COL_STATIC
for x, y in piece.boxes:
if board[y][x] == OCCUPIED_F:
return COL_FALLING
return COL_NONE
def _movePiece(command):
'''not for moveDown'''
global fallingPieces,currentPiece
if currentPiece == None: return # try to prune line eating case
p = currentPiece
_removeFromBoard(p)
if command == CMD_ROTATE_R:
p.rotateRight()
elif command == CMD_ROTATE_L:
p.rotateLeft()
elif command == CMD_MOVE_R:
p.moveRight()
elif command == CMD_MOVE_L:
p.moveLeft()
# reverse if the command is not possible
if _checkCollision(p) == True:
if command == CMD_ROTATE_L:
p.rotateRight()
elif command == CMD_ROTATE_R:
p.rotateLeft()
elif command == CMD_MOVE_L:
p.moveRight()
elif command == CMD_MOVE_R:
p.moveLeft()
_addToBoard(p)
def _getNextPiece ():
global nextPiece
if nextPiece == None:
nextPiece = _generateNewPiece()
newPiece = nextPiece
nextPiece = _generateNewPiece()
return newPiece
def _generateNewPiece():
global pendings
# refill if needed
if (len(pendings) < PENDING_MIN):
pendings = pendings + [(random.randrange(TYPES),random.randrange(4)) \
for i in range(PENDING_MAX - PENDING_MIN)]
pending = pendings.pop(0);
#print 'im the real new piece here! u imposters!'
return Piece(pending[0], (BOARDCOLS - PATTERNSIZE)/2, 0, pending[1])
'''
def _cmp(piece1, piece2):
# TODO: error here
y1 = piece1.boxes[len(piece1.boxes)-1][1] # get the lowest y
y2 = piece2.boxes[len(piece2.boxes)-1][1]
if (y1 > y2):
return 1
if (y1 < y2):
return -1
return 0
'''
def _moveDown (piece):
global board, fallingPieces, staticPieces, currentPiece
assert piece != None
_removeFromBoard(piece)
piece.moveDown()
col = _checkCollision(piece)
if col==COL_STATIC:
piece.moveUp()
fallingPieces.remove(piece)
staticPieces.append(piece)
_addToBoard(piece,OCCUPIED_S)
if piece == currentPiece:
currentPiece = None
else:
if col==COL_FALLING:
piece.moveUp()
_addToBoard(piece,OCCUPIED_F)
def _getStrBoard():
s = '\n---+---+---\n'
for y in range(BOARDROWS):
for x in range(BOARDCOLS):
s += str(board[y][x])
s += '\n'
return s
def _addToBoard(piece, status=OCCUPIED_F):
for x,y in piece.boxes:
board[y][x] = status
def _removeFromBoard(piece):
for x,y in piece.boxes:
|
import cmd
import json
try:
import readline
except ImportError:
pass
from lib.asmdecoder import AsmDecoder
from lib.uploadprogram import uploadProgram
def run(hardware):
cli = CommandLineInterface(hardware)
cli.printStatus()
cli.cmdloop()
class CommandLineInterface(cmd.Cmd):
def __init__(self, hardware, *args, **kwargs):
cmd.Cmd.__init__(self, *args, **kwargs)
self.hardware = hardware
self.asmdecoder = AsmDecoder(hardware)
self.running = False
self.codeAddressAliases = []
self.codeAddressAliasesDict = {}
self.memoryAddresses = {}
def printStatus(self):
reg = self.hardware.getRegisters()
pc = reg[12] << 8 | reg[13]
print
print ' A: %02X B: %02X SP: %02X' % (reg[0], reg[1], reg[18]<< 8 | reg[19])
print ' R0: %02X R2: %02X R4: %02X R6: %02X' % (reg[2], reg[4], reg[6], reg[8])
print ' R1: %02X R3: %02X R5: %02X R7: %02X' % (reg[3], reg[5], reg[7], reg[9])
print 'PSW: %02X (%s) DPTR: %04X' % (reg[14], self.parsePSW(reg[14]), reg[10] << 8 | reg[11])
print 'Unknown: %02X %02X %02X %02X' % (reg[15], reg[16], reg[17], reg[20])
self.asmdecoder.markVisited(pc)
print
print 'PC = %04X (%s)' % (pc, self.asmdecoder.getStringForAddress(pc))
address, mnemonic = self.asmdecoder.getMnemonic(pc)
self.printInstruction(address, mnemonic, pc, showAlias=False)
def parsePSW(self, psw):
if psw & 0b10000000:
cy = 'C'
else:
cy = '-'
if psw & 0b01000000:
ac = 'A'
else:
ac = '-'
if psw & 0b00100000:
f0 = '*'
else:
f0 = '-'
rs = (psw & 0b00011000) >> 3
if psw & 0b00000100:
ov = 'O'
else:
ov = '-'
if psw & 0b00000010:
f1 = '*'
else:
f1 = '-'
if psw & 0b00000001:
p = 'P'
else:
p = '-'
return '%s%s%s%s%s%s%s' % (cy, ac, f0, rs, ov, f1, p)
def printInstruction(self, address, mnemonic, pc, showAlias=True):
joined = ' '.join((mnemonic[0], ', '.join(mnemonic[1:])))
if address == pc:
marker = '-->'
else:
marker = ' '
if showAlias and address in self.codeAddressAliasesDict:
print ' (%s)' % self.codeAddressAliasesDict[address]
print '%s %04X: %s' % (marker, address, joined)
def do_list(self, line):
'''Shows the previous, current and next instructions located around the specified
address. (Default: program counter)'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
instructions = []
pc = self.hardware.getPC()
if line:
target = int(line, 16)
else:
target = self.hardware.getPC()
address = target - 1
for i in xrange(5):
address, mnemonic = self.asmdecoder.getMnemonic(address, direction=-1)
instructions.insert(0, (address, mnemonic))
address -= 1
address = target
for i in xrange(6):
address, mnemonic = self.asmdecoder.getMnemonic(address)
instructions.append((address, mnemonic))
address += 1
for address, mnemonic in instructions:
self.printInstruction(address, mnemonic, pc)
def do_show(self, line):
'''Shows contents of a variable.
Syntax: show <variable>'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
if line not in self.memoryAddresses:
print "Variable '%s' not found." % line
return
address = self.memoryAddresses[line]
if address[0] == 'internal':
mem = self.hardware.readDirect(address[1], 0x01)
elif address[0] == 'external':
mem = self.hardware.readExternal(address[1], 0x01)
print '%04X %02X' % (address[1], mem[0])
def do_mem(self, line):
'''Shows memory contents.
Syntax: mem <type> <address>
type can be one of: direct indirect external code (may be abbreviated)
mem shows a block of size 0x20 containing the specified address.'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
parts = [part for part in line.split(' ') if part]
if len(parts) != 2 or parts[0][0] not in ('d', 'i', 'e', 'c'):
print 'syntax: mem <type> <address>'
print 'type can be one of: direct indirect external code (may be abbreviated)'
address = int(parts[1], 16) & 0xffe0
if parts[0][0] == 'd':
mem = self.hardware.readDirect(address, 0x20)
elif parts[0][0] == 'i':
mem = self.hardware.readIndirect(address, 0x20)
elif parts[0][0] == 'e':
mem = self.hardware.readExternal(address, 0x20)
elif parts[0][0] == 'c':
mem = self.hardware.readCode(address, 0x20)
print ('%04X ' + ' %02X' * 8) % ((address, ) + tuple(mem[0:8]))
print ('%04X ' + ' %02X' * 8) % ((address + 8, ) + tuple(mem[8:16]))
print ('%04X ' + ' %02X' * 8) % ((address + 16, ) + tuple(mem[16:24]))
print ('%04X ' + ' %02X' * 8) % ((address + 24, ) + tuple(mem[24:32]))
def do_step(self, line):
'Executes the specified number of instructions. (Default: 1)'
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
steps = 1
if line:
steps = int(line)
while steps:
self.hardware.step()
steps -= 1
self.printStatus()
def do_load(self, line):
'''Uploads a program to the hardware.
Syntax: load <path-to-hexfile>'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
if line[-4:] == '.hex':
line = line[:-4]
try:
with open(line + '.hex') as inputFile:
prog = inputFile.read()
except IOError, e:
print "Error reading '%s.hex': %s" % (line, e)
return
uploadProgram(prog, self.hardware)
self.do_loadDebug(line)
self.asmdecoder.invalidateCache()
self.asmdecoder.markVisited(self.hardwa | re.getPC())
self.printStatus()
def do_loaddebug(self, line):
'''Loads debug information for a program.
Syntax: loaddebug <path-to-scdebugfile>'''
debugData = {'codeAddressAliases': None, 'memoryAddresses': None}
try:
with open(line + '.scdebug') as inputFile:
debugData = json.load(inputFile)
except IOError, e:
print "Error reading '%s.sc | debug': %s" % (line, e)
return
if line[-8:] == '.scdebug':
line = line[:-8]
self.codeAddressAliases = debugData['codeAddressAliases']
self.asmdecoder.addressAliases = self.codeAddressAliases
self.codeAddressAliasesDict = dict(self.codeAddressAliases)
self.memoryAddresses = debugData['memoryAddresses']
def do_run(self, line):
'''Resumes execution of the program.
go disables all commands and enables stop.'''
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
self.running = True
self.hardware.run()
def do_stop(self, line):
if not self.running:
print "Can't stop. Program is not running."
return
self.hardware.stop()
self.running = False
self.printStatus()
def do_reset(self, line):
if self.running:
print 'Program is running. Stop execution to issue commands.'
return
self.hardware.reset()
self.printStatus()
def do_exit(self, line):
'Quits the debugger.'
return True
def emptyline(self):
pass
def do_EOF(self, line):
print
return self.do_exit(line)
# def postcmd
|
#!/usr/bin/python
# Copyright (c) 2017-2018 Utrecht University
# GNU General Public License v3.0
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
from ansible.module_utils.basic import *
IRODSCLIENT_AVAILABLE = False
try:
from irods.session import iRODSSession
from irods.models import User
from irods.exception import UserDoesNotExist, iRODSException
except ImportError:
pass
else:
IRODSCLIENT_AVAILABLE = True
def get_session():
env_file = os.path.expanduser('~/.irods/irods_environment.json')
with open(env_file) as data_file:
ienv = json.load(data_file)
return (iRODSSession(irods_env_file=env_file), ienv)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(default=None, required=True),
option=dict(default=None, required=True),
value=dict(default=None, required=True)
),
supports_check_mode=True)
| name = module.params["name | "]
option = module.params["option"]
value = module.params["value"]
if IRODSCLIENT_AVAILABLE:
try:
session, ienv = get_session()
except iRODSException:
module.fail_json(
msg="Could not establish irods connection. Please check ~/.irods/irods_environment.json"
)
else:
module.fail_json(msg="python-irodsclient needs to be installed")
changed = False
try:
if not module.check_mode:
resource = session.users.modify(name, option, value)
except UserDoesNotExist:
module.fail_json(msg="User does not exist.")
else:
changed = True
module.exit_json(
changed=changed,
irods_environment=ienv)
if __name__ == '__main__':
main()
|
ustom_field, obj.custom_fields),
'_meta': {
'links': {
'self': route_to_abs(helpers.url_for(PostApi.name, id=obj.id)),
'comments': route_to_abs(helpers.url_for(CommentCollectionApi.name, parent_id=obj.id))
},
'supports': ['GET', 'PUT', 'DELETE'],
'media_type': PostApi.media_type
}
}
def get(self, id):
post = wp.call(wp_methods.posts.GetPost(id))
return json.jsonify(PostApi.from_xmlrpc(post))
class PostCollectionApi(MethodView):
name = 'posts'
def get(self):
page = int(request.values.get('page', 1))
post_type = request.values.get('post_type', 'post')
posts = wp.call(wp_methods.posts.GetPosts({
'number': default_page_size,
'offset': (page - 1) * default_page_size,
'post_type': post_type
}))
response = {}
response['items'] = map(PostApi.from_xmlrpc, posts)
meta = {
'supports': ['GET', 'POST']
}
links = {}
paging_params = {}
if (post_type != 'post'):
paging_params['post_type'] = post_type
if len(posts) == default_page_size:
links['next'] = route_to_abs(helpers.url_for(PostCollectionApi.name, page=page+1, **paging_params))
if page > 1:
params = {}
if (page > 2):
params['page'] = page + 1
links['prev'] = route_to_abs(helpers.url_for(PostCollectionApi.name, **dict(paging_params, **params)))
meta['links'] = links
response['_meta'] = meta
return json.jsonify(response)
class CommentApi(MethodView):
name = 'comment'
media_type = 'application/vnd.wordpress.comment.v1'
@staticmethod
def from_xmlrpc(obj):
return {
'_meta': {
'media_type': CommentApi.media_type,
'supports': ['GET', 'PUT', 'DELETE'],
'links': {
'self': route_to_abs(helpers.url_for(CommentApi.name, parent_id=obj.post, id=obj.id))
}
},
'id': obj.id,
'date': obj.date_created.isoformat(),
'status': obj.status,
'content': obj.content,
'link': obj.link,
'author': obj.author,
'author_url': obj.author_url,
'author_email': obj.author_email,
'author_ip': obj.author_ip
}
def get(self, parent_id, id):
comment = wp.call(wp_methods.comments.GetComment(id))
return json.jsonify(CommentApi.from_xmlrpc(comment))
class CommentCollectionApi(MethodView):
name = 'comments'
def get(self, parent_id):
response = {}
page = int(request.values.get('page', 1))
comments = wp.call(wp_methods.comments.GetComments({
'po | st_id': parent_id,
'number': default_page_size,
'offset': (page - 1) * default_page_size
}))
response['items'] = map(CommentApi.from_xmlrpc, comments)
response['_meta'] = {
'supports': ['GET', 'POST'],
'links': {
'self': route_to_abs(helpers.url_for(CommentCollectionApi.name, parent_id=parent_id)),
'parent': route_t | o_abs(helpers.url_for(PostApi.name, id=parent_id))
}
}
return json.jsonify(response)
class UserApi(MethodView):
name = 'user'
media_type = 'application/vnd.wordpress.user.v1'
@staticmethod
def from_xmlrpc(obj):
return {
'_meta': {
'media_type': UserApi.media_type,
'supports': ['GET'],
'links': {
'self': route_to_abs(helpers.url_for(UserApi.name, id=obj.id))
}
},
'id': obj.id,
'username': obj.username,
'nickname': obj.nickname,
'description': obj.bio,
'email': obj.email,
'url': obj.url
}
def get(self, id):
user = wp.call(wp_methods.users.GetUser(id))
return json.jsonify(UserApi.from_xmlrpc(user))
class UserCollectionApi(MethodView):
name = 'users'
def get(self):
page = int(request.values.get('page', 1))
users = wp.call(wp_methods.users.GetUsers({
'number': default_page_size,
'offset': (page - 1) * default_page_size
}))
response = {}
response['items'] = map(UserApi.from_xmlrpc, users)
response['_meta'] = {
'supports': ['GET']
}
return json.jsonify(response)
class FileApi(MethodView):
name = 'file'
def get(self):
return 'get_file'
class FileCollectionApi(MethodView):
name = 'files'
def get(self):
return 'get_files'
class ImageApi(MethodView):
name = 'image'
def get(self):
return 'get_image'
class ImageCollectionApi(MethodView):
name = 'images'
def get(self):
return 'get_images'
class VideoApi(MethodView):
name = 'video'
def get(self):
return 'get_video'
class VideoCollectionApi(MethodView):
name = 'videos'
def get(self):
return 'get_videos'
class AudioApi(MethodView):
name = 'audio_item'
def get(self):
return 'get_audio'
class AudioCollectionApi(MethodView):
name = 'audio'
def get(self):
return 'get_audio'
class TaxonomyApi(MethodView):
name = 'taxonomy'
media_type = 'application/vnd.wordpress.taxonomy.v1'
@staticmethod
def from_xmlrpc(obj):
return {
'name': obj.name,
'label': obj.label,
'hierarchical': obj.hierarchical,
'public': obj.public,
'show_ui': obj.show_ui,
'is_builtin': obj.is_builtin,
'object_types': obj.object_type,
'_meta': {
'supports': ['GET'],
'media_type': TaxonomyApi.media_type,
'links': {
'self': route_to_abs(helpers.url_for(TaxonomyApi.name, id=obj.name)),
'terms': route_to_abs(helpers.url_for(TaxonomyTermCollectionApi.name, parent_id=obj.name))
}
}
}
def get(self, id):
taxonomy = wp.call(wp_taxonomies.GetTaxonomy(id))
return json.jsonify(TaxonomyApi.from_xmlrpc(taxonomy))
class TaxonomyCollectionApi(MethodView):
name = 'taxonomies'
def get(self):
taxonomies = wp.call(wp_taxonomies.GetTaxonomies())
response = {}
response['items'] = map(TaxonomyApi.from_xmlrpc, taxonomies)
response['_meta'] = {
'supports': ['GET']
}
return json.jsonify(response)
class TaxonomyTermApi(MethodView):
name = 'term'
media_type = 'application/vnd.wordpress.taxonomyterm.v1'
@staticmethod
def from_xmlrpc(obj):
term = {
'id': obj.id,
'name': obj.name,
'slug': obj.slug,
'description': obj.description,
'count': obj.count,
'taxonomy': {
'_meta': {
'links': {
'self': route_to_abs(helpers.url_for(TaxonomyApi.name, id=obj.taxonomy)),
},
'media_type': TaxonomyApi.media_type,
'supports': ['GET']
},
'name': obj.taxonomy
},
'_meta': {
'supports': ['GET', 'PUT', 'DELETE'],
'media_type': TaxonomyTermApi.media_type,
'links': {
'self': route_to_abs(helpers.url_for(TaxonomyTermApi.name, parent_id=obj.taxonomy, id=obj.id))
}
}
}
if obj.parent and int(obj.parent) > 0:
term['parent'] = {
'id': obj.parent,
'self': route_to_abs(helpers.url_for(TaxonomyTermApi.name, parent_id=obj.taxonomy, id=obj.parent))
}
return term
def get(self, parent_id, id):
term = wp.call(wp_taxon |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para rtpa
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools |
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="", page_data="" ):
logger.info("tvalacarta.servers.rtpa get_video_url(page_url='%s' | )" % page_url)
data = scrapertools.cache_page(page_url)
url = scrapertools.get_match(data,"'file'\: '([^']+)'")
video_urls = []
video_urls.append( [ "(mp4) [rtpa]" , url ] )
for video_url in video_urls:
logger.info("tvalacarta.servers.rtpa %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
return devuelve
|
e=False,
make_binary=True)
self.assertAlmostEqual(self.res["default_digits_binary"],
sklearn.metrics.accuracy_score(
targets, predictions),
places=self.res.get(
"default_digits_binary_places", 7))
def test_default_digits(self):
if self.__class__ == BaseClassificationComponentTest:
return
for i in range(2):
predictions, targets, n_calls = \
_test_classifier(dataset="digits",
classifier=self.module)
self.assertAlmostEqual(self.res["default_digits"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_digits_places", 7))
if self.res.get("digits_n_calls"):
self.assertEqual(self.res["digits_n_calls"], n_calls)
def test_default_digits_iterative_fit(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not hasattr(self.module, 'iterative_fit'):
return
for i in range(2):
predictions, targets, classifier = \
_test_classifier_iterative_fit(dataset="digits",
classifier=self.module)
self.assertAlmostEqual(self.res["default_digits_iterative"],
sklearn.metrics.accuracy_score(targets,
predictions),
places=self.res.get(
"default_digits_iterative_places", 7))
if self.step_hyperparameter is not None:
self.assertEqual(
getattr(classifier.estimator, self.step_hyperparameter['name']),
self.res.get("digits_iterative_n_iter", self.step_hyperparameter['value'])
)
def test_default_digits_multilabel(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not self.module.get_properties()["handles_multilabel"]:
return
for _ in range(2):
predictions, targets, _ = _test_classifier(
classifier=self.module, dataset='digits', make_multilabel=True
)
score = sklearn.metrics.precision_score(
targets, predictions, average='macro', zero_division=0
)
self.assertAlmostEqual(
self.res["default_digits_multilabel"], score,
places=self.res.get("default_digits_multilabel_places", 7)
)
def test_default_digit | s_multilabel_predict_proba(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not self.module.get_properties()["handles_multilabel"]:
return
for i in range(2):
predictions, targets = \
_test_classifier_predict_proba(classifier=self.module,
make_multilabel=True)
self.assertEqual(predictions.shape, ((50, 3)))
self.assert | AlmostEqual(self.res["default_digits_multilabel_proba"],
sklearn.metrics.roc_auc_score(
targets, predictions, average='macro'),
places=self.res.get(
"default_digits_multilabel_proba_places", 7))
def test_target_algorithm_multioutput_multiclass_support(self):
if self.__class__ == BaseClassificationComponentTest:
return
if not self.module.get_properties()["handles_multiclass"]:
return
elif self.sk_module is not None:
cls = self.sk_module
X = np.random.random((10, 10))
y = np.random.randint(0, 1, size=(10, 10))
self.assertRaisesRegex(
ValueError,
'bad input shape \\(10, 10\\)',
cls.fit,
X,
y
)
else:
return
def test_module_idempotent(self):
""" Fitting twice with the same config gives the same model params.
This is only valid when the random_state passed is an int. If a
RandomState object is passed then repeated calls to fit will have
different results. See the section on "Controlling Randomness" in the
sklearn docs.
https://scikit-learn.org/0.24/common_pitfalls.html#controlling-randomness
"""
if self.__class__ == BaseClassificationComponentTest:
return
classifier_cls = self.module
X = np.array([
[0, 0], [0, 1], [1, 0], [1, 1],
[0, 0], [0, 1], [1, 0], [1, 1],
[0, 0], [0, 1], [1, 0], [1, 1],
[0, 0], [0, 1], [1, 0], [1, 1],
])
y = np.array([
0, 1, 1, 0,
0, 1, 1, 0,
0, 1, 1, 0,
0, 1, 1, 0,
])
# There are certain errors we ignore so we wrap this in a function
def fitted_params(model) -> Optional[Dict]:
"""
Returns the params if fitted successfully, else None if an
acceptable error occurs
"""
# We are okay with Numerical in Quadractic disciminant analysis
def is_QDA_error(err):
return "Numerical problems in QDA" in err.args[0]
# We are okay if the BaseClassifier in AdaBoostClassifier is worse
# than random so no ensemble can be fit
def is_AdaBoostClassifier_error(err):
return ("BaseClassifier in AdaBoostClassifier ensemble is worse"
+ " than random, ensemble can not be fit." in err.args[0])
def is_unset_param_raw_predictions_val_error(err):
return ("local variable 'raw_predictions_val' referenced before"
+ " assignment" in err.args[0])
try:
with ignore_warnings(classifier_warnings):
model.fit(X.copy(), y.copy())
except ValueError as e:
if is_AdaBoostClassifier_error(e) or is_QDA_error(e):
return None
except UnboundLocalError as e:
if is_unset_param_raw_predictions_val_error(e):
return None
return model.estimator.get_params()
# We ignore certain keys when comparing
param_keys_ignored = ['base_estimator']
# We use the default config + sampled ones
configuration_space = classifier_cls.get_hyperparameter_search_space()
default = configuration_space.get_default_configuration()
sampled = [configuration_space.sample_configuration() for _ in range(2)]
for seed, config in enumerate([default] + sampled):
model_args = {"random_state": seed, **config}
classifier = classifier_cls(**model_args)
# Get the parameters on the first and second fit with config params
params_first = fitted_params(classifier)
if hasattr(classifier.estimator, 'random_state'):
rs_1 = classifier.random_state
rs_estimator_1 = classifier.estimator.random_state
params_second = fitted_params(classifier)
if hasattr(classifier.estimator, 'random_state'):
rs_2 = classifier.random_state
rs_estimator_2 = classifier.estimator.random_state
# An acceptable error occured, skip to next sample
if params_first is None or params_second is None:
continue
# Remove keys we don't wish to include in the comparison
for params in [params_first, para |
# This is a modified version of original twilio_sms Gluu's script to work with Casa
from java.util import Arrays
from javax.faces.application import FacesMessage
from org.gluu.jsf2.message import FacesMessages
from org.gluu.oxauth.security import Identity
from org.gluu.oxauth.service import UserService, AuthenticationService
from org.gluu.oxauth.util import ServerUtil
from org.gluu.model.custom.script.type.auth import PersonAuthenticationType
from org.gluu.service.cdi.util import CdiUtil
from org.gluu.util import StringHelper, ArrayHelper
from com.google.common.base import Joiner
from com.twilio import Twilio
import com.twilio.rest.api.v2010.account.Message as TwMessage
from com.twilio.type import PhoneNumber
import random
import sys
class PersonAuthentication(PersonAuthenticationType):
def __init__(self, currentTimeMillis):
self.currentTimeMillis = currentTimeMillis
def init(self, customScript, configurationAttributes):
print "Twilio SMS. Initialized"
return True
def destroy(self, configurationAttributes):
print "Twilio SMS. Destroyed successfully"
return True
def getApiVersion(self):
return 11
def getAuthenticationMethodClaims(self, configurationAttributes):
return None
def isValidAuthenticationMethod(self, usageType, configurationAttributes):
return True
def getAlternativeAuthenticationMethod(self, usageType, configurationAttributes):
return None
def authenticate(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Authenticate for Step %s" % str(step)
identity = CdiUtil.bean(Identity)
authenticationService = CdiUtil.bean(AuthenticationService)
user = authenticationService.getAuthenticatedUser()
if step == 1:
if user == None:
credentials = identity.getCredentials()
user_name = credentials.getUsername()
user_password = credentials.getPassword()
if StringHelper.isNotEmptyString(user_name) and StringHelper.isNotEmptyString(user_password):
authenticationService.authenticate(user_name, user_password)
user = authenticationService.getAuthenticatedUser()
if user == None:
return False
#Attempt to send message now if user has only one mobile number
mobiles = user.getAttributeValues("mobile")
if mobiles == None:
return False
else:
| code = random.randint(100000, 999999)
identity.setWorkingParameter("randCode", code)
sid = configurationAttributes.get("twilio_sid").getValue2()
token = configurationAttributes.get("twilio_token").getValue2()
self.from_no = configurationAttributes.get("from_number").g | etValue2()
Twilio.init(sid, token)
if mobiles.size() == 1:
self.sendMessage(code, mobiles.get(0))
else:
chopped = ""
for numb in mobiles:
l = len(numb)
chopped += "," + numb[max(0, l-4) : l]
#converting to comma-separated list (identity does not remember lists in 3.1.3)
identity.setWorkingParameter("numbers", Joiner.on(",").join(mobiles.toArray()))
identity.setWorkingParameter("choppedNos", chopped[1:])
return True
else:
if user == None:
return False
session_attributes = identity.getSessionId().getSessionAttributes()
code = session_attributes.get("randCode")
numbers = session_attributes.get("numbers")
if step == 2 and numbers != None:
#Means the selection number page was used
idx = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:indexOfNumber")
if idx != None and code != None:
sendToNumber = numbers.split(",")[int(idx)]
self.sendMessage(code, sendToNumber)
return True
else:
return False
success = False
form_passcode = ServerUtil.getFirstValue(requestParameters, "OtpSmsloginForm:passcode")
if form_passcode != None and code == form_passcode:
print "TwilioSMS. authenticate. 6-digit code matches with code sent via SMS"
success = True
else:
facesMessages = CdiUtil.bean(FacesMessages)
facesMessages.setKeepMessages()
facesMessages.clear()
facesMessages.add(FacesMessage.SEVERITY_ERROR, "Wrong code entered")
return success
def prepareForStep(self, configurationAttributes, requestParameters, step):
print "TwilioSMS. Prepare for Step %s" % str(step)
return True
def getExtraParametersForStep(self, configurationAttributes, step):
if step > 1:
return Arrays.asList("randCode", "numbers", "choppedNos")
return None
def getCountAuthenticationSteps(self, configurationAttributes):
print "TwilioSMS. getCountAuthenticationSteps called"
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return 2
else:
return 3
def getPageForStep(self, configurationAttributes, step):
print "TwilioSMS. getPageForStep called %s" % step
print "numbers are %s" % CdiUtil.bean(Identity).getWorkingParameter("numbers")
defPage = "/casa/otp_sms.xhtml"
if step == 2:
if CdiUtil.bean(Identity).getWorkingParameter("numbers") == None:
return defPage
else:
return "/casa/otp_sms_prompt.xhtml"
elif step == 3:
return defPage
return ""
def logout(self, configurationAttributes, requestParameters):
return True
def hasEnrollments(self, configurationAttributes, user):
return user.getAttribute("mobile") != None
def sendMessage(self, code, numb):
try:
if numb[:1] != "+":
numb = "+" + numb
print "TwilioSMS. Sending SMS message (%s) to %s" % (code, numb)
msg = "%s is your passcode to access your account" % code
message = TwMessage.creator(PhoneNumber(numb), PhoneNumber(self.from_no), msg).create()
print "TwilioSMS. Message Sid: %s" % message.getSid()
except:
print "TwilioSMS. Error sending message", sys.exc_info()[1]
|
"""Runner for testing app and bluepr | int logging individually"""
import subprocess
from tests.samples import app
if __name__ == '__main | __':
app.run()
subprocess.call(['rm', '-rf', 'logs'])
|
from default_test_case import DefaultTestCase
from sns_project import SnsProject
class TestS | nsProjects(DefaultTestCase):
def test_project_with_defaults(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_defaults')
self.assertEqual(project.raw_message, False)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'default_sns_region')
def test_project_with_custom_env(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_wi | th_custom_env')
self.assertEqual(project.raw_message, False)
self.assertEqual(project.env, 'custom_sns_env')
self.assertEqual(project.region, 'default_sns_region')
def test_project_with_custom_raw_message(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_custom_raw_message')
self.assertEqual(project.raw_message, True)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'default_sns_region')
def test_project_with_custom_region(self):
project = SnsProject.find(self.loaded_config.sns_projects, 'project_with_custom_region')
self.assertEqual(project.raw_message, False)
self.assertEqual(project.env, 'default_sns_env')
self.assertEqual(project.region, 'custom_sns_region')
if __name__ == '__main__':
unittest.main()
|
+xs+'\n'
if hplat=='win':
rcc=ck.access({'action':'convert_to_cygwin_path',
'module_uoa':cfg['module_deps']['os'],
'path':pib})
if rcc['return']==0:
sb+=eset+' '+envp_b+'_CYGWIN='+xs+rcc['path']+xs+'\n'
if plib!='':
sb+=eset+' '+envp_l+'='+xs+plib+xs+'\n'
if hplat=='win':
rcc=ck.access({'action':'convert_to_cygwin_path',
'module_uoa':cfg['module_deps']['os'],
'path':plib})
if rcc['return']==0:
sb+=eset+' '+envp_l+'_CYGWIN='+xs+rcc['path']+xs+'\n'
if piib!='':
sb+=eset+' '+envp_i+'='+xs+piib+xs+'\n'
if hplat=='win':
rcc=ck.access({'action':'convert_to_cygwin_path',
'module_uoa':cfg['module_deps']['os'],
'path':piib})
if rcc['return']==0:
sb+=eset+' '+envp_i+'_CYGWIN='+xs+rcc['path']+xs+'\n'
if sadd!='':
sb+='\n'+sadd
# Add all env
for k in sorted(env):
v=str(env[k])
if eifs!='' and wb!='yes':
if v.find(' ')>=0 and not v.startswith(eifs):
v=eifs+v+eifs
sb+=eset+' '+k+'='+v+'\n'
sb+='\n'
# Add to existing vars
if cus.get('add_to_path','')=='yes' or (cus.get('skip_add_to_path','')!='yes' and cus.get('skip_add_to_bin','')!='yes' and cus.get('skip_dirs','')!='yes' and pi!=''):
sb+=eset+' PATH='+svarb+envp_b+svare+evs+svarb+'PATH'+svare+'\n'
if pi!='' and cus.get('skip_add_to_ld_path','')!='yes' and cus.get('skip_dirs','')!='yes':
sb+=eset+' '+elp+'='+svarb+envp_l+svare+evs+svarb+elp+svare+'\n'
sb+=eset+' '+ellp+'='+svarb+envp_l+svare+evs+svarb+ellp+svare+'\n'
# Say that environment is set (to avoid recursion)
sb+=eset+' '+envps+'=1\n'
# Finish environment batch file
if wb=='yes':
sb+='\n'
sb+='exit /b 0\n'
# Check if save to bat file *****************************************************************************************
bf=i.get('bat_file', '')
pnew=''
if bf=='':
bf=cfg['default_bat_name']+hosd.get('script_ext','')
# Preparing to add or update entry
xx='added'
ltags=sorted(ltags)
dd={'tags':ltags,
'setup':setup,
'env':env,
'deps':deps,
'soft_uoa':duid,
'customize':cus,
'env_script':bf}
if duid!='':
dd['soft_uoa']=duid
pduoa=i.get('package_uoa','')
if pduoa!='':
dd['package_uoa']=pduoa
# Probably should have come from pi, but there are too many sources of pi !
#
install_location = i.get('install_path', i.get('full_path_install', ''))
dd['install_location'] = install_location
ii={'action':'add',
'module_uoa':cfg['module_deps']['env'],
'dict':dd,
'sort_keys':'yes',
'substitute':'yes'}
if enduoa!='': ii['data_uoa']=enduoa
if enruoa!='': ii['repo_uoa']=enruoa
if update:
ii['action']='update'
xx='updated'
# Adding/updating
if dname!='':
ii['data_name']=dname
rx=ck.access(ii)
if rx['return']>0: return rx
enduoa=rx['data_uoa']
enduid=rx['data_uid']
pnew=rx['path']
if o=='con':
ck.out('')
ck.out('Environment entry '+xx+' ('+enduoa+')!')
# Record batch file
if pnew=='': pb=bf
else: pb=os.path.join(pnew, bf)
# Write file
rx=ck.save_text_file({'text_file':pb, 'string':sb})
if rx['return']>0: return rx
return {'return':0, 'env_data_uoa':enduoa, 'env_data_uid':enduid, 'deps':deps}
##############################################################################
# search tool in pre-defined paths
def search_tool(i):
"""
Input: {
path_list - path list
file_name - name of file to find (can be with patterns)
(recursion_level_max) - if >0, limit dir recursion
(can_be_dir) - if 'yes', return matched directories as well
(return_symlinks) - if 'yes', symlinks are returned as-is. Otherwise, they're resolved
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
list - list of file (see ck.list_all_files)
elapsed_time - elapsed time
}
"""
o=i.get('out','')
import time
import os
start_time = time.time()
pl=i['path_list']
fn=i['file_name | ']
pt=''
rlm=i.get('recursion_level_max',0)
cbd=i.get('can_be_dir','')
return_symlinks = i. | get('return_symlinks','')
if fn.find('?')>=0 or fn.find('*')>=0:
pt=fn
fn=''
lst=[]
for p in pl:
if o=='con':
ck.out(' * Searching in '+p+' ...')
r=list_all_files({'path':p,
'file_name':fn,
'pattern':pt,
'can_be_dir':cbd,
'recursion_level_max':rlm})
if r['return']>0: return r
lst.extend( r['list'] )
# if return_symlinks != 'yes':
# # resolving symlinks
# lst = [os.path.realpath(p) for p in lst]
# #removing duplicates
# recorded_paths = set()
# record_path = recorded_paths.add
# lst = [p for p in lst if not (p in recorded_paths or record_path(p))]
elapsed_time = time.time() - start_time
return {'return':0, 'list':lst, 'elapsed_time':elapsed_time}
##############################################################################
# A helper function for list_all_files()
def _internal_check_encoded_path_is_dir( path ):
"""
Need this complex structure to support UTF-8 file names in Python 2.7
"""
import os
import sys
try:
if os.path.isdir( path ):
return path
except Exception as e:
try:
path = path.encode('utf-8')
if os.path.isdir( path ):
return path
except Exception as e:
try:
path = path.encode(sys.stdin.encoding)
if os.path.isdir(p):
return path
except Exception as e:
pass
return None
##############################################################################
# List all files recursively in a given directory
def list_all_files(i):
"""
Input: {
path - top level path
(file_name) - search for a specific file name
(pattern) - return only files with this pattern
(path_ext) - path extension (needed for recursion)
(can_be_dir) - if 'yes', return matched directories as well
(recursion_level_max) - if >0, limit dir recursion
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
list - list of found files
}
"""
import sys
import os
list_of_results=[]
fname = i.get('file_name', '')
fname_with_sep_bool = fname.find(os.sep)>=0
can_be_dir = i.get('can_be_dir', '')
can_be_dir_bool = can_be_dir == 'yes'
pattern=i.get('pattern','')
if pattern!='':
import fnmatch
pe = i.get('path_ext', '')
po = i.get('path', '')
if sys.version_info[0]<3: po=unicode(po)
rl=i.ge |
# coding: utf-8
#
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under | the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writi | ng, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for text classifier."""
import json
import os
from core.classifiers.TextClassifier import TextClassifier
from core.tests import test_utils
import vmconf
def _load_training_data():
file_path = os.path.join(vmconf.DATASETS_DIR,
'string_classifier_data.json')
with open(file_path, 'r') as f:
training_data = json.loads(f.read())
return training_data
class TextClassifierTests(test_utils.GenericTestBase):
"""Tests for text classifier."""
def setUp(self):
super(TextClassifierTests, self).setUp()
self.clf = TextClassifier.TextClassifier()
self.training_data = _load_training_data()
def test_that_text_classifier_works(self):
"""Test that entire classifier is working end-to-end."""
self.clf.train(self.training_data)
classifier_data = self.clf.to_dict()
self.clf.validate(classifier_data)
def test_text_classifier_performance(self):
"""Test the performance of the text classifier.
This method measures and tests the run-time and the f1 score of the
classifier. The run-time should be less than 1 second and the f1 score
should be greater than 0.85 for the test to pass.
"""
self.clf.train(self.training_data)
# The weighted f1 score for the test dataset should be at least 0.85.
self.assertGreaterEqual(self.clf.best_score, 0.85)
# The training phase for the test dataset should take less than 2 sec.
self.assertLessEqual(self.clf.exec_time, 2)
|
default (name, email, language, and username). If you don't need
all those attributes for your app, you can request fewer with
the ax_attrs keyword argument.
.. versionchanged:: 3.1
Returns a `.Future` and takes an optional callback. These are
not strictly necessary as this method is synchronous,
but they are supplied for consistency with
`OAuthMixin.authorize_redirect`.
"""
callback_uri = callback_uri or self.request.uri
args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
callback()
@_auth_return_future
def get_authenticated_user(self, callback, http_client=None):
"""Fetches the authenticated user data upon redirect.
This method should be called by the handler that receives the
redirect from the `authenticate_redirect()` method (which is
often the same as the one that calls it; in that case you would
call `get_authenticated_user` if the ``openid.mode`` parameter
is present and `authenticate_redirect` if it is not).
The result of this method will generally be used to set a cookie.
"""
# Verify the OpenID response via direct request to the OP
args = dict((k, v[-1]) for k, v in self.request.arguments.items())
args["openid.mode"] = u("check_authentication")
url = self._OPENID_ENDPOINT
if http_client is None:
http_client = self.get_auth_http_client()
http_client.fetch(url, functools.partial(
self._on_authentication_verified, callback),
method="POST", body=urllib_parse.urlencode(args))
def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
url = urlparse.urljoin(self.request.full_url(), callback_uri)
args = {
"openid.ns": "http://specs.openid.net/auth/2.0",
"openid.claimed_id":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.identity":
"http://specs.openid.net/auth/2.0/identifier_select",
"openid.return_to": url,
"openid.realm": urlparse.urljoin(url, '/'),
"openid.mode": "checkid_setup",
}
if ax_attrs:
args.update({
"openid.ns.ax": "http://openid.net/srv/ax/1.0",
"openid.ax.mode": "fetch_request",
})
ax_attrs = set(ax_attrs)
required = []
if "name" in ax_attrs:
ax_attrs -= set(["name", "firstname", "fullname", "lastname"])
required += ["firstname", "fullname", "lastname"]
args.update({
"openid.ax.type.firstname":
"http://axschema.org/namePerson/first",
"openid.ax.type.fullname":
"http://axschema.org/namePerson",
"openid.ax.type.lastname":
"http://axschema.org/namePerson/last",
})
known_attrs = {
"email": "http://axschema.org/contact/email",
"language": "http://axschema.org/pref/language",
"username": "http://axschema.org/namePerson/friendly",
}
for name in ax_attrs:
args["openid.ax.type." + name] = known_attrs[name]
required.append(name)
args["openid.ax.required"] = ",".join(required)
if oauth_scope:
args.update({
"openid.ns.oauth":
"http://specs.openid.net/extensions/oauth/1.0",
"openid.oauth.consumer": self.request.host.split(":")[0],
"openid.oauth.scope": oauth_scope,
})
return args
def _on_authentication_verified(self, future, response):
if response.error or b"is_valid:true" not in response.body:
future.set_exception(AuthError(
"Invalid OpenID response: %s" % (response.error or
response.body)))
return
# Make sure we got back at least an email from attribute exchange
ax_ns = None
for name in self.request.arguments:
if name.startswith("openid.ns.") and \
self.get_argument(name) == u("http://openid.net/srv/ax/1.0"):
ax_ns = name[10:]
break
def get_ax_arg(uri):
if not ax_ns:
return u("")
prefix = "openid." + ax_ns + ".type."
ax_name = None
for name in self.request.arguments.keys():
if self.get_argument(name) == uri and name.startswith(prefix):
part = name[len(prefix):]
ax_name = "openid." + ax_ns + ".value." + part
break
if not ax_name:
| return u("")
return self.get_argument(ax_name, u(""))
email = get_ax_arg("http://axschema.org/contact/email")
name = get_ax_arg("http://axschema.org/namePerson")
first_name = get_ax_arg("http://axschema.org/namePerson/first")
last_name = get_ax_arg("http://axschema.org/namePerson/last")
username = get_ax_arg("http://axschema.org/namePerson/friendly")
locale = get_ax_arg("http: | //axschema.org/pref/language").lower()
user = dict()
name_parts = []
if first_name:
user["first_name"] = first_name
name_parts.append(first_name)
if last_name:
user["last_name"] = last_name
name_parts.append(last_name)
if name:
user["name"] = name
elif name_parts:
user["name"] = u(" ").join(name_parts)
elif email:
user["name"] = email.split("@")[0]
if email:
user["email"] = email
if locale:
user["locale"] = locale
if username:
user["username"] = username
claimed_id = self.get_argument("openid.claimed_id", None)
if claimed_id:
user["claimed_id"] = claimed_id
future.set_result(user)
def get_auth_http_client(self):
"""Returns the `.AsyncHTTPClient` instance to be used for auth requests.
May be overridden by subclasses to use an HTTP client other than
the default.
"""
return httpclient.AsyncHTTPClient()
class OAuthMixin(object):
"""Abstract implementation of OAuth 1.0 and 1.0a.
See `TwitterMixin` below for an example implementation.
Class attributes:
* ``_OAUTH_AUTHORIZE_URL``: The service's OAuth authorization url.
* ``_OAUTH_ACCESS_TOKEN_URL``: The service's OAuth access token url.
* ``_OAUTH_VERSION``: May be either "1.0" or "1.0a".
* ``_OAUTH_NO_CALLBACKS``: Set this to True if the service requires
advance registration of callbacks.
Subclasses must also override the `_oauth_get_user_future` and
`_oauth_consumer_token` methods.
"""
@return_future
def authorize_redirect(self, callback_uri=None, extra_params=None,
http_client=None, callback=None):
"""Redirects the user to obtain OAuth authorization for this service.
The ``callback_uri`` may be omitted if you have previously
registered a callback URI with the third-party service. For
some services (including Friendfeed), you must use a
previously-registered callback URI and cannot specify a
callback via this method.
This method sets a cookie called ``_oauth_request_token`` which is
subsequently used (and cleared) in `get_authenticated_user` for
security purposes.
Note that this method is asynchronous, although it calls
`.RequestHandler.finish` for you so it may not be necessary
to pass a callback or use the `.Future` it returns. However,
if this method is called from a function decorated with
`.gen.coroutine`, you must call it with ``yield`` to keep the
response from being cl |
def extractMobileSuitZe | taGundamNovelsTranslation(item):
"""
Parser for 'Mobile Suit Zeta Gundam Novels Translation'
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or 'preview' in item['title'].l | ower():
return None
if 'WATTT' in item['tags']:
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False
|
import re
import weakref
from buildbot import util
class Properties(util.ComparableMixin):
"""
I represent a set of properties that can be interpolated into various
strings in buildsteps.
@ivar properties: dictionary mapping property values to tuples
(value, source), where source is a string identifing the source
of the property.
Objects of this class can be read like a dictionary -- in this case,
only the property value is returned.
As a special case, a property value of None is returned as an empty
string when used as a mapping.
"""
compare_attrs = ('properties',)
def __init__(self, **kwargs):
"""
@param kwargs: initial property values (for testing)
"""
self.properties = {}
self.pmap = PropertyMap(self)
if kwargs: self.update(kwargs, "TEST")
def __getstate__(self):
d = self.__dict__.copy()
del d['pmap']
return d
def __setstate__(self, d):
self.__dict__ = d
self.pmap = PropertyMap(self)
def __contains__(self, name):
return name in self.properties
def __getitem__(self, name):
"""Just get the value for this property."""
rv = self.properties[name][0]
return rv
def has_key(self, name):
return self.properties.has_key(name)
def getProperty(self, name, default=None):
"""Get the value for the given property."""
return self.properties.get(name, (default,))[0]
def getPropertySource(self, name):
return self.properties[name][1]
def asList(self):
"""Return the properties as a sorted list of (name, value, source)"""
l = [ (k, v[0], v[1]) for k,v in self.properties.items() ]
l.sort()
return l
def __repr__(self):
return repr(dict([ (k,v[0]) for k,v in self.properties.iteritems() ]))
def setProperty(self, name, value, source):
self.properties[name] = (value, source)
def update(self, dict, source):
"""Update this object from a dictionary, with an explicit source specified."""
for k, v in dict.items():
self.properties[k] = (v, source)
def updateFromProperties(self, other):
"""Update this object based on another object; the other object's """
self.properties.update(other.properties)
def render(self, value):
"""
Return a variant of value that has any WithProperties objects
substituted. This recurses into Python's compound data types.
"""
# we use isinstance to detect Python's standard data types, and call
# this function recursively for the values in those types
if isinstance(value, (str, unicode)):
return value
elif isinstance(value, WithProperties):
return value.render(self.pmap)
elif isinstance(value, list):
return [ self.render(e) for e in value ]
elif isinstance(value, tuple):
return tuple([ self.render(e) for e in value ])
elif isinstance(value, dict):
return dict([ (self.render(k), self.render(v)) f | or k,v in value.iteritems() ])
else:
return value
class PropertyMap:
"""
Privately-used mapping object to implement WithProperties' substitutions,
including the rendering of None as ''.
"""
colon_minus_re = re.compile(r"(.*):-(.*)")
colon | _plus_re = re.compile(r"(.*):\+(.*)")
def __init__(self, properties):
# use weakref here to avoid a reference loop
self.properties = weakref.ref(properties)
def __getitem__(self, key):
properties = self.properties()
assert properties is not None
# %(prop:-repl)s
# if prop exists, use it; otherwise, use repl
mo = self.colon_minus_re.match(key)
if mo:
prop, repl = mo.group(1,2)
if properties.has_key(prop):
rv = properties[prop]
else:
rv = repl
else:
# %(prop:+repl)s
# if prop exists, use repl; otherwise, an empty string
mo = self.colon_plus_re.match(key)
if mo:
prop, repl = mo.group(1,2)
if properties.has_key(prop):
rv = repl
else:
rv = ''
else:
rv = properties[key]
# translate 'None' to an empty string
if rv is None: rv = ''
return rv
class WithProperties(util.ComparableMixin):
"""
This is a marker class, used fairly widely to indicate that we
want to interpolate build properties.
"""
compare_attrs = ('fmtstring', 'args')
def __init__(self, fmtstring, *args):
self.fmtstring = fmtstring
self.args = args
def render(self, pmap):
if self.args:
strings = []
for name in self.args:
strings.append(pmap[name])
s = self.fmtstring % tuple(strings)
else:
s = self.fmtstring % pmap
return s
|
# -*- coding: utf-8 -*-
"""
lantz.drivers.rgblasersystems
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:company: RGB Lasersysteme GmbH.
:description: Lasers and Lasers Systems.
:website: http://www.rgb-laser.com/
----
:copyright: 2015 by La | ntz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from .minilas | evo import MiniLasEvo
__all__ = ['MiniLasEvo']
|
conan_file.settings = settings
cmake = CMake(conan_file)
self.assertNotIn('WARN: Set CMake build type ', conan_file.output)
self.assertEquals(cmake.build_type, "Release")
# Now with visual, (multiconfig)
settings = Settings.loads(default_settings_yml)
settings.os = "Windows"
settings.compiler = "Visual Studio"
settings.compiler.version = "15"
settings.arch = "x86"
settings.build_type = "Release"
conan_file = ConanFileMock()
conan_file.settings = settings
cmake = CMake(conan_file)
cmake.build_type = "Debug"
self.assertIn('WARN: Set CMake build type "Debug" is different than the '
'settings build_type "Release"', conan_file.output)
self.assertEquals(cmake.build_type, "Debug")
self.assertNotIn('-DCMAKE_BUILD_TYPE="Debug"', cmake.command_line)
self.assertIn("--config Debug", cmake.build_config)
cmake = CMake(conan_file)
cmake.build_type = "Release"
self.assertIn("--config Release", cmake.build_config)
def loads_default_test(self):
settings = Settings.loads(default_settings_yml)
settings.os = "Windows"
settings.compiler = "Visual Studio"
settings.compiler.version = "12"
settings.arch = "x86"
conan_file = ConanFileMock()
conan_file.settings = settings
def check(text, build_config, generator=None, set_cmake_flags=False):
os = str(settings.os)
os_ver = str(settings.os.version) if settings.get_safe('os.version') else None
for cmake_system_name in (True, False):
cross_ver = ("-DCMAKE_SYSTEM_VERSION=\"%s\" " % os_ver) if os_ver else ""
cross = ("-DCMAKE_SYSTEM_NAME=\"%s\" %s-DCMAKE_SYSROOT=\"/path/to/sysroot\" "
% ({"Macos": "Darwin"}.get(os, os), cross_ver)
if (platform.system() != os and cmake_system_name) else "")
cmake = CMake(conan_file, generator=generator, cmake_system_name=cmake_system_name,
set_cmake_flags=set_cmake_flags)
new_text = text.replace("-DCONAN_EXPORTED", "%s-DCONAN_EXPORTED" % cross)
if "Visual Studio" in text:
cores = ('-DCONAN_CXX_FLAGS="/MP{0}" '
'-DCONAN_C_FLAGS="/MP{0}" '.format(tools.cpu_count()))
new_text = new_text.replace('-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON"',
'%s-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON"' % cores)
self.assertEqual(new_text, cmake.command_line)
self.assertEqual(build_config, cmake.build_config)
check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev',
"")
check('-G "Custom Generator" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev',
'', generator="Custom Generator")
check('-G "Custom Generator" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev',
'', generator="Custom Generator", set_cmake_flags=True)
settings.build_type = "Debug"
check('-G "Visual Studio 12 2013" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev',
'--config Debug')
settings.arch = "x86_64"
check('-G "Visual Studio 12 2013 Win64" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="Visual Studio" -DCONAN_COMPILER_VERSION="12" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev',
'--config Debug')
settings.compiler = "gcc"
settings.compiler.version = "4.8"
generator = "MinGW Makefiles" if platform.system() == "Windows" else "Unix Makefiles"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="gcc" -DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" '
'-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator, "")
settings.os = "Linux"
settings.arch = "x86"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" '
'-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m32" '
'-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.arch = "x86_64"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" '
'-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" '
'-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="gcc" '
'-DCONAN_COMPILER_VERSION="4.8" -DCONAN_CXX_FLAGS="-m64" '
'-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" '
'-DCMAKE_CXX_FLAGS="-m64" -DCMAKE_SHARED_LINKER_FLAGS="-m64" -DCMAKE_C_FLAGS="-m64" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" '
'-Wno-dev' % generat | or,
"", set_cmake_flags=True)
settings.os = "FreeBSD"
settings.compiler = "clang"
settings.compiler.version = "3.8"
settings.arch = "x86"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="clang" '
'-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS="-m32" '
'-DCONAN_SHARED_LINKER_FLAGS="-m | 32" -DCONAN_C_FLAGS="-m32" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.arch = "x86_64"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="clang" '
'-DCONAN_COMPILER_VERSION="3.8" -DCONAN_CXX_FLAGS="-m64" '
'-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.os = "SunOS"
settings.compiler = "sun-cc"
settings.compiler.version = "5.10"
settings.arch = "x86"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="sun-cc" '
'-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m32" '
'-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.arch = "x86_64"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug"'
' -DCONAN_EXPORTED="1" -DCONAN_COMPILER="sun-cc" '
'-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m64" '
'-DCONAN_SHARED_LINKER_FLAGS="-m64" -DCONAN_C_FLAGS="-m64" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.arch = "sparc"
check('-G "%s" -DCMAKE_BUILD_TYPE="Debug" -DCONAN_EXPORTED="1" '
'-DCONAN_COMPILER="sun-cc" '
'-DCONAN_COMPILER_VERSION="5.10" -DCONAN_CXX_FLAGS="-m32" '
'-DCONAN_SHARED_LINKER_FLAGS="-m32" -DCONAN_C_FLAGS="-m32" '
'-DCMAKE_EXPORT_NO_PACKAGE_REGISTRY="ON" -Wno-dev' % generator,
"")
settings.arch = "sparcv9"
check('-G "%s" -DCMAKE_BUILD_TYPE="Deb |
from django.views import generic
from django.db.models import get_model
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from . import forms
from ..utils import get_page_model, get_node_model
PageNode = get_node_model()
FancyPage = get_page_model()
ContentBlock = get_model('fancypages', 'ContentBlock')
Container = get_model('fancypages', 'Container')
TabBlock = get_model('fancypages', 'TabBlock')
OrderedContainer = get_model('fancypages | ', 'OrderedContainer')
class PageListView(generic.TemplateView):
template_name = "fancypages/dashboard/page_list.html"
class PageCreateView(generic.CreateView):
model = FancyPage
form_class = forms.PageNodeForm
template_name = "fancypages/dashboard/page_update.html"
def get_form_kwargs(self):
kwargs = super(PageCreateView, self).get_form_kwargs()
kwargs.update(self.kwargs)
return kwargs
def get_context_data(self, ** | kwargs):
ctx = super(PageCreateView, self).get_context_data(**kwargs)
ctx['title'] = _("Create new page")
return ctx
def get_success_url(self):
return reverse('fp-dashboard:page-list')
class PageUpdateView(generic.UpdateView):
model = FancyPage
form_class = forms.PageNodeForm
context_object_name = 'fancypage'
template_name = "fancypages/dashboard/page_update.html"
def get_context_data(self, **kwargs):
ctx = super(PageUpdateView, self).get_context_data(**kwargs)
ctx['title'] = _("Update page")
return ctx
def get_success_url(self):
return reverse('fp-dashboard:page-list')
class PageDeleteView(generic.DeleteView):
model = FancyPage
context_object_name = 'fancypage'
template_name = "fancypages/dashboard/page_delete.html"
def get_success_url(self):
return reverse('fp-dashboard:page-list')
|
ions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetc | her
from bambou import NURESTObject
class NUCTranslationMap(NURESTObject):
""" Represents a CTranslationMap in the VSD
Notes:
1:1 mapping of customer private IPs in customer domain to customer alias (public) IPs in provider domain and N:1 mapping to customer alias SPAT IP in the provider domain.
"""
__rest_name__ = "ctranslationmap"
__resource_name__ = "ctranslationmaps"
## Constants
CONST_MAPPING_TYP | E_PAT = "PAT"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_MAPPING_TYPE_NAT = "NAT"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a CTranslationMap instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> ctranslationmap = NUCTranslationMap(id=u'xxxx-xxx-xxx-xxx', name=u'CTranslationMap')
>>> ctranslationmap = NUCTranslationMap(data=my_dict)
"""
super(NUCTranslationMap, self).__init__()
# Read/Write Attributes
self._mapping_type = None
self._last_updated_by = None
self._entity_scope = None
self._associated_domain_id = None
self._customer_alias_ip = None
self._customer_ip = None
self._external_id = None
self.expose_attribute(local_name="mapping_type", remote_name="mappingType", attribute_type=str, is_required=True, is_unique=False, choices=[u'NAT', u'PAT'])
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="associated_domain_id", remote_name="associatedDomainID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="customer_alias_ip", remote_name="customerAliasIP", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="customer_ip", remote_name="customerIP", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def mapping_type(self):
""" Get mapping_type value.
Notes:
NAT for 1:1 mapping or PAT for *:1 mappings.
This attribute is named `mappingType` in VSD API.
"""
return self._mapping_type
@mapping_type.setter
def mapping_type(self, value):
""" Set mapping_type value.
Notes:
NAT for 1:1 mapping or PAT for *:1 mappings.
This attribute is named `mappingType` in VSD API.
"""
self._mapping_type = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def associated_domain_id(self):
""" Get associated_domain_id value.
Notes:
Domain associated to this address mapping.
This attribute is named `associatedDomainID` in VSD API.
"""
return self._associated_domain_id
@associated_domain_id.setter
def associated_domain_id(self, value):
""" Set associated_domain_id value.
Notes:
Domain associated to this address mapping.
This attribute is named `associatedDomainID` in VSD API.
"""
self._associated_domain_id = value
@property
def customer_alias_ip(self):
""" Get customer_alias_ip value.
Notes:
Customer public IP in the provider domain.
This attribute is named `customerAliasIP` in VSD API.
"""
return self._customer_alias_ip
@customer_alias_ip.setter
def customer_alias_ip(self, value):
""" Set customer_alias_ip value.
Notes:
Customer public IP in the provider domain.
This attribute is named `customerAliasIP` in VSD API.
"""
self._customer_alias_ip = value
@property
def customer_ip(self):
""" Get customer_ip value.
Notes:
Customer private IP in the customer domain.
This attribute is named `customerIP` in VSD API.
"""
return self._customer_ip
@customer_ip.setter
def customer_ip(self, value):
""" Set customer_ip value.
Notes:
Customer private IP in the customer domain.
This attribute is named `customerIP` in VSD API.
"""
self._customer_ip = value
@property
def external_id(self):
""" Get external_id value.
|
# Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR | CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from deploy_board.webapp.helpers.rodimus_client import RodimusClient
rodimus_client = RodimusClient()
def create_placement(request, placement_info):
return rodimus_client.post("/placements", request.teletraan_user_id.token, data=placement_info)
def get_all(request, index | , size):
params = [('pageIndex', index), ('pageSize', size)]
return rodimus_client.get("/placements", request.teletraan_user_id.token, params=params)
def get_by_provider_and_cell_name(request, provider, cell_name):
if cell_name:
return rodimus_client.get("/placements/cell/%s" % cell_name, request.teletraan_user_id.token)
return rodimus_client.get("/placements/provider/%s" % provider, request.teletraan_user_id.token)
def get_by_id(request, placement_id):
return rodimus_client.get("/placements/%s" % placement_id, request.teletraan_user_id.token)
|
#!/usr/bin/env python
#
# ex13.py
#
# Author: Billy Wilson Arante
# Created: 2016/04/26 PHT
#
from sys import argv
def main():
"""Exercise 13: Parameters, Unpacking, Variables"""
script, first, second, third = argv
print "The script is called:", script
print "Your first | variable is:", first
print "Your second variable is:", second
print "Your third variable is:" | , third
if __name__ == "__main__":
main()
|
# flake8: noqa
from flask import Flask
from flask_assets import Environment, Bundle
from flask_co | mpress import Compress
from flask_cache import Cache
from flask_wtf.csrf import CsrfProtect
app = Flask(__name__)
#App config
app.config.from_pyfile('config.py')
# Flask Assets
assets = Environment(app)
css = Bundle('css/custom.css', filters='cssmin', output='css/custom.min.css')
assets.register('custom_css', css)
# Flask Compress
Compress(app)
# Flask Cache
cache = Ca | che(app,config={'CACHE_TYPE': 'simple'})
# CSRF Protection
CsrfProtect(app)
import mash_place_ui.views
|
import cgi
print "Content-Type: text/html\n"
form = cgi.FieldStorage()
print """
<!DOCTYPE html>
<html lang="en">
<head>
<title>CodeBin</title>
<link rel="stylesheet" type="text/css" href="./css/editor.css"/>
<link rel="stylesheet" rel="stylesheet" type="text/css" media="screen" href="http://openfontlibrary.org/face/hans-kendrick"/>
<link rel="stylesheet" href="./bin/icons/font-awesome-4.0.3/css/font-awesome.min.css"/>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.0/jquery.min.js" type="text/javascript"></script>
<script src="./js/skulpt.min.js" type="text/javascript"></script>
<script src="./js/skulpt-stdlib.js" type="text/javascript"></script>
<script type="text/javascript">
function Sound(source,volume,loop)
{
this.source=source;
this.volume=volume;
this.loop=loop;
var son;
this.son=son;
this.finish=false;
this.stop=function()
{
document.body.removeChild(this.son);
}
this.start=function()
{
if(this.finish)return false;
this.son=document.createElement("embed");
this.son.setAttribute("src",this.source);
this.son.setAttribute("hidden","true");
this.son.setAttribute("volume",this.volume);
this.son.setAttribute("autostart","true");
this.son.setAttribute("loop",this.loop);
document.body.appendChild(this.son);
}
this.remove=function()
{
document.body.removeChild(this.son);
this.finish=true;
}
this.init=function(volume,loop)
{
this.finish=false;
this.volume=volume;
this.loop=loop;
}
}
//Konami Code Implementation
if (window.addEventListener) {
var keys = [];
var konami = "38,38,40,40,37,39,37,39,66,65";
var r_konami = "65,66,39,37,39,37,40,40,38,38";
var index=0;
window.addEventListener("keydown", function(e){
keys.push(e.keyCode);
if (keys.toString().indexOf(konami) >= 0) {
var bg=["./bin/img/bunny-fail.gif","./bin/img/tab.gif","./bin/img/laughing.gif","./bin/img/beer.gif","./bin/img/ugh.gif","./bin/img/energy.gif"];
var bg_file=bg[index];
document.body.style.backgroundImage="url("+bg_file+")";
if(index>5) {
index=0;
}
| else {
index++;
}
keys = [];
};
}, true);
};
fu | nction startRickRoll() {
alert("Turn up your volume, You just got RickRolled!");
var rickroll = new Sound("bin/mp3/rickroll.mp3",100,true);
rickroll.start();
}
function loadtext()
{
"""
try:
print """
var xmlhttp=new XMLHttpRequest();
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
var text=xmlhttp.responseText;
var textArray=text.split("\\r\\n");
text="";
for (var x=0;x<textArray.length;x++)
{
text+=textArray[x];
text+="\\n";
}
editor.getSession().setValue(text);
}
else if (xmlhttp.status==404)
{
editor.getSession().setValue('An error occured.');
}
}
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=1&hash="+'%s');
"""%(str(form['hash'].value))
except:
print """editor.getSession().setValue('print \"Hello World\"');"""
print """
}
</script>
</head>
<body onload="loadtext()">
<script type="text/javascript">
// output functions are configurable. This one just appends some text
// to a pre element.
function outf(text) {
var mypre = document.getElementById("output");
mypre.innerHTML = mypre.innerHTML + text;
}
function builtinRead(x) {
if (Sk.builtinFiles === undefined || Sk.builtinFiles["files"][x] === undefined)
throw "File not found: '" + x + "'";
return Sk.builtinFiles["files"][x];
}
// Here's everything you need to run a python program in skulpt
// grab the code from your textarea
// get a reference to your pre element for output
// configure the output function
// call Sk.importMainWithBody()
function runit() {
var prog = editor.getSession().getValue();
var mypre = document.getElementById("output");
mypre.innerHTML = '';
Sk.canvas = "mycanvas";
Sk.pre = "output";
Sk.configure({output:outf, read:builtinRead});
eval(Sk.importMainWithBody("<stdin>",false,prog));
}
function savetext()
{
var xmlhttp=new XMLHttpRequest();
"""
try:
print """
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=2&code="+editor.getSession().getValue()+"&hash=%s");
"""%(str(form['hash'].value))
except:
print """
var newHash="";
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
newHash=xmlhttp.responseText;
window.location=document.URL+"?hash="+newHash;
}
else if (xmlhttp.status==404)
{
editor.getSession().setValue("An error occured.");
}
}
xmlhttp.open("POST","backend.py",true);
xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded");
xmlhttp.send("pick=3&code="+editor.getSession().getValue());
"""
print """
}
</script>
<div id="header-content">
<a class="easter-egg" href="#" onclick="startRickRoll()"><h1 id="logo">CodeBin (This site is pretty ghetto. Use at your own risk.)</h1></a>
</div>
<div id="body-content">
<form id="body-form">
<div id="editor"></div>
<div id="output">
<canvas id="mycanvas" ></mycanvas>
</div>
<button id="run-button" type="button" onclick="savetext(); runit();">Run</button>
</form>
</div>
<div id="footer-content">
<p>CodeBin Copyright 2014 Written By <a class="easter-egg" href="http://bit.ly/1eUpyT1">Alex Jaeger</a> and <a class="easter-egg" href="./bin/img/cat.jpg">Dylan Johnson</a>. Documentation and Support provided by the <a class="easter-egg" href="./bin/img/california-condor.jpg">Condor</a></p>
</div>
<script src="./bin/libraries/ace-builds-master/src-noconflict/ace.js" type="text/javascript" charset="utf-8"></script>
<script>
var editor=ace.edit("editor");
editor.setTheme("ace/theme/tomorrow_night");
editor.getSession().setMode("ace/mode/python");
editor.setFontSize(12);
</script>
</body>
</html>
""" |
from rambutan3.check_args.annotation. | NUMBER import NUMBER
def test():
assert not NUMBER.matches("abc")
assert not NUMBER.matches(True)
assert NUMBER.matches(-1.234)
assert NUMBER.matches(-1)
assert NUMBER.matches(0)
assert NUMBER.matches(0.234)
assert NUMBER.matches(1)
assert NUMBER.ma | tches(1.234)
assert NUMBER.matches(2)
|
typ)
else:
pkey = '{%s}%s' % (target_namespace, prop.name)
if lista:
return "c_children['%s'] = ('%s', [%s])" % (
pkey, prop.pyname, typ)
else:
return "c_children['%s'] = ('%s', %s)" % (
pkey, prop.pyname, typ)
def knamn(self, sup, cdict):
cname = cdict[sup].class_name
if not cname:
(namesp, tag) = cdict[sup].name.split('.')
if namesp:
ctag = self.root.modul[namesp].factory(tag).__class__.__name__
cname = '%s.%s' % (namesp, ctag)
else:
cname = tag + "_"
return cname
def _do_properties(self, line, cdict, ignore, target_namespace):
args = []
child = []
try:
(own, inh) = self.properties
except AttributeError:
(own, inh) = ([], [])
for prop in own:
if isinstance(prop, PyAttribute):
line.append("%sc_attributes['%s'] = %s" % (INDENT,
prop.name, prop.spec()))
if prop.fixed:
args.append((prop.pyname, prop.fixed, None))
else:
if prop.default:
args.append((prop.pyname, prop.pyname, prop.default))
else:
args.append((prop.pyname, prop.pyname, None))
elif isinstance(prop, PyElement):
(mod, cname) = _mod_cname(prop, cdict)
if prop.max == "unbounded":
lista = True
pmax = 0 # just has to be different from 1
else:
pmax = int(prop.max)
lista = False
if prop.name in ignore:
pass
else:
line.append("%s%s" % (INDENT, self.child_spec(
target_namespace, prop,
mod, cname,
lista)))
pmin = int(getattr(prop, 'min', 1))
if pmax == 1 and pmin == 1:
pass
elif prop.max == "unbounded":
line.append( "%sc_cardinality['%s'] = {\"min\":%s}" % (
INDENT, prop.pyname, pmin))
else:
line.append(
"%sc_cardinality['%s'] = {\"min\":%s, \"max\":%d}" % (
INDENT, prop.pyname, pmin, pmax))
child.append(prop.pyname)
if lista:
args.append((prop.pyname, "%s or []" % (prop.pyname,),
None))
else:
args.append((prop.pyname, prop.pyname, None))
return args, child, inh
def _superiors(self, cdict):
imps = {}
try:
superior = self.superior
sups = []
for sup in superior:
klass = self.knamn(sup, cdict)
sups.append(klass)
imps[klass] = []
for cla in cdict[sup].properties[0]:
if cla.pyname and cla.pyname not in imps[klass]:
imps[klass].append(cla.pyname)
except AttributeError:
superior = []
sups = []
return superior, sups, imps
def class_definition(self, target_namespace, cdict=None, ignore=None):
line = []
if self.root:
if self.name not in [c.name for c in self.root.elems]:
self.root.elems.append(self)
(superior, sups, imps) = self._superiors(cdict)
c_name = klass_namn(self)
if not superior:
line.a | ppend("class %s(SamlBase):" % ( | c_name,))
else:
line.append("class %s(%s):" % (c_name, ",".join(sups)))
if hasattr(self, 'scoped'):
pass
else:
line.append("%s\"\"\"The %s:%s element \"\"\"" % (INDENT,
target_namespace,
self.name))
line.append("")
line.append("%sc_tag = '%s'" % (INDENT, self.name))
line.append("%sc_namespace = NAMESPACE" % (INDENT,))
try:
if self.value_type:
if isinstance(self.value_type, str):
line.append("%sc_value_type = '%s'" % (INDENT,
self.value_type))
else:
line.append("%sc_value_type = %s" % (INDENT,
self.value_type))
except AttributeError:
pass
if not superior:
for var, cps in CLASS_PROP:
line.append("%s%s = SamlBase.%s%s" % (INDENT, var, var, cps))
else:
for sup in sups:
for var, cps in CLASS_PROP:
line.append("%s%s = %s.%s%s" % (INDENT, var, sup, var,
cps))
(args, child, inh) = self._do_properties(line, cdict, ignore,
target_namespace)
if child:
line.append("%sc_child_order.extend([%s])" % (INDENT,
"'"+"', '".join(child)+"'"))
if args:
if inh:
cname = self.knamn(self.superior[0], cdict)
imps = {cname: [c.pyname for c in inh if c.pyname]}
line.append("")
line.extend(def_init(imps, args))
line.extend(base_init(imps))
line.extend(initialize(args))
line.append("")
if not self.abstract or not self.class_name.endswith("_"):
line.append("def %s_from_string(xml_string):" % pyify(
self.class_name))
line.append(
"%sreturn saml2.create_class_from_xml_string(%s, xml_string)" % (
INDENT, self.class_name))
line.append("")
self.done = True
return "\n".join(line)
def prepend(add, orig):
# return a list which is the lists concatenated with the second list first
res = [add]
if orig:
res.extend(orig)
return res
def pyobj_factory(name, value_type, elms=None):
pyobj = PyObj(name, pyify(name))
pyobj.value_type = value_type
if elms:
if name not in [c.name for c in elms]:
elms.append(pyobj)
return pyobj
def pyelement_factory(name, value_type, elms=None):
obj = PyElement(name, pyify(name))
obj.value_type = value_type
if elms:
if name not in [c.name for c in elms]:
elms.append(obj)
return obj
def expand_groups(properties, cdict):
res = []
for prop in properties:
if isinstance(prop, PyGroup):
# only own, what about inherited ? Not on groups ?
cname = prop.ref[1]
res.extend(cdict[cname].properties[0])
else:
res.append(prop)
return res
class PyElement(PyObj):
def __init__(self, name=None, pyname=None, root=None, parent=""):
PyObj.__init__(self, name, pyname, root)
if parent:
self.class_name = "%s_%s" % (leading_uppercase(parent), self.name)
else:
self.class_name = leading_uppercase(self.name)
self.ref = None
self.min = 1
self.max = 1
self.definition = None
self.orig = None
# def prereq(self, prop):
# prtext = prop.text(target_namespace, cdict)
# if prtext == None:
# return []
# else:
# prop.done = True
# |
f):
# This list will pass since every sub-array has at least 2 frames.
has_enough_frames = self.metric._HasEnoughFrames(self.good_timestamps)
self.assertTrue(has_enough_frames)
def testHasEnoughFramesWithNotEnoughFrames(self):
# This list will fail since the first sub-array only has a single frame.
has_enough_frames = self.metric._HasEnoughFrames(
self.not_enough_frames_timestamps)
self.assertFalse(has_enough_frames)
def testComputeSurfaceFlingerMetricNoJank(self):
stats = _MockRenderingStats(refresh_period=10,
frame_timestamps=[[10, 20], [130, 140, 150]],
frame_times=[[10], [10, 10]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals([1, 1, 1], frame_lengths.values)
self.assertEquals(1, max_frame_delay.value)
self.assertEquals(0, jank_count.value)
self.assertEquals(100, avg_surface_fps.value)
def testComputeSurfaceFlingerMetricJank(self):
stats = _MockRenderingStats(
refresh_period=10,
frame_timestamps=[[10, 20, 50], [130, 140, 150, 170, 180]],
frame_times=[[10, 30], [10, 10, 20, 10]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals([1, 3, 1, 1, 2, 1], frame_lengths.values)
self.assertEquals(3, max_frame_delay.value)
self.assertEquals(2, jank_count.value)
self.assertEquals(67, avg_surface_fps.value)
def testComputeFrameTimeMetricWithNotEnoughFrames(self):
stats = _MockRenderingStats(
refresh_period=10,
frame_timestamps=self.not_enough_frames_timestamps,
frame_times=[[10, 20], [30, 40, 50]])
avg_surface_fps, jank_count, max_frame_delay, frame_lengths = (
self.metric._ComputeSurfaceFlingerMetric(self.page, stats))
self.assertEquals(None, avg_surface_fps.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
avg_surface_fps.none_value_reason)
self.assertEquals(None, jank_count.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
jank_count.none_value_reason)
self.assertEquals(None, max_frame_delay.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
max_frame_delay.none_value_reason)
self.assertEquals(None, frame_lengths.values)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
frame_lengths.none_value_reason)
def testComputeLatencyMetric(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
input_event_latency=[[10, 20], [30, 40, 50]])
# pylint: disable=unbalanced-tuple-unpacking
mean_value, discrepancy_value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals(30, mean_value.value)
self.assertEquals(60, discrepancy_value.value)
def testComputeLatencyMetricWithMissingData(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
input_event_latency=[[], []])
value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals((), value)
def testComputeLatencyMetricWithNotEnoughFrames(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
input_event_latency=[[], []])
# pylint: disable=unbalanced-tuple-unpacking
mean_value, discrepancy_value = self.metric._ComputeLatencyMetric(
self.page, stats, 'input_event_latency', stats.input_event_latency)
self.assertEquals(None, mean_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
mean_value.none_value_reason)
self.assertEquals(None, discrepancy_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
discrepancy_value.none_value_reason)
def testComputeGestureScrollUpdateLatency(self):
stats = _MockRenderingStats(
frame_timestamps=self.good_timestamps,
gesture_scroll_update_latency=[[10, 20], [30, 40, 50]])
gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)[0]
self.assertEquals(10, gesture_value.value)
def testComputeGestureScrollUpdateLatencyWithMissingData(self):
stats = _MockRenderingStats(
frame_timestamps=self.good_timestamps,
gesture_scroll_update_latency=[[], []])
value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)
self.assertEquals((), value)
def testComputeGestureScrollUpdateLatencyWithNotEnoughFrames(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
gesture_scroll_update_latency=[[10, 20], [30, 40, 50]])
gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatency(
self.page, stats)[0]
self.assertEquals(None, gesture_value.value)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
gesture_value.none_value_reason)
def testComputeQueueingDuration(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_queueing_durations=[[10, 20], [30, 40]])
list_of_scalar_values = self.metric._ComputeQueueingDuration(self.page,
stats)
self.assertEquals([10, 20, 30, 40], list_of_scalar_values.values)
def testComputeQueueingDurationWithMissingData(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_queueing_durations=[[], []])
list_of_scalar_values = self.metric._ComputeQueueingDuration(
self.page, stats)
self.assertEquals(None, list_of_scalar_values.values)
self.assertEquals('No frame queueing durations recorded.',
list_of_scalar_values.none_value_reason)
def testComputeQueueingDurationWithMissingDataAndErrorValue(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_queueing_durations=[[], []])
stats.errors['frame_queueing_durations'] = (
'Current chrome version does not support the queueing delay metric.')
list_of_scalar_values = self.metric._ComputeQueueingDuration(
self.page, stats)
self.assertEquals(None, list_of_scalar_values.values)
sel | f.assertEquals(
'Current chrome version does not support the queueing | delay metric.',
list_of_scalar_values.none_value_reason)
def testComputeQueueingDurationWithNotEnoughFrames(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
frame_queueing_durations=[[10, 20], [30, 40, 50]])
list_of_scalar_values = self.metric._ComputeQueueingDuration(self.page,
stats)
self.assertEquals(None, list_of_scalar_values.values)
self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE,
list_of_scalar_values.none_value_reason)
def testComputeFrameTimeMetric(self):
stats = _MockRenderingStats(frame_timestamps=self.good_timestamps,
frame_times=[[10, 20], [30, 40, 50]])
frame_times_value, mean_frame_time_value, percentage_smooth_value = (
self.metric._ComputeFrameTimeMetric(self.page, stats))
self.assertEquals([10, 20, 30, 40, 50], frame_times_value.values)
self.assertEquals(30, mean_frame_time_value.value)
self.assertEquals(20, percentage_smooth_value.value)
def testComputeFrameTimeMetricWithNotEnoughFrames2(self):
stats = _MockRenderingStats(
frame_timestamps=self.not_enough_frames_timestamps,
frame_times=[[10, 20], [30, 40, 50]])
frame_times_value, mean_frame_time_value, percentage_smooth_value = (
self.metric._ComputeFrameTimeMet |
# cell definition
# name = 'Epos_AD'
# libname = 'can'
inp = 0
outp = 1
parameters = dict() #parametriseerbare cell
properties = {'Device ID': ' 0x01', 'Channel [ | 0/1]': ' 0', 'name': 'epos_areadBlk'} #voor netlisten
#view variables:
iconSource = 'AD'
views = {'ic | on':iconSource}
|
validate_certs=validate_certs)
def _required_auth_capability(self):
return ['hmac-v4']
def encode_bool(self, v):
v = bool(v)
return {True: "true", False: "false"}[v]
def _build_create_or_update_params(self, stack_name, template_body,
template_url, parameters,
notification_arns, disable_rollback,
timeout_in_minutes, capabilities, tags):
"""
Helper that creates JSON parameters needed by a Stack Create or
Stack Update call.
:type stack_name: string
:param stack_name: The name of the Stack, must be unique amoung running
Stacks
:type template_body: string
:param template_body: The template body (JSON string)
:type template_url: string
:param template_url: An S3 URL of a stored template JSON document. If
both the template_body and template_url are
specified, the template_body takes precedence
:type parameters: list of tuples
:param parameters: A list of (key, value) pairs for template input
parameters.
:type notification_arns: list of strings
:param notification_arns: A list of SNS topics to send Stack event
notifications to.
:type disable_rollback: bool
:param disable_rollback: Indicates whether or not to rollback on
failure.
:type timeout_in_minutes: int
:param timeout_in_minutes: Maximum amount of time to let the Stack
spend creating itself. If this timeout is exceeded,
the Stack will enter the CREATE_FAILED state.
:type capabilities: list
:param capabilities: The list of capabilities you want to allow in
the stack. Currently, the only valid capability is
'CAPABILITY_IAM'.
:type tags: dict
:param tags: A dictionary of (key, value) pairs of tags to
associate with this stack.
:rtype: dict
:return: JSON parameters represented as a Python dict.
"""
params = {'ContentType': "JSON", 'StackName': stack_name,
'DisableRollback': self.encode_bool(disable_rollback)}
if template_body:
params['TemplateBody'] = template_body
if template_url:
params['TemplateURL'] = template_url
if template_body and template_url:
boto.log.warning("If both TemplateBody and TemplateURL are"
" specified, only TemplateBody will be honored by the API")
if len(parameters) > 0:
for i, (key, value) in enumerate(parameters):
params['Parameters.member.%d.ParameterKey' % (i + 1)] = key
params['Parameters.member.%d.ParameterValue' % (i + 1)] = value
if capabilities:
for i, value in enumerate(capabilities):
params['Capabilities.member.%d' % (i + 1)] = value
if tags:
for i, (key, value) in enumerate(tags.items()):
params['Tags.member.%d.Key' % (i + 1)] = key
params['Tags.member.%d.Value' % (i + 1)] = value
if len(notification_arns) > 0:
self.build_list_params(params, notification_arns,
"NotificationARNs.member")
if timeout_in_minutes:
params['TimeoutInMinutes'] = int(timeout_in_minutes)
return params
def create_stack(self, stack_name, template_body=None, template_url=None,
parameters=[], notification_arns=[], disable_rollback=False,
timeout_in_minutes=None, capabilities=None, tags=None):
"""
Creates a CloudFormation Stack as specified by the template.
:type stack_name: string
:param stack_name: The name of the Stack, must be unique amoung running
Stacks
:type template_body: string
:param template_body: The template body (JSON string)
:type template_url: string
:param template_url: An S3 URL of a stored template JSON document. If
both the template_body and template_url are
specified, the template_body takes precedence
:type parameters: list of tuples
:param parameters: A list of (key, value) pairs for template input
parameters.
:type notification_arns: list of strings
:param notification_arns: A list of SNS topics to send Stack event
notifications to.
:type disable_rollback: bool
:param disable_rollback: Indicates whether or not to rollback on
failure.
:type timeout_in_minutes: int
:param timeout_in_minutes: Maximum amount of time to let the Stack
spend creating itself. If this timeout is exceeded,
the Stack will enter the CREATE_FAILED state.
:type capabilities: list
:param capabilities: The list of capabilities you want to allow in
the stack. Currently, the only valid capability is
'CAPABILITY_IAM'.
:type tags: dict
:param tags: A dictionary of (key, value) pairs of tags to
associate with this stack.
:rtype: string
:return: The unique Stack ID.
"""
params = self._build_create_or_update_params(stack_name,
template_body, template_url, parameters, notification_arns | ,
disable_rollback, timeout_in_minutes, capabilities, tags)
response = self.make_request('CreateStack', params, '/', 'POST')
body = response.read()
if response.status == 200:
body = json.loads(body)
return body['CreateStackResponse']['CreateStackResult']['StackId']
| else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
def update_stack(self, stack_name, template_body=None, template_url=None,
parameters=[], notification_arns=[], disable_rollback=False,
timeout_in_minutes=None, capabilities=None, tags=None):
"""
Updates a CloudFormation Stack as specified by the template.
:type stack_name: string
:param stack_name: The name of the Stack, must be unique amoung running
Stacks.
:type template_body: string
:param template_body: The template body (JSON string)
:type template_url: string
:param template_url: An S3 URL of a stored template JSON document. If
both the template_body and template_url are
specified, the template_body takes precedence.
:type parameters: list of tuples
:param parameters: A list of (key, value) pairs for template input
parameters.
:type notification_arns: list of strings
:param notification_arns: A list of SNS topics to send Stack event
notifications to.
:type disable_rollback: bool
:param disable_rollback: Indicates whether or not to rollback on
failure.
:type timeout_in_minutes: int
:param timeout_in_minutes: Maximum amount of time to let the Stack
spend creating itself. If this timeout is exceeded,
the Stack will enter the CREATE_FAILED state
:type capabilities: list
:param capabilities: The list of capabilities you want to allow in
the stack. Currently, the only valid capability is
'CAPABILITY_IAM'.
:type tags: dict
:param tags: A dictionary of (key, value) pairs of tags to
associate with this stack.
:rtype: string
:return: The unique Stack ID.
"""
params = self._build_create_or_update_params(stack_name,
template_body, template_url, parameters, notification_arns,
disable_rollback, timeout_in_minutes, capabilities, tags)
response = self.make_request('UpdateStack', para |
si | ngle_numeral_to_decimal_map = {"I": 1, | "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000}
|
from django.apps import AppConfig
|
class GeoDashServerDjangoConfig(AppConfig):
name = 'geodashserver'
| verbose_name = "GeoDash Server"
|
ent_definition:
# constraint exists but its definition may have changed
tools.drop_constraint(cr, self._table, conname)
if foreign_key_re.match(definition):
self.pool.post_init(tools.add_constraint, cr, self._table, conname, definition)
else:
self.pool.post_constraint(tools.add_constraint, cr, self._table, conname, definition)
#
# Update objects that use this one to update their _inherits fields
#
@api.model
def _add_inherited_fields(self):
""" Determine inherited fields. """
if self._abstract or not self._inherits:
return
# determine which fields can be inherited
to_inherit = {
name: (parent_fname, field)
for parent_model_name, parent_fname in self._inherits.items()
for name, field in self.env[parent_model_name]._fields.items()
}
# add inherited fields that are not redefined locally
for name, (parent_fname, field) in to_inherit.items():
if name not in self._fields:
# inherited fields are implemented as related fields, with the
# following specific properties:
# - reading inherited fields should not bypass access rights
# - copy inherited fields iff their original field is copied
self._add_field(name, field.new(
inherited=True,
inherited_field=field,
related=f"{parent_fname}.{name}",
related_sudo=False,
copy=field.copy,
readonly=field.readonly,
))
@api.model
def _inherits_check(self):
for table, field_name in self._inherits.items():
field = self._fields.get(field_name)
if not field:
_logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
from .fields import Many2one
field = Many2one(table, string="Automatically created field to link to parent %s" % table, required=True, ondelete="cascade")
self._add_field(field_name, field)
elif not (field.required and (field.ondelete or "").lower() in ("cascade", "restrict")):
_logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
field.required = True
field.ondelete = "cascade"
field.delegate = True
# reflect fields with delegate=True in dictionary self._inherits
for field in self._fields.values():
if field.type == 'many2one' and not field.related and field.delegate:
if not field.required:
_logger.warning("Field %s with delegate=True must be required.", field)
field.required = True
if field.ondelete.lower() not in ('cascade', 'restrict'):
field.ondelete = 'cascade'
type(self)._inherits = {**self._inherits, field.comodel_name: field.name}
self.pool[field.comodel_name]._inherits_children.add(self._name)
@api.model
def _prepare_setup(self):
""" Prepare the setup of the model. """
cls = type(self)
cls._setup_done = False
# changing base classes is costly, do it only when necessary
if cls.__bases__ != cls.__base_classes:
cls.__bases__ = cls.__base_classes
# reset those attributes on the model's class for _setup_fields() below
for attr in ('_rec_name', '_active_name'):
discardattr(cls, attr)
@api.model
def _setup_base(self):
""" Determine the inherited and custom fields of the model. """
cls = type(self)
if cls._setup_done:
return
# the classes that define this model, i.e., the ones that are not
# registry classes; the purpose of this attribute is to behave as a
# cache of [c for c in cls.mro() if not is_registry_class(c))], which
# is heavily used in function fields.resolve_mro()
cls._model_classes = tuple(c for c in cls.mro() if getattr(c, 'pool', None) is None)
# 1. determine the proper fields of the model: the fields defined on the
# class and magic fields, not the inherited or custom ones
# retrieve fields from parent classes, and duplicate them on cls to
# avoid clashes with inheritance between different models
for name in cls._fields:
discardattr | (cls, name)
cls._fields.clear()
# collect the definitions of each field (base definition + overrides)
definitions = defaultdict(list)
for klass in reversed(cls._model_classes):
# this condition is an optimization of is_definition_class(klass)
if isinstan | ce(klass, MetaModel):
for field in klass._field_definitions:
definitions[field.name].append(field)
for name, fields_ in definitions.items():
if len(fields_) == 1 and fields_[0]._direct and fields_[0].model_name == cls._name:
cls._fields[name] = fields_[0]
else:
self._add_field(name, fields_[-1].new(_base_fields=fields_))
# 2. add manual fields
if self.pool._init_modules:
self.env['ir.model.fields']._add_manual_fields(self)
# 3. make sure that parent models determine their own fields, then add
# inherited fields to cls
self._inherits_check()
for parent in self._inherits:
self.env[parent]._setup_base()
self._add_inherited_fields()
# 4. initialize more field metadata
cls._setup_done = True
for field in cls._fields.values():
field.prepare_setup()
# 5. determine and validate rec_name
if cls._rec_name:
assert cls._rec_name in cls._fields, \
"Invalid _rec_name=%r for model %r" % (cls._rec_name, cls._name)
elif 'name' in cls._fields:
cls._rec_name = 'name'
elif cls._custom and 'x_name' in cls._fields:
cls._rec_name = 'x_name'
# 6. determine and validate active_name
if cls._active_name:
assert (cls._active_name in cls._fields
and cls._active_name in ('active', 'x_active')), \
("Invalid _active_name=%r for model %r; only 'active' and "
"'x_active' are supported and the field must be present on "
"the model") % (cls._active_name, cls._name)
elif 'active' in cls._fields:
cls._active_name = 'active'
elif 'x_active' in cls._fields:
cls._active_name = 'x_active'
@api.model
def _setup_fields(self):
""" Setup the fields, except for recomputation triggers. """
cls = type(self)
# set up fields
bad_fields = []
for name, field in cls._fields.items():
try:
field.setup(self)
except Exception:
if field.base_field.manual:
# Something goes wrong when setup a manual field.
# This can happen with related fields using another manual many2one field
# that hasn't been loaded because the comodel does not exist yet.
# This can also be a manual function field depending on not loaded fields yet.
bad_fields.append(name)
continue
raise
for name in bad_fields:
self._pop_field(name)
@api.model
def _setup_complete(self):
""" Setup recomputation triggers, and complete the model setup. """
cls = type(self)
# register constraints and onchange methods
cls._init_constraints_onchanges()
@api.model
def fields_get(self, allfields=None, attribute |
time.datetime(2006, 12, 17, 7, 3, 31)
)
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(force_text(error), form_errors)
def assertURLEqual(self, url, expected, parse_qs=False):
"""
Given two URLs, make sure all their components (the ones given by
urlparse) are equal, only comparing components that are present in both
URLs.
If `parse_qs` is True, then the querystrings are parsed with QueryDict.
This is useful if you don't want the order of parameters to matter.
Otherwise, the query strings are compared as-is.
"""
fields = ParseResult._fields
for attr, x, y in zip(fields, urlparse(url), urlparse(expected)):
if parse_qs and attr == 'query':
x, y = QueryDict(x), QueryDict(y)
if x and y and x != y:
self.fail("%r != %r (%s doesn't match)" % (url, expected, attr))
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': 'staffmember@example.com'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
@ignore_warnings(category=RemovedInDjango110Warning)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://adminsite.com", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/password_reset/',
{'email': 'staffmember@example.com'},
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on ad | min views"
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
response = self.client.post(
'/admin_password_reset/',
{'email': 'staffmember@example.com'},
HTTP_ | HOST='www.example:dr.frankenstein@evil.tld'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(logger_calls), 1)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(len(mail.outbox), 1)
return self._re |
scription', 'sortOrder')
admin.site.register(OrderExtSelectionFieldType, OrderExtSelectionFieldTypeAdmin)
class TextT | ypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'desc | ription')
admin.site.register(TextType, TextTypeAdmin)
class BPTextTypeAdmin(admin.ModelAdmin):
list_display = ('bpType', 'key', 'description')
admin.site.register(BPTextType, BPTextTypeAdmin)
class OrderAdmin(admin.ModelAdmin):
list_display = (
'id', 'type', 'description', 'createdBy', 'createdAt', 'updatedBy', 'updatedAt', 'priority', 'status',
'deleteFlag')
search_fields = ('id', 'description')
admin.site.register(Order, OrderAdmin)
class OrderCustomizedAdmin(admin.ModelAdmin):
list_display = (
'order', 'travelAmount', 'amount', 'stage', 'goLiveDate', 'file1', 'file2', 'imgFile1', 'imgFile2')
admin.site.register(OrderCustomized, OrderCustomizedAdmin)
class OrderMultipleValueFieldAdmin(admin.ModelAdmin):
list_display = ('id', 'order', 'field', 'charValue1', 'charValue2')
admin.site.register(OrderMultipleValueField, OrderMultipleValueFieldAdmin)
class OrderPFAdmin(admin.ModelAdmin):
list_display = ('order', 'pf', 'bp', 'relatedOrder', 'main')
admin.site.register(OrderPF, OrderPFAdmin)
class OrderTextAdmin(admin.ModelAdmin):
list_display = ('type', 'order', 'createdBy', 'createdAt', 'content')
search_fields = ('content',)
admin.site.register(OrderText, OrderTextAdmin)
class BPTextAdmin(admin.ModelAdmin):
list_display = ('type', 'bp', 'createdBy', 'createdAt', 'content')
search_fields = ('content',)
admin.site.register(BPText, BPTextAdmin)
class OrderExtFieldTypeAdmin(admin.ModelAdmin):
list_display = ('orderType', 'key', 'description')
admin.site.register(OrderExtFieldType, OrderExtFieldTypeAdmin)
class OrderExtFieldAdmin(admin.ModelAdmin):
list_display = ('type', 'originalOrder', 'value', 'relatedBp', 'relatedOrder', 'relatedSelection')
admin.site.register(OrderExtField, OrderExtFieldAdmin)
class SiteLanguageAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(SiteLanguage, SiteLanguageAdmin)
class SiteAppTypeAdmin(admin.ModelAdmin):
list_display = ('appId', 'description')
admin.site.register(SiteAppType, SiteAppTypeAdmin)
class SitePhraseAdmin(admin.ModelAdmin):
list_display = ('phraseId', 'app', 'phraseLan', 'content', 'bigContent')
search_fields = ('phraseId', 'content', 'bigContent')
admin.site.register(SitePhrase, SitePhraseAdmin)
class SiteMenuItemAdmin(admin.ModelAdmin):
list_display = ('role', 'parentMenuId', 'phraseId', 'appId', 'pageApp', 'sortOrder', 'valid')
admin.site.register(SiteMenuItem, SiteMenuItemAdmin)
class FieldTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(FieldType, FieldTypeAdmin)
class OrderFieldDefAdmin(admin.ModelAdmin):
list_display = (
'orderType', 'fieldKey', 'attributeType', 'fieldType', 'valueType', 'storeType', 'storeColumn', 'storeKey')
admin.site.register(OrderFieldDef, OrderFieldDefAdmin)
class BPFieldDefAdmin(admin.ModelAdmin):
list_display = (
'bpType', 'fieldKey', 'attributeType', 'fieldType', 'valueType', 'storeType', 'storeColumn', 'storeKey')
admin.site.register(BPFieldDef, BPFieldDefAdmin)
class UserSavedSearchFavoriteAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'type', 'name', 'sortOrder', 'property', 'operation', 'low', 'high')
admin.site.register(UserSavedSearchFavorite, UserSavedSearchFavoriteAdmin)
class OrderBEDefAdmin(admin.ModelAdmin):
list_display = ('orderType', 'businessEntity')
admin.site.register(OrderBEDef, OrderBEDefAdmin)
class BPBEDefAdmin(admin.ModelAdmin):
list_display = ('bpType', 'businessEntity')
admin.site.register(BPBEDef, BPBEDefAdmin)
class ViewTypeAdmin(admin.ModelAdmin):
list_display = ('key', 'description')
admin.site.register(ViewType, ViewTypeAdmin)
class StdViewLayoutConfAdmin(admin.ModelAdmin):
list_display = (
'field', 'businessRole', 'viewType', 'locRow', 'locCol', 'locWidth', 'locHeight', 'visibility', 'required',
'labelPhraseId', 'multipleValue1PhraseId', 'multipleValue1Required', 'multipleValue2PhraseId',
'multipleValue1Required', 'appId', 'valid')
admin.site.register(StdViewLayoutConf, StdViewLayoutConfAdmin)
class BPStdViewLayoutConfAdmin(admin.ModelAdmin):
list_display = (
'field', 'businessRole', 'viewType', 'locRow', 'locCol', 'locWidth', 'locHeight', 'visibility', 'required',
'labelPhraseId', 'appId', 'valid')
admin.site.register(BPStdViewLayoutConf, BPStdViewLayoutConfAdmin)
class UserRoleAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'role', 'valid')
admin.site.register(UserRole, UserRoleAdmin)
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'profile', 'valid')
admin.site.register(UserProfile, UserProfileAdmin)
class UserParameterAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'name', 'value')
admin.site.register(UserParameter, UserParameterAdmin)
class UserSingleAuthObjectsAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'singleAuthObject', 'valid')
admin.site.register(UserSingleAuthObject, UserSingleAuthObjectsAdmin)
class UserProfileAuthObjectAdmin(admin.ModelAdmin):
list_display = ('profile', 'singleAuthObject', 'valid')
admin.site.register(UserProfileAuthObject, UserProfileAuthObjectAdmin)
class UserViewHistoryAdmin(admin.ModelAdmin):
list_display = ('userlogin', 'objectId', 'type', 'viewedAt')
admin.site.register(UserViewHistory, UserViewHistoryAdmin)
class ChangeHistoryAdmin(admin.ModelAdmin):
list_display = (
'id', 'objectId', 'type', 'objectField', 'oldValue', 'oldKeyValue', 'newValue', 'newKeyValue', 'updatedBy',
'updatedAt')
search_fields = ('id', 'orderId', 'orderField', 'oldValue', 'oldKeyValue', 'newValue', 'newKeyValue')
admin.site.register(ChangeHistory, ChangeHistoryAdmin)
class LockTableAdmin(admin.ModelAdmin):
list_display = ('objectId', 'tableType', 'lockedBy', 'lockedAt')
admin.site.register(LockTable, LockTableAdmin)
class SystemConfigurationAdmin(admin.ModelAdmin):
list_display = ('key', 'property1', 'property2', 'value1', 'value2')
admin.site.register(SystemConfiguration, SystemConfigurationAdmin)
class ActivityAdmin(admin.ModelAdmin):
list_display = ('order', 'startDateTime', 'endDateTime', 'visibility')
admin.site.register(Activity, ActivityAdmin)
class FileAttachmentAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'version', 'actualfilename', 'file', 'deleteFlag')
admin.site.register(FileAttachment, FileAttachmentAdmin)
class OrderFileAttachmentAdmin(admin.ModelAdmin):
list_display = (
'id', 'order', 'name', 'description', 'actualfilename', 'file', 'image', 'createdBy', 'createdAt', 'deleteFlag')
admin.site.register(OrderFileAttachment, OrderFileAttachmentAdmin)
class BPFileAttachmentAdmin(admin.ModelAdmin):
list_display = (
'id', 'bp', 'name', 'description', 'actualfilename', 'file', 'image', 'createdBy', 'createdAt', 'deleteFlag')
admin.site.register(BPFileAttachment, BPFileAttachmentAdmin)
class UploadFilesTempAdmin(admin.ModelAdmin):
list_display = ('imageFile', 'normalFile')
admin.site.register(UploadFilesTemp, UploadFilesTempAdmin)
class OrderFollowUpDefAdmin(admin.ModelAdmin):
list_display = ('orderTypeA', 'relation', 'orderTypeB', 'comments', 'valid')
admin.site.register(OrderFollowUpDef, OrderFollowUpDefAdmin)
class AppNavAccessAdmin(admin.ModelAdmin):
list_display = ('userLogin', 'type', 'pageApp', 'pageAction', 'pageParams', 'pageMode', 'accessedAt')
admin.site.register(AppNavAccess, AppNavAccessAdmin)
class UserFeedbackAdmin(admin.ModelAdmin):
list_display = ('userLogin', 'title', 'type', 'text')
admin.site.register(UserFeedback, UserFeedbackAdmin)
class SiteMessageAdmin(admin.ModelAdmin):
list_display = (
'sender', 'receiver', 'message', 'sentAt', 'receiverReadFlag', 'receiverDeleteFlag', 'senderDeleteFlag')
admin.site.r |
import _plotly_utils.basevalidators
class ComputedValidato | r(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="computed", parent_name="layout", **kwargs):
super(ComputedValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name | ,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# |
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from .sas_usage import FileSasSamples
from .share_usage import ShareSamples
from .directory_usage import DirectorySa | mples
from .file_usage import FileSamples |
der the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
# @author: Edgar Magana, Cisco Systems, Inc.
# @author: Arvind Somya, Cisco Systems, Inc. (asomya@cisco.com)
#
"""
PlugIn for Nexus OS driver
"""
import logging
from quantum.common import exceptions as exc
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials_v2 as cred
from quantum.plugins.cisco.common import cisco_exceptions as excep
from quantum.plugins.cisco.db import network_db_v2 as cdb
from quantum.plugins.cisco.db import nexus_db_v2 as nxos_db
from quantum.plugins.cisco.l2device_plugin_base import L2DevicePluginBase
from quantum.plugins.cisco.nexus import cisco_nexus_configuration as conf
LOG = logging.getLogger(__name__)
class NexusPlugin(L2DevicePluginBase):
"""
Nexus PLugIn Main Class
"""
_networks = {}
def __init__(self):
"""
Extracts the configuration parameters from the configuration file
"""
self._client = importutils.import_object(conf.NEXUS_DRIVER)
LOG.debug(_("Loaded driver %s"), conf.NEXUS_DRIVER)
self._nexus_switches = conf.NEXUS_DETAILS
self.credentials = {}
def get_credential(self, nexus_ip):
if nexus_ip not in self.credentials:
_nexus_username = cred.Store.get_username(nexus_ip)
_nexus_password = cred.Store.get_password(nexus_ip)
self.credentials[nexus_ip] = {
'username': _nexus_username,
'password': _nexus_password
}
return self.credentials[nexus_ip]
def get_all_networks(self, tenant_id):
"""
Returns a dictionary containing all
<network_uuid, network_name> for
the specified tenant.
"""
LOG.debug(_("NexusPlugin:get_all_networks() called"))
return self._networks.values()
def create_network(self, tenant_id, net_name, net_id, vlan_name, vlan_id,
host, instance):
"""
Create a VLAN in the appropriate switch/port,
and configure the appropriate interfaces
for this VLAN
"""
LOG.debug(_("NexusPlugin:create_network() called"))
# Grab the switch IP and port for this host
switch_ip = ''
port_id = ''
for switch in self._nexus_switches.keys():
for hostname in self._nexus_switches[switch].keys():
if str(hostname) == str(host):
switch_ip = switch
port_id = self._nexus_switches[switch][hostname]['ports']
# Check if this network is already in the DB
binding = nxos_db.get_port_vlan_switch_binding(
port_id, vlan_id, switch_ip)
if not binding:
_nexus_ip = switch_ip
_nexus_ports = (port_id,)
_nexus_ssh_port = \
self._nexus_switches[switch_ip]['ssh_port']['ssh_port']
_nexus_creds = self.get_credential(_nexus_ip)
_nexus_username = _nexus_creds['username']
_nexus_password = _nexus_creds['password']
# Check for vlan/switch binding
vbinding = nxos_db.get_nexusvlan_binding(vlan_id, switch_ip)
if not vbinding:
# Create vlan and trunk vlan on the port
self._client.create_vlan(
vlan_name, str(vlan_id), _nexus_ip,
_nexus_username, _nexus_password,
_nexus_ports, _nexus_ssh_port, vlan_id)
else:
# Only trunk vlan on the port
man = self._client.nxos_connect(_nexus_ip,
int(_nexus_ssh_port),
_nexus_username,
_nexus_password)
self._client.enable_vlan_on_trunk_int(man,
port_id,
vlan_id)
nxos_db.add_nexusport_binding(port_id, str(vlan_id),
switch_ip, instance)
new_net_dict = {const.NET_ID: net_id,
const.NET_NAME: net_name,
const.NET_PORTS: {},
const.NET_VLAN_NAME: vlan_name,
const.NET_VLAN_ID: vlan_id}
self._networks[net_id] = new_net_dict
return new_net_dict
def delete_network(self, tenant_id, net_id, **kwargs):
"""
Deletes the VLAN in all switches, and removes the VLAN configuration
from the relevant interfaces
"""
LOG.debug(_("NexusPlugin:delete_network() called"))
def get_network_details(self, tenant_id, net_id, **kwargs):
"""
Returns the details of a particular network
"""
LOG.debug(_("NexusPlugin:get_network_details() called"))
network = self._get_network(tenant_id, net_id)
return network
def update_network(self, tenant_id, net_id, **kwargs):
"""
Updates the properties of a particular
Virtual Network.
"""
LOG.debug(_("NexusPlugin:update_network() called"))
def get_all_ports(self, tenant_id, net_id, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:get_all_ports() called"))
def create_port(self, tenant_id, net_id, port_state, port_id, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:create_port() called"))
def delete_port(self, device_id, vlan_id):
"""
Delete port bindings from the database and scan
whether the network is still required on
the interfaces trunked
"""
LOG.debug(_("NexusPlugin:delete_port() called"))
# Delete DB row for this port
row = nxos_db.get_nexusvm_binding(vlan_id, device_id)
if r | ow:
nxos_db.remove_nexusport_binding(row['port_id'], row['vlan_id'],
row['switch_ip'],
| row['instance_id'])
# Check for any other bindings with the same vlan_id and switch_ip
bindings = nxos_db.get_nexusvlan_binding(
row['vlan_id'], row['switch_ip'])
if not bindings:
# Delete this vlan from this switch
_nexus_ip = row['switch_ip']
_nexus_ports = (row['port_id'],)
_nexus_ssh_port = \
self._nexus_switches[_nexus_ip]['ssh_port']['ssh_port']
_nexus_creds = self.get_credential(_nexus_ip)
_nexus_username = _nexus_creds['username']
_nexus_password = _nexus_creds['password']
self._client.delete_vlan(
str(row['vlan_id']), _nexus_ip,
_nexus_username, _nexus_password,
_nexus_ports, _nexus_ssh_port)
return row['instance_id']
def update_port(self, tenant_id, net_id, port_id, port_state, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:update_port() called"))
def get_port_details(self, tenant_id, net_id, port_id, **kwargs):
"""
This is probably not applicable to the Nexus plugin.
Delete if not required.
"""
LOG.debug(_("NexusPlugin:get_port_details() called"))
def plug_interface(self, tenant_id, net_id, port_id, remote_interface_id,
**kwargs):
"""
This is probably not applicable to the Nexus plugin.
De |
from sender import *
import threading
QUEUE_NAME = 'event_queue'
class CompetingReceiver(object):
def __init__(self):
self.connection = Connection().initialize()
def receive(self):
self.connection.channel.queue_declare(QUEUE_NAME, False, False, | False, None)
self.connection.channel.basic_consume(self.connection.callback, QUEUE_NAME, True)
self.connection.channel.start_consuming()
if __name__ == '__main__':
connection1 = CompetingReceiver()
connection2 = CompetingReceiver()
t1 = threading.Thread(target=connection1.receive())
t2 = threading.Thread(target=connection2.receive())
t1.start()
t2.start()
t1.join()
t2.join | ()
connection1.connection.destroy()
connection2.connection.destroy()
|
# Download | the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACCOUNT_SID"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
number = client.lookups.phone_numbers("+ | 15108675309").fetch(type="carrier")
print(number.carrier['type'])
print(number.carrier['name'])
|
# -*- coding: UTF-8 | -*-
# 来源:疯狂的蚂蚁的博客www.crazyant.net总结整理
import MySQLdb as mdb
import sys
#获取数据库的链接对象
#con = mdb.connect('192.168.2.117', 'root', 'zzjr#2015', 'disconf')
con = mdb.connect('localhost', 'root', '', 'jumpserver')
with con:
#获取普通的查询cursor
cur = con.cursor()
cur.execute("select * from juser_user")
rows = cur.fe | tchall()
#获取连接对象的描述信息
desc = cur.description
print 'cur.description:',desc
#打印表头,就是字段名字
for i in desc:
print i[0]
print "%s %3s" % (desc[0][0], desc[1][0])
# print rows[2][11].decode('ascii').encode('utf-8')
print rows[2][11]
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class GdkPixbuf(Package):
"""The Gdk Pixbuf is a toolkit for image loading and pixel buffer
manipulation. It is used by GTK+ 2 and GTK+ 3 to load and
manipulate images. In the past it was distributed as part of
GTK+ 2 but it was split off into a separate package in
preparation for the change to GTK+ 3."""
homepage = "https://developer.gnome.org/gdk-pixbuf/"
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.40/gdk-pixbuf-2.40.0.tar.xz"
list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
list_depth = 1
version('2.40.0', sha256='1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6')
version('2.38.2', sha256='73fa651ec0d89d73dd3070b129ce2203a66171dfc0bd2caa3570a9c93d2d0781')
version('2.38.0', sha256='dd50973c7757bcde15de6bcd3a6d462a445efd552604ae6435a0532fbbadae47')
version('2.31.2', sha256='9e467ed09894c802499fb2399cd9a89ed21c81700ce8f27f970a833efb1e47aa')
variant('x11', default=False, description="Enable X11 support")
depends_on('meson@0.46.0:', type='build', when='@2.37.92:')
depends_on('meson@0.45.0:', type='build', when='@2.37.0:')
depends_on('ninja', type='build', when='@2.37.0:')
depends_on('shared-mime-info', type='build', when='@2.36.8: platform=linux')
depends_on('shared-mime-info', type='build', when='@2.36.8: platform=cray')
depends_on('pkgconfig', type='build')
# Building the man pages requires libxslt and the Docbook stylesheets
depends_on('libxslt', type='build')
depends_on('docbook-xsl', type='build')
depends_on('gettext')
depends_on('glib@2.38.0:')
depends_on('jpeg')
depends_on('libpng')
depends_on('zlib')
depends_on('libtiff')
depends_on('gobject-introspection')
depends_on('libx11', when='+x11')
# Replace the docbook stylesheet URL with the one that our
# docbook-xsl package uses/recognizes.
patch('docbook-cdn.patch')
def url_for_version(self, version):
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/{0}/gdk-pixbuf-{1}.tar.xz"
return url.format(version.up_to(2), version)
def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def setup_dependent_run_environment(self, env, dependent_spec):
env.prepend_path("XDG_DATA_DIRS", self.prefix.share)
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
meson_args = std_meson_args
meson_args += ['-Dx11={0}'.format('+x11' in spec)]
meson('..', *meson_args)
ninja('-v')
if self.run_tests:
ninja('test')
ninja('install')
def configure_args(self):
args = []
# disable building of gtk-doc files following #9771
args.append('--disable-gtk-doc-html')
true = which('true')
args.append('GTKDOC_CHECK={0}'.format(true))
args.append('GTKDOC_CHECK_PATH={0}'.format(true))
args.append('GTKDOC_MKPDF={0}'.format(true))
args.append('GTKDOC_REBASE={0}'.format(true))
return args
@when('@:2.36')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix), *self.configure_args())
make()
if self.run_tests:
make('check')
make('install')
if self.run_tests:
make('installcheck')
def setup_build_environment( | self, env):
# The "post-install.sh" script uses gdk-pixbuf-query-loaders,
# which was installed earlier.
env.prepend_path('PATH | ', self.prefix.bin)
|
#!/usr/bin/env python2
"""Demo checker script.
Given a demo .cpp file PATH.cpp we can make it a small test if there is a file
PATH.cpp.stdout and/or PATH.cpp.stderr. The test is implemented using this
script.
The script is called with the options --binary-path and one or both of
--stdout-path and --stderr-path. The demo is executed and the test succeeds
if the exit code is 0 and the standard/error output is the same as in the
.stdout/.stderr file. If there is output and the file is missing then this is
a failure as well.
"""
__author__ = """Manuel Holtgrewe <manuel.holtgrewe@fu-berlin.de>
Temesgen H. Dadi <temesgen.dadi@fu-berlin.de>
"""
import argparse
import difflib
import subprocess
import sys
import re
def t(s):
"""Force Windows line endings to Unix line endings."""
return s.replace("\r\n", "\n")
def fuzzyEqual(pattern, text):
"""checks if the expected output is eqal to the actualoutput using a reqex
use the literal [VAR] if the part of the output is not expected to be the same all the time.
"""
if len(pattern) != len(text):
print >> sys.stderr, 'Number of lines differ. Expected output has %s lines whereas actual has %s lines.' % (len(pattern), len(text))
return False
for i in range(len(pattern)):
T = text[i]
P = pattern[i]
if T == P :
continue
else :
if '[VAR]' not in P:
print >> sys.stderr, 'Line %s is different between expected and actual outputs.' % (i)
return False
else:
P = (re.escape(P)).replace('\\[VAR\\]', "[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?")
r = re.compile(P)
if re.match(r, T) == None:
print >> sys.stderr, 'Line %s is different (REGEX) between expected and actual outputs.' % (i)
return False
return True
def loadExpected(args):
"""Load the expected file contents."""
out, err = '', ''
if args.stdout_path:
with open(args.stdout_path, 'rb') as f:
out = f.read()
if args.stderr_path:
with open(args.stderr_path, 'rb') as f:
err = f.read()
return t(out.strip()).split('\n'), t(err.strip()).split('\n')
def runDemo(args):
cmd = [args.binary_path]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdoutbuff, stderrbuff = p.communicate()
return t(stdoutbuff.strip()).split('\n'), t(stderrbuff.strip()).split('\n'), p.returncode
def main():
"""Program entry point."""
parser = argparse.ArgumentParser(description='Run SeqAn demos as apps.')
parser.add_argument('--binary-path', dest='binary_path', required='True',
help='Path to the demo binary to execute.')
parser.add_argument('--stdout-path', dest='stdout_path',
help='Path to standard out file to compare to.',
default=None)
parser.add_argument('--stderr-path', dest='stderr_path',
help='Path to standard error file to compare to.',
default=None)
args = parser.parse_args()
print >>sys.stderr, 'Running %s.' % args.binary_path
actual_out, actual_err, ret = runDemo(args)
if ret != 0:
print >>sys.stderr, 'ERROR: Return code of %s was %s.' % (args.binary_path, ret)
return 1
else:
print >>sys.stderr, 'Return code was %s.' % ret
print >>sys.stderr, 'Loading files "%s", "%s".' % (args.stdout_path, args.stderr_path)
expected_out, expected_err = loadExpected(args)
is_stdout_as_expected = fuzzyEqual(expected_out, actual_out)
is_stderr_as_expected = fuzzyEqual(expected_err, actual_err)
if not is_stdout_as_expected:
print >>sys.stderr, 'The standard output was not as expected!'
l = difflib.context_diff(expected_out, actual_out,
fromfile='expected', tofile='actual')
print >>sys.stderr, '\n'.join(l)
else:
print >>sys.stderr, 'Standard output was as expected.'
if not is_stderr_a | s_exp | ected:
print >>sys.stderr, 'The standard error was not as expected!'
l = difflib.context_diff(expected_err, actual_err,
fromfile='expected', tofile='actual')
print >>sys.stderr, '\n'.join(l)
else:
print >>sys.stderr, 'Standard error was as expected.'
# here we used not because we need return-code 0 (False) if test is successful
return not (is_stdout_as_expected and is_stderr_as_expected)
if __name__ == '__main__':
sys.exit(main())
|
# -*- coding: UTF-8 -*-
from .codegen import *
from .cmake import *
from .p | rinting import *
from . | utilities import *
|
i | mport bee
from bee.segments import *
class toggle(bee.worker):
inp = antenna("push", "trigger")
on = variable("bool")
parameter(on, False)
state = output("pull", "bool")
connect(on, state)
true = output("push", "trigger")
trig_true = triggerfunc(true)
false = output("push", "trigger")
trig_false = triggerfunc(false)
@modifier
def m_trig(self):
if self.on:
self.on = False
self.trig_false()
else | :
self.on = True
self.trig_true()
trigger(inp, m_trig) |
保险合同准备金
acting_trading_sec: NUMBER(20,4)
代理买卖证券款
acting_uw_sec: NUMBER(20,4)
代理承销证券款
non_cur_liab_due_within_1y: NUMBER(20,4)
一年内到期的非流动负债
oth_cur_liab: NUMBER(20,4)
其他流动负债
tot_cur_liab: NUMBER(20,4)
流动负债合计
lt_borrow: NUMBER(20,4)
长期借款
bonds_payable: NUMBER(20,4)
应付债券
lt_payable: NUMBER(20,4)
长期应付款
specific_item_payable: NUMBER(20,4)
专项应付款
provisions: NUMBER(20,4)
预计负债
deferred_tax_liab: NUMBER(20,4)
递延所得税负债
deferred_inc_non_cur_liab: NUMBER(20,4)
递延收益-非流动负债
oth_non_cur_liab: NUMBER(20,4)
其他非流动负债
tot_non_cur_liab: NUMBER(20,4)
非流动负债合计
liab_dep_oth_banks_fin_inst: NUMBER(20,4)
同业和其它金融机构存放款项
derivative_fin_liab: NUMBER(20,4)
衍生金融负债
cust_bank_dep: NUMBER(20,4)
吸收存款
agency_bus_liab: NUMBER(20,4)
代理业务负债
oth_liab: NUMBER(20,4)
其他负债
prem_received_adv: NUMBER(20,4)
预收保费
deposit_received: NUMBER(20,4)
存入保证金
insured_deposit_invest: NUMBER(20,4)
保户储金及投资款
unearned_prem_rsrv: NUMBER(20,4)
未到期责任准备金
out_loss_rsrv: NUMBER(20,4)
未决赔款准备金
life_insur_rsrv: NUMBER(20,4)
寿险责任准备金
lt_health_insur_v: NUMBER(20,4)
长期健康险责任准备金
independent_acct_liab: NUMBER(20,4)
独立账户负债
incl_pledge_loan: NUMBER(20,4)
其中:质押借款
claims_payable: NUMBER(20,4)
应付赔付款
dvd_payable_insured: NUMBER(20,4)
应付保单红利
tot_liab: NUMBER(20,4)
负债合计
cap_stk: NUMBER(20,4)
股本
cap_rsrv: NUMBER(20,4)
资本公积金
special_rsrv: NUMBER(20,4)
专项储备
surplus_rsrv: NUMBER(20,4)
盈余公积金
undistributed_profit: NUMBER(20,4)
未分配利润
less_tsy_stk: NUMBER(20,4)
减:库存股
prov_nom_risks: NUMBER(20,4)
一般风险准备
cnvd_diff_foreign_curr_stat: NUMBER(20,4)
外币报表折算差额
unconfirmed_invest_loss: NUMBER(20,4)
未确认的投资损失
minority_int: NUMBER(20,4)
少数股东权益
tot_shrhldr_eqy_excl_min_int: NUMBER(20,4)
股东权益合计(不含少数股东权益)
tot_shrhldr_eqy_incl_min_int: NUMBER(20,4)
股东权益合计(含少数股东权益)
tot_liab_shrhldr_eqy: NUMBER(20,4)
负债及股东权益总计
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "CBondBalanceSheet"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_compcode = Column(VARCHAR2(40))
ann_dt = Column(VARCHAR2(8))
report_period = Column(VARCHAR2(8))
statement_type = Column(VARCHAR2(10))
crncy_code = Column(VARCHAR2(10))
monetary_cap = Column(NUMBER(20,4))
tradable_fin_assets = Column(NUMBER(20,4))
notes_rcv = Column(NUMBER(20,4))
acct_rcv = Column(NUMBER(20,4))
oth_rcv = Column(NUMBER(20,4))
prepay = Column(NUMBER(20,4))
dvd_rcv = Column(NUMBER(20,4))
int_rcv = Column(NUMBER(20,4))
inventories = Column(NUMBER(20,4))
consumptive_bio_assets = Column(NUMBER(20,4))
deferred_exp = Column(NUMBER(20,4))
non_cur_assets_due_within_1y = Column(NUMBER(20,4))
settle_rsrv = Column(NUMBER(20,4))
loans_to_oth_banks = Column(NUMBER(20,4))
prem_rcv = Column(NUMBER(20,4))
rcv_from_reinsurer = Column(NUMBER(20,4))
rcv_from_ceded_insur_cont_rsrv = Column(NUMBER(20,4))
red_monetary_cap_for_sale = Column(NUMBER(20,4))
oth_cur_assets = Column(NUMBER(20,4))
tot_cur_assets = Column(NUMBER(20,4))
fin_assets_avail_for_sale = Column(NUMBER(20,4))
held_to_mty_invest = Column(NUMBER(20,4))
long_term_eqy_invest = Column(NUMBER(20,4))
invest_real_estate = Column(NUMBER(20,4))
time_deposits = Column(NUMBER(20,4))
oth_assets = Column(NUMBER(20,4))
long_term_rec = Column(NUMBER(20,4))
fix_assets = Column(NUMBER(20,4))
const_in_prog = Column(NUMBER(20,4))
proj_matl = Column(NUMBER(20,4))
fix_assets_disp = Column(NUMBER(20,4))
productive_bio_assets = Column(NUMBER(20,4))
oil_and_natural_gas_assets = Column(NUMBER(20,4))
intang_assets = Column(NUMBER(20,4))
r_and_d_costs = Column(NUMBER(20,4))
goodwill = Column(NUMBER(20,4))
long_term_deferred_exp = Column(NUMBER(20,4))
deferred_tax_assets = Column(NUMBER(20,4))
loans_and_adv_granted = Column(NUMBER(20,4))
oth_non_cur_assets = Column(NUMBER(20,4))
tot_non_cur_assets = Column(NUMBER(20,4))
cash_deposits_central_bank = Column(NUMBER(20,4))
asset_dep_oth_banks_fin_inst = Column(NUMBER(20,4))
precious_metals = Column(NUMBER(20,4))
derivative_fin_assets = Column(NUMBER(20,4))
agency_bus_assets = Column(NUMBER(20,4))
subr_rec = Column(NUMBER(20,4))
rcv_ceded_unearned_prem_rsrv = Column(NUMBER(20,4))
rcv_ceded_claim_rsrv = Column(NUMBER(20,4))
rcv_ceded_life_insur_rsrv = Column(NUMBER(20,4))
rcv_ceded_lt_health_insur_rsrv = Column(NUMBER(20,4))
mrgn_paid = Column(NUMBER(20,4))
insured_pledge_loan = Column(NUMBER(20,4))
cap_mrgn_paid = Column(NUMBER(20,4))
independent_acct_assets = Column(NUMBER(20,4))
clients_cap_deposit = Column(NUMBER(20,4))
clients_rsrv_settle = Column(NUMBER(20,4))
incl_seat_fees_exchange = Column(NUMBER(20,4))
rcv_invest = Column(NUMBER(20,4))
tot_assets = Column(NUMBER(20,4))
st_borrow = Column(NUMBER(20,4))
borrow_central_bank = Column(NUMBER(2 | 0,4))
deposit_received_ib_deposits = Column(NUMBE | R(20,4))
loans_oth_banks = Column(NUMBER(20,4))
tradable_fin_liab = Column(NUMBER(20,4))
notes_payable = Column(NUMBER(20,4))
acct_payable = Column(NUMBER(20,4))
adv_from_cust = Column(NUMBER(20,4))
fund_sales_fin_assets_rp = Column(NUMBER(20,4))
handling_charges_comm_payable = Column(NUMBER(20,4))
empl_ben_payable = Column(NUMBER(20,4))
taxes_surcharges_payable = Column(NUMBER(20,4))
int_payable = Column(NUMBER(20,4))
dvd_payable = Column(NUMBER(20,4))
oth_payable = Column(NUMBER(20,4))
acc_exp = Column(NUMBER(20,4))
deferred_inc = Column(NUMBER(20,4))
st_bonds_payable = Column(NUMBER(20,4))
payable_to_reinsurer = Column(NUMBER(20,4))
rsrv_insur_cont = Column(NUMBER(20,4))
acting_trading_sec = Column(NUMBER(20,4))
acting_uw_sec = Column(NUMBER(20,4))
non_cur_liab_due_within_1y = Column(NUMBER(20,4))
oth_cur_liab = Column(NUMBER(20,4))
tot_cur_liab = Column(NUMBER(20,4))
lt_borrow = Column(NUMBER(20,4))
bonds_payable = Column(NUMBER(20,4))
lt_payable = Column(NUMBER(20,4))
specific_item_payable = Column(NUMBER(20,4))
provisions = Column(NUMBER(20,4))
deferred_tax_liab = Column(NUMBER(20,4))
deferred_inc_non_cur_liab = Column(NUMBER(20,4))
oth_non_cur_liab = Column(NUMBER(20,4))
tot_non_cur_liab = Column(NUMBER(20,4))
liab_dep_oth_banks_fin_inst = Column(NUMBER(20,4))
derivative_fin_liab = Column(NUMBER(20,4))
cust_bank_dep = Column(NUMBER(20,4))
agency_bus_liab = Column(NUMBER(20,4))
oth_liab = Column(NUMBER(20,4))
prem_received_adv = Column(NUMBER(20,4))
deposit_received = Column(NUMBER(20,4))
insured_deposit_invest = Column(NUMBER(20,4))
unearned_prem_rsrv = Column(NUMBER(20,4))
out_loss_rsrv = Column(NUMBER(20,4))
life_insur_rsrv = Column(NUMBER(20,4))
lt_health_insur_v = Column(NUMBER(20,4))
independent_acct_liab = Column(NUMBER(20,4))
incl_pledge_loan = Column(NUMBER(20,4))
claims_payable = Column(NUMBER(20,4))
dvd_payable_insured = Column(NUMBER(20,4))
tot_liab = Column(NUMBER(20,4))
cap_stk = Column(NUMBER(20,4))
cap_rsrv = Column(NUMBER(20,4))
special_rsrv = Column(NUMBER(20,4))
surplus_rsrv = Column(NUMBER(20,4))
undistributed_profit = Column(NUMBER(20,4))
less_tsy_stk = Column(NUMBER(20,4))
prov_nom_risks = Column(NUMBER(20,4))
cnvd_diff_foreign_curr_stat = Column(NUMBER(20,4))
unconfirmed_invest_loss = Column(NUMBER(20,4))
minority_int = Column(NUMBER(20,4))
tot_shrhldr_eqy_excl_min_int = Column(NUMBER(20,4))
tot |
import os, sys
import random
import time
import feedparser
import itertools
import HTMLParser
from feed import Feed
if os.getcwd().rstrip(os.sep).endswith('feeds'):
os.chdir('..')
sys.path.insert(0, os.getcwd())
from gui_client import new_rpc
import web
import reddit
class RSSFeed(Feed):
def __init__(self):
self.title = 'RSS Feed'
self.streams = []
self.wait_range = (60, 70)
self.max_error_wait = 600
self.max_subs = 0
self.urls = set()
def configure(self):
pass
def watch(self, new_streams=None):
self.configure()
self.web = web.Web()
try:
self.rpc = new_rpc(self.title)
except:
self.rpc = None
print 'Warning: Running without RPC'
if new_streams is None:
new_streams = []
|
streams = self.streams + new_streams
for url in itertools.cycle(streams):
print url
self.check_feed(url)
time.sleep(random.randint(*self.wait_range))
def check_feed(self, url):
for fail_count in itertools.count():
try:
datad = feedparser.parse(url)
except:
print 'Parse error for', url
time.sleep(min(2 ** fail_count, self.max_error_wait | ))
else:
break
try:
posts = datad['items']
except:
print 'No items field for', url
posts = []
for post in posts:
self.check_post(post)
def check_post(self, post):
if ('link' not in post):
return False
url = self.url_pre_filter(post['link'])
try:
req = self.web.get(url)
url = req.geturl()
except:
print 'URL retrieval error for ', url
return False
url = self.url_post_filter(url)
if (url in self.urls) or not url.startswith('http://'):
return False
self.urls.add(url)
feed_title = self.default_title_filter(post.get('title', ''))
page_title = self.default_title_filter(self.web.title(req))
title = self.title_filter(page_title, feed_title)
if self.rpc is not None:
subreddit = self.rpc.get_title_subreddit(title)
keywords = self.rpc.get_title_keywords(title)
if self.rpc.get_link_posted_count(url, title) <= self.max_subs:
stats = self.rpc.get_learned_stats(title, keywords)
self.rpc.gui_link_add(self.title, title, url, subreddit, keywords, **stats)
try:
req.close()
except:
pass
print title
print url
def url_pre_filter(self, url):
return url
def url_post_filter(self, url):
return url
def default_title_filter(self, title):
h = HTMLParser.HTMLParser()
return h.unescape(title)
def title_filter(self, page_title, feed_title):
return page_title
if __name__ == '__main__':
f = RSSFeed()
f.watch(['http://www.physorg.com/rss-feed/'])
|
# -*-coding:Utf-8 -*
# Copyright (c) 2012 NOEL-BARON Léo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFT | WARE IS PROVIDED BY THE COPYRIGHT HO | LDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'recettes' et ses sous-commandes.
Dans ce fichier se trouve la commande même.
"""
from primaires.interpreteur.commande.commande import Commande
from .editer import PrmEditer
from .lister import PrmLister
from .supprimer import PrmSupprimer
class CmdRecettes(Commande):
"""Commande 'recettes'.
"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "recettes", "recipes")
self.groupe = "administrateur"
self.aide_courte = "manipulation des recettes"
self.aide_longue = \
""
def ajouter_parametres(self):
"""Ajout des paramètres"""
self.ajouter_parametre(PrmEditer())
self.ajouter_parametre(PrmLister())
self.ajouter_parametre(PrmSupprimer())
|
# -*- coding: utf-8 -*-
# Gener | ated by Django 1.9.7 on 2017-02-23 22:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tournament', '0145_auto_20170211_1825'),
]
operations = [
migrations.AddField(
model_name='league',
name='description',
field=models.TextFie | ld(blank=True),
),
]
|
from scrapy.spiders import Spider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request
from scrapy.conf import settings
from beerindex.items import BeerindexItem
import logging
import lxml.html
from urlparse import urlparse
import re
class BeerSpider(Spider):
name = "beerspider"
beer_sites = {
'www.wbeer.com.br':
{
"start_url" : 'https://www.wbeer.com.br/browse.ep?cID=103354',
"next_link" : '.paginacao li.prox a::attr(href)',
"product_link" : '.catalogo-lista .lista .informacoes a::attr("href")',
"xpath_title" : "//span[@itemprop='name']//text()",
"xpath_price" : "//div[@class='preco-por']//text()",
"xpath_style" : "//div[@class='resumo']//span[@class='nome-tipo']//text()"
},
'www.emporioveredas.com.br' : {
"start_url" : 'http://www.emporioveredas.com.br/cervejas-importadas.html',
"next_link" : '.pager a.next::attr(href)',
"product_link" : '.products-grid a.product-image ::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[@class='product-shop']//span[@itemprop='price']//text()",
"xpath_style" : "//table[@id='product-attribute-specs-table']//tr[contains(.,'Estilo')]//td[last()]//text()"
},
'www.mundodascervejas.com' : {
"start_url" : 'http://www.mundodascervejas.com/buscar?q=cerveja',
"next_link" : '.topo .pagination a[rel="next"]::attr("href")',
"product_link" : '#listagemProdutos a.produto-sobrepor::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[@class='principal']//div[contains(@class,'preco-produto')]//strong[contains(@class,'preco-promocional')]//text()",
"xpath_style" : "//div[@id='descricao']//table//tr[contains(.,'Estilo')]//td[last()]//text()"
},
'www.clubeer.com.br': {
"start_url" : 'http://www.clubeer.com.br/loja',
"next_link" : '#pagination | li.current + li a::attr("href")',
"product_link" : '.minhascervejas li .areaborder > a:first-child::attr("href")',
"xpath_title" : "//h1[ | @itemprop='name']//text()",
"xpath_price" : "//div[@id='principal']//div[contains(@class,'areaprecos')]//span[@itemprop='price']//text()",
"xpath_style" : "//div[contains(@class,'areaprodutoinfoscontent')]//ul[contains(.,'ESTILO')]//li[position()=2]//text()"
},
'www.clubedomalte.com.br': {
"start_url" : 'http://www.clubedomalte.com.br/pais',
"next_link" : '.paginacao li.pg:last-child a::attr("href")',
"product_link" : '.mainBar .spotContent > a:first-child::attr("href")',
"xpath_title" : "//h1[@itemprop='name']//text()",
"xpath_price" : "//div[contains(@class,'interna')]//div[contains(@class,'preco')]//*[@itemprop='price']//text()",
"xpath_style" : "//div[contains(@class,'areaprodutoinfoscontent')]//ul[contains(.,'ESTILO')]//li[position()=2]//text()"
}
}
def domain_from_url(self,url):
parsed = urlparse(url)
return parsed.netloc
#allowed_domains = ["www.cervejastore.com.br"]
# start_urls = ['http://www.mundodascervejas.com/buscar?q=cerveja']
# start_urls = ["http://www.emporioveredas.com.br/cervejas-importadas.html"]
start_urls = [beer_sites[store]["start_url"] for store in beer_sites]
def parse(self,response):
domain = self.domain_from_url(response.url)
for url in response.css(self.beer_sites[domain]["next_link"]).extract():
request = Request(response.urljoin(url.strip()), self.parse)
yield request
titles = response.css(self.beer_sites[domain]["product_link"]).extract()
for title in titles:
yield Request(response.urljoin(title), self.parse_product)
def parse_product(self,response):
domain = self.domain_from_url(response.url)
item = BeerindexItem()
item["name"] = response.xpath(self.beer_sites[domain]["xpath_title"]).extract_first()
item["style"] = response.xpath(self.beer_sites[domain]["xpath_style"]).extract_first()
item["link"] = response.url
item["price"] = "".join(response.xpath(self.beer_sites[domain]["xpath_price"]).extract())
item["price"] = re.sub(r"\s+", "", item["price"], flags=re.UNICODE)
item["price"] = re.sub(r"[^\d,\.+]", "", item["price"], flags=re.UNICODE)
item["price"] = re.sub(r",", ".", item["price"], flags=re.UNICODE)
yield item
|
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.productreview': {
'Meta': {'object_name': 'ProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product_reviews'", 'to': "orm['catalog.Product']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.shopreview': {
'Meta': {'object_name': 'ShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shop_reviews'", 'to': "orm['catalog.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.space': {
'Meta': {'object_name': 'Space'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_admins'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'latitude': ('django.d | b.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'logo': ('django.db.models.fields.URLField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'Tr | ue', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_members'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'new_members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_new_members'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewUser']"}),
'new_tools': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'space_new_tools'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewProduct']"}),
|
es only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
if fit_common.VERBOSITY >= 3:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print fit_common.json.dumps(poller_data, indent=4)
def test_verify_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check number of polls being kept for poller ID"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poller_data = test_api_utils.get_poller_data_by_id(poller_id)
poll_len = len(poller_data)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
print "Number of polls for "+ str(poller_id) + ": " + str(len(poller_data))
self.assertLessEqual(poll_len, 10, 'Number of cached polls should not exceed 10')
def test_get_current_poller_data(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display most current data from poller"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_ | api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\ | nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mondata, indent=4)
def test_get_poller_status_timestamp(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display status and timestamp from current poll"
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
nodetype = get_rackhd_nodetype(node)
# Run test against managed nodes only
if nodetype != "unknown" and nodetype != "Unmanaged":
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
monurl = "/api/1.1/pollers/" + str(poller_id) + "/data/current"
mondata = fit_common.rackhdapi(url_cmd=monurl)
print "Return status", mondata['status']
if mondata['status'] == 200:
if fit_common.VERBOSITY >= 2:
print "Timestamp:", mondata['json'][0]['timestamp']
print fit_common.json.dumps(mondata['json'][0], indent=4)
def test_verify_poller_error_counter(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Check for Poller Errors"
print "\t{0}".format(msg)
errorlist = []
for node in NODELIST:
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config', 'updatedAt']:
self.assertIn(subitem, item, subitem + ' field error')
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
poll_fails = poll_data['json'].get('failureCount', 0)
if poll_fails != 0:
errorlist.append("Node: {} Poller: {} {} reported {} failureCount".format(node,
poller,
poller_id,
poll_fails))
if errorlist != []:
print "{}".format(fit_common.json.dumps(errorlist, indent=4))
self.assertEqual(errorlist, [], "Error reported in Pollers")
else:
if fit_common.VERBOSITY >= 2:
print ("No Poller errors found")
def test_get_nodes_id_pollers(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller updated-at per node."
print "\t{0}".format(msg)
node = 0
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code")
for item in mon_data['json']:
# check required fields
self.assertGreater(item['pollInterval'], 0, 'pollInterval field error')
for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'config', 'updatedAt']:
self.assertIn(subitem, item, subitem + ' field error')
poller_dict = test_api_utils.get_supported_pollers(node)
for poller in poller_dict:
poller_id = poller_dict[poller]["poller_id"]
poll_data = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id)
if fit_common.VERBOSITY >= 2:
print "\nPoller: " + poller + " ID: " + str(poller_id)
pprint.pprint("Created At: {}".format(poll_data['json']['createdAt']))
pprint.pprint("Updated At: {}".format(poll_data['json']['updatedAt']))
def test_check_poller_interval(self):
if fit_common.VERBOSITY >= 2:
msg = "Description: Display the poller interval."
print "\t{0}".format(msg)
for node in NODELIST:
if fit_common.VERBOSITY >= 2:
print "\nNode: ", node
mon_data = fit_common.rackhdapi("/api/1.1/nodes/" + node + "/pollers")
self.assertIn(mon_data['status'], [200], "Incorrect HTTP return code, expected 200, got {}".format(mon_data['status']))
poller_list = []
if fit_common.VERBOSITY >= 2:
print fit_common.json.dumps(mon_data['json'], indent=4)
for item in mon_data['json']:
poller_list.append(item['id'])
for poller_id in poller_list:
poller = fit_common.rackhdapi("/api/1.1/pollers/" + poller_id )
self.assertIn(poller['status'], [200], "Incorrect HTTP return code")
pollerdata = poller['json']
# |
doctests = """
########### Tests mostly copied from test_listcomps.py ############
Test simple loop with conditional
>>> sum({i*i for i in range(100) if i&1 == 1})
166650
Test simple case
>>> {2*y + x + 1 for x in (0,) for y in (1,)}
set([3])
Test simple nesting
>>> list(sorted({(i,j) for i in range(3) for j in range(4)}))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> list(sorted({(i,j) for i in range(4) for j in range(i)}))
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum({i*i for i in range(100)})
328350
>>> i
20
Verify that syntax error's are raised for setcomps used as lvalues
>>> {y for y in (1,2)} = 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
>>> {y for y in (1,2)} += 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
Make a nested set comprehension that acts like set(range())
>>> def srange(n):
... return {i for i in range(n)}
>>> list(sorted(srange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Same again, only as a lambda expression instead of a function definition
>>> lrange = lambda n: {i for i in range(n)}
>>> list(sorted(lrange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators can call other generators:
>>> def grange(n):
... for x in {i for i in range(n)}:
... yield x
>>> list(sorted(grange(5)))
[0, 1, 2, 3, 4]
Make sure that None is a valid return value
>>> {None for i in range(10)}
set([None])
########### Tests for various scoping corner cases ############
Return lambdas that use the iteration variable as a default argument
>>> items = {(lambda i=i: i) for i in range(5)}
>>> {x() for x in items} == set(range(5))
True
Same again, only this time as a closur | e variable
>>> items = {(lambda: i) for i in range(5)}
>>> {x() for x in items}
set([4])
Another way to test that the iteration variable is local to the list comp
>> | > items = {(lambda: i) for i in range(5)}
>>> i = 20
>>> {x() for x in items}
set([4])
And confirm that a closure can jump over the list comp scope
>>> items = {(lambda: y) for i in range(5)}
>>> y = 2
>>> {x() for x in items}
set([2])
We also repeat each of the above scoping tests inside a function
>>> def test_func():
... items = {(lambda i=i: i) for i in range(5)}
... return {x() for x in items}
>>> test_func() == set(range(5))
True
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... i = 20
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: y) for i in range(5)}
... y = 2
... return {x() for x in items}
>>> test_func()
set([2])
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import test_support
from test import test_setcomps
test_support.run_doctest(test_setcomps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
test_support.run_doctest(test_setcomps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
|
# This script created a queue
#
# Author - Paul Doyle Nov 2015
#
#
import boto.sqs
import boto.sqs.queue
from boto.sqs.message import Mess | age
from boto.sqs.connection import SQSConnection
from boto.exception import SQSError
import sys
import urllib2
# Get the keys from a specific url and then use them to connect to AWS Service
response = urllib2.urlopen('http://ec2-52-30-7-5.eu-west-1.compute.amazonaws.com:81/key')
html=response.read()
result = html.split(':')
#print (result[0])
| #print (result[1])
access_key_id = result[0]
secret_access_key = result[1]
#print (access_key_id,secret_access_key)
# Set up a connection to the AWS service.
conn = boto.sqs.connect_to_region("eu-west-1", aws_access_key_id=access_key_id, aws_secret_access_key=secret_access_key)
student_number = 'C13470112'
#conn.delete_queue(sys.argv[1])
queue_name = student_number+sys.argv[1]
# Get a list of the queues that exists and then print the list out
rs = conn.get_queue(queue_name)
# Get a list of the queues that exists and then print the list out
#rs = conn.get_all_queues()
print "adding message...."
rs.set_message_class(Message)
m = Message()
m.set_body(sys.argv[2])
rs.write(m)
|
import os, sys, re
import optparse
import shutil
import pandas
import numpy
import gc
import subprocess
#####################################
#This is a script to combine the output reports from
#Skyline, in preparation for MSstats! Let's get started.
#
#VERSION 0.70A
version="0.70A"
#DATE: 10/11/2016
date="10/11/2016"
#####################################
print "-----------------------------------------------------------------------"
print "Welcome to the MSstats wrapper for Galaxy, Wohlschlegel Lab UCLA"
print "Written by William Barshop"
print "Version: ",version
print "Date: ",date
basedir=os.getcwd()
####################################
#Argument parsing! So much fun!
#We'll use OptParse even though some
#people really rave about argparse...
#
#
# NB: With Optparse, if an option is
# not specified, it will take a
# value of None
####################################
parser = optparse.OptionParser()
parser.add_option("--experiment_file",action="store",type="string",dest="experiment_file")
parser.add_option("--folder",action="store",type="string",dest="operation_folder",default=".")
parser.add_option("--msstats-image-RData",action="store",type="string",dest="image_RData")
parser.add_option("--msstats-comparison-csv",action="store",type="string",dest="comparison_csv")
################# OUTPUTS ################################
parser.add_option("--comparisonPlotOutput",action="store",type="string",dest="comparisonPlotOutput")
parser.add_option("--heatmapOutput",action="store",type="string",dest="heatmapOutput")
parser.add_option("--volcanoPlotOutput",action="store",type="string",dest="volcanoPlotOutput")
parser.add_option("--RScriptOutput",action="store",type="string",dest="RScriptOutput")
################## BELOW THIS ARE PLOTTING OPTIONS ############################## These are actually all going to be moved into a separate tool
#general options
parser.add_option("--significance",action="store",type="float",dest="significance") # For the volcano plots...
parser.add_option("--FCthreshold",action="store",type="float",dest="FCthreshold") # FC threshold For the volcano plots...
parser.add_option("--ylimUp",action="store",type="float",dest="ylimUp") # ylimUp threshold for the plots
parser.add_option("--ylimDown",action="store",type="float",dest="ylimDown") # ylimDown threshold for plots
parser.add_option("--xlimUp",action="store",type="float",dest="xlimUp") # xlimUp threshold for Volcano plots
parser.add_option("--autoAxes",action="store_true",dest="autoAxes")
parser.add_option("--xAxisSize",action="store",type="int",dest="xAxisSize")
parser.add_option("--yAxisSize",action="store",type="int",dest="yAxisSize")
parser.add_option("--width",action="store",type="int",dest="width",default=10)
parser.add_option("--height",action="store",type="int",dest="height",default=10)
#HeatMap
parser.add_option("--numProtein",action="store",type="int",dest="numProtein",default=180) # Number of proteins per heatmap... Max is 180
parser.add_option("--clustering",action="store",type="string",dest="clustering",default="protein") # clustering type for heatmap... Can be "protein", "comparison", "both"
#VolcanoPlot
parser.add_option("--dotSize",action="store",type="int",dest="dotSize",default=3)#volcanoplot
parser.add_option("--textSize",action="store",type="int",dest="textSize",default=4)#volcanoplot
parser.add_option("--proteinName",action="store_true",dest="proteinName") # On volcano plot, draw protein names?
parser.add_option("--legendSize",action="store",type="int",dest="legendSize",default=7)
(options,args) = parser.parse_args()
if options.autoAxes:
xlimUp="FALSE"
ylimUp="FALSE"
ylimDown="FALSE"
else:
xlimUp=options.xlimUp
ylimUp=options.ylimUp
ylimDown=options.ylimDown
if options.proteinName:
proteinName="TRUE"
else:
proteinName="FALSE"
print "Now we're going to prepare the R script for MSstats graphing..."
#Let's start by reading in the experiment structure.
group_information = pandas.read_csv(options.experiment_file,sep='\t')
comparison_df = pandas.read_csv(options.comparison_csv)
with open("MSstats_Script.R",'wb') as script_writer:
script_writer.write("library(MSstats)\n")
script_writer.write("setwd(\""+str(basedir)+"\")\n") #We're goin | g to set the current directory...
script_writer.write("load(\""+str(options.image_RData)+"\")\n")
#script_writer.write("comparisonResult<-read.csv(\""+str(options.comparison_csv)+"\")\n") #We will load in the input CSV | file! (In this case by absolute path, though that's not necessary...)
#script_writer.write("write.csv(comparisonResult$ComparisonResult,file=\"comparisonResult_output.csv\")\n")
#OKAY! So, now we're going to write out the plots... This may take a bit...
#So, first, let's check if we can output a heatmap (number of comparisons >2)
if len(comparison_df['Label'].unique().tolist())>=2:
#script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"Heatmap\", logBase.pvalue=2, sig="+str(options.significance)+", FCcutoff="+str(options.FCthreshold)+",ylimUp="+str(ylimUp)+",ylimDown="+str(ylimDown)+",xlimUp="+str(xlimUp)+",x.axis.size="+str(options.xAxisSize)+",y.axis.size="+str(options.yAxisSize)+",numProtein="+str(options.numProtein)+",clustering=\""+options.clustering+"\",width="+str(options.width)+",height="+str(options.height)+")\n") #add width, height, address
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"Heatmap\", logBase.pvalue=2,x.axis.size="+str(options.xAxisSize)+",y.axis.size="+str(options.yAxisSize)+",numProtein="+str(options.numProtein)+",clustering=\""+options.clustering+"\",width="+str(options.width)+",height="+str(options.height)+")\n") #add width, height, address
#pass
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,ProteinName=\""+proteinName+"\",type=\"VolcanoPlot\", logBase.pvalue=2, sig="+str(options.significance)+", FCcutoff="+str(options.FCthreshold)+",ylimUp="+str(ylimUp)+",ylimDown="+str(ylimDown)+",xlimUp="+str(xlimUp)+",x.axis.size="+str(options.xAxisSize)+",dot.size="+str(options.dotSize)+",text.size="+str(options.textSize)+",legend.size="+str(options.legendSize)+",width="+str(options.width)+",height="+str(options.height)+",which.Comparison=\"all\")\n")
script_writer.write("groupComparisonPlots(data=comparisonResult$ComparisonResult,type=\"ComparisonPlot\", sig="+str(options.significance)+",x.axis.size="+str(options.xAxisSize)+",dot.size="+str(options.dotSize)+",legend.size="+str(options.legendSize)+",width="+str(options.width)+",height="+str(options.height)+",which.Comparison=\"all\")\n")
#OKAY.... The R Script has been written!
#We're going to execute the R script now!
print "Copying RScript back to Galaxy..."
shutil.copy('MSstats_Script.R',options.RScriptOutput)
subprocess.check_call(['Rscript', 'MSstats_Script.R'],shell=False,stderr=sys.stdout.fileno())
print "Moving files to final output locations...."
#print os.listdir(os.getcwd())
#shutil.copy('TMP_dataProcess_output.csv',options.processedOutput)
#shutil.copy('comparisonResult_output.csv',options.comparisonOutput)
shutil.copy('VolcanoPlot.pdf',options.volcanoPlotOutput)
if len(comparison_df['Label'].unique().tolist())>2:
shutil.copy('Heatmap.pdf',options.heatmapOutput)
shutil.copy('ComparisonPlot.pdf',options.comparisonPlotOutput)
print "All done!"
|
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# i | s distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
# This setup file is used when running cloud training or cloud dataflow jobs.
from setuptools import setup, find_packages
setup(
name='trainer',
version='1.0.0',
packages=find_packages(),
description='Google Cloud Datalab helper sub-package',
author='Google',
author_email='google-clou | d-datalab-feedback@googlegroups.com',
keywords=[
],
license="Apache Software License",
long_description="""
""",
install_requires=[
'tensorflow==1.15.2',
'protobuf==3.1.0',
'pillow==6.2.0', # ML Engine does not have PIL installed
],
package_data={
},
data_files=[],
)
|
##
# Copyright 2012-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
This script can be used to install easybuild-easyblocks, e.g. using:
easy_install --user .
or
python setup.py --prefix=$HOME/easybuild
@author: Kenneth Hoste (Ghent University)
"""
import os
import re
import sys
from distutils import log
sys.path.append('easybuild')
from easyblocks import VERSION
API_VERSION = str(VERSION).split('.')[0]
suff = ''
rc_regexp = re.compile("^.*(rc[0-9]*)$")
res = rc_regexp.search(str(VERSION))
if res:
suff = res.group(1)
dev_regexp = re.compile("^.*[0-9]dev$")
if dev_regexp.match(str(VERSION)):
suff = 'dev'
API_VERSION += suff
# log levels: 0 = WARN (default), 1 = INFO, 2 = DEBUG
log.set_verbosity(1)
try:
from setuptools import setup
log.info("Installing with setuptools.setup...")
except ImportError, err:
log.info("Failed to import setuptools.setup, so falling back to distutils.setup")
from distutils.core import setup
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
log.info("Installing version %s (required versions: API >= %s)" % (VERSION, API_VERSION))
setup(
name = "easybuild-easyblocks",
version = str(VERSION),
author = "EasyBuild community",
author_email = "easybuild@lists.ugent.be",
description = """Python modules which implement support for installing particular (groups of) software packages with EasyBuild.""",
license = "GPLv2",
keywords = "software build building installation installing compilation HPC scientific",
url = "http://hpcugent.github.com/easybuild",
packages = ["easybuild", "easybuild.easyblocks", "easybuild.easyblocks.generic"],
package_dir = {"easybuild.easyblocks": "easybuild/easyblocks"},
package_data = {'easybuild.easyblocks': ["[a-z0-9]/*.py"]},
long_description = read("README.rst"),
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: POSIX :: Linux",
| "Programming Language :: Python :: 2.4",
"Topic :: Software Development :: Build Tools",
],
platforms = "Linux",
provides = ["easybuild", "easybuild.easyblocks", "easybuild.easyblocks.generic"],
install_requires = ["easybuild-framework >= %s" % API_VERSION],
zi | p_safe = False,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-8-13 下午1:31
# @Author : Tom.Lee
# @CopyRight : 2016-2017
# @File : demo.py
# @Product : PyCharm
# @Docs :
# @Source :
import rpyc
from rpyc.utils.server import ThreadedServer
class MyService(rpyc.Service):
data = []
def exposed_save_data(self, d):
se | lf.data.append(d)
def exposed_get_data(self):
return self.data
class MyClient(object):
@classmethod
def conn(cls):
connections = rpyc.connect('localhost', 15111)
connections.root.save_data(123)
print connections.root.get_data()
if __name__ == '__main__':
import threading
import time
server = ThreadedServer(MyService, port=15111)
client | = MyClient()
def start():
print '*************************************'
print '*************************************'
print '*****************RpyC****************'
print '************ **************'
print '*************************************'
print '***************start server**********'
print '*************************************'
print '*************************************\n\n'
server.start()
threading.Thread(target=start).start()
client.conn()
time.sleep(5)
server.close()
print 'service stop.'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'第 0010 题:使用 Python 生成类似于图中的字母验证码图片'
from PIL import Image, ImageDraw, ImageFont, ImageFilter
import random
# 随机字母:
def rndChar():
return chr(random.randint(65, 90 | ))
# 随机颜色1:
def rndColor():
return (random.randint(64, 255), random.randint(64, 255), random.randint(64, 255))
# 随机颜色2:
def rndColor2():
ret | urn (random.randint(32, 127), random.randint(32, 127), random.randint(32, 127))
# 240 x 60:
width = 60 * 4
height = 60
image = Image.new('RGB', (width, height), (255, 255, 255))
# 创建Font对象:
font = ImageFont.truetype('C:\Windows\Fonts\Arial.ttf', 36)
# 创建Draw对象:
draw = ImageDraw.Draw(image)
# 填充每个像素:
for x in range(width):
for y in range(height):
draw.point((x, y), fill=rndColor())
# 输出文字:
for t in range(4):
draw.text((60 * t + 10, 10), rndChar(), font=font, fill=rndColor2())
# 模糊:
image = image.filter(ImageFilter.BLUR)
image.save('0010\code.jpg', 'jpeg') |
# pyGeoNet_readGeotiff
#import sys
#import os
from osgeo import gdal
#from string import *
import numpy as np
from time import clock
import pygeonet_defaults as defaults
import pygeonet_prepare as Parameters
from math import modf, floor
#from scipy.stats.mstats import mquantiles
def read_dem_from_geotiff(demFileName,demFilePath):
# Open the GeoTIFF format DEM
fullFilePath = demFilePath + demFileName
#fullFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\skunk.tif"
print fullFilePath
ary = []
ds = gdal.Open(fullFilePath, gdal.GA_ReadOnly)
geotransform = ds.GetGeoTransform()
'''
print 'Driver: ', ds.GetDriver().ShortName,'/', \
ds.GetDriver().LongName
pri | nt 'Size is ',ds.RasterXSize,'x',ds.RasterYSize, \
'x',ds.RasterCount
print 'Projection is ',ds.GetProjection()
if not geotransform is None:
print 'Origin = (',geotransform[0], ',',geotransform[3],')'
print 'Pixel Size = (',geotrans | form[1], ',',geotransform[5],')'
'''
ary = ds.GetRasterBand(1).ReadAsArray()
#Parameters.geospatialReferenceArray
#Parameters.geoReferencingMatrix
#Parameters.geoBoundingBox
Parameters.demPixelScale = geotransform[1]
Parameters.xLowerLeftCoord = geotransform[0]
Parameters.yLowerLeftCoord = geotransform[3]
return ary
def quantile(x, q, qtype = 7, issorted = False):
"""
Args:
x - input data
q - quantile
qtype - algorithm
issorted- True if x already sorted.
Compute quantiles from input array x given q.For median,
specify q=0.5.
References:
http://reference.wolfram.com/mathematica/ref/Quantile.html
http://wiki.r-project.org/rwiki/doku.php?id=rdoc:stats:quantile
Author:
Ernesto P.Adorio Ph.D.
UP Extension Program in Pampanga, Clark Field.
"""
if not issorted:
y = sorted(x)
else:
y = x
if not (1 <= qtype <= 9):
return None # error!
# Parameters for the Hyndman and Fan algorithm
abcd = [(0, 0, 1, 0), # inverse empirical distrib.function., R type 1
(0.5, 0, 1, 0), # similar to type 1, averaged, R type 2
(0.5, 0, 0, 0), # nearest order statistic,(SAS) R type 3
(0, 0, 0, 1), # California linear interpolation, R type 4
(0.5, 0, 0, 1), # hydrologists method, R type 5
(0, 1, 0, 1), # mean-based estimate(Weibull method), (SPSS,Minitab), type 6
(1, -1, 0, 1), # mode-based method,(S, S-Plus), R type 7
(1.0/3, 1.0/3, 0, 1), # median-unbiased , R type 8
(3/8.0, 0.25, 0, 1) # normal-unbiased, R type 9.
]
a, b, c, d = abcd[qtype-1]
n = len(x)
g, j = modf( a + (n+b) * q -1)
if j < 0:
return y[0]
elif j >= n:
return y[n-1] # oct. 8, 2010 y[n]???!! uncaught off by 1 error!!!
j = int(floor(j))
if g == 0:
return y[j]
else:
return y[j] + (y[j+1]- y[j])* (c + d * g)
def main():
#demFileName = "skunk.tif"
#demFilePath = "G:\\HarishLaptop_Backup\\TI102782W0E\\PythonScripts\\pyGeoNet1.0\\data\\"
print "Reading input file path :",Parameters.demDataFilePath
print "Reading input file :",Parameters.demFileName
rawDemArray = read_dem_from_geotiff(Parameters.demFileName,Parameters.demDataFilePath)
nanDemArray=rawDemArray
nanDemArray[nanDemArray < defaults.demNanFlag]= np.NAN
Parameters.minDemValue= np.min(nanDemArray[:])
Parameters.maxDemValue= np.max(nanDemArray[:])
# Area of analysis
Parameters.xDemSize=np.size(rawDemArray,0)
Parameters.yDemSize=np.size(rawDemArray,1)
# Calculate pixel length scale and assume square
Parameters.maxLowerLeftCoord = np.max([Parameters.xDemSize, Parameters.yDemSize])
print 'DTM size: ',Parameters.xDemSize, 'x' ,Parameters.yDemSize
#-----------------------------------------------------------------------------
# Compute slope magnitude for raw and filtered DEMs
print 'Computing slope of raw DTM'
slopeMagnitudeDemArray = np.gradient(nanDemArray,Parameters.demPixelScale)
print slopeMagnitudeDemArray
# Computation of the threshold lambda used in Perona-Malik nonlinear
# filtering. The value of lambda (=edgeThresholdValue) is given by the 90th
# quantile of the absolute value of the gradient.
print'Computing lambda = q-q-based nonlinear filtering threshold'
mult = Parameters.xDemSize * Parameters.yDemSize
print np.size(slopeMagnitudeDemArray,0)
edgeThresholdValue = quantile(np.reshape(slopeMagnitudeDemArray,mult),defaults.demSmoothingQuantile)
print edgeThresholdValue
if __name__ == '__main__':
t0 = clock()
main()
t1 = clock()
print "time taken to complete the script is::",t1-t0," seconds"
print "script complete"
|
# -*- co | ding: utf-8 -*-
class ScrapeError(Exception):
"""scraping Failed"" | "
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERC | HANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the ro | ot of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import template
register = template.Library()
@register.filter
def score_display(value, decimal_option):
if value is None or str(value) == '-':
return ""
else:
try:
if decimal_option:
return "{0:.1f}".format(value)
else:
return "{0:.0f}".format(value)
except:
return value
|
class Car:
def bee | p():
print('Beep')
car = Car()
Ca | r.beep()
#https://pt.stackoverflow.com/q/482008/101
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.